am 7d93b757: (-s ours) am 68f66b94: Merge "MP3Extractor and MP3 decoder fixes - DO NOT MERGE" into gingerbread

* commit '7d93b7574f741a9cd8872771faefa3c92bed6576':
  MP3Extractor and MP3 decoder fixes - DO NOT MERGE
diff --git a/camera/Android.mk b/camera/Android.mk
index 03ff229..2f16923 100644
--- a/camera/Android.mk
+++ b/camera/Android.mk
@@ -14,7 +14,8 @@
 	libbinder \
 	libhardware \
 	libsurfaceflinger_client \
-	libui
+	libui \
+	libgui
 
 LOCAL_MODULE:= libcamera_client
 
diff --git a/camera/Camera.cpp b/camera/Camera.cpp
index 743fbb2..e288312 100644
--- a/camera/Camera.cpp
+++ b/camera/Camera.cpp
@@ -80,8 +80,9 @@
         c->mStatus = NO_ERROR;
         c->mCamera = camera;
         camera->asBinder()->linkToDeath(c);
+        return c;
     }
-    return c;
+    return 0;
 }
 
 void Camera::init()
@@ -167,32 +168,34 @@
     return c->unlock();
 }
 
-// pass the buffered ISurface to the camera service
+// pass the buffered Surface to the camera service
 status_t Camera::setPreviewDisplay(const sp<Surface>& surface)
 {
-    LOGV("setPreviewDisplay");
+    LOGV("setPreviewDisplay(%p)", surface.get());
     sp <ICamera> c = mCamera;
     if (c == 0) return NO_INIT;
     if (surface != 0) {
-        return c->setPreviewDisplay(surface->getISurface());
+        return c->setPreviewDisplay(surface);
     } else {
         LOGD("app passed NULL surface");
         return c->setPreviewDisplay(0);
     }
 }
 
-status_t Camera::setPreviewDisplay(const sp<ISurface>& surface)
+// pass the buffered ISurfaceTexture to the camera service
+status_t Camera::setPreviewTexture(const sp<ISurfaceTexture>& surfaceTexture)
 {
-    LOGV("setPreviewDisplay");
-    if (surface == 0) {
-        LOGD("app passed NULL surface");
-    }
+    LOGV("setPreviewTexture(%p)", surfaceTexture.get());
     sp <ICamera> c = mCamera;
     if (c == 0) return NO_INIT;
-    return c->setPreviewDisplay(surface);
+    if (surfaceTexture != 0) {
+        return c->setPreviewTexture(surfaceTexture);
+    } else {
+        LOGD("app passed NULL surface");
+        return c->setPreviewTexture(0);
+    }
 }
 
-
 // start preview mode
 status_t Camera::startPreview()
 {
@@ -202,6 +205,31 @@
     return c->startPreview();
 }
 
+int32_t Camera::getNumberOfVideoBuffers() const
+{
+    LOGV("getNumberOfVideoBuffers");
+    sp <ICamera> c = mCamera;
+    if (c == 0) return 0;
+    return c->getNumberOfVideoBuffers();
+}
+
+sp<IMemory> Camera::getVideoBuffer(int32_t index) const
+{
+    LOGV("getVideoBuffer: %d", index);
+    sp <ICamera> c = mCamera;
+    if (c == 0) return 0;
+    return c->getVideoBuffer(index);
+}
+
+status_t Camera::storeMetaDataInBuffers(bool enabled)
+{
+    LOGV("storeMetaDataInBuffers: %s",
+            enabled? "true": "false");
+    sp <ICamera> c = mCamera;
+    if (c == 0) return NO_INIT;
+    return c->storeMetaDataInBuffers(enabled);
+}
+
 // start recording mode, must call setPreviewDisplay first
 status_t Camera::startRecording()
 {
@@ -273,12 +301,12 @@
 }
 
 // take a picture
-status_t Camera::takePicture()
+status_t Camera::takePicture(int msgType)
 {
-    LOGV("takePicture");
+    LOGV("takePicture: 0x%x", msgType);
     sp <ICamera> c = mCamera;
     if (c == 0) return NO_INIT;
-    return c->takePicture();
+    return c->takePicture(msgType);
 }
 
 // set preview/capture parameters - key/value pairs
@@ -378,4 +406,3 @@
 }
 
 }; // namespace android
-
diff --git a/camera/CameraParameters.cpp b/camera/CameraParameters.cpp
index 83e5e57..0fd79a4 100644
--- a/camera/CameraParameters.cpp
+++ b/camera/CameraParameters.cpp
@@ -73,6 +73,9 @@
 const char CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED[] = "smooth-zoom-supported";
 const char CameraParameters::KEY_FOCUS_DISTANCES[] = "focus-distances";
 const char CameraParameters::KEY_VIDEO_FRAME_FORMAT[] = "video-frame-format";
+const char CameraParameters::KEY_VIDEO_SIZE[] = "video-size";
+const char CameraParameters::KEY_SUPPORTED_VIDEO_SIZES[] = "video-size-values";
+const char CameraParameters::KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO[] = "preferred-preview-size-for-video";
 
 const char CameraParameters::TRUE[] = "true";
 const char CameraParameters::FOCUS_DISTANCE_INFINITY[] = "Infinity";
@@ -129,10 +132,10 @@
 const char CameraParameters::SCENE_MODE_CANDLELIGHT[] = "candlelight";
 const char CameraParameters::SCENE_MODE_BARCODE[] = "barcode";
 
-// Formats for setPreviewFormat and setPictureFormat.
 const char CameraParameters::PIXEL_FORMAT_YUV422SP[] = "yuv422sp";
 const char CameraParameters::PIXEL_FORMAT_YUV420SP[] = "yuv420sp";
 const char CameraParameters::PIXEL_FORMAT_YUV422I[] = "yuv422i-yuyv";
+const char CameraParameters::PIXEL_FORMAT_YUV420P[]  = "yuv420p";
 const char CameraParameters::PIXEL_FORMAT_RGB565[] = "rgb565";
 const char CameraParameters::PIXEL_FORMAT_JPEG[] = "jpeg";
 
@@ -331,12 +334,41 @@
     parse_pair(p, width, height, 'x');
 }
 
+void CameraParameters::getPreferredPreviewSizeForVideo(int *width, int *height) const
+{
+    *width = *height = -1;
+    const char *p = get(KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO);
+    if (p == 0)  return;
+    parse_pair(p, width, height, 'x');
+}
+
 void CameraParameters::getSupportedPreviewSizes(Vector<Size> &sizes) const
 {
     const char *previewSizesStr = get(KEY_SUPPORTED_PREVIEW_SIZES);
     parseSizesList(previewSizesStr, sizes);
 }
 
+void CameraParameters::setVideoSize(int width, int height)
+{
+    char str[32];
+    sprintf(str, "%dx%d", width, height);
+    set(KEY_VIDEO_SIZE, str);
+}
+
+void CameraParameters::getVideoSize(int *width, int *height) const
+{
+    *width = *height = -1;
+    const char *p = get(KEY_VIDEO_SIZE);
+    if (p == 0) return;
+    parse_pair(p, width, height, 'x');
+}
+
+void CameraParameters::getSupportedVideoSizes(Vector<Size> &sizes) const
+{
+    const char *videoSizesStr = get(KEY_SUPPORTED_VIDEO_SIZES);
+    parseSizesList(videoSizesStr, sizes);
+}
+
 void CameraParameters::setPreviewFrameRate(int fps)
 {
     set(KEY_PREVIEW_FRAME_RATE, fps);
diff --git a/camera/ICamera.cpp b/camera/ICamera.cpp
index 13673b5..931b57d 100644
--- a/camera/ICamera.cpp
+++ b/camera/ICamera.cpp
@@ -28,6 +28,7 @@
 enum {
     DISCONNECT = IBinder::FIRST_CALL_TRANSACTION,
     SET_PREVIEW_DISPLAY,
+    SET_PREVIEW_TEXTURE,
     SET_PREVIEW_CALLBACK_FLAG,
     START_PREVIEW,
     STOP_PREVIEW,
@@ -45,6 +46,9 @@
     STOP_RECORDING,
     RECORDING_ENABLED,
     RELEASE_RECORDING_FRAME,
+    GET_NUM_VIDEO_BUFFERS,
+    GET_VIDEO_BUFFER,
+    STORE_META_DATA_IN_BUFFERS,
 };
 
 class BpCamera: public BpInterface<ICamera>
@@ -64,17 +68,29 @@
         remote()->transact(DISCONNECT, data, &reply);
     }
 
-    // pass the buffered ISurface to the camera service
-    status_t setPreviewDisplay(const sp<ISurface>& surface)
+    // pass the buffered Surface to the camera service
+    status_t setPreviewDisplay(const sp<Surface>& surface)
     {
         LOGV("setPreviewDisplay");
         Parcel data, reply;
         data.writeInterfaceToken(ICamera::getInterfaceDescriptor());
-        data.writeStrongBinder(surface->asBinder());
+        Surface::writeToParcel(surface, &data);
         remote()->transact(SET_PREVIEW_DISPLAY, data, &reply);
         return reply.readInt32();
     }
 
+    // pass the buffered SurfaceTexture to the camera service
+    status_t setPreviewTexture(const sp<ISurfaceTexture>& surfaceTexture)
+    {
+        LOGV("setPreviewTexture");
+        Parcel data, reply;
+        data.writeInterfaceToken(ICamera::getInterfaceDescriptor());
+        sp<IBinder> b(surfaceTexture->asBinder());
+        data.writeStrongBinder(b);
+        remote()->transact(SET_PREVIEW_TEXTURE, data, &reply);
+        return reply.readInt32();
+    }
+
     // set the preview callback flag to affect how the received frames from
     // preview are handled. See Camera.h for details.
     void setPreviewCallbackFlag(int flag)
@@ -133,6 +149,37 @@
         remote()->transact(RELEASE_RECORDING_FRAME, data, &reply);
     }
 
+    int32_t getNumberOfVideoBuffers() const
+    {
+        LOGV("getNumberOfVideoBuffers");
+        Parcel data, reply;
+        data.writeInterfaceToken(ICamera::getInterfaceDescriptor());
+        remote()->transact(GET_NUM_VIDEO_BUFFERS, data, &reply);
+        return reply.readInt32();
+    }
+
+    sp<IMemory> getVideoBuffer(int32_t index) const
+    {
+        LOGV("getVideoBuffer: %d", index);
+        Parcel data, reply;
+        data.writeInterfaceToken(ICamera::getInterfaceDescriptor());
+        data.writeInt32(index);
+        remote()->transact(GET_VIDEO_BUFFER, data, &reply);
+        sp<IMemory> mem = interface_cast<IMemory>(
+                            reply.readStrongBinder());
+        return mem;
+    }
+
+    status_t storeMetaDataInBuffers(bool enabled)
+    {
+        LOGV("storeMetaDataInBuffers: %s", enabled? "true": "false");
+        Parcel data, reply;
+        data.writeInterfaceToken(ICamera::getInterfaceDescriptor());
+        data.writeInt32(enabled);
+        remote()->transact(STORE_META_DATA_IN_BUFFERS, data, &reply);
+        return reply.readInt32();
+    }
+
     // check preview state
     bool previewEnabled()
     {
@@ -176,11 +223,12 @@
     }
 
     // take a picture - returns an IMemory (ref-counted mmap)
-    status_t takePicture()
+    status_t takePicture(int msgType)
     {
-        LOGV("takePicture");
+        LOGV("takePicture: 0x%x", msgType);
         Parcel data, reply;
         data.writeInterfaceToken(ICamera::getInterfaceDescriptor());
+        data.writeInt32(msgType);
         remote()->transact(TAKE_PICTURE, data, &reply);
         status_t ret = reply.readInt32();
         return ret;
@@ -258,10 +306,17 @@
         case SET_PREVIEW_DISPLAY: {
             LOGV("SET_PREVIEW_DISPLAY");
             CHECK_INTERFACE(ICamera, data, reply);
-            sp<ISurface> surface = interface_cast<ISurface>(data.readStrongBinder());
+            sp<Surface> surface = Surface::readFromParcel(data);
             reply->writeInt32(setPreviewDisplay(surface));
             return NO_ERROR;
         } break;
+        case SET_PREVIEW_TEXTURE: {
+            LOGV("SET_PREVIEW_TEXTURE");
+            CHECK_INTERFACE(ICamera, data, reply);
+            sp<ISurfaceTexture> st = interface_cast<ISurfaceTexture>(data.readStrongBinder());
+            reply->writeInt32(setPreviewTexture(st));
+            return NO_ERROR;
+        } break;
         case SET_PREVIEW_CALLBACK_FLAG: {
             LOGV("SET_PREVIEW_CALLBACK_TYPE");
             CHECK_INTERFACE(ICamera, data, reply);
@@ -300,6 +355,26 @@
             releaseRecordingFrame(mem);
             return NO_ERROR;
         } break;
+        case GET_NUM_VIDEO_BUFFERS: {
+            LOGV("GET_NUM_VIDEO_BUFFERS");
+            CHECK_INTERFACE(ICamera, data, reply);
+            reply->writeInt32(getNumberOfVideoBuffers());
+            return NO_ERROR;
+        } break;
+        case GET_VIDEO_BUFFER: {
+            LOGV("GET_VIDEO_BUFFER");
+            CHECK_INTERFACE(ICamera, data, reply);
+            int32_t index = data.readInt32();
+            reply->writeStrongBinder(getVideoBuffer(index)->asBinder());
+            return NO_ERROR;
+        } break;
+        case STORE_META_DATA_IN_BUFFERS: {
+            LOGV("STORE_META_DATA_IN_BUFFERS");
+            CHECK_INTERFACE(ICamera, data, reply);
+            bool enabled = data.readInt32();
+            reply->writeInt32(storeMetaDataInBuffers(enabled));
+            return NO_ERROR;
+        } break;
         case PREVIEW_ENABLED: {
             LOGV("PREVIEW_ENABLED");
             CHECK_INTERFACE(ICamera, data, reply);
@@ -327,7 +402,8 @@
         case TAKE_PICTURE: {
             LOGV("TAKE_PICTURE");
             CHECK_INTERFACE(ICamera, data, reply);
-            reply->writeInt32(takePicture());
+            int msgType = data.readInt32();
+            reply->writeInt32(takePicture(msgType));
             return NO_ERROR;
         } break;
         case SET_PARAMETERS: {
@@ -376,4 +452,3 @@
 // ----------------------------------------------------------------------------
 
 }; // namespace android
-
diff --git a/cmds/stagefright/Android.mk b/cmds/stagefright/Android.mk
index 5b74007..1b13dd9 100644
--- a/cmds/stagefright/Android.mk
+++ b/cmds/stagefright/Android.mk
@@ -7,13 +7,16 @@
 	SineSource.cpp
 
 LOCAL_SHARED_LIBRARIES := \
-	libstagefright libmedia libutils libbinder libstagefright_foundation
+	libstagefright libmedia libutils libbinder libstagefright_foundation \
+        libskia libsurfaceflinger_client libgui
 
 LOCAL_C_INCLUDES:= \
 	$(JNI_H_INCLUDE) \
 	frameworks/base/media/libstagefright \
 	frameworks/base/media/libstagefright/include \
-	$(TOP)/frameworks/base/include/media/stagefright/openmax
+	$(TOP)/frameworks/base/include/media/stagefright/openmax \
+        external/skia/include/core \
+        external/skia/include/images \
 
 LOCAL_CFLAGS += -Wno-multichar
 
@@ -53,6 +56,31 @@
 
 LOCAL_SRC_FILES:=         \
         SineSource.cpp    \
+        recordvideo.cpp
+
+LOCAL_SHARED_LIBRARIES := \
+	libstagefright liblog libutils libbinder
+
+LOCAL_C_INCLUDES:= \
+	$(JNI_H_INCLUDE) \
+	frameworks/base/media/libstagefright \
+	$(TOP)/frameworks/base/include/media/stagefright/openmax
+
+LOCAL_CFLAGS += -Wno-multichar
+
+LOCAL_MODULE_TAGS := debug
+
+LOCAL_MODULE:= recordvideo
+
+include $(BUILD_EXECUTABLE)
+
+
+################################################################################
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:=         \
+        SineSource.cpp    \
         audioloop.cpp
 
 LOCAL_SHARED_LIBRARIES := \
@@ -70,3 +98,53 @@
 LOCAL_MODULE:= audioloop
 
 include $(BUILD_EXECUTABLE)
+
+################################################################################
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:=         \
+        stream.cpp    \
+
+LOCAL_SHARED_LIBRARIES := \
+	libstagefright liblog libutils libbinder libsurfaceflinger_client \
+        libstagefright_foundation libmedia
+
+LOCAL_C_INCLUDES:= \
+	$(JNI_H_INCLUDE) \
+	frameworks/base/media/libstagefright \
+	$(TOP)/frameworks/base/include/media/stagefright/openmax
+
+LOCAL_CFLAGS += -Wno-multichar
+
+LOCAL_MODULE_TAGS := debug
+
+LOCAL_MODULE:= stream
+
+include $(BUILD_EXECUTABLE)
+
+################################################################################
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:=         \
+        sf2.cpp    \
+
+LOCAL_SHARED_LIBRARIES := \
+	libstagefright liblog libutils libbinder libstagefright_foundation \
+        libmedia libsurfaceflinger_client libcutils libui
+
+LOCAL_C_INCLUDES:= \
+	$(JNI_H_INCLUDE) \
+	frameworks/base/media/libstagefright \
+	$(TOP)/frameworks/base/include/media/stagefright/openmax
+
+LOCAL_CFLAGS += -Wno-multichar
+
+LOCAL_MODULE_TAGS := debug
+
+LOCAL_MODULE:= sf2
+
+include $(BUILD_EXECUTABLE)
+
+
diff --git a/cmds/stagefright/recordvideo.cpp b/cmds/stagefright/recordvideo.cpp
new file mode 100644
index 0000000..1264215
--- /dev/null
+++ b/cmds/stagefright/recordvideo.cpp
@@ -0,0 +1,303 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "SineSource.h"
+
+#include <binder/ProcessState.h>
+#include <media/stagefright/AudioPlayer.h>
+#include <media/stagefright/MediaBufferGroup.h>
+#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/MPEG4Writer.h>
+#include <media/stagefright/OMXClient.h>
+#include <media/stagefright/OMXCodec.h>
+#include <media/MediaPlayerInterface.h>
+
+using namespace android;
+
+// Print usage showing how to use this utility to record videos
+static void usage(const char *me) {
+    fprintf(stderr, "usage: %s\n", me);
+    fprintf(stderr, "       -h(elp)\n");
+    fprintf(stderr, "       -b bit rate in bits per second (default: 300000)\n");
+    fprintf(stderr, "       -c YUV420 color format: [0] semi planar or [1] planar (default: 1)\n");
+    fprintf(stderr, "       -f frame rate in frames per second (default: 30)\n");
+    fprintf(stderr, "       -i I frame interval in seconds (default: 1)\n");
+    fprintf(stderr, "       -n number of frames to be recorded (default: 300)\n");
+    fprintf(stderr, "       -w width in pixels (default: 176)\n");
+    fprintf(stderr, "       -t height in pixels (default: 144)\n");
+    fprintf(stderr, "       -l encoder level. see omx il header (default: encoder specific)\n");
+    fprintf(stderr, "       -p encoder profile. see omx il header (default: encoder specific)\n");
+    fprintf(stderr, "       -v video codec: [0] AVC [1] M4V [2] H263 (default: 0)\n");
+    fprintf(stderr, "The output file is /sdcard/output.mp4\n");
+    exit(1);
+}
+
+class DummySource : public MediaSource {
+
+public:
+    DummySource(int width, int height, int nFrames, int fps, int colorFormat)
+        : mWidth(width),
+          mHeight(height),
+          mMaxNumFrames(nFrames),
+          mFrameRate(fps),
+          mColorFormat(colorFormat),
+          mSize((width * height * 3) / 2) {
+
+        mGroup.add_buffer(new MediaBuffer(mSize));
+
+        // Check the color format to make sure
+        // that the buffer size mSize it set correctly above.
+        CHECK(colorFormat == OMX_COLOR_FormatYUV420SemiPlanar ||
+              colorFormat == OMX_COLOR_FormatYUV420Planar);
+    }
+
+    virtual sp<MetaData> getFormat() {
+        sp<MetaData> meta = new MetaData;
+        meta->setInt32(kKeyWidth, mWidth);
+        meta->setInt32(kKeyHeight, mHeight);
+        meta->setInt32(kKeyColorFormat, mColorFormat);
+        meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW);
+
+        return meta;
+    }
+
+    virtual status_t start(MetaData *params) {
+        mNumFramesOutput = 0;
+        return OK;
+    }
+
+    virtual status_t stop() {
+        return OK;
+    }
+
+    virtual status_t read(
+            MediaBuffer **buffer, const MediaSource::ReadOptions *options) {
+
+        if (mNumFramesOutput % 10 == 0) {
+            fprintf(stderr, ".");
+        }
+        if (mNumFramesOutput == mMaxNumFrames) {
+            return ERROR_END_OF_STREAM;
+        }
+
+        status_t err = mGroup.acquire_buffer(buffer);
+        if (err != OK) {
+            return err;
+        }
+
+        // We don't care about the contents. we just test video encoder
+        // Also, by skipping the content generation, we can return from
+        // read() much faster.
+        //char x = (char)((double)rand() / RAND_MAX * 255);
+        //memset((*buffer)->data(), x, mSize);
+        (*buffer)->set_range(0, mSize);
+        (*buffer)->meta_data()->clear();
+        (*buffer)->meta_data()->setInt64(
+                kKeyTime, (mNumFramesOutput * 1000000) / mFrameRate);
+        ++mNumFramesOutput;
+
+        return OK;
+    }
+
+protected:
+    virtual ~DummySource() {}
+
+private:
+    MediaBufferGroup mGroup;
+    int mWidth, mHeight;
+    int mMaxNumFrames;
+    int mFrameRate;
+    int mColorFormat;
+    size_t mSize;
+    int64_t mNumFramesOutput;;
+
+    DummySource(const DummySource &);
+    DummySource &operator=(const DummySource &);
+};
+
+enum {
+    kYUV420SP = 0,
+    kYUV420P  = 1,
+};
+
+// returns -1 if mapping of the given color is unsuccessful
+// returns an omx color enum value otherwise
+static int translateColorToOmxEnumValue(int color) {
+    switch (color) {
+        case kYUV420SP:
+            return OMX_COLOR_FormatYUV420SemiPlanar;
+        case kYUV420P:
+            return OMX_COLOR_FormatYUV420Planar;
+        default:
+            fprintf(stderr, "Unsupported color: %d\n", color);
+            return -1;
+    }
+}
+
+int main(int argc, char **argv) {
+
+    // Default values for the program if not overwritten
+    int frameRateFps = 30;
+    int width = 176;
+    int height = 144;
+    int bitRateBps = 300000;
+    int iFramesIntervalSeconds = 1;
+    int colorFormat = OMX_COLOR_FormatYUV420Planar;
+    int nFrames = 300;
+    int level = -1;        // Encoder specific default
+    int profile = -1;      // Encoder specific default
+    int codec = 0;
+    const char *fileName = "/sdcard/output.mp4";
+
+    android::ProcessState::self()->startThreadPool();
+    int res;
+    while ((res = getopt(argc, argv, "b:c:f:i:n:w:t:l:p:v:h")) >= 0) {
+        switch (res) {
+            case 'b':
+            {
+                bitRateBps = atoi(optarg);
+                break;
+            }
+
+            case 'c':
+            {
+                colorFormat = translateColorToOmxEnumValue(atoi(optarg));
+                if (colorFormat == -1) {
+                    usage(argv[0]);
+                }
+                break;
+            }
+
+            case 'f':
+            {
+                frameRateFps = atoi(optarg);
+                break;
+            }
+
+            case 'i':
+            {
+                iFramesIntervalSeconds = atoi(optarg);
+                break;
+            }
+
+            case 'n':
+            {
+                nFrames = atoi(optarg);
+                break;
+            }
+
+            case 'w':
+            {
+                width = atoi(optarg);
+                break;
+            }
+
+            case 't':
+            {
+                height = atoi(optarg);
+                break;
+            }
+
+            case 'l':
+            {
+                level = atoi(optarg);
+                break;
+            }
+
+            case 'p':
+            {
+                profile = atoi(optarg);
+                break;
+            }
+
+            case 'v':
+            {
+                codec = atoi(optarg);
+                if (codec < 0 || codec > 2) {
+                    usage(argv[0]);
+                }
+                break;
+            }
+
+            case 'h':
+            default:
+            {
+                usage(argv[0]);
+                break;
+            }
+        }
+    }
+
+    OMXClient client;
+    CHECK_EQ(client.connect(), OK);
+
+    status_t err = OK;
+    sp<MediaSource> source =
+        new DummySource(width, height, nFrames, frameRateFps, colorFormat);
+
+    sp<MetaData> enc_meta = new MetaData;
+    switch (codec) {
+        case 1:
+            enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_MPEG4);
+            break;
+        case 2:
+            enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_H263);
+            break;
+        default:
+            enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
+            break;
+    }
+    enc_meta->setInt32(kKeyWidth, width);
+    enc_meta->setInt32(kKeyHeight, height);
+    enc_meta->setInt32(kKeyFrameRate, frameRateFps);
+    enc_meta->setInt32(kKeyBitRate, bitRateBps);
+    enc_meta->setInt32(kKeyStride, width);
+    enc_meta->setInt32(kKeySliceHeight, height);
+    enc_meta->setInt32(kKeyIFramesInterval, iFramesIntervalSeconds);
+    enc_meta->setInt32(kKeyColorFormat, colorFormat);
+    if (level != -1) {
+        enc_meta->setInt32(kKeyVideoLevel, level);
+    }
+    if (profile != -1) {
+        enc_meta->setInt32(kKeyVideoProfile, profile);
+    }
+
+    sp<MediaSource> encoder =
+        OMXCodec::Create(
+                client.interface(), enc_meta, true /* createEncoder */, source);
+
+    sp<MPEG4Writer> writer = new MPEG4Writer(fileName);
+    writer->addSource(encoder);
+    int64_t start = systemTime();
+    CHECK_EQ(OK, writer->start());
+    while (!writer->reachedEOS()) {
+    }
+    err = writer->stop();
+    int64_t end = systemTime();
+
+    fprintf(stderr, "$\n");
+    client.disconnect();
+
+    if (err != OK && err != ERROR_END_OF_STREAM) {
+        fprintf(stderr, "record failed: %d\n", err);
+        return 1;
+    }
+    fprintf(stderr, "encoding %d frames in %lld us\n", nFrames, (end-start)/1000);
+    fprintf(stderr, "encoding speed is: %.2f fps\n", (nFrames * 1E9) / (end-start));
+    return 0;
+}
diff --git a/cmds/stagefright/sf2.cpp b/cmds/stagefright/sf2.cpp
new file mode 100644
index 0000000..74649a9
--- /dev/null
+++ b/cmds/stagefright/sf2.cpp
@@ -0,0 +1,565 @@
+#include <binder/ProcessState.h>
+
+#include <media/stagefright/foundation/hexdump.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+#include <media/stagefright/ACodec.h>
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaExtractor.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/Utils.h>
+
+#include <surfaceflinger/ISurfaceComposer.h>
+#include <surfaceflinger/SurfaceComposerClient.h>
+
+#include "include/ESDS.h"
+
+using namespace android;
+
+struct Controller : public AHandler {
+    Controller(const char *uri, bool decodeAudio, const sp<Surface> &surface)
+        : mURI(uri),
+          mDecodeAudio(decodeAudio),
+          mSurface(surface),
+          mCodec(new ACodec) {
+        CHECK(!mDecodeAudio || mSurface == NULL);
+    }
+
+    void startAsync() {
+        (new AMessage(kWhatStart, id()))->post();
+    }
+
+protected:
+    virtual ~Controller() {
+    }
+
+    virtual void onMessageReceived(const sp<AMessage> &msg) {
+        switch (msg->what()) {
+            case kWhatStart:
+            {
+#if 1
+                mDecodeLooper = looper();
+#else
+                mDecodeLooper = new ALooper;
+                mDecodeLooper->setName("sf2 decode looper");
+                mDecodeLooper->start();
+#endif
+
+                sp<DataSource> dataSource =
+                    DataSource::CreateFromURI(mURI.c_str());
+
+                sp<MediaExtractor> extractor =
+                    MediaExtractor::Create(dataSource);
+
+                for (size_t i = 0; i < extractor->countTracks(); ++i) {
+                    sp<MetaData> meta = extractor->getTrackMetaData(i);
+
+                    const char *mime;
+                    CHECK(meta->findCString(kKeyMIMEType, &mime));
+
+                    if (!strncasecmp(mDecodeAudio ? "audio/" : "video/",
+                                     mime, 6)) {
+                        mSource = extractor->getTrack(i);
+                        break;
+                    }
+                }
+                CHECK(mSource != NULL);
+
+                CHECK_EQ(mSource->start(), (status_t)OK);
+
+                mDecodeLooper->registerHandler(mCodec);
+
+                mCodec->setNotificationMessage(
+                        new AMessage(kWhatCodecNotify, id()));
+
+                sp<AMessage> format = makeFormat(mSource->getFormat());
+
+                if (mSurface != NULL) {
+                    format->setObject("surface", mSurface);
+                }
+
+                mCodec->initiateSetup(format);
+
+                mCSDIndex = 0;
+                mStartTimeUs = ALooper::GetNowUs();
+                mNumOutputBuffersReceived = 0;
+                mTotalBytesReceived = 0;
+                mLeftOverBuffer = NULL;
+                mFinalResult = OK;
+                mSeekState = SEEK_NONE;
+
+                // (new AMessage(kWhatSeek, id()))->post(5000000ll);
+                break;
+            }
+
+            case kWhatSeek:
+            {
+                printf("+");
+                fflush(stdout);
+
+                CHECK(mSeekState == SEEK_NONE
+                        || mSeekState == SEEK_FLUSH_COMPLETED);
+
+                if (mLeftOverBuffer != NULL) {
+                    mLeftOverBuffer->release();
+                    mLeftOverBuffer = NULL;
+                }
+
+                mSeekState = SEEK_FLUSHING;
+                mSeekTimeUs = 30000000ll;
+
+                mCodec->signalFlush();
+                break;
+            }
+
+            case kWhatStop:
+            {
+                if (mLeftOverBuffer != NULL) {
+                    mLeftOverBuffer->release();
+                    mLeftOverBuffer = NULL;
+                }
+
+                CHECK_EQ(mSource->stop(), (status_t)OK);
+                mSource.clear();
+
+                mCodec->initiateShutdown();
+                break;
+            }
+
+            case kWhatCodecNotify:
+            {
+                int32_t what;
+                CHECK(msg->findInt32("what", &what));
+
+                if (what == ACodec::kWhatFillThisBuffer) {
+                    onFillThisBuffer(msg);
+                } else if (what == ACodec::kWhatDrainThisBuffer) {
+                    if ((mNumOutputBuffersReceived++ % 16) == 0) {
+                        printf(".");
+                        fflush(stdout);
+                    }
+
+                    onDrainThisBuffer(msg);
+                } else if (what == ACodec::kWhatEOS) {
+                    printf("$\n");
+
+                    int64_t delayUs = ALooper::GetNowUs() - mStartTimeUs;
+
+                    if (mDecodeAudio) {
+                        printf("%lld bytes received. %.2f KB/sec\n",
+                               mTotalBytesReceived,
+                               mTotalBytesReceived * 1E6 / 1024 / delayUs);
+                    } else {
+                        printf("%d frames decoded, %.2f fps. %lld bytes "
+                               "received. %.2f KB/sec\n",
+                               mNumOutputBuffersReceived,
+                               mNumOutputBuffersReceived * 1E6 / delayUs,
+                               mTotalBytesReceived,
+                               mTotalBytesReceived * 1E6 / 1024 / delayUs);
+                    }
+
+                    (new AMessage(kWhatStop, id()))->post();
+                } else if (what == ACodec::kWhatFlushCompleted) {
+                    mSeekState = SEEK_FLUSH_COMPLETED;
+                    mCodec->signalResume();
+
+                    (new AMessage(kWhatSeek, id()))->post(5000000ll);
+                } else if (what == ACodec::kWhatOutputFormatChanged) {
+                } else {
+                    CHECK_EQ(what, (int32_t)ACodec::kWhatShutdownCompleted);
+
+                    mDecodeLooper->unregisterHandler(mCodec->id());
+
+                    if (mDecodeLooper != looper()) {
+                        mDecodeLooper->stop();
+                    }
+
+                    looper()->stop();
+                }
+                break;
+            }
+
+            default:
+                TRESPASS();
+                break;
+        }
+    }
+
+private:
+    enum {
+        kWhatStart             = 'strt',
+        kWhatStop              = 'stop',
+        kWhatCodecNotify       = 'noti',
+        kWhatSeek              = 'seek',
+    };
+
+    sp<ALooper> mDecodeLooper;
+
+    AString mURI;
+    bool mDecodeAudio;
+    sp<Surface> mSurface;
+    sp<ACodec> mCodec;
+    sp<MediaSource> mSource;
+
+    Vector<sp<ABuffer> > mCSD;
+    size_t mCSDIndex;
+
+    MediaBuffer *mLeftOverBuffer;
+    status_t mFinalResult;
+
+    int64_t mStartTimeUs;
+    int32_t mNumOutputBuffersReceived;
+    int64_t mTotalBytesReceived;
+
+    enum SeekState {
+        SEEK_NONE,
+        SEEK_FLUSHING,
+        SEEK_FLUSH_COMPLETED,
+    };
+    SeekState mSeekState;
+    int64_t mSeekTimeUs;
+
+    sp<AMessage> makeFormat(const sp<MetaData> &meta) {
+        CHECK(mCSD.isEmpty());
+
+        const char *mime;
+        CHECK(meta->findCString(kKeyMIMEType, &mime));
+
+        sp<AMessage> msg = new AMessage;
+        msg->setString("mime", mime);
+
+        if (!strncasecmp("video/", mime, 6)) {
+            int32_t width, height;
+            CHECK(meta->findInt32(kKeyWidth, &width));
+            CHECK(meta->findInt32(kKeyHeight, &height));
+
+            msg->setInt32("width", width);
+            msg->setInt32("height", height);
+        } else {
+            CHECK(!strncasecmp("audio/", mime, 6));
+
+            int32_t numChannels, sampleRate;
+            CHECK(meta->findInt32(kKeyChannelCount, &numChannels));
+            CHECK(meta->findInt32(kKeySampleRate, &sampleRate));
+
+            msg->setInt32("channel-count", numChannels);
+            msg->setInt32("sample-rate", sampleRate);
+        }
+
+        uint32_t type;
+        const void *data;
+        size_t size;
+        if (meta->findData(kKeyAVCC, &type, &data, &size)) {
+            // Parse the AVCDecoderConfigurationRecord
+
+            const uint8_t *ptr = (const uint8_t *)data;
+
+            CHECK(size >= 7);
+            CHECK_EQ((unsigned)ptr[0], 1u);  // configurationVersion == 1
+            uint8_t profile = ptr[1];
+            uint8_t level = ptr[3];
+
+            // There is decodable content out there that fails the following
+            // assertion, let's be lenient for now...
+            // CHECK((ptr[4] >> 2) == 0x3f);  // reserved
+
+            size_t lengthSize = 1 + (ptr[4] & 3);
+
+            // commented out check below as H264_QVGA_500_NO_AUDIO.3gp
+            // violates it...
+            // CHECK((ptr[5] >> 5) == 7);  // reserved
+
+            size_t numSeqParameterSets = ptr[5] & 31;
+
+            ptr += 6;
+            size -= 6;
+
+            sp<ABuffer> buffer = new ABuffer(1024);
+            buffer->setRange(0, 0);
+
+            for (size_t i = 0; i < numSeqParameterSets; ++i) {
+                CHECK(size >= 2);
+                size_t length = U16_AT(ptr);
+
+                ptr += 2;
+                size -= 2;
+
+                CHECK(size >= length);
+
+                memcpy(buffer->data() + buffer->size(), "\x00\x00\x00\x01", 4);
+                memcpy(buffer->data() + buffer->size() + 4, ptr, length);
+                buffer->setRange(0, buffer->size() + 4 + length);
+
+                ptr += length;
+                size -= length;
+            }
+
+            buffer->meta()->setInt32("csd", true);
+            mCSD.push(buffer);
+
+            buffer = new ABuffer(1024);
+            buffer->setRange(0, 0);
+
+            CHECK(size >= 1);
+            size_t numPictureParameterSets = *ptr;
+            ++ptr;
+            --size;
+
+            for (size_t i = 0; i < numPictureParameterSets; ++i) {
+                CHECK(size >= 2);
+                size_t length = U16_AT(ptr);
+
+                ptr += 2;
+                size -= 2;
+
+                CHECK(size >= length);
+
+                memcpy(buffer->data() + buffer->size(), "\x00\x00\x00\x01", 4);
+                memcpy(buffer->data() + buffer->size() + 4, ptr, length);
+                buffer->setRange(0, buffer->size() + 4 + length);
+
+                ptr += length;
+                size -= length;
+            }
+
+            buffer->meta()->setInt32("csd", true);
+            mCSD.push(buffer);
+
+            msg->setObject("csd", buffer);
+        } else if (meta->findData(kKeyESDS, &type, &data, &size)) {
+            ESDS esds((const char *)data, size);
+            CHECK_EQ(esds.InitCheck(), (status_t)OK);
+
+            const void *codec_specific_data;
+            size_t codec_specific_data_size;
+            esds.getCodecSpecificInfo(
+                    &codec_specific_data, &codec_specific_data_size);
+
+            sp<ABuffer> buffer = new ABuffer(codec_specific_data_size);
+
+            memcpy(buffer->data(), codec_specific_data,
+                   codec_specific_data_size);
+
+            buffer->meta()->setInt32("csd", true);
+            mCSD.push(buffer);
+        }
+
+        int32_t maxInputSize;
+        if (meta->findInt32(kKeyMaxInputSize, &maxInputSize)) {
+            msg->setInt32("max-input-size", maxInputSize);
+        }
+
+        return msg;
+    }
+
+    void onFillThisBuffer(const sp<AMessage> &msg) {
+        sp<AMessage> reply;
+        CHECK(msg->findMessage("reply", &reply));
+
+        if (mSeekState == SEEK_FLUSHING) {
+            reply->post();
+            return;
+        }
+
+        sp<RefBase> obj;
+        CHECK(msg->findObject("buffer", &obj));
+        sp<ABuffer> outBuffer = static_cast<ABuffer *>(obj.get());
+
+        if (mCSDIndex < mCSD.size()) {
+            outBuffer = mCSD.editItemAt(mCSDIndex++);
+            outBuffer->meta()->setInt64("timeUs", 0);
+        } else {
+            size_t sizeLeft = outBuffer->capacity();
+            outBuffer->setRange(0, 0);
+
+            int32_t n = 0;
+
+            for (;;) {
+                MediaBuffer *inBuffer;
+
+                if (mLeftOverBuffer != NULL) {
+                    inBuffer = mLeftOverBuffer;
+                    mLeftOverBuffer = NULL;
+                } else if (mFinalResult != OK) {
+                    break;
+                } else {
+                    MediaSource::ReadOptions options;
+                    if (mSeekState == SEEK_FLUSH_COMPLETED) {
+                        options.setSeekTo(mSeekTimeUs);
+                        mSeekState = SEEK_NONE;
+                    }
+                    status_t err = mSource->read(&inBuffer, &options);
+
+                    if (err != OK) {
+                        mFinalResult = err;
+                        break;
+                    }
+                }
+
+                if (inBuffer->range_length() > sizeLeft) {
+                    if (outBuffer->size() == 0) {
+                        LOGE("Unable to fit even a single input buffer of size %d.",
+                             inBuffer->range_length());
+                    }
+                    CHECK_GT(outBuffer->size(), 0u);
+
+                    mLeftOverBuffer = inBuffer;
+                    break;
+                }
+
+                ++n;
+
+                if (outBuffer->size() == 0) {
+                    int64_t timeUs;
+                    CHECK(inBuffer->meta_data()->findInt64(kKeyTime, &timeUs));
+
+                    outBuffer->meta()->setInt64("timeUs", timeUs);
+                }
+
+                memcpy(outBuffer->data() + outBuffer->size(),
+                       (const uint8_t *)inBuffer->data()
+                        + inBuffer->range_offset(),
+                       inBuffer->range_length());
+
+                outBuffer->setRange(
+                        0, outBuffer->size() + inBuffer->range_length());
+
+                sizeLeft -= inBuffer->range_length();
+
+                inBuffer->release();
+                inBuffer = NULL;
+
+                // break;  // Don't coalesce
+            }
+
+            LOGV("coalesced %d input buffers", n);
+
+            if (outBuffer->size() == 0) {
+                CHECK_NE(mFinalResult, (status_t)OK);
+
+                reply->setInt32("err", mFinalResult);
+                reply->post();
+                return;
+            }
+        }
+
+        reply->setObject("buffer", outBuffer);
+        reply->post();
+    }
+
+    void onDrainThisBuffer(const sp<AMessage> &msg) {
+        sp<RefBase> obj;
+        CHECK(msg->findObject("buffer", &obj));
+
+        sp<ABuffer> buffer = static_cast<ABuffer *>(obj.get());
+        mTotalBytesReceived += buffer->size();
+
+        sp<AMessage> reply;
+        CHECK(msg->findMessage("reply", &reply));
+
+        reply->post();
+    }
+
+    DISALLOW_EVIL_CONSTRUCTORS(Controller);
+};
+
+static void usage(const char *me) {
+    fprintf(stderr, "usage: %s\n", me);
+    fprintf(stderr, "       -h(elp)\n");
+    fprintf(stderr, "       -a(udio)\n");
+
+    fprintf(stderr,
+            "       -s(surface) Allocate output buffers on a surface.\n");
+}
+
+int main(int argc, char **argv) {
+    android::ProcessState::self()->startThreadPool();
+
+    bool decodeAudio = false;
+    bool useSurface = false;
+
+    int res;
+    while ((res = getopt(argc, argv, "has")) >= 0) {
+        switch (res) {
+            case 'a':
+                decodeAudio = true;
+                break;
+
+            case 's':
+                useSurface = true;
+                break;
+
+            case '?':
+            case 'h':
+            default:
+            {
+                usage(argv[0]);
+                return 1;
+            }
+        }
+    }
+
+    argc -= optind;
+    argv += optind;
+
+    if (argc != 1) {
+        usage(argv[-optind]);
+        return 1;
+    }
+
+    DataSource::RegisterDefaultSniffers();
+
+    sp<ALooper> looper = new ALooper;
+    looper->setName("sf2");
+
+    sp<SurfaceComposerClient> composerClient;
+    sp<SurfaceControl> control;
+    sp<Surface> surface;
+
+    if (!decodeAudio && useSurface) {
+        composerClient = new SurfaceComposerClient;
+        CHECK_EQ(composerClient->initCheck(), (status_t)OK);
+
+        control = composerClient->createSurface(
+                getpid(),
+                String8("A Surface"),
+                0,
+                1280,
+                800,
+                PIXEL_FORMAT_RGB_565,
+                0);
+
+        CHECK(control != NULL);
+        CHECK(control->isValid());
+
+        CHECK_EQ(composerClient->openTransaction(), (status_t)OK);
+        CHECK_EQ(control->setLayer(30000), (status_t)OK);
+        CHECK_EQ(control->show(), (status_t)OK);
+        CHECK_EQ(composerClient->closeTransaction(), (status_t)OK);
+
+        surface = control->getSurface();
+        CHECK(surface != NULL);
+    }
+
+    sp<Controller> controller = new Controller(argv[0], decodeAudio, surface);
+    looper->registerHandler(controller);
+
+    controller->startAsync();
+
+    CHECK_EQ(looper->start(true /* runOnCallingThread */), (status_t)OK);
+
+    looper->unregisterHandler(controller->id());
+
+    if (!decodeAudio && useSurface) {
+        composerClient->dispose();
+    }
+
+    return 0;
+}
+
diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp
index ff92431..a875c3a 100644
--- a/cmds/stagefright/stagefright.cpp
+++ b/cmds/stagefright/stagefright.cpp
@@ -31,7 +31,7 @@
 #include <media/IMediaPlayerService.h>
 #include <media/stagefright/foundation/ALooper.h>
 #include "include/ARTSPController.h"
-#include "include/LiveSource.h"
+#include "include/LiveSession.h"
 #include "include/NuCachedSource2.h"
 #include <media/stagefright/AudioPlayer.h>
 #include <media/stagefright/DataSource.h>
@@ -49,8 +49,17 @@
 #include <media/stagefright/MPEG2TSWriter.h>
 #include <media/stagefright/MPEG4Writer.h>
 
+#include <private/media/VideoFrame.h>
+#include <SkBitmap.h>
+#include <SkImageEncoder.h>
+
 #include <fcntl.h>
 
+#include <gui/SurfaceTextureClient.h>
+
+#include <surfaceflinger/ISurfaceComposer.h>
+#include <surfaceflinger/SurfaceComposerClient.h>
+
 using namespace android;
 
 static long gNumRepetitions;
@@ -59,8 +68,13 @@
 static bool gPreferSoftwareCodec;
 static bool gPlaybackAudio;
 static bool gWriteMP4;
+static bool gDisplayHistogram;
 static String8 gWriteMP4Filename;
 
+static sp<ANativeWindow> gSurface;
+
+#define USE_SURFACE_COMPOSER 0
+
 static int64_t getNowUs() {
     struct timeval tv;
     gettimeofday(&tv, NULL);
@@ -68,6 +82,58 @@
     return (int64_t)tv.tv_usec + tv.tv_sec * 1000000ll;
 }
 
+static int CompareIncreasing(const int64_t *a, const int64_t *b) {
+    return (*a) < (*b) ? -1 : (*a) > (*b) ? 1 : 0;
+}
+
+static void displayDecodeHistogram(Vector<int64_t> *decodeTimesUs) {
+    printf("decode times:\n");
+
+    decodeTimesUs->sort(CompareIncreasing);
+
+    size_t n = decodeTimesUs->size();
+    int64_t minUs = decodeTimesUs->itemAt(0);
+    int64_t maxUs = decodeTimesUs->itemAt(n - 1);
+
+    printf("min decode time %lld us (%.2f secs)\n", minUs, minUs / 1E6);
+    printf("max decode time %lld us (%.2f secs)\n", maxUs, maxUs / 1E6);
+
+    size_t counts[100];
+    for (size_t i = 0; i < 100; ++i) {
+        counts[i] = 0;
+    }
+
+    for (size_t i = 0; i < n; ++i) {
+        int64_t x = decodeTimesUs->itemAt(i);
+
+        size_t slot = ((x - minUs) * 100) / (maxUs - minUs);
+        if (slot == 100) { slot = 99; }
+
+        ++counts[slot];
+    }
+
+    for (size_t i = 0; i < 100; ++i) {
+        int64_t slotUs = minUs + (i * (maxUs - minUs) / 100);
+
+        double fps = 1E6 / slotUs;
+        printf("[%.2f fps]: %d\n", fps, counts[i]);
+    }
+}
+
+static void displayAVCProfileLevelIfPossible(const sp<MetaData>& meta) {
+    uint32_t type;
+    const void *data;
+    size_t size;
+    if (meta->findData(kKeyAVCC, &type, &data, &size)) {
+        const uint8_t *ptr = (const uint8_t *)data;
+        CHECK(size >= 7);
+        CHECK(ptr[0] == 1);  // configurationVersion == 1
+        uint8_t profile = ptr[1];
+        uint8_t level = ptr[3];
+        fprintf(stderr, "AVC video profile %d and level %d\n", profile, level);
+    }
+}
+
 static void playSource(OMXClient *client, sp<MediaSource> &source) {
     sp<MetaData> meta = source->getFormat();
 
@@ -81,12 +147,14 @@
         rawSource = OMXCodec::Create(
             client->interface(), meta, false /* createEncoder */, source,
             NULL /* matchComponentName */,
-            gPreferSoftwareCodec ? OMXCodec::kPreferSoftwareCodecs : 0);
+            gPreferSoftwareCodec ? OMXCodec::kPreferSoftwareCodecs : 0,
+            gSurface);
 
         if (rawSource == NULL) {
             fprintf(stderr, "Failed to instantiate decoder for '%s'.\n", mime);
             return;
         }
+        displayAVCProfileLevelIfPossible(meta);
     }
 
     source.clear();
@@ -201,6 +269,8 @@
     int64_t sumDecodeUs = 0;
     int64_t totalBytes = 0;
 
+    Vector<int64_t> decodeTimesUs;
+
     while (numIterationsLeft-- > 0) {
         long numFrames = 0;
 
@@ -224,9 +294,17 @@
                 break;
             }
 
-            if (buffer->range_length() > 0 && (n++ % 16) == 0) {
-                printf(".");
-                fflush(stdout);
+            if (buffer->range_length() > 0) {
+                if (gDisplayHistogram && n > 0) {
+                    // Ignore the first time since it includes some setup
+                    // cost.
+                    decodeTimesUs.push(delayDecodeUs);
+                }
+
+                if ((n++ % 16) == 0) {
+                    printf(".");
+                    fflush(stdout);
+                }
             }
 
             sumDecodeUs += delayDecodeUs;
@@ -266,6 +344,10 @@
                (double)sumDecodeUs / n);
 
         printf("decoded a total of %d frame(s).\n", n);
+
+        if (gDisplayHistogram) {
+            displayDecodeHistogram(&decodeTimesUs);
+        }
     } else if (!strncasecmp("audio/", mime, 6)) {
         // Frame count makes less sense for audio, as the output buffer
         // sizes may be different across decoders.
@@ -466,6 +548,9 @@
     fprintf(stderr, "       -o playback audio\n");
     fprintf(stderr, "       -w(rite) filename (write to .mp4 file)\n");
     fprintf(stderr, "       -k seek test\n");
+    fprintf(stderr, "       -x display a histogram of decoding times/fps "
+                    "(video only)\n");
+    fprintf(stderr, "       -S allocate buffers from a surface\n");
 }
 
 int main(int argc, char **argv) {
@@ -476,18 +561,21 @@
     bool dumpProfiles = false;
     bool extractThumbnail = false;
     bool seekTest = false;
+    bool useSurfaceAlloc = false;
     gNumRepetitions = 1;
     gMaxNumFrames = 0;
     gReproduceBug = -1;
     gPreferSoftwareCodec = false;
     gPlaybackAudio = false;
     gWriteMP4 = false;
+    gDisplayHistogram = false;
 
     sp<ALooper> looper;
     sp<ARTSPController> rtspController;
+    sp<LiveSession> liveSession;
 
     int res;
-    while ((res = getopt(argc, argv, "han:lm:b:ptsow:k")) >= 0) {
+    while ((res = getopt(argc, argv, "han:lm:b:ptsow:kxS")) >= 0) {
         switch (res) {
             case 'a':
             {
@@ -560,6 +648,18 @@
                 break;
             }
 
+            case 'x':
+            {
+                gDisplayHistogram = true;
+                break;
+            }
+
+            case 'S':
+            {
+                useSurfaceAlloc = true;
+                break;
+            }
+
             case '?':
             case 'h':
             default:
@@ -602,6 +702,19 @@
 
             if (mem != NULL) {
                 printf("getFrameAtTime(%s) => OK\n", filename);
+
+                VideoFrame *frame = (VideoFrame *)mem->pointer();
+
+                SkBitmap bitmap;
+                bitmap.setConfig(
+                        SkBitmap::kRGB_565_Config, frame->mWidth, frame->mHeight);
+
+                bitmap.setPixels((uint8_t *)frame + sizeof(VideoFrame));
+
+                CHECK(SkImageEncoder::EncodeFile(
+                            "/sdcard/out.jpg", bitmap,
+                            SkImageEncoder::kJPEG_Type,
+                            SkImageEncoder::kDefaultQuality));
             } else {
                 mem = retriever->extractAlbumArt();
 
@@ -685,6 +798,39 @@
         }
     }
 
+    sp<SurfaceComposerClient> composerClient;
+    sp<SurfaceControl> control;
+
+    if (useSurfaceAlloc && !audioOnly) {
+#if USE_SURFACE_COMPOSER
+        composerClient = new SurfaceComposerClient;
+        CHECK_EQ(composerClient->initCheck(), (status_t)OK);
+
+        control = composerClient->createSurface(
+                getpid(),
+                String8("A Surface"),
+                0,
+                1280,
+                800,
+                PIXEL_FORMAT_RGB_565,
+                0);
+
+        CHECK(control != NULL);
+        CHECK(control->isValid());
+
+        CHECK_EQ(composerClient->openTransaction(), (status_t)OK);
+        CHECK_EQ(control->setLayer(30000), (status_t)OK);
+        CHECK_EQ(control->show(), (status_t)OK);
+        CHECK_EQ(composerClient->closeTransaction(), (status_t)OK);
+
+        gSurface = control->getSurface();
+        CHECK(gSurface != NULL);
+#else
+        sp<SurfaceTexture> texture = new SurfaceTexture(0 /* tex */);
+        gSurface = new SurfaceTextureClient(texture);
+#endif
+    }
+
     DataSource::RegisterDefaultSniffers();
 
     OMXClient client;
@@ -754,8 +900,15 @@
                 String8 uri("http://");
                 uri.append(filename + 11);
 
-                dataSource = new LiveSource(uri.string());
-                dataSource = new NuCachedSource2(dataSource);
+                if (looper == NULL) {
+                    looper = new ALooper;
+                    looper->start();
+                }
+                liveSession = new LiveSession;
+                looper->registerHandler(liveSession);
+
+                liveSession->connect(uri.string());
+                dataSource = liveSession->getDataSource();
 
                 extractor =
                     MediaExtractor::Create(
@@ -855,6 +1008,14 @@
         }
     }
 
+    if (useSurfaceAlloc && !audioOnly) {
+        gSurface.clear();
+
+#if USE_SURFACE_COMPOSER
+        composerClient->dispose();
+#endif
+    }
+
     client.disconnect();
 
     return 0;
diff --git a/cmds/stagefright/stream.cpp b/cmds/stagefright/stream.cpp
new file mode 100644
index 0000000..9246a00
--- /dev/null
+++ b/cmds/stagefright/stream.cpp
@@ -0,0 +1,190 @@
+#include <binder/ProcessState.h>
+
+#include <media/IStreamSource.h>
+#include <media/mediaplayer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+#include <binder/IServiceManager.h>
+#include <media/IMediaPlayerService.h>
+#include <surfaceflinger/ISurfaceComposer.h>
+#include <surfaceflinger/SurfaceComposerClient.h>
+
+#include <fcntl.h>
+
+using namespace android;
+
+struct MyStreamSource : public BnStreamSource {
+    // Caller retains ownership of fd.
+    MyStreamSource(int fd);
+
+    virtual void setListener(const sp<IStreamListener> &listener);
+    virtual void setBuffers(const Vector<sp<IMemory> > &buffers);
+
+    virtual void onBufferAvailable(size_t index);
+
+protected:
+    virtual ~MyStreamSource();
+
+private:
+    int mFd;
+    off64_t mFileSize;
+    int64_t mNextSeekTimeUs;
+
+    sp<IStreamListener> mListener;
+    Vector<sp<IMemory> > mBuffers;
+
+    DISALLOW_EVIL_CONSTRUCTORS(MyStreamSource);
+};
+
+MyStreamSource::MyStreamSource(int fd)
+    : mFd(fd),
+      mFileSize(0),
+      mNextSeekTimeUs(-1) {  // ALooper::GetNowUs() + 5000000ll) {
+    CHECK_GE(fd, 0);
+
+    mFileSize = lseek64(fd, 0, SEEK_END);
+    lseek64(fd, 0, SEEK_SET);
+}
+
+MyStreamSource::~MyStreamSource() {
+}
+
+void MyStreamSource::setListener(const sp<IStreamListener> &listener) {
+    mListener = listener;
+}
+
+void MyStreamSource::setBuffers(const Vector<sp<IMemory> > &buffers) {
+    mBuffers = buffers;
+}
+
+void MyStreamSource::onBufferAvailable(size_t index) {
+    CHECK_LT(index, mBuffers.size());
+
+    if (mNextSeekTimeUs >= 0 && mNextSeekTimeUs <= ALooper::GetNowUs()) {
+        off64_t offset = (off64_t)(((float)rand() / RAND_MAX) * mFileSize * 0.8);
+        offset = (offset / 188) * 188;
+
+        lseek(mFd, offset, SEEK_SET);
+
+        mListener->issueCommand(
+                IStreamListener::DISCONTINUITY, false /* synchronous */);
+
+        mNextSeekTimeUs = -1;
+        mNextSeekTimeUs = ALooper::GetNowUs() + 5000000ll;
+    }
+
+    sp<IMemory> mem = mBuffers.itemAt(index);
+
+    ssize_t n = read(mFd, mem->pointer(), mem->size());
+    if (n <= 0) {
+        mListener->issueCommand(IStreamListener::EOS, false /* synchronous */);
+    } else {
+        mListener->queueBuffer(index, n);
+    }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct MyClient : public BnMediaPlayerClient {
+    MyClient()
+        : mEOS(false) {
+    }
+
+    virtual void notify(int msg, int ext1, int ext2) {
+        Mutex::Autolock autoLock(mLock);
+
+        if (msg == MEDIA_ERROR || msg == MEDIA_PLAYBACK_COMPLETE) {
+            mEOS = true;
+            mCondition.signal();
+        }
+    }
+
+    void waitForEOS() {
+        Mutex::Autolock autoLock(mLock);
+        while (!mEOS) {
+            mCondition.wait(mLock);
+        }
+    }
+
+protected:
+    virtual ~MyClient() {
+    }
+
+private:
+    Mutex mLock;
+    Condition mCondition;
+
+    bool mEOS;
+
+    DISALLOW_EVIL_CONSTRUCTORS(MyClient);
+};
+
+int main(int argc, char **argv) {
+    android::ProcessState::self()->startThreadPool();
+
+    if (argc != 2) {
+        fprintf(stderr, "Usage: %s filename\n", argv[0]);
+        return 1;
+    }
+
+    sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
+    CHECK_EQ(composerClient->initCheck(), (status_t)OK);
+
+    sp<SurfaceControl> control =
+        composerClient->createSurface(
+                getpid(),
+                String8("A Surface"),
+                0,
+                1280,
+                800,
+                PIXEL_FORMAT_RGB_565,
+                0);
+
+    CHECK(control != NULL);
+    CHECK(control->isValid());
+
+    CHECK_EQ(composerClient->openTransaction(), (status_t)OK);
+    CHECK_EQ(control->setLayer(30000), (status_t)OK);
+    CHECK_EQ(control->show(), (status_t)OK);
+    CHECK_EQ(composerClient->closeTransaction(), (status_t)OK);
+
+    sp<Surface> surface = control->getSurface();
+    CHECK(surface != NULL);
+
+    sp<IServiceManager> sm = defaultServiceManager();
+    sp<IBinder> binder = sm->getService(String16("media.player"));
+    sp<IMediaPlayerService> service = interface_cast<IMediaPlayerService>(binder);
+
+    CHECK(service.get() != NULL);
+
+    int fd = open(argv[1], O_RDONLY);
+
+    if (fd < 0) {
+        fprintf(stderr, "Failed to open file '%s'.", argv[1]);
+        return 1;
+    }
+
+    sp<MyClient> client = new MyClient;
+
+    sp<IMediaPlayer> player =
+        service->create(getpid(), client, new MyStreamSource(fd), 0);
+
+    if (player != NULL) {
+        player->setVideoSurface(surface);
+        player->start();
+
+        client->waitForEOS();
+
+        player->stop();
+    } else {
+        fprintf(stderr, "failed to instantiate player.\n");
+    }
+
+    close(fd);
+    fd = -1;
+
+    composerClient->dispose();
+
+    return 0;
+}
diff --git a/drm/common/Android.mk b/drm/common/Android.mk
index c79a91a..f1136c9 100644
--- a/drm/common/Android.mk
+++ b/drm/common/Android.mk
@@ -26,7 +26,6 @@
     DrmInfoStatus.cpp \
     DrmRights.cpp \
     DrmSupportInfo.cpp \
-    IDrmIOService.cpp \
     IDrmManagerService.cpp \
     IDrmServiceListener.cpp \
     DrmInfoEvent.cpp \
diff --git a/drm/common/DrmEngineBase.cpp b/drm/common/DrmEngineBase.cpp
index ac360eb..9b16c36 100644
--- a/drm/common/DrmEngineBase.cpp
+++ b/drm/common/DrmEngineBase.cpp
@@ -84,7 +84,7 @@
 }
 
 status_t DrmEngineBase::setPlaybackStatus(
-    int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position) {
+    int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position) {
     return onSetPlaybackStatus(uniqueId, decryptHandle, playbackStatus, position);
 }
 
@@ -120,7 +120,7 @@
 }
 
 status_t DrmEngineBase::openDecryptSession(
-    int uniqueId, DecryptHandle* decryptHandle, int fd, int offset, int length) {
+    int uniqueId, DecryptHandle* decryptHandle, int fd, off64_t offset, off64_t length) {
     return onOpenDecryptSession(uniqueId, decryptHandle, fd, offset, length);
 }
 
@@ -150,7 +150,7 @@
 }
 
 ssize_t DrmEngineBase::pread(
-    int uniqueId, DecryptHandle* decryptHandle, void* buffer, ssize_t numBytes, off_t offset) {
+    int uniqueId, DecryptHandle* decryptHandle, void* buffer, ssize_t numBytes, off64_t offset) {
     return onPread(uniqueId, decryptHandle, buffer, numBytes, offset);
 }
 
diff --git a/drm/common/DrmSupportInfo.cpp b/drm/common/DrmSupportInfo.cpp
index ffc8953..3e02093 100644
--- a/drm/common/DrmSupportInfo.cpp
+++ b/drm/common/DrmSupportInfo.cpp
@@ -56,7 +56,7 @@
     for (unsigned int i = 0; i < mFileSuffixVector.size(); i++) {
         const String8 item = mFileSuffixVector.itemAt(i);
 
-        if (String8("") != fileType && item.find(fileType) != -1) {
+        if (item.find(fileType) != -1) {
             return true;
         }
     }
diff --git a/drm/common/IDrmIOService.cpp b/drm/common/IDrmIOService.cpp
deleted file mode 100644
index e44ca55..0000000
--- a/drm/common/IDrmIOService.cpp
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <stdint.h>
-#include <sys/types.h>
-#include <binder/Parcel.h>
-#include <binder/IPCThreadState.h>
-#include <drm/drm_framework_common.h>
-#include "IDrmIOService.h"
-
-using namespace android;
-
-void BpDrmIOService::writeToFile(const String8& filePath, const String8& dataBuffer) {
-    Parcel data, reply;
-
-    data.writeInterfaceToken(IDrmIOService::getInterfaceDescriptor());
-    data.writeString8(filePath);
-    data.writeString8(dataBuffer);
-
-    remote()->transact(WRITE_TO_FILE, data, &reply);
-}
-
-String8 BpDrmIOService::readFromFile(const String8& filePath) {
-
-    Parcel data, reply;
-
-    data.writeInterfaceToken(IDrmIOService::getInterfaceDescriptor());
-    data.writeString8(filePath);
-
-    remote()->transact(READ_FROM_FILE, data, &reply);
-    return reply.readString8();
-}
-
-IMPLEMENT_META_INTERFACE(DrmIOService, "drm.IDrmIOService");
-
-status_t BnDrmIOService::onTransact(
-    uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) {
-
-    switch (code) {
-    case WRITE_TO_FILE:
-    {
-        CHECK_INTERFACE(IDrmIOService, data, reply);
-
-        writeToFile(data.readString8(), data.readString8());
-        return DRM_NO_ERROR;
-    }
-
-    case READ_FROM_FILE:
-    {
-        CHECK_INTERFACE(IDrmIOService, data, reply);
-
-        String8 dataBuffer = readFromFile(data.readString8());
-        reply->writeString8(dataBuffer);
-        return DRM_NO_ERROR;
-    }
-
-    default:
-        return BBinder::onTransact(code, data, reply, flags);
-    }
-}
-
diff --git a/drm/common/IDrmManagerService.cpp b/drm/common/IDrmManagerService.cpp
index 723b50e..696e305 100644
--- a/drm/common/IDrmManagerService.cpp
+++ b/drm/common/IDrmManagerService.cpp
@@ -363,7 +363,7 @@
 }
 
 status_t BpDrmManagerService::setPlaybackStatus(
-            int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position) {
+            int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position) {
     LOGV("setPlaybackStatus");
     Parcel data, reply;
 
@@ -382,7 +382,7 @@
     }
 
     data.writeInt32(playbackStatus);
-    data.writeInt32(position);
+    data.writeInt64(position);
 
     remote()->transact(SET_PLAYBACK_STATUS, data, &reply);
     return reply.readInt32();
@@ -459,7 +459,7 @@
     if (0 != reply.dataAvail()) {
         //Filling DRM Converted Status
         const int statusCode = reply.readInt32();
-        const int offset = reply.readInt32();
+        const off64_t offset = reply.readInt64();
 
         DrmBuffer* convertedData = NULL;
         if (0 != reply.dataAvail()) {
@@ -491,7 +491,7 @@
     if (0 != reply.dataAvail()) {
         //Filling DRM Converted Status
         const int statusCode = reply.readInt32();
-        const int offset = reply.readInt32();
+        const off64_t offset = reply.readInt64();
 
         DrmBuffer* convertedData = NULL;
         if (0 != reply.dataAvail()) {
@@ -545,15 +545,15 @@
 }
 
 DecryptHandle* BpDrmManagerService::openDecryptSession(
-            int uniqueId, int fd, int offset, int length) {
+            int uniqueId, int fd, off64_t offset, off64_t length) {
     LOGV("Entering BpDrmManagerService::openDecryptSession");
     Parcel data, reply;
 
     data.writeInterfaceToken(IDrmManagerService::getInterfaceDescriptor());
     data.writeInt32(uniqueId);
     data.writeFileDescriptor(fd);
-    data.writeInt32(offset);
-    data.writeInt32(length);
+    data.writeInt64(offset);
+    data.writeInt64(length);
 
     remote()->transact(OPEN_DECRYPT_SESSION, data, &reply);
 
@@ -569,8 +569,6 @@
             handle->decryptInfo = new DecryptInfo();
             handle->decryptInfo->decryptBufferLength = reply.readInt32();
         }
-    } else {
-        LOGE("no decryptHandle is generated in service side");
     }
     return handle;
 }
@@ -598,7 +596,7 @@
             handle->decryptInfo->decryptBufferLength = reply.readInt32();
         }
     } else {
-        LOGE("no decryptHandle is generated in service side");
+        LOGV("no decryptHandle is generated in service side");
     }
     return handle;
 }
@@ -729,7 +727,7 @@
 
 ssize_t BpDrmManagerService::pread(
             int uniqueId, DecryptHandle* decryptHandle, void* buffer,
-            ssize_t numBytes, off_t offset) {
+            ssize_t numBytes, off64_t offset) {
     LOGV("read");
     Parcel data, reply;
     int result;
@@ -749,7 +747,7 @@
     }
 
     data.writeInt32(numBytes);
-    data.writeInt32(offset);
+    data.writeInt64(offset);
 
     remote()->transact(PREAD, data, &reply);
     result = reply.readInt32();
@@ -1113,7 +1111,7 @@
         }
 
         const status_t status
-            = setPlaybackStatus(uniqueId, &handle, data.readInt32(), data.readInt32());
+            = setPlaybackStatus(uniqueId, &handle, data.readInt32(), data.readInt64());
         reply->writeInt32(status);
 
         delete handle.decryptInfo; handle.decryptInfo = NULL;
@@ -1185,7 +1183,7 @@
         if (NULL != drmConvertedStatus) {
             //Filling Drm Converted Ststus
             reply->writeInt32(drmConvertedStatus->statusCode);
-            reply->writeInt32(drmConvertedStatus->offset);
+            reply->writeInt64(drmConvertedStatus->offset);
 
             if (NULL != drmConvertedStatus->convertedData) {
                 const DrmBuffer* convertedData = drmConvertedStatus->convertedData;
@@ -1214,7 +1212,7 @@
         if (NULL != drmConvertedStatus) {
             //Filling Drm Converted Ststus
             reply->writeInt32(drmConvertedStatus->statusCode);
-            reply->writeInt32(drmConvertedStatus->offset);
+            reply->writeInt64(drmConvertedStatus->offset);
 
             if (NULL != drmConvertedStatus->convertedData) {
                 const DrmBuffer* convertedData = drmConvertedStatus->convertedData;
@@ -1274,7 +1272,7 @@
         const int fd = data.readFileDescriptor();
 
         DecryptHandle* handle
-            = openDecryptSession(uniqueId, fd, data.readInt32(), data.readInt32());
+            = openDecryptSession(uniqueId, fd, data.readInt64(), data.readInt64());
 
         if (NULL != handle) {
             reply->writeInt32(handle->decryptId);
@@ -1285,8 +1283,6 @@
                 reply->writeInt32(handle->decryptInfo->decryptBufferLength);
                 delete handle->decryptInfo; handle->decryptInfo = NULL;
             }
-        } else {
-            LOGE("NULL decryptHandle is returned");
         }
         delete handle; handle = NULL;
         return DRM_NO_ERROR;
@@ -1312,7 +1308,7 @@
                 delete handle->decryptInfo; handle->decryptInfo = NULL;
             }
         } else {
-            LOGE("NULL decryptHandle is returned");
+            LOGV("NULL decryptHandle is returned");
         }
         delete handle; handle = NULL;
         return DRM_NO_ERROR;
@@ -1481,7 +1477,7 @@
         const int numBytes = data.readInt32();
         char* buffer = new char[numBytes];
 
-        const off_t offset = data.readInt32();
+        const off64_t offset = data.readInt64();
 
         ssize_t result = pread(uniqueId, &handle, buffer, numBytes, offset);
         reply->writeInt32(result);
diff --git a/drm/common/ReadWriteUtils.cpp b/drm/common/ReadWriteUtils.cpp
index 7ec4fa2..c16214e 100644
--- a/drm/common/ReadWriteUtils.cpp
+++ b/drm/common/ReadWriteUtils.cpp
@@ -42,7 +42,7 @@
         struct stat sb;
 
         if (fstat(fd, &sb) == 0 && sb.st_size > 0) {
-            int length = sb.st_size;
+            off64_t length = sb.st_size;
             char* bytes = new char[length];
             if (length == read(fd, (void*) bytes, length)) {
                 string.append(bytes, length);
@@ -57,7 +57,7 @@
 int ReadWriteUtils::readBytes(const String8& filePath, char** buffer) {
     FILE* file = NULL;
     file = fopen(filePath.string(), "r");
-    int length = 0;
+    off64_t length = 0;
 
     if (NULL != file) {
         int fd = fileno(file);
diff --git a/drm/drmserver/DrmManager.cpp b/drm/drmserver/DrmManager.cpp
index 49df1c8..1eee5f2 100644
--- a/drm/drmserver/DrmManager.cpp
+++ b/drm/drmserver/DrmManager.cpp
@@ -51,6 +51,7 @@
 }
 
 int DrmManager::addUniqueId(int uniqueId) {
+    Mutex::Autolock _l(mLock);
     if (0 == uniqueId) {
         int temp = 0;
         bool foundUniqueId = false;
@@ -78,6 +79,7 @@
 }
 
 void DrmManager::removeUniqueId(int uniqueId) {
+    Mutex::Autolock _l(mLock);
     for (unsigned int i = 0; i < mUniqueIdVector.size(); i++) {
         if (uniqueId == mUniqueIdVector.itemAt(i)) {
             mUniqueIdVector.removeAt(i);
@@ -87,7 +89,7 @@
 }
 
 status_t DrmManager::loadPlugIns() {
-    String8 pluginDirPath("/system/lib/drm/plugins/native");
+    String8 pluginDirPath("/system/lib/drm");
     return loadPlugIns(pluginDirPath);
 }
 
@@ -107,6 +109,7 @@
 }
 
 status_t DrmManager::unloadPlugIns() {
+    Mutex::Autolock _l(mLock);
     mConvertSessionMap.clear();
     mDecryptSessionMap.clear();
     mPlugInManager.unloadPlugIns();
@@ -116,7 +119,7 @@
 
 status_t DrmManager::setDrmServiceListener(
             int uniqueId, const sp<IDrmServiceListener>& drmServiceListener) {
-    Mutex::Autolock _l(mLock);
+    Mutex::Autolock _l(mListenerLock);
     if (NULL != drmServiceListener.get()) {
         mServiceListeners.add(uniqueId, drmServiceListener);
     } else {
@@ -126,6 +129,7 @@
 }
 
 void DrmManager::addClient(int uniqueId) {
+    Mutex::Autolock _l(mLock);
     if (!mSupportInfoToPlugInIdMap.isEmpty()) {
         Vector<String8> plugInIdList = mPlugInManager.getPlugInIdList();
         for (unsigned int index = 0; index < plugInIdList.size(); index++) {
@@ -137,6 +141,7 @@
 }
 
 void DrmManager::removeClient(int uniqueId) {
+    Mutex::Autolock _l(mLock);
     Vector<String8> plugInIdList = mPlugInManager.getPlugInIdList();
     for (unsigned int index = 0; index < plugInIdList.size(); index++) {
         IDrmEngine& rDrmEngine = mPlugInManager.getPlugIn(plugInIdList.itemAt(index));
@@ -145,6 +150,7 @@
 }
 
 DrmConstraints* DrmManager::getConstraints(int uniqueId, const String8* path, const int action) {
+    Mutex::Autolock _l(mLock);
     const String8 plugInId = getSupportedPlugInIdFromPath(uniqueId, *path);
     if (EMPTY_STRING != plugInId) {
         IDrmEngine& rDrmEngine = mPlugInManager.getPlugIn(plugInId);
@@ -154,6 +160,7 @@
 }
 
 DrmMetadata* DrmManager::getMetadata(int uniqueId, const String8* path) {
+    Mutex::Autolock _l(mLock);
     const String8 plugInId = getSupportedPlugInIdFromPath(uniqueId, *path);
     if (EMPTY_STRING != plugInId) {
         IDrmEngine& rDrmEngine = mPlugInManager.getPlugIn(plugInId);
@@ -163,6 +170,7 @@
 }
 
 status_t DrmManager::installDrmEngine(int uniqueId, const String8& absolutePath) {
+    Mutex::Autolock _l(mLock);
     mPlugInManager.loadPlugIn(absolutePath);
 
     IDrmEngine& rDrmEngine = mPlugInManager.getPlugIn(absolutePath);
@@ -176,6 +184,7 @@
 }
 
 bool DrmManager::canHandle(int uniqueId, const String8& path, const String8& mimeType) {
+    Mutex::Autolock _l(mLock);
     const String8 plugInId = getSupportedPlugInId(mimeType);
     bool result = (EMPTY_STRING != plugInId) ? true : false;
 
@@ -184,13 +193,17 @@
             IDrmEngine& rDrmEngine = mPlugInManager.getPlugIn(plugInId);
             result = rDrmEngine.canHandle(uniqueId, path);
         } else {
-            result = canHandle(uniqueId, path);
+            String8 extension = path.getPathExtension();
+            if (String8("") != extension) {
+                result = canHandle(uniqueId, path);
+            }
         }
     }
     return result;
 }
 
 DrmInfoStatus* DrmManager::processDrmInfo(int uniqueId, const DrmInfo* drmInfo) {
+    Mutex::Autolock _l(mLock);
     const String8 plugInId = getSupportedPlugInId(drmInfo->getMimeType());
     if (EMPTY_STRING != plugInId) {
         IDrmEngine& rDrmEngine = mPlugInManager.getPlugIn(plugInId);
@@ -215,6 +228,7 @@
 }
 
 DrmInfo* DrmManager::acquireDrmInfo(int uniqueId, const DrmInfoRequest* drmInfoRequest) {
+    Mutex::Autolock _l(mLock);
     const String8 plugInId = getSupportedPlugInId(drmInfoRequest->getMimeType());
     if (EMPTY_STRING != plugInId) {
         IDrmEngine& rDrmEngine = mPlugInManager.getPlugIn(plugInId);
@@ -225,6 +239,7 @@
 
 status_t DrmManager::saveRights(int uniqueId, const DrmRights& drmRights,
             const String8& rightsPath, const String8& contentPath) {
+    Mutex::Autolock _l(mLock);
     const String8 plugInId = getSupportedPlugInId(drmRights.getMimeType());
     status_t result = DRM_ERROR_UNKNOWN;
     if (EMPTY_STRING != plugInId) {
@@ -235,6 +250,7 @@
 }
 
 String8 DrmManager::getOriginalMimeType(int uniqueId, const String8& path) {
+    Mutex::Autolock _l(mLock);
     const String8 plugInId = getSupportedPlugInIdFromPath(uniqueId, path);
     if (EMPTY_STRING != plugInId) {
         IDrmEngine& rDrmEngine = mPlugInManager.getPlugIn(plugInId);
@@ -244,6 +260,7 @@
 }
 
 int DrmManager::getDrmObjectType(int uniqueId, const String8& path, const String8& mimeType) {
+    Mutex::Autolock _l(mLock);
     const String8 plugInId = getSupportedPlugInId(uniqueId, path, mimeType);
     if (EMPTY_STRING != plugInId) {
         IDrmEngine& rDrmEngine = mPlugInManager.getPlugIn(plugInId);
@@ -253,6 +270,7 @@
 }
 
 int DrmManager::checkRightsStatus(int uniqueId, const String8& path, int action) {
+    Mutex::Autolock _l(mLock);
     const String8 plugInId = getSupportedPlugInIdFromPath(uniqueId, path);
     if (EMPTY_STRING != plugInId) {
         IDrmEngine& rDrmEngine = mPlugInManager.getPlugIn(plugInId);
@@ -264,6 +282,7 @@
 status_t DrmManager::consumeRights(
     int uniqueId, DecryptHandle* decryptHandle, int action, bool reserve) {
     status_t result = DRM_ERROR_UNKNOWN;
+    Mutex::Autolock _l(mDecryptLock);
     if (mDecryptSessionMap.indexOfKey(decryptHandle->decryptId) != NAME_NOT_FOUND) {
         IDrmEngine* drmEngine = mDecryptSessionMap.valueFor(decryptHandle->decryptId);
         result = drmEngine->consumeRights(uniqueId, decryptHandle, action, reserve);
@@ -272,8 +291,9 @@
 }
 
 status_t DrmManager::setPlaybackStatus(
-    int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position) {
+    int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position) {
     status_t result = DRM_ERROR_UNKNOWN;
+    Mutex::Autolock _l(mDecryptLock);
     if (mDecryptSessionMap.indexOfKey(decryptHandle->decryptId) != NAME_NOT_FOUND) {
         IDrmEngine* drmEngine = mDecryptSessionMap.valueFor(decryptHandle->decryptId);
         result = drmEngine->setPlaybackStatus(uniqueId, decryptHandle, playbackStatus, position);
@@ -283,6 +303,7 @@
 
 bool DrmManager::validateAction(
     int uniqueId, const String8& path, int action, const ActionDescription& description) {
+    Mutex::Autolock _l(mLock);
     const String8 plugInId = getSupportedPlugInIdFromPath(uniqueId, path);
     if (EMPTY_STRING != plugInId) {
         IDrmEngine& rDrmEngine = mPlugInManager.getPlugIn(plugInId);
@@ -292,6 +313,7 @@
 }
 
 status_t DrmManager::removeRights(int uniqueId, const String8& path) {
+    Mutex::Autolock _l(mLock);
     const String8 plugInId = getSupportedPlugInIdFromPath(uniqueId, path);
     status_t result = DRM_ERROR_UNKNOWN;
     if (EMPTY_STRING != plugInId) {
@@ -315,6 +337,7 @@
 }
 
 int DrmManager::openConvertSession(int uniqueId, const String8& mimeType) {
+    Mutex::Autolock _l(mConvertLock);
     int convertId = -1;
 
     const String8 plugInId = getSupportedPlugInId(mimeType);
@@ -322,7 +345,6 @@
         IDrmEngine& rDrmEngine = mPlugInManager.getPlugIn(plugInId);
 
         if (DRM_NO_ERROR == rDrmEngine.openConvertSession(uniqueId, mConvertId + 1)) {
-            Mutex::Autolock _l(mConvertLock);
             ++mConvertId;
             convertId = mConvertId;
             mConvertSessionMap.add(convertId, &rDrmEngine);
@@ -335,6 +357,7 @@
             int uniqueId, int convertId, const DrmBuffer* inputData) {
     DrmConvertedStatus *drmConvertedStatus = NULL;
 
+    Mutex::Autolock _l(mConvertLock);
     if (mConvertSessionMap.indexOfKey(convertId) != NAME_NOT_FOUND) {
         IDrmEngine* drmEngine = mConvertSessionMap.valueFor(convertId);
         drmConvertedStatus = drmEngine->convertData(uniqueId, convertId, inputData);
@@ -343,6 +366,7 @@
 }
 
 DrmConvertedStatus* DrmManager::closeConvertSession(int uniqueId, int convertId) {
+    Mutex::Autolock _l(mConvertLock);
     DrmConvertedStatus *drmConvertedStatus = NULL;
 
     if (mConvertSessionMap.indexOfKey(convertId) != NAME_NOT_FOUND) {
@@ -355,6 +379,7 @@
 
 status_t DrmManager::getAllSupportInfo(
                     int uniqueId, int* length, DrmSupportInfo** drmSupportInfoArray) {
+    Mutex::Autolock _l(mLock);
     Vector<String8> plugInPathList = mPlugInManager.getPlugInIdList();
     int size = plugInPathList.size();
     int validPlugins = 0;
@@ -384,7 +409,7 @@
     return DRM_NO_ERROR;
 }
 
-DecryptHandle* DrmManager::openDecryptSession(int uniqueId, int fd, int offset, int length) {
+DecryptHandle* DrmManager::openDecryptSession(int uniqueId, int fd, off64_t offset, off64_t length) {
     Mutex::Autolock _l(mDecryptLock);
     status_t result = DRM_ERROR_CANNOT_HANDLE;
     Vector<String8> plugInIdList = mPlugInManager.getPlugInIdList();
@@ -407,7 +432,6 @@
     }
     if (DRM_NO_ERROR != result) {
         delete handle; handle = NULL;
-        LOGE("DrmManager::openDecryptSession: no capable plug-in found");
     }
     return handle;
 }
@@ -435,7 +459,7 @@
     }
     if (DRM_NO_ERROR != result) {
         delete handle; handle = NULL;
-        LOGE("DrmManager::openDecryptSession: no capable plug-in found");
+        LOGV("DrmManager::openDecryptSession: no capable plug-in found");
     }
     return handle;
 }
@@ -456,6 +480,7 @@
 status_t DrmManager::initializeDecryptUnit(
     int uniqueId, DecryptHandle* decryptHandle, int decryptUnitId, const DrmBuffer* headerInfo) {
     status_t result = DRM_ERROR_UNKNOWN;
+    Mutex::Autolock _l(mDecryptLock);
     if (mDecryptSessionMap.indexOfKey(decryptHandle->decryptId) != NAME_NOT_FOUND) {
         IDrmEngine* drmEngine = mDecryptSessionMap.valueFor(decryptHandle->decryptId);
         result = drmEngine->initializeDecryptUnit(uniqueId, decryptHandle, decryptUnitId, headerInfo);
@@ -466,6 +491,8 @@
 status_t DrmManager::decrypt(int uniqueId, DecryptHandle* decryptHandle, int decryptUnitId,
             const DrmBuffer* encBuffer, DrmBuffer** decBuffer, DrmBuffer* IV) {
     status_t result = DRM_ERROR_UNKNOWN;
+
+    Mutex::Autolock _l(mDecryptLock);
     if (mDecryptSessionMap.indexOfKey(decryptHandle->decryptId) != NAME_NOT_FOUND) {
         IDrmEngine* drmEngine = mDecryptSessionMap.valueFor(decryptHandle->decryptId);
         result = drmEngine->decrypt(
@@ -477,6 +504,7 @@
 status_t DrmManager::finalizeDecryptUnit(
             int uniqueId, DecryptHandle* decryptHandle, int decryptUnitId) {
     status_t result = DRM_ERROR_UNKNOWN;
+    Mutex::Autolock _l(mDecryptLock);
     if (mDecryptSessionMap.indexOfKey(decryptHandle->decryptId) != NAME_NOT_FOUND) {
         IDrmEngine* drmEngine = mDecryptSessionMap.valueFor(decryptHandle->decryptId);
         result = drmEngine->finalizeDecryptUnit(uniqueId, decryptHandle, decryptUnitId);
@@ -485,9 +513,10 @@
 }
 
 ssize_t DrmManager::pread(int uniqueId, DecryptHandle* decryptHandle,
-            void* buffer, ssize_t numBytes, off_t offset) {
+            void* buffer, ssize_t numBytes, off64_t offset) {
     ssize_t result = DECRYPT_FILE_ERROR;
 
+    Mutex::Autolock _l(mDecryptLock);
     if (mDecryptSessionMap.indexOfKey(decryptHandle->decryptId) != NAME_NOT_FOUND) {
         IDrmEngine* drmEngine = mDecryptSessionMap.valueFor(decryptHandle->decryptId);
         result = drmEngine->pread(uniqueId, decryptHandle, buffer, numBytes, offset);
@@ -544,7 +573,7 @@
 }
 
 void DrmManager::onInfo(const DrmInfoEvent& event) {
-    Mutex::Autolock _l(mLock);
+    Mutex::Autolock _l(mListenerLock);
     for (unsigned int index = 0; index < mServiceListeners.size(); index++) {
         int uniqueId = mServiceListeners.keyAt(index);
 
diff --git a/drm/drmserver/DrmManagerService.cpp b/drm/drmserver/DrmManagerService.cpp
index 4dcfa72..0901a44 100644
--- a/drm/drmserver/DrmManagerService.cpp
+++ b/drm/drmserver/DrmManagerService.cpp
@@ -162,7 +162,7 @@
 }
 
 status_t DrmManagerService::setPlaybackStatus(
-            int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position) {
+            int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position) {
     LOGV("Entering setPlaybackStatus");
     return mDrmManager->setPlaybackStatus(uniqueId, decryptHandle, playbackStatus, position);
 }
@@ -207,7 +207,7 @@
 }
 
 DecryptHandle* DrmManagerService::openDecryptSession(
-            int uniqueId, int fd, int offset, int length) {
+            int uniqueId, int fd, off64_t offset, off64_t length) {
     LOGV("Entering DrmManagerService::openDecryptSession");
     if (isProtectedCallAllowed()) {
         return mDrmManager->openDecryptSession(uniqueId, fd, offset, length);
@@ -251,7 +251,7 @@
 }
 
 ssize_t DrmManagerService::pread(int uniqueId, DecryptHandle* decryptHandle,
-            void* buffer, ssize_t numBytes, off_t offset) {
+            void* buffer, ssize_t numBytes, off64_t offset) {
     LOGV("Entering pread");
     return mDrmManager->pread(uniqueId, decryptHandle, buffer, numBytes, offset);
 }
diff --git a/drm/libdrmframework/DrmManagerClient.cpp b/drm/libdrmframework/DrmManagerClient.cpp
index 8bb00c3..1d1e258 100644
--- a/drm/libdrmframework/DrmManagerClient.cpp
+++ b/drm/libdrmframework/DrmManagerClient.cpp
@@ -78,12 +78,11 @@
 }
 
 status_t DrmManagerClient::consumeRights(DecryptHandle* decryptHandle, int action, bool reserve) {
-    Mutex::Autolock _l(mDecryptLock);
     return mDrmManagerClientImpl->consumeRights(mUniqueId, decryptHandle, action, reserve);
 }
 
 status_t DrmManagerClient::setPlaybackStatus(
-            DecryptHandle* decryptHandle, int playbackStatus, int position) {
+            DecryptHandle* decryptHandle, int playbackStatus, int64_t position) {
     return mDrmManagerClientImpl
             ->setPlaybackStatus(mUniqueId, decryptHandle, playbackStatus, position);
 }
@@ -117,7 +116,7 @@
     return mDrmManagerClientImpl->getAllSupportInfo(mUniqueId, length, drmSupportInfoArray);
 }
 
-DecryptHandle* DrmManagerClient::openDecryptSession(int fd, int offset, int length) {
+DecryptHandle* DrmManagerClient::openDecryptSession(int fd, off64_t offset, off64_t length) {
     return mDrmManagerClientImpl->openDecryptSession(mUniqueId, fd, offset, length);
 }
 
@@ -131,7 +130,6 @@
 
 status_t DrmManagerClient::initializeDecryptUnit(
             DecryptHandle* decryptHandle, int decryptUnitId, const DrmBuffer* headerInfo) {
-    Mutex::Autolock _l(mDecryptLock);
     return mDrmManagerClientImpl->initializeDecryptUnit(
             mUniqueId, decryptHandle, decryptUnitId, headerInfo);
 }
@@ -139,19 +137,16 @@
 status_t DrmManagerClient::decrypt(
     DecryptHandle* decryptHandle, int decryptUnitId,
     const DrmBuffer* encBuffer, DrmBuffer** decBuffer, DrmBuffer* IV) {
-    Mutex::Autolock _l(mDecryptLock);
     return mDrmManagerClientImpl->decrypt(
             mUniqueId, decryptHandle, decryptUnitId, encBuffer, decBuffer, IV);
 }
 
 status_t DrmManagerClient::finalizeDecryptUnit(DecryptHandle* decryptHandle, int decryptUnitId) {
-    Mutex::Autolock _l(mDecryptLock);
     return mDrmManagerClientImpl->finalizeDecryptUnit(mUniqueId, decryptHandle, decryptUnitId);
 }
 
 ssize_t DrmManagerClient::pread(
-            DecryptHandle* decryptHandle, void* buffer, ssize_t numBytes, off_t offset) {
-    Mutex::Autolock _l(mDecryptLock);
+            DecryptHandle* decryptHandle, void* buffer, ssize_t numBytes, off64_t offset) {
     return mDrmManagerClientImpl->pread(mUniqueId, decryptHandle, buffer, numBytes, offset);
 }
 
diff --git a/drm/libdrmframework/DrmManagerClientImpl.cpp b/drm/libdrmframework/DrmManagerClientImpl.cpp
index eea312b..d20de92 100644
--- a/drm/libdrmframework/DrmManagerClientImpl.cpp
+++ b/drm/libdrmframework/DrmManagerClientImpl.cpp
@@ -179,7 +179,7 @@
 }
 
 status_t DrmManagerClientImpl::setPlaybackStatus(
-            int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position) {
+            int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position) {
     status_t status = DRM_ERROR_UNKNOWN;
     if (NULL != decryptHandle) {
         status = getDrmManagerService()->setPlaybackStatus(
@@ -240,7 +240,7 @@
 }
 
 DecryptHandle* DrmManagerClientImpl::openDecryptSession(
-            int uniqueId, int fd, int offset, int length) {
+            int uniqueId, int fd, off64_t offset, off64_t length) {
     return getDrmManagerService()->openDecryptSession(uniqueId, fd, offset, length);
 }
 
@@ -292,7 +292,7 @@
 }
 
 ssize_t DrmManagerClientImpl::pread(int uniqueId, DecryptHandle* decryptHandle,
-            void* buffer, ssize_t numBytes, off_t offset) {
+            void* buffer, ssize_t numBytes, off64_t offset) {
     ssize_t retCode = INVALID_VALUE;
     if ((NULL != decryptHandle) && (NULL != buffer) && (0 < numBytes)) {
         retCode = getDrmManagerService()->pread(uniqueId, decryptHandle, buffer, numBytes, offset);
diff --git a/drm/libdrmframework/include/DrmIOService.h b/drm/libdrmframework/include/DrmIOService.h
deleted file mode 100644
index 244124e..0000000
--- a/drm/libdrmframework/include/DrmIOService.h
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __DRM_IO_SERVICE_H__
-#define __DRM_IO_SERVICE_H__
-
-#include "IDrmIOService.h"
-
-namespace android {
-
-/**
- * This is the implementation class for DRM IO service.
- *
- * The instance of this class is created while starting the DRM IO service.
- *
- */
-class DrmIOService : public BnDrmIOService {
-public:
-    static void instantiate();
-
-private:
-    DrmIOService();
-    virtual ~DrmIOService();
-
-public:
-    void writeToFile(const String8& filePath, const String8& dataBuffer);
-    String8 readFromFile(const String8& filePath);
-};
-
-};
-
-#endif /* __DRM_IO_SERVICE_H__ */
-
diff --git a/drm/libdrmframework/include/DrmManager.h b/drm/libdrmframework/include/DrmManager.h
index bc462c2..c7276f9 100644
--- a/drm/libdrmframework/include/DrmManager.h
+++ b/drm/libdrmframework/include/DrmManager.h
@@ -95,7 +95,7 @@
     status_t consumeRights(int uniqueId, DecryptHandle* decryptHandle, int action, bool reserve);
 
     status_t setPlaybackStatus(
-            int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position);
+            int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position);
 
     bool validateAction(
             int uniqueId, const String8& path, int action, const ActionDescription& description);
@@ -112,7 +112,7 @@
 
     status_t getAllSupportInfo(int uniqueId, int* length, DrmSupportInfo** drmSupportInfoArray);
 
-    DecryptHandle* openDecryptSession(int uniqueId, int fd, int offset, int length);
+    DecryptHandle* openDecryptSession(int uniqueId, int fd, off64_t offset, off64_t length);
 
     DecryptHandle* openDecryptSession(int uniqueId, const char* uri);
 
@@ -127,7 +127,7 @@
     status_t finalizeDecryptUnit(int uniqueId, DecryptHandle* decryptHandle, int decryptUnitId);
 
     ssize_t pread(int uniqueId, DecryptHandle* decryptHandle,
-            void* buffer, ssize_t numBytes, off_t offset);
+            void* buffer, ssize_t numBytes, off64_t offset);
 
     void onInfo(const DrmInfoEvent& event);
 
@@ -147,6 +147,7 @@
     int mDecryptSessionId;
     int mConvertId;
     Mutex mLock;
+    Mutex mListenerLock;
     Mutex mDecryptLock;
     Mutex mConvertLock;
     TPlugInManager<IDrmEngine> mPlugInManager;
diff --git a/drm/libdrmframework/include/DrmManagerClientImpl.h b/drm/libdrmframework/include/DrmManagerClientImpl.h
index ff84fc7..0a7fcd1 100644
--- a/drm/libdrmframework/include/DrmManagerClientImpl.h
+++ b/drm/libdrmframework/include/DrmManagerClientImpl.h
@@ -203,7 +203,7 @@
      *     Returns DRM_NO_ERROR for success, DRM_ERROR_UNKNOWN for failure
      */
     status_t setPlaybackStatus(
-            int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position);
+            int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position);
 
     /**
      * Validates whether an action on the DRM content is allowed or not.
@@ -303,7 +303,7 @@
      * @return
      *     Handle for the decryption session
      */
-    DecryptHandle* openDecryptSession(int uniqueId, int fd, int offset, int length);
+    DecryptHandle* openDecryptSession(int uniqueId, int fd, off64_t offset, off64_t length);
 
     /**
      * Open the decrypt session to decrypt the given protected content
@@ -381,7 +381,7 @@
      * @return Number of bytes read. Returns -1 for Failure.
      */
     ssize_t pread(int uniqueId, DecryptHandle* decryptHandle,
-            void* buffer, ssize_t numBytes, off_t offset);
+            void* buffer, ssize_t numBytes, off64_t offset);
 
     /**
      * Notify the event to the registered listener
diff --git a/drm/libdrmframework/include/DrmManagerService.h b/drm/libdrmframework/include/DrmManagerService.h
index f346356..d0a0db7 100644
--- a/drm/libdrmframework/include/DrmManagerService.h
+++ b/drm/libdrmframework/include/DrmManagerService.h
@@ -81,7 +81,7 @@
     status_t consumeRights(int uniqueId, DecryptHandle* decryptHandle, int action, bool reserve);
 
     status_t setPlaybackStatus(
-            int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position);
+            int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position);
 
     bool validateAction(int uniqueId, const String8& path,
             int action, const ActionDescription& description);
@@ -98,7 +98,7 @@
 
     status_t getAllSupportInfo(int uniqueId, int* length, DrmSupportInfo** drmSupportInfoArray);
 
-    DecryptHandle* openDecryptSession(int uniqueId, int fd, int offset, int length);
+    DecryptHandle* openDecryptSession(int uniqueId, int fd, off64_t offset, off64_t length);
 
     DecryptHandle* openDecryptSession(int uniqueId, const char* uri);
 
@@ -113,7 +113,7 @@
     status_t finalizeDecryptUnit(int uniqueId, DecryptHandle* decryptHandle, int decryptUnitId);
 
     ssize_t pread(int uniqueId, DecryptHandle* decryptHandle,
-            void* buffer, ssize_t numBytes, off_t offset);
+            void* buffer, ssize_t numBytes, off64_t offset);
 
 private:
     DrmManager* mDrmManager;
diff --git a/drm/libdrmframework/include/IDrmIOService.h b/drm/libdrmframework/include/IDrmIOService.h
deleted file mode 100644
index 5e0d907..0000000
--- a/drm/libdrmframework/include/IDrmIOService.h
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __IDRM_IO_SERVICE_H__
-#define __IDRM_IO_SERVICE_H__
-
-#include <utils/RefBase.h>
-#include <binder/IInterface.h>
-#include <binder/Parcel.h>
-
-namespace android {
-
-/**
- * This is the interface class for DRM IO service.
- *
- */
-class IDrmIOService : public IInterface
-{
-public:
-    enum {
-        WRITE_TO_FILE = IBinder::FIRST_CALL_TRANSACTION,
-        READ_FROM_FILE
-    };
-
-public:
-    DECLARE_META_INTERFACE(DrmIOService);
-
-public:
-    /**
-     * Writes the data into the file path provided
-     *
-     * @param[in] filePath Path of the file
-     * @param[in] dataBuffer Data to write
-     */
-    virtual void writeToFile(const String8& filePath, const String8& dataBuffer) = 0;
-
-    /**
-     * Reads the data from the file path provided
-     *
-     * @param[in] filePath Path of the file
-     * @return Data read from the file
-     */
-    virtual String8 readFromFile(const String8& filePath) = 0;
-};
-
-/**
- * This is the Binder implementation class for DRM IO service.
- */
-class BpDrmIOService: public BpInterface<IDrmIOService>
-{
-public:
-    BpDrmIOService(const sp<IBinder>& impl)
-            : BpInterface<IDrmIOService>(impl) {}
-
-    virtual void writeToFile(const String8& filePath, const String8& dataBuffer);
-
-    virtual String8 readFromFile(const String8& filePath);
-};
-
-/**
- * This is the Binder implementation class for DRM IO service.
- */
-class BnDrmIOService: public BnInterface<IDrmIOService>
-{
-public:
-    virtual status_t onTransact(
-            uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags = 0);
-};
-
-};
-
-#endif /* __IDRM_IO_SERVICE_H__ */
-
diff --git a/drm/libdrmframework/include/IDrmManagerService.h b/drm/libdrmframework/include/IDrmManagerService.h
index f1dabd3..2424ea5 100644
--- a/drm/libdrmframework/include/IDrmManagerService.h
+++ b/drm/libdrmframework/include/IDrmManagerService.h
@@ -120,7 +120,7 @@
             int uniqueId, DecryptHandle* decryptHandle, int action, bool reserve) = 0;
 
     virtual status_t setPlaybackStatus(
-            int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position) = 0;
+            int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position) = 0;
 
     virtual bool validateAction(
             int uniqueId, const String8& path,
@@ -140,7 +140,7 @@
     virtual status_t getAllSupportInfo(
             int uniqueId, int* length, DrmSupportInfo** drmSupportInfoArray) = 0;
 
-    virtual DecryptHandle* openDecryptSession(int uniqueId, int fd, int offset, int length) = 0;
+    virtual DecryptHandle* openDecryptSession(int uniqueId, int fd, off64_t offset, off64_t length) = 0;
 
     virtual DecryptHandle* openDecryptSession(int uniqueId, const char* uri) = 0;
 
@@ -156,7 +156,7 @@
             int uniqueId, DecryptHandle* decryptHandle, int decryptUnitId) = 0;
 
     virtual ssize_t pread(int uniqueId, DecryptHandle* decryptHandle,
-            void* buffer, ssize_t numBytes,off_t offset) = 0;
+            void* buffer, ssize_t numBytes,off64_t offset) = 0;
 };
 
 /**
@@ -204,7 +204,7 @@
             int uniqueId, DecryptHandle* decryptHandle, int action, bool reserve);
 
     virtual status_t setPlaybackStatus(
-            int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position);
+            int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position);
 
     virtual bool validateAction(
             int uniqueId, const String8& path, int action, const ActionDescription& description);
@@ -223,7 +223,7 @@
     virtual status_t getAllSupportInfo(
             int uniqueId, int* length, DrmSupportInfo** drmSupportInfoArray);
 
-    virtual DecryptHandle* openDecryptSession(int uniqueId, int fd, int offset, int length);
+    virtual DecryptHandle* openDecryptSession(int uniqueId, int fd, off64_t offset, off64_t length);
 
     virtual DecryptHandle* openDecryptSession(int uniqueId, const char* uri);
 
@@ -239,7 +239,7 @@
             int uniqueId, DecryptHandle* decryptHandle, int decryptUnitId);
 
     virtual ssize_t pread(int uniqueId, DecryptHandle* decryptHandle,
-            void* buffer, ssize_t numBytes, off_t offset);
+            void* buffer, ssize_t numBytes, off64_t offset);
 };
 
 /**
diff --git a/drm/libdrmframework/plugins/common/include/DrmEngineBase.h b/drm/libdrmframework/plugins/common/include/DrmEngineBase.h
index 67b6355..b61e3d3 100644
--- a/drm/libdrmframework/plugins/common/include/DrmEngineBase.h
+++ b/drm/libdrmframework/plugins/common/include/DrmEngineBase.h
@@ -62,7 +62,7 @@
     status_t consumeRights(int uniqueId, DecryptHandle* decryptHandle, int action, bool reserve);
 
     status_t setPlaybackStatus(
-            int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position);
+            int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position);
 
     bool validateAction(
             int uniqueId, const String8& path, int action, const ActionDescription& description);
@@ -80,7 +80,7 @@
     DrmSupportInfo* getSupportInfo(int uniqueId);
 
     status_t openDecryptSession(
-            int uniqueId, DecryptHandle* decryptHandle, int fd, int offset, int length);
+            int uniqueId, DecryptHandle* decryptHandle, int fd, off64_t offset, off64_t length);
 
     status_t openDecryptSession(
             int uniqueId, DecryptHandle* decryptHandle, const char* uri);
@@ -96,7 +96,7 @@
     status_t finalizeDecryptUnit(int uniqueId, DecryptHandle* decryptHandle, int decryptUnitId);
 
     ssize_t pread(int uniqueId, DecryptHandle* decryptHandle,
-            void* buffer, ssize_t numBytes, off_t offset);
+            void* buffer, ssize_t numBytes, off64_t offset);
 
 protected:
     /////////////////////////////////////////////////////
@@ -268,7 +268,7 @@
      *     Returns DRM_NO_ERROR for success, DRM_ERROR_UNKNOWN for failure
      */
     virtual status_t onSetPlaybackStatus(
-            int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position) = 0;
+            int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position) = 0;
 
     /**
      * Validates whether an action on the DRM content is allowed or not.
@@ -369,7 +369,7 @@
      *     DRM_ERROR_CANNOT_HANDLE for failure and DRM_NO_ERROR for success
      */
     virtual status_t onOpenDecryptSession(
-            int uniqueId, DecryptHandle* decryptHandle, int fd, int offset, int length) = 0;
+            int uniqueId, DecryptHandle* decryptHandle, int fd, off64_t offset, off64_t length) = 0;
 
     /**
      * Open the decrypt session to decrypt the given protected content
@@ -450,7 +450,7 @@
      * @return Number of bytes read. Returns -1 for Failure.
      */
     virtual ssize_t onPread(int uniqueId, DecryptHandle* decryptHandle,
-            void* buffer, ssize_t numBytes, off_t offset) = 0;
+            void* buffer, ssize_t numBytes, off64_t offset) = 0;
 };
 
 };
diff --git a/drm/libdrmframework/plugins/common/include/IDrmEngine.h b/drm/libdrmframework/plugins/common/include/IDrmEngine.h
index f839070..d05c24f 100644
--- a/drm/libdrmframework/plugins/common/include/IDrmEngine.h
+++ b/drm/libdrmframework/plugins/common/include/IDrmEngine.h
@@ -224,7 +224,7 @@
      *     Returns DRM_NO_ERROR for success, DRM_ERROR_UNKNOWN for failure
      */
     virtual status_t setPlaybackStatus(int uniqueId, DecryptHandle* decryptHandle,
-            int playbackStatus, int position) = 0;
+            int playbackStatus, int64_t position) = 0;
 
     /**
      * Validates whether an action on the DRM content is allowed or not.
@@ -325,7 +325,7 @@
      *     DRM_ERROR_CANNOT_HANDLE for failure and DRM_NO_ERROR for success
      */
     virtual status_t openDecryptSession(
-        int uniqueId, DecryptHandle* decryptHandle, int fd, int offset, int length) = 0;
+        int uniqueId, DecryptHandle* decryptHandle, int fd, off64_t offset, off64_t length) = 0;
 
     /**
      * Open the decrypt session to decrypt the given protected content
@@ -406,7 +406,7 @@
      * @return Number of bytes read. Returns -1 for Failure.
      */
     virtual ssize_t pread(int uniqueId, DecryptHandle* decryptHandle,
-            void* buffer, ssize_t numBytes, off_t offset) = 0;
+            void* buffer, ssize_t numBytes, off64_t offset) = 0;
 };
 
 };
diff --git a/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.mk b/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.mk
index d4a6f18..af67aa3 100644
--- a/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.mk
+++ b/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.mk
@@ -60,7 +60,7 @@
     $(LOCAL_PATH)/include \
     external/openssl/include
 
-LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/drm/plugins/native
+LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/drm
 
 LOCAL_MODULE_TAGS := optional
 
diff --git a/drm/libdrmframework/plugins/passthru/include/DrmPassthruPlugIn.h b/drm/libdrmframework/plugins/passthru/include/DrmPassthruPlugIn.h
index bbcd9ed..f941f70 100644
--- a/drm/libdrmframework/plugins/passthru/include/DrmPassthruPlugIn.h
+++ b/drm/libdrmframework/plugins/passthru/include/DrmPassthruPlugIn.h
@@ -56,7 +56,7 @@
     status_t onConsumeRights(int uniqueId, DecryptHandle* decryptHandle, int action, bool reserve);
 
     status_t onSetPlaybackStatus(
-            int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position);
+            int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position);
 
     bool onValidateAction(
             int uniqueId, const String8& path, int action, const ActionDescription& description);
@@ -74,7 +74,7 @@
     DrmSupportInfo* onGetSupportInfo(int uniqueId);
 
     status_t onOpenDecryptSession(
-            int uniqueId, DecryptHandle* decryptHandle, int fd, int offset, int length);
+            int uniqueId, DecryptHandle* decryptHandle, int fd, off64_t offset, off64_t length);
 
     status_t onOpenDecryptSession(
             int uniqueId, DecryptHandle* decryptHandle, const char* uri);
@@ -90,7 +90,7 @@
     status_t onFinalizeDecryptUnit(int uniqueId, DecryptHandle* decryptHandle, int decryptUnitId);
 
     ssize_t onPread(int uniqueId, DecryptHandle* decryptHandle,
-            void* buffer, ssize_t numBytes, off_t offset);
+            void* buffer, ssize_t numBytes, off64_t offset);
 
 private:
     DecryptHandle* openDecryptSessionImpl();
diff --git a/drm/libdrmframework/plugins/passthru/src/DrmPassthruPlugIn.cpp b/drm/libdrmframework/plugins/passthru/src/DrmPassthruPlugIn.cpp
index dee1fdb..976978f 100644
--- a/drm/libdrmframework/plugins/passthru/src/DrmPassthruPlugIn.cpp
+++ b/drm/libdrmframework/plugins/passthru/src/DrmPassthruPlugIn.cpp
@@ -187,7 +187,7 @@
 }
 
 status_t DrmPassthruPlugIn::onSetPlaybackStatus(int uniqueId, DecryptHandle* decryptHandle,
-            int playbackStatus, int position) {
+            int playbackStatus, int64_t position) {
     LOGD("DrmPassthruPlugIn::onSetPlaybackStatus() : %d", uniqueId);
     return DRM_NO_ERROR;
 }
@@ -234,7 +234,7 @@
 }
 
 status_t DrmPassthruPlugIn::onOpenDecryptSession(
-            int uniqueId, DecryptHandle* decryptHandle, int fd, int offset, int length) {
+            int uniqueId, DecryptHandle* decryptHandle, int fd, off64_t offset, off64_t length) {
     LOGD("DrmPassthruPlugIn::onOpenDecryptSession() : %d", uniqueId);
 
 #ifdef ENABLE_PASSTHRU_DECRYPTION
@@ -292,7 +292,7 @@
 }
 
 ssize_t DrmPassthruPlugIn::onPread(int uniqueId, DecryptHandle* decryptHandle,
-            void* buffer, ssize_t numBytes, off_t offset) {
+            void* buffer, ssize_t numBytes, off64_t offset) {
     LOGD("DrmPassthruPlugIn::onPread() : %d", uniqueId);
     return 0;
 }
diff --git a/include/camera/Camera.h b/include/camera/Camera.h
index e734c38..f3c8f64 100644
--- a/include/camera/Camera.h
+++ b/include/camera/Camera.h
@@ -19,11 +19,10 @@
 
 #include <utils/Timers.h>
 #include <camera/ICameraClient.h>
+#include <gui/ISurfaceTexture.h>
 
 namespace android {
 
-class ISurface;
-
 /*
  * A set of bit masks for specifying how the received preview frames are
  * handled before the previewCallback() call.
@@ -67,16 +66,17 @@
 
 // msgType in notifyCallback and dataCallback functions
 enum {
-    CAMERA_MSG_ERROR            = 0x001,
-    CAMERA_MSG_SHUTTER          = 0x002,
-    CAMERA_MSG_FOCUS            = 0x004,
-    CAMERA_MSG_ZOOM             = 0x008,
-    CAMERA_MSG_PREVIEW_FRAME    = 0x010,
-    CAMERA_MSG_VIDEO_FRAME      = 0x020,
-    CAMERA_MSG_POSTVIEW_FRAME   = 0x040,
-    CAMERA_MSG_RAW_IMAGE        = 0x080,
-    CAMERA_MSG_COMPRESSED_IMAGE = 0x100,
-    CAMERA_MSG_ALL_MSGS         = 0x1FF
+    CAMERA_MSG_ERROR            = 0x0001,
+    CAMERA_MSG_SHUTTER          = 0x0002,
+    CAMERA_MSG_FOCUS            = 0x0004,
+    CAMERA_MSG_ZOOM             = 0x0008,
+    CAMERA_MSG_PREVIEW_FRAME    = 0x0010,
+    CAMERA_MSG_VIDEO_FRAME      = 0x0020,
+    CAMERA_MSG_POSTVIEW_FRAME   = 0x0040,
+    CAMERA_MSG_RAW_IMAGE        = 0x0080,
+    CAMERA_MSG_COMPRESSED_IMAGE = 0x0100,
+    CAMERA_MSG_RAW_IMAGE_NOTIFY = 0x0200,
+    CAMERA_MSG_ALL_MSGS         = 0xFFFF
 };
 
 // cmdType in sendCommand functions
@@ -96,11 +96,19 @@
     // or CAMERA_MSG_COMPRESSED_IMAGE. This is not allowed to be set during
     // preview.
     CAMERA_CMD_SET_DISPLAY_ORIENTATION = 3,
+
+    // cmdType to disable/enable shutter sound.
+    // In sendCommand passing arg1 = 0 will disable,
+    // while passing arg1 = 1 will enable the shutter sound.
+    CAMERA_CMD_ENABLE_SHUTTER_SOUND = 4,
+
+    // cmdType to play recording sound.
+    CAMERA_CMD_PLAY_RECORDING_SOUND = 5,
 };
 
 // camera fatal errors
 enum {
-    CAMERA_ERROR_UKNOWN  = 1,
+    CAMERA_ERROR_UNKNOWN  = 1,
     CAMERA_ERROR_SERVER_DIED = 100
 };
 
@@ -166,9 +174,11 @@
 
             status_t    getStatus() { return mStatus; }
 
-            // pass the buffered ISurface to the camera service
+            // pass the buffered Surface to the camera service
             status_t    setPreviewDisplay(const sp<Surface>& surface);
-            status_t    setPreviewDisplay(const sp<ISurface>& surface);
+
+            // pass the buffered ISurfaceTexture to the camera service
+            status_t    setPreviewTexture(const sp<ISurfaceTexture>& surfaceTexture);
 
             // start preview mode, must call setPreviewDisplay first
             status_t    startPreview();
@@ -198,7 +208,7 @@
             status_t    cancelAutoFocus();
 
             // take a picture - picture returned from callback
-            status_t    takePicture();
+            status_t    takePicture(int msgType);
 
             // set preview/capture parameters - key/value pairs
             status_t    setParameters(const String8& params);
@@ -209,6 +219,15 @@
             // send command to camera driver
             status_t    sendCommand(int32_t cmd, int32_t arg1, int32_t arg2);
 
+            // return the total number of available video buffers.
+            int32_t     getNumberOfVideoBuffers() const;
+
+            // return the individual video buffer corresponding to the given index.
+            sp<IMemory> getVideoBuffer(int32_t index) const;
+
+            // tell camera hal to store meta data or real YUV in video buffers.
+            status_t    storeMetaDataInBuffers(bool enabled);
+
             void        setListener(const sp<CameraListener>& listener);
             void        setPreviewCallbackFlags(int preview_callback_flag);
 
diff --git a/include/camera/CameraHardwareInterface.h b/include/camera/CameraHardwareInterface.h
index 35c5aa1..86bd849 100644
--- a/include/camera/CameraHardwareInterface.h
+++ b/include/camera/CameraHardwareInterface.h
@@ -18,15 +18,16 @@
 #define ANDROID_HARDWARE_CAMERA_HARDWARE_INTERFACE_H
 
 #include <binder/IMemory.h>
+#include <ui/egl/android_natives.h>
 #include <utils/RefBase.h>
 #include <surfaceflinger/ISurface.h>
+#include <ui/android_native_buffer.h>
+#include <ui/GraphicBuffer.h>
 #include <camera/Camera.h>
 #include <camera/CameraParameters.h>
 
 namespace android {
 
-class Overlay;
-
 /**
  *  The size of image for display.
  */
@@ -86,8 +87,8 @@
 public:
     virtual ~CameraHardwareInterface() { }
 
-    /** Return the IMemoryHeap for the preview image heap */
-    virtual sp<IMemoryHeap>         getPreviewHeap() const = 0;
+    /** Set the ANativeWindow to which preview frames are sent */
+    virtual status_t setPreviewWindow(const sp<ANativeWindow>& buf) = 0;
 
     /** Return the IMemoryHeap for the raw image heap */
     virtual sp<IMemoryHeap>         getRawHeap() const = 0;
@@ -111,6 +112,13 @@
 
     /**
      * Disable a message, or a set of messages.
+     *
+     * Once received a call to disableMsgType(CAMERA_MSG_VIDEO_FRAME), camera hal
+     * should not rely on its client to call releaseRecordingFrame() to release
+     * video recording frames sent out by the cameral hal before and after the
+     * disableMsgType(CAMERA_MSG_VIDEO_FRAME) call. Camera hal clients must not
+     * modify/access any video recording frame after calling
+     * disableMsgType(CAMERA_MSG_VIDEO_FRAME).
      */
     virtual void        disableMsgType(int32_t msgType) = 0;
 
@@ -127,12 +135,6 @@
     virtual status_t    startPreview() = 0;
 
     /**
-     * Only used if overlays are used for camera preview.
-     */
-    virtual bool         useOverlay() {return false;}
-    virtual status_t     setOverlay(const sp<Overlay> &overlay) {return BAD_VALUE;}
-
-    /**
      * Stop a previously started preview.
      */
     virtual void        stopPreview() = 0;
@@ -143,9 +145,89 @@
     virtual bool        previewEnabled() = 0;
 
     /**
+     * Retrieve the total number of available buffers from camera hal for passing
+     * video frame data in a recording session. Must be called again if a new
+     * recording session is started.
+     *
+     * This method should be called after startRecording(), since
+     * the some camera hal may choose to allocate the video buffers only after
+     * recording is started.
+     *
+     * Some camera hal may not implement this method, and 0 can be returned to
+     * indicate that this feature is not available.
+     *
+     * @return the number of video buffers that camera hal makes available.
+     *      Zero (0) is returned to indicate that camera hal does not support
+     *      this feature.
+     */
+    virtual int32_t     getNumberOfVideoBuffers() const { return 0; }
+
+    /**
+     * Retrieve the video buffer corresponding to the given index in a
+     * recording session. Must be called again if a new recording session
+     * is started.
+     *
+     * It allows a client to retrieve all video buffers that camera hal makes
+     * available to passing video frame data by calling this method with all
+     * valid index values. The valid index value ranges from 0 to n, where
+     * n = getNumberOfVideoBuffers() - 1. With an index outside of the valid
+     * range, 0 must be returned. This method should be called after
+     * startRecording().
+     *
+     * The video buffers should NOT be modified/released by camera hal
+     * until stopRecording() is called and all outstanding video buffers
+     * previously sent out via CAMERA_MSG_VIDEO_FRAME have been released
+     * via releaseVideoBuffer().
+     *
+     * @param index an index to retrieve the corresponding video buffer.
+     *
+     * @return the video buffer corresponding to the given index.
+     */
+    virtual sp<IMemory> getVideoBuffer(int32_t index) const { return 0; }
+
+    /**
+     * Request the camera hal to store meta data or real YUV data in
+     * the video buffers send out via CAMERA_MSG_VIDEO_FRRAME for a
+     * recording session. If it is not called, the default camera
+     * hal behavior is to store real YUV data in the video buffers.
+     *
+     * This method should be called before startRecording() in order
+     * to be effective.
+     *
+     * If meta data is stored in the video buffers, it is up to the
+     * receiver of the video buffers to interpret the contents and
+     * to find the actual frame data with the help of the meta data
+     * in the buffer. How this is done is outside of the scope of
+     * this method.
+     *
+     * Some camera hal may not support storing meta data in the video
+     * buffers, but all camera hal should support storing real YUV data
+     * in the video buffers. If the camera hal does not support storing
+     * the meta data in the video buffers when it is requested to do
+     * do, INVALID_OPERATION must be returned. It is very useful for
+     * the camera hal to pass meta data rather than the actual frame
+     * data directly to the video encoder, since the amount of the
+     * uncompressed frame data can be very large if video size is large.
+     *
+     * @param enable if true to instruct the camera hal to store
+     *      meta data in the video buffers; false to instruct
+     *      the camera hal to store real YUV data in the video
+     *      buffers.
+     *
+     * @return OK on success.
+     */
+    virtual status_t    storeMetaDataInBuffers(bool enable) {
+                            return enable? INVALID_OPERATION: OK;
+                        }
+
+    /**
      * Start record mode. When a record image is available a CAMERA_MSG_VIDEO_FRAME
      * message is sent with the corresponding frame. Every record frame must be released
-     * by calling releaseRecordingFrame().
+     * by a cameral hal client via releaseRecordingFrame() before the client calls
+     * disableMsgType(CAMERA_MSG_VIDEO_FRAME). After the client calls
+     * disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is camera hal's responsibility
+     * to manage the life-cycle of the video recording frames, and the client must
+     * not modify/access any video recording frames.
      */
     virtual status_t    startRecording() = 0;
 
@@ -161,6 +243,13 @@
 
     /**
      * Release a record frame previously returned by CAMERA_MSG_VIDEO_FRAME.
+     *
+     * It is camera hal client's responsibility to release video recording
+     * frames sent out by the camera hal before the camera hal receives
+     * a call to disableMsgType(CAMERA_MSG_VIDEO_FRAME). After it receives
+     * the call to disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is camera hal's
+     * responsibility of managing the life-cycle of the video recording
+     * frames.
      */
     virtual void        releaseRecordingFrame(const sp<IMemory>& mem) = 0;
 
diff --git a/include/camera/CameraParameters.h b/include/camera/CameraParameters.h
index 4e770fd..da2f049 100644
--- a/include/camera/CameraParameters.h
+++ b/include/camera/CameraParameters.h
@@ -59,6 +59,35 @@
     void setPreviewSize(int width, int height);
     void getPreviewSize(int *width, int *height) const;
     void getSupportedPreviewSizes(Vector<Size> &sizes) const;
+
+    // Set the dimensions in pixels to the given width and height
+    // for video frames. The given width and height must be one
+    // of the supported dimensions returned from
+    // getSupportedVideoSizes(). Must not be called if
+    // getSupportedVideoSizes() returns an empty Vector of Size.
+    void setVideoSize(int width, int height);
+    // Retrieve the current dimensions (width and height)
+    // in pixels for video frames, which must be one of the
+    // supported dimensions returned from getSupportedVideoSizes().
+    // Must not be called if getSupportedVideoSizes() returns an
+    // empty Vector of Size.
+    void getVideoSize(int *width, int *height) const;
+    // Retrieve a Vector of supported dimensions (width and height)
+    // in pixels for video frames. If sizes returned from the method
+    // is empty, the camera does not support calls to setVideoSize()
+    // or getVideoSize(). In adddition, it also indicates that
+    // the camera only has a single output, and does not have
+    // separate output for video frames and preview frame.
+    void getSupportedVideoSizes(Vector<Size> &sizes) const;
+    // Retrieve the preferred preview size (width and height) in pixels
+    // for video recording. The given width and height must be one of
+    // supported preview sizes returned from getSupportedPreviewSizes().
+    // Must not be called if getSupportedVideoSizes() returns an empty
+    // Vector of Size. If getSupportedVideoSizes() returns an empty
+    // Vector of Size, the width and height returned from this method
+    // is invalid, and is "-1x-1".
+    void getPreferredPreviewSizeForVideo(int *width, int *height) const;
+
     void setPreviewFrameRate(int fps);
     int getPreviewFrameRate() const;
     void getPreviewFpsRange(int *min_fps, int *max_fps) const;
@@ -288,6 +317,31 @@
     // Example value: "0.95,1.9,Infinity" or "0.049,0.05,0.051". Read only.
     static const char KEY_FOCUS_DISTANCES[];
 
+    // The current dimensions in pixels (width x height) for video frames.
+    // The width and height must be one of the supported sizes retrieved
+    // via KEY_SUPPORTED_VIDEO_SIZES.
+    // Example value: "1280x720". Read/write.
+    static const char KEY_VIDEO_SIZE[];
+    // A list of the supported dimensions in pixels (width x height)
+    // for video frames. See CAMERA_MSG_VIDEO_FRAME for details in
+    // frameworks/base/include/camera/Camera.h.
+    // Example: "176x144,1280x720". Read only.
+    static const char KEY_SUPPORTED_VIDEO_SIZES[];
+
+    // Preferred preview frame size in pixels for video recording.
+    // The width and height must be one of the supported sizes retrieved
+    // via KEY_SUPPORTED_PREVIEW_SIZES. This key can be used only when
+    // getSupportedVideoSizes() does not return an empty Vector of Size.
+    // Camcorder applications are recommended to set the preview size
+    // to a value that is not larger than the preferred preview size.
+    // In other words, the product of the width and height of the
+    // preview size should not be larger than that of the preferred
+    // preview size. In addition, we recommend to choos a preview size
+    // that has the same aspect ratio as the resolution of video to be
+    // recorded.
+    // Example value: "800x600". Read only.
+    static const char KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO[];
+
     // The image format for video frames. See CAMERA_MSG_VIDEO_FRAME in
     // frameworks/base/include/camera/Camera.h.
     // Example value: "yuv420sp" or PIXEL_FORMAT_XXX constants. Read only.
@@ -361,10 +415,12 @@
     // for barcode reading.
     static const char SCENE_MODE_BARCODE[];
 
-    // Formats for setPreviewFormat and setPictureFormat.
+    // Pixel color formats for KEY_PREVIEW_FORMAT, KEY_PICTURE_FORMAT,
+    // and KEY_VIDEO_FRAME_FORMAT
     static const char PIXEL_FORMAT_YUV422SP[];
     static const char PIXEL_FORMAT_YUV420SP[]; // NV21
     static const char PIXEL_FORMAT_YUV422I[]; // YUY2
+    static const char PIXEL_FORMAT_YUV420P[]; // YV12
     static const char PIXEL_FORMAT_RGB565[];
     static const char PIXEL_FORMAT_JPEG[];
 
diff --git a/include/camera/ICamera.h b/include/camera/ICamera.h
index 6fcf9e5..2344b3f 100644
--- a/include/camera/ICamera.h
+++ b/include/camera/ICamera.h
@@ -20,10 +20,11 @@
 #include <utils/RefBase.h>
 #include <binder/IInterface.h>
 #include <binder/Parcel.h>
-#include <surfaceflinger/ISurface.h>
+#include <surfaceflinger/Surface.h>
 #include <binder/IMemory.h>
 #include <utils/String8.h>
 #include <camera/Camera.h>
+#include <gui/ISurfaceTexture.h>
 
 namespace android {
 
@@ -45,8 +46,12 @@
     // allow other processes to use this ICamera interface
     virtual status_t        unlock() = 0;
 
-    // pass the buffered ISurface to the camera service
-    virtual status_t        setPreviewDisplay(const sp<ISurface>& surface) = 0;
+    // pass the buffered Surface to the camera service
+    virtual status_t        setPreviewDisplay(const sp<Surface>& surface) = 0;
+
+    // pass the buffered ISurfaceTexture to the camera service
+    virtual status_t        setPreviewTexture(
+            const sp<ISurfaceTexture>& surfaceTexture) = 0;
 
     // set the preview callback flag to affect how the received frames from
     // preview are handled.
@@ -65,7 +70,7 @@
     virtual status_t        startRecording() = 0;
 
     // stop recording mode
-    virtual void            stopRecording() = 0;    
+    virtual void            stopRecording() = 0;
 
     // get recording state
     virtual bool            recordingEnabled() = 0;
@@ -79,8 +84,14 @@
     // cancel auto focus
     virtual status_t        cancelAutoFocus() = 0;
 
-    // take a picture
-    virtual status_t        takePicture() = 0;
+    /*
+     * take a picture.
+     * @param msgType the message type an application selectively turn on/off
+     * on a photo-by-photo basis. The supported message types are:
+     * CAMERA_MSG_SHUTTER, CAMERA_MSG_RAW_IMAGE, CAMERA_MSG_COMPRESSED_IMAGE,
+     * and CAMERA_MSG_POSTVIEW_FRAME. Any other message types will be ignored.
+     */
+    virtual status_t        takePicture(int msgType) = 0;
 
     // set preview/capture parameters - key/value pairs
     virtual status_t        setParameters(const String8& params) = 0;
@@ -90,6 +101,15 @@
 
     // send command to camera driver
     virtual status_t        sendCommand(int32_t cmd, int32_t arg1, int32_t arg2) = 0;
+
+    // return the total number of available video buffers
+    virtual int32_t         getNumberOfVideoBuffers() const  = 0;
+
+    // return the individual video buffer corresponding to the given index.
+    virtual sp<IMemory>     getVideoBuffer(int32_t index) const = 0;
+
+    // tell the camera hal to store meta data or real YUV data in video buffers.
+    virtual status_t        storeMetaDataInBuffers(bool enabled) = 0;
 };
 
 // ----------------------------------------------------------------------------
diff --git a/include/drm/DrmManagerClient.h b/include/drm/DrmManagerClient.h
index 004556f..12142bc 100644
--- a/include/drm/DrmManagerClient.h
+++ b/include/drm/DrmManagerClient.h
@@ -69,7 +69,7 @@
      * @return
      *     Handle for the decryption session
      */
-    DecryptHandle* openDecryptSession(int fd, int offset, int length);
+    DecryptHandle* openDecryptSession(int fd, off64_t offset, off64_t length);
 
     /**
      * Open the decrypt session to decrypt the given protected content
@@ -113,7 +113,7 @@
      * @return status_t
      *     Returns DRM_NO_ERROR for success, DRM_ERROR_UNKNOWN for failure
      */
-    status_t setPlaybackStatus(DecryptHandle* decryptHandle, int playbackStatus, int position);
+    status_t setPlaybackStatus(DecryptHandle* decryptHandle, int playbackStatus, int64_t position);
 
     /**
      * Initialize decryption for the given unit of the protected content
@@ -167,7 +167,7 @@
      *
      * @return Number of bytes read. Returns -1 for Failure.
      */
-    ssize_t pread(DecryptHandle* decryptHandle, void* buffer, ssize_t numBytes, off_t offset);
+    ssize_t pread(DecryptHandle* decryptHandle, void* buffer, ssize_t numBytes, off64_t offset);
 
     /**
      * Validates whether an action on the DRM content is allowed or not.
@@ -365,7 +365,6 @@
 
 private:
     int mUniqueId;
-    Mutex mDecryptLock;
     DrmManagerClientImpl* mDrmManagerClientImpl;
 };
 
diff --git a/include/drm/drm_framework_common.h b/include/drm/drm_framework_common.h
index c5765a9..1758cdd 100644
--- a/include/drm/drm_framework_common.h
+++ b/include/drm/drm_framework_common.h
@@ -217,6 +217,10 @@
      * POSIX based Decrypt API set for container based DRM
      */
     static const int CONTAINER_BASED = 0x02;
+    /**
+     * Decrypt API for Widevine streams
+     */
+    static const int WV_BASED = 0x3;
 };
 
 /**
diff --git a/include/media/AudioEffect.h b/include/media/AudioEffect.h
index c967efb..cda2be0 100644
--- a/include/media/AudioEffect.h
+++ b/include/media/AudioEffect.h
@@ -403,7 +403,7 @@
      static status_t guidToString(const effect_uuid_t *guid, char *str, size_t maxLen);
 
 protected:
-     volatile int32_t        mEnabled;           // enable state
+     bool                    mEnabled;           // enable state
      int32_t                 mSessionId;         // audio session ID
      int32_t                 mPriority;          // priority for effect control
      status_t                mStatus;            // effect status
@@ -412,6 +412,7 @@
      void*                   mUserData;          // client context for callback function
      effect_descriptor_t     mDescriptor;        // effect descriptor
      int32_t                 mId;                // system wide unique effect engine instance ID
+     Mutex                   mLock;               // Mutex for mEnabled access
 
 private:
 
diff --git a/include/media/AudioRecord.h b/include/media/AudioRecord.h
index 38e3d44..293764d 100644
--- a/include/media/AudioRecord.h
+++ b/include/media/AudioRecord.h
@@ -346,17 +346,19 @@
     };
 
             bool processAudioBuffer(const sp<ClientRecordThread>& thread);
-            status_t openRecord(uint32_t sampleRate,
+            status_t openRecord_l(uint32_t sampleRate,
                                 int format,
                                 int channelCount,
                                 int frameCount,
                                 uint32_t flags,
                                 audio_io_handle_t input);
+            audio_io_handle_t getInput_l();
+            status_t restoreRecord_l(audio_track_cblk_t*& cblk);
 
     sp<IAudioRecord>        mAudioRecord;
     sp<IMemory>             mCblkMemory;
     sp<ClientRecordThread>  mClientRecordThread;
-    Mutex                   mRecordThreadLock;
+    Mutex                   mLock;
 
     uint32_t                mFrameCount;
 
diff --git a/include/media/AudioSystem.h b/include/media/AudioSystem.h
index e881747..2dc4beb 100644
--- a/include/media/AudioSystem.h
+++ b/include/media/AudioSystem.h
@@ -204,8 +204,9 @@
     // set audio mode in audio hardware (see AudioSystem::audio_mode)
     static status_t setMode(int mode);
 
-    // returns true in *state if tracks are active on the specified stream
-    static status_t isStreamActive(int stream, bool *state);
+    // returns true in *state if tracks are active on the specified stream or has been active
+    // in the past inPastMs milliseconds
+    static status_t isStreamActive(int stream, bool *state, uint32_t inPastMs = 0);
 
     // set/get audio hardware parameters. The function accepts a list of parameters
     // key value pairs in the form: key1=value1;key2=value2;...
@@ -263,11 +264,15 @@
         DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES = 0x100,
         DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER = 0x200,
         DEVICE_OUT_AUX_DIGITAL = 0x400,
+        DEVICE_OUT_ANLG_DOCK_HEADSET = 0x800,
+        DEVICE_OUT_DGTL_DOCK_HEADSET = 0x1000,
         DEVICE_OUT_DEFAULT = 0x8000,
         DEVICE_OUT_ALL = (DEVICE_OUT_EARPIECE | DEVICE_OUT_SPEAKER | DEVICE_OUT_WIRED_HEADSET |
                 DEVICE_OUT_WIRED_HEADPHONE | DEVICE_OUT_BLUETOOTH_SCO | DEVICE_OUT_BLUETOOTH_SCO_HEADSET |
                 DEVICE_OUT_BLUETOOTH_SCO_CARKIT | DEVICE_OUT_BLUETOOTH_A2DP | DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES |
-                DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER | DEVICE_OUT_AUX_DIGITAL | DEVICE_OUT_DEFAULT),
+                DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER | DEVICE_OUT_AUX_DIGITAL |
+                DEVICE_OUT_ANLG_DOCK_HEADSET | DEVICE_OUT_DGTL_DOCK_HEADSET |
+                DEVICE_OUT_DEFAULT),
         DEVICE_OUT_ALL_A2DP = (DEVICE_OUT_BLUETOOTH_A2DP | DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES |
                 DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER),
 
@@ -310,6 +315,8 @@
         FORCE_WIRED_ACCESSORY,
         FORCE_BT_CAR_DOCK,
         FORCE_BT_DESK_DOCK,
+        FORCE_ANALOG_DOCK,
+        FORCE_DIGITAL_DOCK,
         NUM_FORCE_CONFIG,
         FORCE_DEFAULT = FORCE_NONE
     };
@@ -385,6 +392,7 @@
     static status_t getStreamVolumeIndex(stream_type stream, int *index);
 
     static uint32_t getStrategyForStream(stream_type stream);
+    static uint32_t getDevicesForStream(stream_type stream);
 
     static audio_io_handle_t getOutputForEffect(effect_descriptor_t *desc);
     static status_t registerEffect(effect_descriptor_t *desc,
diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h
index 4475d4a..3e346db 100644
--- a/include/media/AudioTrack.h
+++ b/include/media/AudioTrack.h
@@ -437,7 +437,7 @@
     };
 
             bool processAudioBuffer(const sp<AudioTrackThread>& thread);
-            status_t createTrack(int streamType,
+            status_t createTrack_l(int streamType,
                                  uint32_t sampleRate,
                                  int format,
                                  int channelCount,
@@ -446,6 +446,10 @@
                                  const sp<IMemory>& sharedBuffer,
                                  audio_io_handle_t output,
                                  bool enforceFrameCount);
+            void flush_l();
+            status_t setLoop_l(uint32_t loopStart, uint32_t loopEnd, int loopCount);
+            audio_io_handle_t getOutput_l();
+            status_t restoreTrack_l(audio_track_cblk_t*& cblk, bool fromStart);
 
     sp<IAudioTrack>         mAudioTrack;
     sp<IMemory>             mCblkMemory;
@@ -480,6 +484,7 @@
     uint32_t                mFlags;
     int                     mSessionId;
     int                     mAuxEffectId;
+    Mutex                   mLock;
 };
 
 
diff --git a/include/media/IAudioFlinger.h b/include/media/IAudioFlinger.h
index 70e505e..589f7cd 100644
--- a/include/media/IAudioFlinger.h
+++ b/include/media/IAudioFlinger.h
@@ -102,9 +102,6 @@
     virtual     status_t    setMicMute(bool state) = 0;
     virtual     bool        getMicMute() const = 0;
 
-    // is any track active on this stream?
-    virtual     bool        isStreamActive(int stream) const = 0;
-
     virtual     status_t    setParameters(int ioHandle, const String8& keyValuePairs) = 0;
     virtual     String8     getParameters(int ioHandle, const String8& keys) = 0;
 
diff --git a/include/media/IAudioPolicyService.h b/include/media/IAudioPolicyService.h
index 49eee59..720a562 100644
--- a/include/media/IAudioPolicyService.h
+++ b/include/media/IAudioPolicyService.h
@@ -74,6 +74,7 @@
     virtual status_t setStreamVolumeIndex(AudioSystem::stream_type stream, int index) = 0;
     virtual status_t getStreamVolumeIndex(AudioSystem::stream_type stream, int *index) = 0;
     virtual uint32_t getStrategyForStream(AudioSystem::stream_type stream) = 0;
+    virtual uint32_t getDevicesForStream(AudioSystem::stream_type stream) = 0;
     virtual audio_io_handle_t getOutputForEffect(effect_descriptor_t *desc) = 0;
     virtual status_t registerEffect(effect_descriptor_t *desc,
                                     audio_io_handle_t output,
@@ -81,6 +82,7 @@
                                     int session,
                                     int id) = 0;
     virtual status_t unregisterEffect(int id) = 0;
+    virtual bool     isStreamActive(int stream, uint32_t inPastMs = 0) const = 0;
 };
 
 
diff --git a/include/media/IMediaPlayer.h b/include/media/IMediaPlayer.h
index af9a7ed..70519ef 100644
--- a/include/media/IMediaPlayer.h
+++ b/include/media/IMediaPlayer.h
@@ -25,6 +25,8 @@
 
 class Parcel;
 class ISurface;
+class Surface;
+class ISurfaceTexture;
 
 class IMediaPlayer: public IInterface
 {
@@ -33,7 +35,9 @@
 
     virtual void            disconnect() = 0;
 
-    virtual status_t        setVideoSurface(const sp<ISurface>& surface) = 0;
+    virtual status_t        setVideoSurface(const sp<Surface>& surface) = 0;
+    virtual status_t        setVideoSurfaceTexture(
+                                    const sp<ISurfaceTexture>& surfaceTexture) = 0;
     virtual status_t        prepareAsync() = 0;
     virtual status_t        start() = 0;
     virtual status_t        stop() = 0;
@@ -46,8 +50,6 @@
     virtual status_t        setAudioStreamType(int type) = 0;
     virtual status_t        setLooping(int loop) = 0;
     virtual status_t        setVolume(float leftVolume, float rightVolume) = 0;
-    virtual status_t        suspend() = 0;
-    virtual status_t        resume() = 0;
     virtual status_t        setAuxEffectSendLevel(float level) = 0;
     virtual status_t        attachAuxEffect(int effectId) = 0;
 
diff --git a/include/media/IMediaPlayerService.h b/include/media/IMediaPlayerService.h
index 9416ca1..7956788 100644
--- a/include/media/IMediaPlayerService.h
+++ b/include/media/IMediaPlayerService.h
@@ -32,6 +32,7 @@
 
 class IMediaRecorder;
 class IOMX;
+struct IStreamSource;
 
 class IMediaPlayerService: public IInterface
 {
@@ -45,9 +46,38 @@
             int audioSessionId = 0) = 0;
     virtual sp<IMediaPlayer> create(pid_t pid, const sp<IMediaPlayerClient>& client,
             int fd, int64_t offset, int64_t length, int audioSessionId) = 0;
+
+    virtual sp<IMediaPlayer> create(
+            pid_t pid, const sp<IMediaPlayerClient> &client,
+            const sp<IStreamSource> &source, int audioSessionId) = 0;
+
     virtual sp<IMemory>         decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, int* pFormat) = 0;
     virtual sp<IMemory>         decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, int* pNumChannels, int* pFormat) = 0;
     virtual sp<IOMX>            getOMX() = 0;
+
+    // codecs and audio devices usage tracking for the battery app
+    enum BatteryDataBits {
+        // tracking audio codec
+        kBatteryDataTrackAudio          = 0x1,
+        // tracking video codec
+        kBatteryDataTrackVideo          = 0x2,
+        // codec is started, otherwise codec is paused
+        kBatteryDataCodecStarted        = 0x4,
+        // tracking decoder (for media player),
+        // otherwise tracking encoder (for media recorder)
+        kBatteryDataTrackDecoder        = 0x8,
+        // start to play an audio on an audio device
+        kBatteryDataAudioFlingerStart   = 0x10,
+        // stop/pause the audio playback
+        kBatteryDataAudioFlingerStop    = 0x20,
+        // audio is rounted to speaker
+        kBatteryDataSpeakerOn           = 0x40,
+        // audio is rounted to devices other than speaker
+        kBatteryDataOtherAudioDeviceOn  = 0x80,
+    };
+
+    virtual void addBatteryData(uint32_t params) = 0;
+    virtual status_t pullBatteryData(Parcel* reply) = 0;
 };
 
 // ----------------------------------------------------------------------------
diff --git a/include/media/IMediaRecorder.h b/include/media/IMediaRecorder.h
index 54adca8..28be7c1 100644
--- a/include/media/IMediaRecorder.h
+++ b/include/media/IMediaRecorder.h
@@ -22,7 +22,7 @@
 
 namespace android {
 
-class ISurface;
+class Surface;
 class ICamera;
 class IMediaRecorderClient;
 
@@ -32,7 +32,7 @@
     DECLARE_META_INTERFACE(MediaRecorder);
 
     virtual	status_t		setCamera(const sp<ICamera>& camera) = 0;
-    virtual	status_t		setPreviewSurface(const sp<ISurface>& surface) = 0;
+    virtual	status_t		setPreviewSurface(const sp<Surface>& surface) = 0;
     virtual	status_t		setVideoSource(int vs) = 0;
     virtual	status_t		setAudioSource(int as) = 0;
     virtual	status_t		setOutputFormat(int of) = 0;
@@ -40,6 +40,7 @@
     virtual	status_t		setAudioEncoder(int ae) = 0;
     virtual	status_t		setOutputFile(const char* path) = 0;
     virtual	status_t		setOutputFile(int fd, int64_t offset, int64_t length) = 0;
+    virtual	status_t		setOutputFileAuxiliary(int fd) = 0;
     virtual	status_t		setVideoSize(int width, int height) = 0;
     virtual	status_t		setVideoFrameRate(int frames_per_second) = 0;
     virtual     status_t                setParameters(const String8& params) = 0;
@@ -68,4 +69,3 @@
 }; // namespace android
 
 #endif // ANDROID_IMEDIARECORDER_H
-
diff --git a/include/media/IOMX.h b/include/media/IOMX.h
index f794766..16a9342 100644
--- a/include/media/IOMX.h
+++ b/include/media/IOMX.h
@@ -19,6 +19,7 @@
 #define ANDROID_IOMX_H_
 
 #include <binder/IInterface.h>
+#include <ui/GraphicBuffer.h>
 #include <utils/List.h>
 #include <utils/String8.h>
 
@@ -78,10 +79,23 @@
             node_id node, OMX_INDEXTYPE index,
             const void *params, size_t size) = 0;
 
+    virtual status_t storeMetaDataInBuffers(
+            node_id node, OMX_U32 port_index, OMX_BOOL enable) = 0;
+
+    virtual status_t enableGraphicBuffers(
+            node_id node, OMX_U32 port_index, OMX_BOOL enable) = 0;
+
+    virtual status_t getGraphicBufferUsage(
+            node_id node, OMX_U32 port_index, OMX_U32* usage) = 0;
+
     virtual status_t useBuffer(
             node_id node, OMX_U32 port_index, const sp<IMemory> &params,
             buffer_id *buffer) = 0;
 
+    virtual status_t useGraphicBuffer(
+            node_id node, OMX_U32 port_index,
+            const sp<GraphicBuffer> &graphicBuffer, buffer_id *buffer) = 0;
+
     // This API clearly only makes sense if the caller lives in the
     // same process as the callee, i.e. is the media_server, as the
     // returned "buffer_data" pointer is just that, a pointer into local
@@ -109,33 +123,6 @@
             node_id node,
             const char *parameter_name,
             OMX_INDEXTYPE *index) = 0;
-
-    virtual sp<IOMXRenderer> createRenderer(
-            const sp<ISurface> &surface,
-            const char *componentName,
-            OMX_COLOR_FORMATTYPE colorFormat,
-            size_t encodedWidth, size_t encodedHeight,
-            size_t displayWidth, size_t displayHeight,
-            int32_t rotationDegrees) = 0;
-
-    // Note: These methods are _not_ virtual, it exists as a wrapper around
-    // the virtual "createRenderer" method above facilitating extraction
-    // of the ISurface from a regular Surface or a java Surface object.
-    sp<IOMXRenderer> createRenderer(
-            const sp<Surface> &surface,
-            const char *componentName,
-            OMX_COLOR_FORMATTYPE colorFormat,
-            size_t encodedWidth, size_t encodedHeight,
-            size_t displayWidth, size_t displayHeight,
-            int32_t rotationDegrees);
-
-    sp<IOMXRenderer> createRendererFromJavaSurface(
-            JNIEnv *env, jobject javaSurface,
-            const char *componentName,
-            OMX_COLOR_FORMATTYPE colorFormat,
-            size_t encodedWidth, size_t encodedHeight,
-            size_t displayWidth, size_t displayHeight,
-            int32_t rotationDegrees);
 };
 
 struct omx_message {
@@ -182,13 +169,6 @@
     virtual void onMessage(const omx_message &msg) = 0;
 };
 
-class IOMXRenderer : public IInterface {
-public:
-    DECLARE_META_INTERFACE(OMXRenderer);
-
-    virtual void render(IOMX::buffer_id buffer) = 0;
-};
-
 ////////////////////////////////////////////////////////////////////////////////
 
 class BnOMX : public BnInterface<IOMX> {
@@ -205,13 +185,6 @@
             uint32_t flags = 0);
 };
 
-class BnOMXRenderer : public BnInterface<IOMXRenderer> {
-public:
-    virtual status_t onTransact(
-            uint32_t code, const Parcel &data, Parcel *reply,
-            uint32_t flags = 0);
-};
-
 }  // namespace android
 
 #endif  // ANDROID_IOMX_H_
diff --git a/include/media/IStreamSource.h b/include/media/IStreamSource.h
new file mode 100644
index 0000000..d310cee
--- /dev/null
+++ b/include/media/IStreamSource.h
@@ -0,0 +1,74 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_ISTREAMSOURCE_H_
+
+#define ANDROID_ISTREAMSOURCE_H_
+
+#include <binder/IInterface.h>
+
+namespace android {
+
+struct AMessage;
+struct IMemory;
+struct IStreamListener;
+
+struct IStreamSource : public IInterface {
+    DECLARE_META_INTERFACE(StreamSource);
+
+    virtual void setListener(const sp<IStreamListener> &listener) = 0;
+    virtual void setBuffers(const Vector<sp<IMemory> > &buffers) = 0;
+
+    virtual void onBufferAvailable(size_t index) = 0;
+};
+
+struct IStreamListener : public IInterface {
+    DECLARE_META_INTERFACE(StreamListener);
+
+    enum Command {
+        EOS,
+        DISCONTINUITY,
+    };
+
+    virtual void queueBuffer(size_t index, size_t size) = 0;
+
+    // When signalling a discontinuity you can optionally
+    // specify an int64_t PTS timestamp in "msg".
+    // If present, rendering of data following the discontinuity
+    // will be suppressed until media time reaches this timestamp.
+    static const char *const kKeyResumeAtPTS;
+
+    virtual void issueCommand(
+            Command cmd, bool synchronous, const sp<AMessage> &msg = NULL) = 0;
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct BnStreamSource : public BnInterface<IStreamSource> {
+    virtual status_t onTransact(
+            uint32_t code, const Parcel &data, Parcel *reply,
+            uint32_t flags = 0);
+};
+
+struct BnStreamListener : public BnInterface<IStreamListener> {
+    virtual status_t onTransact(
+            uint32_t code, const Parcel &data, Parcel *reply,
+            uint32_t flags = 0);
+};
+
+}  // namespace android
+
+#endif  // ANDROID_ISTREAMSOURCE_H_
diff --git a/include/media/MediaPlayerInterface.h b/include/media/MediaPlayerInterface.h
index 0521709..117d7eb 100644
--- a/include/media/MediaPlayerInterface.h
+++ b/include/media/MediaPlayerInterface.h
@@ -33,13 +33,16 @@
 
 class Parcel;
 class ISurface;
+class Surface;
+class ISurfaceTexture;
 
 template<typename T> class SortedVector;
 
 enum player_type {
     PV_PLAYER = 1,
     SONIVOX_PLAYER = 2,
-    STAGEFRIGHT_PLAYER = 4,
+    STAGEFRIGHT_PLAYER = 3,
+    NU_PLAYER = 4,
     // Test players are available only in the 'test' and 'eng' builds.
     // The shared library with the test player is passed passed as an
     // argument to the 'test:' url in the setDataSource call.
@@ -105,7 +108,18 @@
             const KeyedVector<String8, String8> *headers = NULL) = 0;
 
     virtual status_t    setDataSource(int fd, int64_t offset, int64_t length) = 0;
-    virtual status_t    setVideoSurface(const sp<ISurface>& surface) = 0;
+
+    virtual status_t    setDataSource(const sp<IStreamSource> &source) {
+        return INVALID_OPERATION;
+    }
+
+    // pass the buffered Surface to the media player service
+    virtual status_t    setVideoSurface(const sp<Surface>& surface) = 0;
+
+    // pass the buffered ISurfaceTexture to the media player service
+    virtual status_t    setVideoSurfaceTexture(
+                                const sp<ISurfaceTexture>& surfaceTexture) = 0;
+
     virtual status_t    prepare() = 0;
     virtual status_t    prepareAsync() = 0;
     virtual status_t    start() = 0;
@@ -118,11 +132,7 @@
     virtual status_t    reset() = 0;
     virtual status_t    setLooping(int loop) = 0;
     virtual player_type playerType() = 0;
-    virtual status_t    suspend() { return INVALID_OPERATION; }
-    virtual status_t    resume() { return INVALID_OPERATION; }
 
-    virtual void        setNotifyCallback(void* cookie, notify_callback_f notifyFunc) {
-                            mCookie = cookie; mNotify = notifyFunc; }
     // Invoke a generic method on the player by using opaque parcels
     // for the request and reply.
     //
@@ -144,9 +154,21 @@
         return INVALID_OPERATION;
     };
 
-    virtual void        sendEvent(int msg, int ext1=0, int ext2=0) { if (mNotify) mNotify(mCookie, msg, ext1, ext2); }
+    void        setNotifyCallback(
+            void* cookie, notify_callback_f notifyFunc) {
+        Mutex::Autolock autoLock(mNotifyLock);
+        mCookie = cookie; mNotify = notifyFunc;
+    }
 
-protected:
+    void        sendEvent(int msg, int ext1=0, int ext2=0) {
+        Mutex::Autolock autoLock(mNotifyLock);
+        if (mNotify) mNotify(mCookie, msg, ext1, ext2);
+    }
+
+private:
+    friend class MediaPlayerService;
+
+    Mutex               mNotifyLock;
     void*               mCookie;
     notify_callback_f   mNotify;
 };
@@ -162,7 +184,7 @@
     sp<AudioSink>       mAudioSink;
 };
 
-// Implement this class for media players that output directo to hardware
+// Implement this class for media players that output audio directly to hardware
 class MediaPlayerHWInterface : public MediaPlayerBase
 {
 public:
diff --git a/include/media/MediaProfiles.h b/include/media/MediaProfiles.h
index c3cd361..f2107ec 100644
--- a/include/media/MediaProfiles.h
+++ b/include/media/MediaProfiles.h
@@ -24,8 +24,25 @@
 namespace android {
 
 enum camcorder_quality {
+    CAMCORDER_QUALITY_LIST_START = 0,
     CAMCORDER_QUALITY_LOW  = 0,
-    CAMCORDER_QUALITY_HIGH = 1
+    CAMCORDER_QUALITY_HIGH = 1,
+    CAMCORDER_QUALITY_QCIF = 2,
+    CAMCORDER_QUALITY_CIF = 3,
+    CAMCORDER_QUALITY_480P = 4,
+    CAMCORDER_QUALITY_720P = 5,
+    CAMCORDER_QUALITY_1080P = 6,
+    CAMCORDER_QUALITY_LIST_END = 6,
+
+    CAMCORDER_QUALITY_TIME_LAPSE_LIST_START = 1000,
+    CAMCORDER_QUALITY_TIME_LAPSE_LOW  = 1000,
+    CAMCORDER_QUALITY_TIME_LAPSE_HIGH = 1001,
+    CAMCORDER_QUALITY_TIME_LAPSE_QCIF = 1002,
+    CAMCORDER_QUALITY_TIME_LAPSE_CIF = 1003,
+    CAMCORDER_QUALITY_TIME_LAPSE_480P = 1004,
+    CAMCORDER_QUALITY_TIME_LAPSE_720P = 1005,
+    CAMCORDER_QUALITY_TIME_LAPSE_1080P = 1006,
+    CAMCORDER_QUALITY_TIME_LAPSE_LIST_END = 1006,
 };
 
 enum video_decoder {
@@ -68,6 +85,12 @@
                                        camcorder_quality quality) const;
 
     /**
+     * Returns true if a profile for the given camera at the given quality exists,
+     * or false if not.
+     */
+    bool hasCamcorderProfile(int cameraId, camcorder_quality quality) const;
+
+    /**
      * Returns the output file formats supported.
      */
     Vector<output_format> getOutputFileFormats() const;
@@ -128,6 +151,11 @@
     Vector<int> getImageEncodingQualityLevels(int cameraId) const;
 
 private:
+    enum {
+        // Camcorder profiles (high/low) and timelapse profiles (high/low)
+        kNumRequiredProfiles = 4,
+    };
+
     MediaProfiles& operator=(const MediaProfiles&);  // Don't call me
     MediaProfiles(const MediaProfiles&);             // Don't call me
     MediaProfiles() {}                               // Dummy default constructor
@@ -141,6 +169,14 @@
               mFrameHeight(frameHeight),
               mFrameRate(frameRate) {}
 
+        VideoCodec(const VideoCodec& copy) {
+            mCodec = copy.mCodec;
+            mBitRate = copy.mBitRate;
+            mFrameWidth = copy.mFrameWidth;
+            mFrameHeight = copy.mFrameHeight;
+            mFrameRate = copy.mFrameRate;
+        }
+
         ~VideoCodec() {}
 
         video_encoder mCodec;
@@ -157,6 +193,13 @@
               mSampleRate(sampleRate),
               mChannels(channels) {}
 
+        AudioCodec(const AudioCodec& copy) {
+            mCodec = copy.mCodec;
+            mBitRate = copy.mBitRate;
+            mSampleRate = copy.mSampleRate;
+            mChannels = copy.mChannels;
+        }
+
         ~AudioCodec() {}
 
         audio_encoder mCodec;
@@ -174,6 +217,15 @@
               mVideoCodec(0),
               mAudioCodec(0) {}
 
+        CamcorderProfile(const CamcorderProfile& copy) {
+            mCameraId = copy.mCameraId;
+            mFileFormat = copy.mFileFormat;
+            mQuality = copy.mQuality;
+            mDuration = copy.mDuration;
+            mVideoCodec = new VideoCodec(*copy.mVideoCodec);
+            mAudioCodec = new AudioCodec(*copy.mAudioCodec);
+        }
+
         ~CamcorderProfile() {
             delete mVideoCodec;
             delete mAudioCodec;
@@ -252,6 +304,10 @@
         Vector<int> mLevels;
     };
 
+    int getCamcorderProfileIndex(int cameraId, camcorder_quality quality) const;
+    void initRequiredProfileRefs(const Vector<int>& cameraIds);
+    int getRequiredProfileRefIndex(int cameraId);
+
     // Debug
     static void logVideoCodec(const VideoCodec& codec);
     static void logAudioCodec(const AudioCodec& codec);
@@ -270,7 +326,10 @@
     static VideoDecoderCap* createVideoDecoderCap(const char **atts);
     static VideoEncoderCap* createVideoEncoderCap(const char **atts);
     static AudioEncoderCap* createAudioEncoderCap(const char **atts);
-    static CamcorderProfile* createCamcorderProfile(int cameraId, const char **atts);
+
+    static CamcorderProfile* createCamcorderProfile(
+                int cameraId, const char **atts, Vector<int>& cameraIds);
+
     static int getCameraId(const char **atts);
 
     ImageEncodingQualityLevels* findImageEncodingQualityLevels(int cameraId) const;
@@ -281,8 +340,25 @@
 
     // If the xml configuration file does not exist, use hard-coded values
     static MediaProfiles* createDefaultInstance();
-    static CamcorderProfile *createDefaultCamcorderLowProfile();
-    static CamcorderProfile *createDefaultCamcorderHighProfile();
+
+    static CamcorderProfile *createDefaultCamcorderQcifProfile(camcorder_quality quality);
+    static CamcorderProfile *createDefaultCamcorderCifProfile(camcorder_quality quality);
+    static void createDefaultCamcorderLowProfiles(
+            MediaProfiles::CamcorderProfile **lowProfile,
+            MediaProfiles::CamcorderProfile **lowSpecificProfile);
+    static void createDefaultCamcorderHighProfiles(
+            MediaProfiles::CamcorderProfile **highProfile,
+            MediaProfiles::CamcorderProfile **highSpecificProfile);
+
+    static CamcorderProfile *createDefaultCamcorderTimeLapseQcifProfile(camcorder_quality quality);
+    static CamcorderProfile *createDefaultCamcorderTimeLapse480pProfile(camcorder_quality quality);
+    static void createDefaultCamcorderTimeLapseLowProfiles(
+            MediaProfiles::CamcorderProfile **lowTimeLapseProfile,
+            MediaProfiles::CamcorderProfile **lowSpecificTimeLapseProfile);
+    static void createDefaultCamcorderTimeLapseHighProfiles(
+            MediaProfiles::CamcorderProfile **highTimeLapseProfile,
+            MediaProfiles::CamcorderProfile **highSpecificTimeLapseProfile);
+
     static void createDefaultCamcorderProfiles(MediaProfiles *profiles);
     static void createDefaultVideoEncoders(MediaProfiles *profiles);
     static void createDefaultAudioEncoders(MediaProfiles *profiles);
@@ -297,6 +373,21 @@
 
     static int findTagForName(const NameToTagMap *map, size_t nMappings, const char *name);
 
+    /**
+     * Check on existing profiles with the following criteria:
+     * 1. Low quality profile must have the lowest video
+     *    resolution product (width x height)
+     * 2. High quality profile must have the highest video
+     *    resolution product (width x height)
+     *
+     * and add required low/high quality camcorder/timelapse
+     * profiles if they are not found. This allows to remove
+     * duplicate profile definitions in the media_profiles.xml
+     * file.
+     */
+    void checkAndAddRequiredProfilesIfNecessary();
+
+
     // Mappings from name (for instance, codec name) to enum value
     static const NameToTagMap sVideoEncoderNameMap[];
     static const NameToTagMap sAudioEncoderNameMap[];
@@ -317,6 +408,20 @@
     Vector<VideoDecoderCap*>  mVideoDecoders;
     Vector<output_format>     mEncoderOutputFileFormats;
     Vector<ImageEncodingQualityLevels *>  mImageEncodingQualityLevels;
+
+    typedef struct {
+        bool mHasRefProfile;      // Refers to an existing profile
+        int  mRefProfileIndex;    // Reference profile index
+        int  mResolutionProduct;  // width x height
+    } RequiredProfileRefInfo;     // Required low and high profiles
+
+    typedef struct {
+        RequiredProfileRefInfo mRefs[kNumRequiredProfiles];
+        int mCameraId;
+    } RequiredProfiles;
+
+    RequiredProfiles *mRequiredProfileRefs;
+    Vector<int>              mCameraIds;
 };
 
 }; // namespace android
diff --git a/include/media/MediaRecorderBase.h b/include/media/MediaRecorderBase.h
index 5e9e368..c42346e 100644
--- a/include/media/MediaRecorderBase.h
+++ b/include/media/MediaRecorderBase.h
@@ -22,7 +22,7 @@
 
 namespace android {
 
-class ISurface;
+class Surface;
 
 struct MediaRecorderBase {
     MediaRecorderBase() {}
@@ -37,9 +37,10 @@
     virtual status_t setVideoSize(int width, int height) = 0;
     virtual status_t setVideoFrameRate(int frames_per_second) = 0;
     virtual status_t setCamera(const sp<ICamera>& camera) = 0;
-    virtual status_t setPreviewSurface(const sp<ISurface>& surface) = 0;
+    virtual status_t setPreviewSurface(const sp<Surface>& surface) = 0;
     virtual status_t setOutputFile(const char *path) = 0;
     virtual status_t setOutputFile(int fd, int64_t offset, int64_t length) = 0;
+    virtual status_t setOutputFileAuxiliary(int fd) {return INVALID_OPERATION;}
     virtual status_t setParameters(const String8& params) = 0;
     virtual status_t setListener(const sp<IMediaRecorderClient>& listener) = 0;
     virtual status_t prepare() = 0;
diff --git a/include/media/PVMediaRecorder.h b/include/media/PVMediaRecorder.h
deleted file mode 100644
index c091c39..0000000
--- a/include/media/PVMediaRecorder.h
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- **
- ** Copyright 2008, The Android Open Source Project
- **
- ** Licensed under the Apache License, Version 2.0 (the "License");
- ** you may not use this file except in compliance with the License.
- ** You may obtain a copy of the License at
- **
- **     http://www.apache.org/licenses/LICENSE-2.0
- **
- ** Unless required by applicable law or agreed to in writing, software
- ** distributed under the License is distributed on an "AS IS" BASIS,
- ** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- ** See the License for the specific language governing permissions and
- ** limitations under the License.
- */
-
-#ifndef ANDROID_PVMEDIARECORDER_H
-#define ANDROID_PVMEDIARECORDER_H
-
-#include <media/IMediaRecorderClient.h>
-#include <media/MediaRecorderBase.h>
-
-namespace android {
-
-class ISurface;
-class ICamera;
-class AuthorDriverWrapper;
-
-class PVMediaRecorder : public MediaRecorderBase {
-public:
-    PVMediaRecorder();
-    virtual ~PVMediaRecorder();
-
-    virtual status_t init();
-    virtual status_t setAudioSource(audio_source as);
-    virtual status_t setVideoSource(video_source vs);
-    virtual status_t setOutputFormat(output_format of);
-    virtual status_t setAudioEncoder(audio_encoder ae);
-    virtual status_t setVideoEncoder(video_encoder ve);
-    virtual status_t setVideoSize(int width, int height);
-    virtual status_t setVideoFrameRate(int frames_per_second);
-    virtual status_t setCamera(const sp<ICamera>& camera);
-    virtual status_t setPreviewSurface(const sp<ISurface>& surface);
-    virtual status_t setOutputFile(const char *path);
-    virtual status_t setOutputFile(int fd, int64_t offset, int64_t length);
-    virtual status_t setParameters(const String8& params);
-    virtual status_t setListener(const sp<IMediaRecorderClient>& listener);
-    virtual status_t prepare();
-    virtual status_t start();
-    virtual status_t stop();
-    virtual status_t close();
-    virtual status_t reset();
-    virtual status_t getMaxAmplitude(int *max);
-    virtual status_t dump(int fd, const Vector<String16>& args) const;
-
-private:
-    status_t doStop();
-
-    AuthorDriverWrapper*            mAuthorDriverWrapper;
-
-    PVMediaRecorder(const PVMediaRecorder &);
-    PVMediaRecorder &operator=(const PVMediaRecorder &);
-};
-
-}; // namespace android
-
-#endif // ANDROID_PVMEDIARECORDER_H
-
diff --git a/include/media/PVMetadataRetriever.h b/include/media/PVMetadataRetriever.h
deleted file mode 100644
index c202dfe..0000000
--- a/include/media/PVMetadataRetriever.h
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
-**
-** Copyright (C) 2008 The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-**     http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
-
-#ifndef ANDROID_PVMETADATARETRIEVER_H
-#define ANDROID_PVMETADATARETRIEVER_H
-
-#include <utils/Errors.h>
-#include <media/MediaMetadataRetrieverInterface.h>
-#include <private/media/VideoFrame.h>
-
-namespace android {
-
-class MetadataDriver;
-
-class PVMetadataRetriever : public MediaMetadataRetrieverInterface
-{
-public:
-                        PVMetadataRetriever();
-    virtual             ~PVMetadataRetriever();
-
-    virtual status_t    setDataSource(const char *url);
-    virtual status_t    setDataSource(int fd, int64_t offset, int64_t length);
-    virtual status_t    setMode(int mode);
-    virtual status_t    getMode(int* mode) const;
-    virtual VideoFrame* captureFrame();
-    virtual MediaAlbumArt* extractAlbumArt();
-    virtual const char* extractMetadata(int keyCode);
-
-private:
-    mutable Mutex       mLock;
-    MetadataDriver*     mMetadataDriver;
-    char*               mDataSourcePath;
-};
-
-}; // namespace android
-
-#endif // ANDROID_PVMETADATARETRIEVER_H
diff --git a/include/media/PVPlayer.h b/include/media/PVPlayer.h
deleted file mode 100644
index df50981..0000000
--- a/include/media/PVPlayer.h
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
- * Copyright (C) 2008 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_PVPLAYER_H
-#define ANDROID_PVPLAYER_H
-
-#include <utils/Errors.h>
-#include <media/MediaPlayerInterface.h>
-#include <media/Metadata.h>
-
-#define MAX_OPENCORE_INSTANCES 25
-
-#ifdef MAX_OPENCORE_INSTANCES
-#include <cutils/atomic.h>
-#endif
-
-class PlayerDriver;
-
-namespace android {
-
-class PVPlayer : public MediaPlayerInterface
-{
-public:
-                        PVPlayer();
-    virtual             ~PVPlayer();
-
-    virtual status_t    initCheck();
-
-    virtual status_t    setDataSource(
-            const char *url, const KeyedVector<String8, String8> *headers);
-
-    virtual status_t    setDataSource(int fd, int64_t offset, int64_t length);
-    virtual status_t    setVideoSurface(const sp<ISurface>& surface);
-    virtual status_t    prepare();
-    virtual status_t    prepareAsync();
-    virtual status_t    start();
-    virtual status_t    stop();
-    virtual status_t    pause();
-    virtual bool        isPlaying();
-    virtual status_t    seekTo(int msec);
-    virtual status_t    getCurrentPosition(int *msec);
-    virtual status_t    getDuration(int *msec);
-    virtual status_t    reset();
-    virtual status_t    setLooping(int loop);
-    virtual player_type playerType() { return PV_PLAYER; }
-    virtual status_t    invoke(const Parcel& request, Parcel *reply);
-    virtual status_t    getMetadata(
-        const SortedVector<media::Metadata::Type>& ids,
-        Parcel *records);
-
-    // make available to PlayerDriver
-    void        sendEvent(int msg, int ext1=0, int ext2=0) { MediaPlayerBase::sendEvent(msg, ext1, ext2); }
-
-private:
-    static void         do_nothing(status_t s, void *cookie, bool cancelled) { }
-    static void         run_init(status_t s, void *cookie, bool cancelled);
-    static void         run_set_video_surface(status_t s, void *cookie, bool cancelled);
-    static void         run_set_audio_output(status_t s, void *cookie, bool cancelled);
-    static void         run_prepare(status_t s, void *cookie, bool cancelled);
-    static void         check_for_live_streaming(status_t s, void *cookie, bool cancelled);
-
-    PlayerDriver*               mPlayerDriver;
-    char *                      mDataSourcePath;
-    bool                        mIsDataSourceSet;
-    sp<ISurface>                mSurface;
-    int                         mSharedFd;
-    status_t                    mInit;
-    int                         mDuration;
-
-#ifdef MAX_OPENCORE_INSTANCES
-    static volatile int32_t     sNumInstances;
-#endif
-};
-
-}; // namespace android
-
-#endif // ANDROID_PVPLAYER_H
diff --git a/include/media/mediaplayer.h b/include/media/mediaplayer.h
index 207191d..528eeb9 100644
--- a/include/media/mediaplayer.h
+++ b/include/media/mediaplayer.h
@@ -28,6 +28,7 @@
 namespace android {
 
 class Surface;
+class ISurfaceTexture;
 
 enum media_event_type {
     MEDIA_NOP               = 0, // interface test message
@@ -146,6 +147,8 @@
 
             status_t        setDataSource(int fd, int64_t offset, int64_t length);
             status_t        setVideoSurface(const sp<Surface>& surface);
+            status_t        setVideoSurfaceTexture(
+                                    const sp<ISurfaceTexture>& surfaceTexture);
             status_t        setListener(const sp<MediaPlayerListener>& listener);
             status_t        prepare();
             status_t        prepareAsync();
@@ -169,8 +172,6 @@
             status_t        invoke(const Parcel& request, Parcel *reply);
             status_t        setMetadataFilter(const Parcel& filter);
             status_t        getMetadata(bool update_only, bool apply_filter, Parcel *metadata);
-            status_t        suspend();
-            status_t        resume();
             status_t        setAudioSessionId(int sessionId);
             int             getAudioSessionId();
             status_t        setAuxEffectSendLevel(float level);
diff --git a/include/media/mediarecorder.h b/include/media/mediarecorder.h
index 9a76393..a710546 100644
--- a/include/media/mediarecorder.h
+++ b/include/media/mediarecorder.h
@@ -174,6 +174,7 @@
     status_t    setAudioEncoder(int ae);
     status_t    setOutputFile(const char* path);
     status_t    setOutputFile(int fd, int64_t offset, int64_t length);
+    status_t    setOutputFileAuxiliary(int fd);
     status_t    setVideoSize(int width, int height);
     status_t    setVideoFrameRate(int frames_per_second);
     status_t    setParameters(const String8& params);
@@ -200,6 +201,7 @@
     bool                        mIsAudioEncoderSet;
     bool                        mIsVideoEncoderSet;
     bool                        mIsOutputFileSet;
+    bool                        mIsAuxiliaryOutputFileSet;
     Mutex                       mLock;
     Mutex                       mNotifyLock;
 };
diff --git a/include/media/mediascanner.h b/include/media/mediascanner.h
index 0d397ac..df5be32 100644
--- a/include/media/mediascanner.h
+++ b/include/media/mediascanner.h
@@ -38,8 +38,7 @@
 
     typedef bool (*ExceptionCheck)(void* env);
     virtual status_t processDirectory(
-            const char *path, const char *extensions,
-            MediaScannerClient &client,
+            const char *path, MediaScannerClient &client,
             ExceptionCheck exceptionCheck, void *exceptionEnv);
 
     void setLocale(const char *locale);
@@ -55,9 +54,8 @@
     char *mLocale;
 
     status_t doProcessDirectory(
-            char *path, int pathRemaining, const char *extensions,
-            MediaScannerClient &client, ExceptionCheck exceptionCheck,
-            void *exceptionEnv);
+            char *path, int pathRemaining, MediaScannerClient &client,
+            ExceptionCheck exceptionCheck, void *exceptionEnv);
 
     MediaScanner(const MediaScanner &);
     MediaScanner &operator=(const MediaScanner &);
@@ -73,7 +71,8 @@
     bool addStringTag(const char* name, const char* value);
     void endFile();
 
-    virtual bool scanFile(const char* path, long long lastModified, long long fileSize) = 0;
+    virtual bool scanFile(const char* path, long long lastModified,
+            long long fileSize, bool isDirectory) = 0;
     virtual bool handleStringTag(const char* name, const char* value) = 0;
     virtual bool setMimeType(const char* mimeType) = 0;
     virtual bool addNoMediaFolder(const char* path) = 0;
diff --git a/include/media/stagefright/ACodec.h b/include/media/stagefright/ACodec.h
new file mode 100644
index 0000000..a969796
--- /dev/null
+++ b/include/media/stagefright/ACodec.h
@@ -0,0 +1,157 @@
+#ifndef A_CODEC_H_
+
+#define A_CODEC_H_
+
+#include <stdint.h>
+#include <android/native_window.h>
+#include <media/IOMX.h>
+#include <media/stagefright/foundation/AHierarchicalStateMachine.h>
+
+namespace android {
+
+struct ABuffer;
+struct MemoryDealer;
+
+struct ACodec : public AHierarchicalStateMachine {
+    enum {
+        kWhatFillThisBuffer      = 'fill',
+        kWhatDrainThisBuffer     = 'drai',
+        kWhatEOS                 = 'eos ',
+        kWhatShutdownCompleted   = 'scom',
+        kWhatFlushCompleted      = 'fcom',
+        kWhatOutputFormatChanged = 'outC',
+    };
+
+    ACodec();
+
+    void setNotificationMessage(const sp<AMessage> &msg);
+    void initiateSetup(const sp<AMessage> &msg);
+    void signalFlush();
+    void signalResume();
+    void initiateShutdown();
+
+protected:
+    virtual ~ACodec();
+
+private:
+    struct BaseState;
+    struct UninitializedState;
+    struct LoadedToIdleState;
+    struct IdleToExecutingState;
+    struct ExecutingState;
+    struct OutputPortSettingsChangedState;
+    struct ExecutingToIdleState;
+    struct IdleToLoadedState;
+    struct ErrorState;
+    struct FlushingState;
+
+    enum {
+        kWhatSetup                   = 'setu',
+        kWhatOMXMessage              = 'omx ',
+        kWhatInputBufferFilled       = 'inpF',
+        kWhatOutputBufferDrained     = 'outD',
+        kWhatShutdown                = 'shut',
+        kWhatFlush                   = 'flus',
+        kWhatResume                  = 'resm',
+        kWhatDrainDeferredMessages   = 'drai',
+    };
+
+    enum {
+        kPortIndexInput  = 0,
+        kPortIndexOutput = 1
+    };
+
+    struct BufferInfo {
+        enum Status {
+            OWNED_BY_US,
+            OWNED_BY_COMPONENT,
+            OWNED_BY_UPSTREAM,
+            OWNED_BY_DOWNSTREAM,
+            OWNED_BY_NATIVE_WINDOW,
+        };
+
+        IOMX::buffer_id mBufferID;
+        Status mStatus;
+
+        sp<ABuffer> mData;
+        sp<GraphicBuffer> mGraphicBuffer;
+    };
+
+    sp<AMessage> mNotify;
+
+    sp<UninitializedState> mUninitializedState;
+    sp<LoadedToIdleState> mLoadedToIdleState;
+    sp<IdleToExecutingState> mIdleToExecutingState;
+    sp<ExecutingState> mExecutingState;
+    sp<OutputPortSettingsChangedState> mOutputPortSettingsChangedState;
+    sp<ExecutingToIdleState> mExecutingToIdleState;
+    sp<IdleToLoadedState> mIdleToLoadedState;
+    sp<ErrorState> mErrorState;
+    sp<FlushingState> mFlushingState;
+
+    AString mComponentName;
+    sp<IOMX> mOMX;
+    IOMX::node_id mNode;
+    sp<MemoryDealer> mDealer[2];
+
+    sp<ANativeWindow> mNativeWindow;
+
+    Vector<BufferInfo> mBuffers[2];
+    bool mPortEOS[2];
+
+    List<sp<AMessage> > mDeferredQueue;
+
+    bool mSentFormat;
+
+    status_t allocateBuffersOnPort(OMX_U32 portIndex);
+    status_t freeBuffersOnPort(OMX_U32 portIndex);
+    status_t freeBuffer(OMX_U32 portIndex, size_t i);
+
+    status_t allocateOutputBuffersFromNativeWindow();
+    status_t cancelBufferToNativeWindow(BufferInfo *info);
+    status_t freeOutputBuffersNotOwnedByComponent();
+    BufferInfo *dequeueBufferFromNativeWindow();
+
+    BufferInfo *findBufferByID(
+            uint32_t portIndex, IOMX::buffer_id bufferID,
+            ssize_t *index = NULL);
+
+    void setComponentRole(bool isEncoder, const char *mime);
+    void configureCodec(const char *mime, const sp<AMessage> &msg);
+
+    status_t setVideoPortFormatType(
+            OMX_U32 portIndex,
+            OMX_VIDEO_CODINGTYPE compressionFormat,
+            OMX_COLOR_FORMATTYPE colorFormat);
+
+    status_t setSupportedOutputFormat();
+
+    status_t setupVideoDecoder(
+            const char *mime, int32_t width, int32_t height);
+
+    status_t setVideoFormatOnPort(
+            OMX_U32 portIndex,
+            int32_t width, int32_t height,
+            OMX_VIDEO_CODINGTYPE compressionFormat);
+
+    status_t setupAACDecoder(int32_t numChannels, int32_t sampleRate);
+    status_t setMinBufferSize(OMX_U32 portIndex, size_t size);
+
+    status_t initNativeWindow();
+
+    // Returns true iff all buffers on the given port have status OWNED_BY_US.
+    bool allYourBuffersAreBelongToUs(OMX_U32 portIndex);
+
+    bool allYourBuffersAreBelongToUs();
+
+    void deferMessage(const sp<AMessage> &msg);
+    void processDeferredMessages();
+
+    void sendFormatChange();
+
+    DISALLOW_EVIL_CONSTRUCTORS(ACodec);
+};
+
+}  // namespace android
+
+#endif  // A_CODEC_H_
diff --git a/include/media/stagefright/AMRWriter.h b/include/media/stagefright/AMRWriter.h
index aa965e1..62d57b4 100644
--- a/include/media/stagefright/AMRWriter.h
+++ b/include/media/stagefright/AMRWriter.h
@@ -44,7 +44,7 @@
     virtual ~AMRWriter();
 
 private:
-    FILE *mFile;
+    int   mFd;
     status_t mInitCheck;
     sp<MediaSource> mSource;
     bool mStarted;
diff --git a/include/media/stagefright/AudioPlayer.h b/include/media/stagefright/AudioPlayer.h
index 37af032..d12ee9c 100644
--- a/include/media/stagefright/AudioPlayer.h
+++ b/include/media/stagefright/AudioPlayer.h
@@ -65,6 +65,7 @@
     bool reachedEOS(status_t *finalStatus);
 
 private:
+    friend class VideoEditorAudioPlayer;
     sp<MediaSource> mSource;
     AudioTrack *mAudioTrack;
 
diff --git a/include/media/stagefright/AudioSource.h b/include/media/stagefright/AudioSource.h
index d484d60..9e6f0e2 100644
--- a/include/media/stagefright/AudioSource.h
+++ b/include/media/stagefright/AudioSource.h
@@ -18,15 +18,17 @@
 
 #define AUDIO_SOURCE_H_
 
+#include <media/AudioRecord.h>
 #include <media/AudioSystem.h>
 #include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <utils/List.h>
 
 namespace android {
 
 class AudioRecord;
-struct MediaBufferGroup;
 
-struct AudioSource : public MediaSource {
+struct AudioSource : public MediaSource, public MediaBufferObserver {
     // Note that the "channels" parameter is _not_ the number of channels,
     // but a bitmask of AudioSystem::audio_channels constants.
     AudioSource(
@@ -45,6 +47,9 @@
     virtual status_t read(
             MediaBuffer **buffer, const ReadOptions *options = NULL);
 
+    status_t dataCallbackTimestamp(const AudioRecord::Buffer& buffer, int64_t timeUs);
+    virtual void signalBufferReturned(MediaBuffer *buffer);
+
 protected:
     virtual ~AudioSource();
 
@@ -54,27 +59,31 @@
 
         // After the initial mute, we raise the volume linearly
         // over kAutoRampDurationUs.
-        kAutoRampDurationUs = 700000,
+        kAutoRampDurationUs = 300000,
 
         // This is the initial mute duration to suppress
         // the video recording signal tone
-        kAutoRampStartUs = 1000000,
-      };
+        kAutoRampStartUs = 0,
+    };
+
+    Mutex mLock;
+    Condition mFrameAvailableCondition;
+    Condition mFrameEncodingCompletionCondition;
 
     AudioRecord *mRecord;
     status_t mInitCheck;
     bool mStarted;
+    int32_t mSampleRate;
 
-    bool mCollectStats;
     bool mTrackMaxAmplitude;
     int64_t mStartTimeUs;
     int16_t mMaxAmplitude;
     int64_t mPrevSampleTimeUs;
-    int64_t mTotalLostFrames;
-    int64_t mPrevLostBytes;
     int64_t mInitialReadTimeUs;
+    int64_t mNumFramesReceived;
+    int64_t mNumClientOwnedBuffers;
 
-    MediaBufferGroup *mGroup;
+    List<MediaBuffer * > mBuffersReceived;
 
     void trackMaxAmplitude(int16_t *data, int nSamples);
 
@@ -84,6 +93,9 @@
         int32_t startFrame, int32_t rampDurationFrames,
         uint8_t *data,   size_t bytes);
 
+    void releaseQueuedFrames_l();
+    void waitOutstandingEncodingFrames_l();
+
     AudioSource(const AudioSource &);
     AudioSource &operator=(const AudioSource &);
 };
diff --git a/include/media/stagefright/CameraSource.h b/include/media/stagefright/CameraSource.h
index 3192d03..4a39fbf 100644
--- a/include/media/stagefright/CameraSource.h
+++ b/include/media/stagefright/CameraSource.h
@@ -20,39 +20,168 @@
 
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaSource.h>
+#include <camera/ICamera.h>
+#include <camera/CameraParameters.h>
 #include <utils/List.h>
 #include <utils/RefBase.h>
-#include <utils/threads.h>
 
 namespace android {
 
-class ICamera;
 class IMemory;
 class Camera;
+class Surface;
 
 class CameraSource : public MediaSource, public MediaBufferObserver {
 public:
+    /**
+     * Factory method to create a new CameraSource using the current
+     * settings (such as video size, frame rate, color format, etc)
+     * from the default camera.
+     *
+     * @return NULL on error.
+     */
     static CameraSource *Create();
-    static CameraSource *CreateFromCamera(const sp<Camera> &camera);
+
+    /**
+     * Factory method to create a new CameraSource.
+     *
+     * @param camera the video input frame data source. If it is NULL,
+     *          we will try to connect to the camera with the given
+     *          cameraId.
+     *
+     * @param cameraId the id of the camera that the source will connect
+     *          to if camera is NULL; otherwise ignored.
+     *
+     * @param videoSize the dimension (in pixels) of the video frame
+     * @param frameRate the target frames per second
+     * @param surface the preview surface for display where preview
+     *          frames are sent to
+     * @param storeMetaDataInVideoBuffers true to request the camera
+     *          source to store meta data in video buffers; false to
+     *          request the camera source to store real YUV frame data
+     *          in the video buffers. The camera source may not support
+     *          storing meta data in video buffers, if so, a request
+     *          to do that will NOT be honored. To find out whether
+     *          meta data is actually being stored in video buffers
+     *          during recording, call isMetaDataStoredInVideoBuffers().
+     *
+     * @return NULL on error.
+     */
+    static CameraSource *CreateFromCamera(const sp<ICamera> &camera,
+                                          int32_t cameraId,
+                                          Size videoSize,
+                                          int32_t frameRate,
+                                          const sp<Surface>& surface,
+                                          bool storeMetaDataInVideoBuffers = false);
 
     virtual ~CameraSource();
 
     virtual status_t start(MetaData *params = NULL);
     virtual status_t stop();
-
-    virtual sp<MetaData> getFormat();
-
     virtual status_t read(
             MediaBuffer **buffer, const ReadOptions *options = NULL);
 
+    /**
+     * Check whether a CameraSource object is properly initialized.
+     * Must call this method before stop().
+     * @return OK if initialization has successfully completed.
+     */
+    virtual status_t initCheck() const;
+
+    /**
+     * Returns the MetaData associated with the CameraSource,
+     * including:
+     * kKeyColorFormat: YUV color format of the video frames
+     * kKeyWidth, kKeyHeight: dimension (in pixels) of the video frames
+     * kKeySampleRate: frame rate in frames per second
+     * kKeyMIMEType: always fixed to be MEDIA_MIMETYPE_VIDEO_RAW
+     */
+    virtual sp<MetaData> getFormat();
+
+    /**
+     * Retrieve the total number of video buffers available from
+     * this source.
+     *
+     * This method is useful if these video buffers are used
+     * for passing video frame data to other media components,
+     * such as OMX video encoders, in order to eliminate the
+     * memcpy of the data.
+     *
+     * @return the total numbner of video buffers. Returns 0 to
+     *      indicate that this source does not make the video
+     *      buffer information availalble.
+     */
+    size_t getNumberOfVideoBuffers() const;
+
+    /**
+     * Retrieve the individual video buffer available from
+     * this source.
+     *
+     * @param index the index corresponding to the video buffer.
+     *      Valid range of the index is [0, n], where n =
+     *      getNumberOfVideoBuffers() - 1.
+     *
+     * @return the video buffer corresponding to the given index.
+     *      If index is out of range, 0 should be returned.
+     */
+    sp<IMemory> getVideoBuffer(size_t index) const;
+
+    /**
+     * Tell whether this camera source stores meta data or real YUV
+     * frame data in video buffers.
+     *
+     * @return true if meta data is stored in the video
+     *      buffers; false if real YUV data is stored in
+     *      the video buffers.
+     */
+    bool isMetaDataStoredInVideoBuffers() const;
+
     virtual void signalBufferReturned(MediaBuffer* buffer);
 
+protected:
+    enum CameraFlags {
+        FLAGS_SET_CAMERA = 1L << 0,
+        FLAGS_HOT_CAMERA = 1L << 1,
+    };
+
+    int32_t  mCameraFlags;
+    Size     mVideoSize;
+    int32_t  mVideoFrameRate;
+    int32_t  mColorFormat;
+    status_t mInitCheck;
+
+    sp<Camera>   mCamera;
+    sp<Surface>  mSurface;
+    sp<MetaData> mMeta;
+
+    int64_t mStartTimeUs;
+    int32_t mNumFramesReceived;
+    int64_t mLastFrameTimestampUs;
+    bool mStarted;
+    int32_t mNumFramesEncoded;
+
+    CameraSource(const sp<ICamera>& camera, int32_t cameraId,
+                 Size videoSize, int32_t frameRate,
+                 const sp<Surface>& surface,
+                 bool storeMetaDataInVideoBuffers);
+
+    virtual void startCameraRecording();
+    virtual void stopCameraRecording();
+    virtual void releaseRecordingFrame(const sp<IMemory>& frame);
+
+    // Returns true if need to skip the current frame.
+    // Called from dataCallbackTimestamp.
+    virtual bool skipCurrentFrame(int64_t timestampUs) {return false;}
+
+    // Callback called when still camera raw data is available.
+    virtual void dataCallback(int32_t msgType, const sp<IMemory> &data) {}
+
+    virtual void dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
+            const sp<IMemory> &data);
+
 private:
     friend class CameraSourceListener;
 
-    sp<Camera> mCamera;
-    sp<MetaData> mMeta;
-
     Mutex mLock;
     Condition mFrameAvailableCondition;
     Condition mFrameCompleteCondition;
@@ -60,25 +189,34 @@
     List<sp<IMemory> > mFramesBeingEncoded;
     List<int64_t> mFrameTimes;
 
-    int64_t mStartTimeUs;
     int64_t mFirstFrameTimeUs;
-    int64_t mLastFrameTimestampUs;
-    int32_t mNumFramesReceived;
-    int32_t mNumFramesEncoded;
     int32_t mNumFramesDropped;
     int32_t mNumGlitches;
     int64_t mGlitchDurationThresholdUs;
     bool mCollectStats;
-    bool mStarted;
-
-    CameraSource(const sp<Camera> &camera);
-
-    void dataCallbackTimestamp(
-            int64_t timestampUs, int32_t msgType, const sp<IMemory> &data);
+    bool mIsMetaDataStoredInVideoBuffers;
 
     void releaseQueuedFrames();
     void releaseOneRecordingFrame(const sp<IMemory>& frame);
 
+
+    status_t init(const sp<ICamera>& camera, int32_t cameraId,
+                Size videoSize, int32_t frameRate,
+                bool storeMetaDataInVideoBuffers);
+    status_t isCameraAvailable(const sp<ICamera>& camera, int32_t cameraId);
+    status_t isCameraColorFormatSupported(const CameraParameters& params);
+    status_t configureCamera(CameraParameters* params,
+                    int32_t width, int32_t height,
+                    int32_t frameRate);
+
+    status_t checkVideoSize(const CameraParameters& params,
+                    int32_t width, int32_t height);
+
+    status_t checkFrameRate(const CameraParameters& params,
+                    int32_t frameRate);
+
+    void releaseCamera();
+
     CameraSource(const CameraSource &);
     CameraSource &operator=(const CameraSource &);
 };
diff --git a/include/media/stagefright/CameraSourceTimeLapse.h b/include/media/stagefright/CameraSourceTimeLapse.h
new file mode 100644
index 0000000..0e5d534
--- /dev/null
+++ b/include/media/stagefright/CameraSourceTimeLapse.h
@@ -0,0 +1,243 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CAMERA_SOURCE_TIME_LAPSE_H_
+
+#define CAMERA_SOURCE_TIME_LAPSE_H_
+
+#include <pthread.h>
+
+#include <utils/RefBase.h>
+#include <utils/threads.h>
+
+namespace android {
+
+class ICamera;
+class IMemory;
+class Camera;
+
+class CameraSourceTimeLapse : public CameraSource {
+public:
+    static CameraSourceTimeLapse *CreateFromCamera(
+        const sp<ICamera> &camera,
+        int32_t cameraId,
+        Size videoSize,
+        int32_t videoFrameRate,
+        const sp<Surface>& surface,
+        int64_t timeBetweenTimeLapseFrameCaptureUs);
+
+    virtual ~CameraSourceTimeLapse();
+
+    // If the frame capture interval is large, read will block for a long time.
+    // Due to the way the mediaRecorder framework works, a stop() call from
+    // mediaRecorder waits until the read returns, causing a long wait for
+    // stop() to return. To avoid this, we can make read() return a copy of the
+    // last read frame with the same time stamp frequently. This keeps the
+    // read() call from blocking too long. Calling this function quickly
+    // captures another frame, keeps its copy, and enables this mode of read()
+    // returning quickly.
+    void startQuickReadReturns();
+
+private:
+    // If true, will use still camera takePicture() for time lapse frames
+    // If false, will use the videocamera frames instead.
+    bool mUseStillCameraForTimeLapse;
+
+    // Size of picture taken from still camera. This may be larger than the size
+    // of the video, as still camera may not support the exact video resolution
+    // demanded. See setPictureSizeToClosestSupported().
+    int32_t mPictureWidth;
+    int32_t mPictureHeight;
+
+    // size of the encoded video.
+    int32_t mVideoWidth;
+    int32_t mVideoHeight;
+
+    // True if we need to crop the still camera image to get the video frame.
+    bool mNeedCropping;
+
+    // Start location of the cropping rectangle.
+    int32_t mCropRectStartX;
+    int32_t mCropRectStartY;
+
+    // Time between capture of two frames during time lapse recording
+    // Negative value indicates that timelapse is disabled.
+    int64_t mTimeBetweenTimeLapseFrameCaptureUs;
+
+    // Time between two frames in final video (1/frameRate)
+    int64_t mTimeBetweenTimeLapseVideoFramesUs;
+
+    // Real timestamp of the last encoded time lapse frame
+    int64_t mLastTimeLapseFrameRealTimestampUs;
+
+    // Thread id of thread which takes still picture and sleeps in a loop.
+    pthread_t mThreadTimeLapse;
+
+    // Variable set in dataCallbackTimestamp() to help skipCurrentFrame()
+    // to know if current frame needs to be skipped.
+    bool mSkipCurrentFrame;
+
+    // Lock for accessing mCameraIdle
+    Mutex mCameraIdleLock;
+
+    // Condition variable to wait on if camera is is not yet idle. Once the
+    // camera gets idle, this variable will be signalled.
+    Condition mCameraIdleCondition;
+
+    // True if camera is in preview mode and ready for takePicture().
+    // False after a call to takePicture() but before the final compressed
+    // data callback has been called and preview has been restarted.
+    volatile bool mCameraIdle;
+
+    // True if stop() is waiting for camera to get idle, i.e. for the last
+    // takePicture() to complete. This is needed so that dataCallbackTimestamp()
+    // can return immediately.
+    volatile bool mStopWaitingForIdleCamera;
+
+    // Lock for accessing quick stop variables.
+    Mutex mQuickStopLock;
+
+    // Condition variable to wake up still picture thread.
+    Condition mTakePictureCondition;
+
+    // mQuickStop is set to true if we use quick read() returns, otherwise it is set
+    // to false. Once in this mode read() return a copy of the last read frame
+    // with the same time stamp. See startQuickReadReturns().
+    volatile bool mQuickStop;
+
+    // Forces the next frame passed to dataCallbackTimestamp() to be read
+    // as a time lapse frame. Used by startQuickReadReturns() so that the next
+    // frame wakes up any blocking read.
+    volatile bool mForceRead;
+
+    // Stores a copy of the MediaBuffer read in the last read() call after
+    // mQuickStop was true.
+    MediaBuffer* mLastReadBufferCopy;
+
+    // Status code for last read.
+    status_t mLastReadStatus;
+
+    CameraSourceTimeLapse(
+        const sp<ICamera> &camera,
+        int32_t cameraId,
+        Size videoSize,
+        int32_t videoFrameRate,
+        const sp<Surface>& surface,
+        int64_t timeBetweenTimeLapseFrameCaptureUs);
+
+    // Wrapper over CameraSource::signalBufferReturned() to implement quick stop.
+    // It only handles the case when mLastReadBufferCopy is signalled. Otherwise
+    // it calls the base class' function.
+    virtual void signalBufferReturned(MediaBuffer* buffer);
+
+    // Wrapper over CameraSource::read() to implement quick stop.
+    virtual status_t read(MediaBuffer **buffer, const ReadOptions *options = NULL);
+
+    // For still camera case starts a thread which calls camera's takePicture()
+    // in a loop. For video camera case, just starts the camera's video recording.
+    virtual void startCameraRecording();
+
+    // For still camera case joins the thread created in startCameraRecording().
+    // For video camera case, just stops the camera's video recording.
+    virtual void stopCameraRecording();
+
+    // For still camera case don't need to do anything as memory is locally
+    // allocated with refcounting.
+    // For video camera case just tell the camera to release the frame.
+    virtual void releaseRecordingFrame(const sp<IMemory>& frame);
+
+    // mSkipCurrentFrame is set to true in dataCallbackTimestamp() if the current
+    // frame needs to be skipped and this function just returns the value of mSkipCurrentFrame.
+    virtual bool skipCurrentFrame(int64_t timestampUs);
+
+    // Handles the callback to handle raw frame data from the still camera.
+    // Creates a copy of the frame data as the camera can reuse the frame memory
+    // once this callback returns. The function also sets a new timstamp corresponding
+    // to one frame time ahead of the last encoded frame's time stamp. It then
+    // calls dataCallbackTimestamp() of the base class with the copied data and the
+    // modified timestamp, which will think that it recieved the frame from a video
+    // camera and proceed as usual.
+    virtual void dataCallback(int32_t msgType, const sp<IMemory> &data);
+
+    // In the video camera case calls skipFrameAndModifyTimeStamp() to modify
+    // timestamp and set mSkipCurrentFrame.
+    // Then it calls the base CameraSource::dataCallbackTimestamp()
+    virtual void dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
+            const sp<IMemory> &data);
+
+    // Convenience function to fill mLastReadBufferCopy from the just read
+    // buffer.
+    void fillLastReadBufferCopy(MediaBuffer& sourceBuffer);
+
+    // If the passed in size (width x height) is a supported video/preview size,
+    // the function sets the camera's video/preview size to it and returns true.
+    // Otherwise returns false.
+    bool trySettingVideoSize(int32_t width, int32_t height);
+
+    // The still camera may not support the demanded video width and height.
+    // We look for the supported picture sizes from the still camera and
+    // choose the smallest one with either dimensions higher than the corresponding
+    // video dimensions. The still picture will be cropped to get the video frame.
+    // The function returns true if the camera supports picture sizes greater than
+    // or equal to the passed in width and height, and false otherwise.
+    bool setPictureSizeToClosestSupported(int32_t width, int32_t height);
+
+    // Computes the offset of the rectangle from where to start cropping the
+    // still image into the video frame. We choose the center of the image to be
+    // cropped. The offset is stored in (mCropRectStartX, mCropRectStartY).
+    bool computeCropRectangleOffset();
+
+    // Crops the source data into a smaller image starting at
+    // (mCropRectStartX, mCropRectStartY) and of the size of the video frame.
+    // The data is returned into a newly allocated IMemory.
+    sp<IMemory> cropYUVImage(const sp<IMemory> &source_data);
+
+    // When video camera is used for time lapse capture, returns true
+    // until enough time has passed for the next time lapse frame. When
+    // the frame needs to be encoded, it returns false and also modifies
+    // the time stamp to be one frame time ahead of the last encoded
+    // frame's time stamp.
+    bool skipFrameAndModifyTimeStamp(int64_t *timestampUs);
+
+    // Wrapper to enter threadTimeLapseEntry()
+    static void *ThreadTimeLapseWrapper(void *me);
+
+    // Runs a loop which sleeps until a still picture is required
+    // and then calls mCamera->takePicture() to take the still picture.
+    // Used only in the case mUseStillCameraForTimeLapse = true.
+    void threadTimeLapseEntry();
+
+    // Wrapper to enter threadStartPreview()
+    static void *ThreadStartPreviewWrapper(void *me);
+
+    // Starts the camera's preview.
+    void threadStartPreview();
+
+    // Starts thread ThreadStartPreviewWrapper() for restarting preview.
+    // Needs to be done in a thread so that dataCallback() which calls this function
+    // can return, and the camera can know that takePicture() is done.
+    void restartPreview();
+
+    // Creates a copy of source_data into a new memory of final type MemoryBase.
+    sp<IMemory> createIMemoryCopy(const sp<IMemory> &source_data);
+
+    CameraSourceTimeLapse(const CameraSourceTimeLapse &);
+    CameraSourceTimeLapse &operator=(const CameraSourceTimeLapse &);
+};
+
+}  // namespace android
+
+#endif  // CAMERA_SOURCE_TIME_LAPSE_H_
diff --git a/include/media/stagefright/ColorConverter.h b/include/media/stagefright/ColorConverter.h
index bc3f464..2ae8a5b 100644
--- a/include/media/stagefright/ColorConverter.h
+++ b/include/media/stagefright/ColorConverter.h
@@ -21,6 +21,7 @@
 #include <sys/types.h>
 
 #include <stdint.h>
+#include <utils/Errors.h>
 
 #include <OMX_Video.h>
 
@@ -32,36 +33,48 @@
 
     bool isValid() const;
 
-    void convert(
-            size_t width, size_t height,
-            const void *srcBits, size_t srcSkip,
-            void *dstBits, size_t dstSkip);
+    status_t convert(
+            const void *srcBits,
+            size_t srcWidth, size_t srcHeight,
+            size_t srcCropLeft, size_t srcCropTop,
+            size_t srcCropRight, size_t srcCropBottom,
+            void *dstBits,
+            size_t dstWidth, size_t dstHeight,
+            size_t dstCropLeft, size_t dstCropTop,
+            size_t dstCropRight, size_t dstCropBottom);
 
 private:
+    struct BitmapParams {
+        BitmapParams(
+                void *bits,
+                size_t width, size_t height,
+                size_t cropLeft, size_t cropTop,
+                size_t cropRight, size_t cropBottom);
+
+        size_t cropWidth() const;
+        size_t cropHeight() const;
+
+        void *mBits;
+        size_t mWidth, mHeight;
+        size_t mCropLeft, mCropTop, mCropRight, mCropBottom;
+    };
+
     OMX_COLOR_FORMATTYPE mSrcFormat, mDstFormat;
     uint8_t *mClip;
 
     uint8_t *initClip();
 
-    void convertCbYCrY(
-            size_t width, size_t height,
-            const void *srcBits, size_t srcSkip,
-            void *dstBits, size_t dstSkip);
+    status_t convertCbYCrY(
+            const BitmapParams &src, const BitmapParams &dst);
 
-    void convertYUV420Planar(
-            size_t width, size_t height,
-            const void *srcBits, size_t srcSkip,
-            void *dstBits, size_t dstSkip);
+    status_t convertYUV420Planar(
+            const BitmapParams &src, const BitmapParams &dst);
 
-    void convertQCOMYUV420SemiPlanar(
-            size_t width, size_t height,
-            const void *srcBits, size_t srcSkip,
-            void *dstBits, size_t dstSkip);
+    status_t convertQCOMYUV420SemiPlanar(
+            const BitmapParams &src, const BitmapParams &dst);
 
-    void convertYUV420SemiPlanar(
-            size_t width, size_t height,
-            const void *srcBits, size_t srcSkip,
-            void *dstBits, size_t dstSkip);
+    status_t convertYUV420SemiPlanar(
+            const BitmapParams &src, const BitmapParams &dst);
 
     ColorConverter(const ColorConverter &);
     ColorConverter &operator=(const ColorConverter &);
diff --git a/include/media/stagefright/DataSource.h b/include/media/stagefright/DataSource.h
index d0b9fcd..f95e56a 100644
--- a/include/media/stagefright/DataSource.h
+++ b/include/media/stagefright/DataSource.h
@@ -48,13 +48,13 @@
 
     virtual status_t initCheck() const = 0;
 
-    virtual ssize_t readAt(off_t offset, void *data, size_t size) = 0;
+    virtual ssize_t readAt(off64_t offset, void *data, size_t size) = 0;
 
     // Convenience methods:
-    bool getUInt16(off_t offset, uint16_t *x);
+    bool getUInt16(off64_t offset, uint16_t *x);
 
     // May return ERROR_UNSUPPORTED.
-    virtual status_t getSize(off_t *size);
+    virtual status_t getSize(off64_t *size);
 
     virtual uint32_t flags() {
         return 0;
@@ -75,11 +75,14 @@
     static void RegisterDefaultSniffers();
 
     // for DRM
-    virtual DecryptHandle* DrmInitialization(DrmManagerClient *client) {
+    virtual DecryptHandle* DrmInitialization() {
         return NULL;
     }
     virtual void getDrmInfo(DecryptHandle **handle, DrmManagerClient **client) {};
 
+    virtual String8 getUri() {
+        return String8();
+    }
 
 protected:
     virtual ~DataSource() {}
diff --git a/include/media/stagefright/FileSource.h b/include/media/stagefright/FileSource.h
index 4307263..51a4343 100644
--- a/include/media/stagefright/FileSource.h
+++ b/include/media/stagefright/FileSource.h
@@ -34,11 +34,11 @@
 
     virtual status_t initCheck() const;
 
-    virtual ssize_t readAt(off_t offset, void *data, size_t size);
+    virtual ssize_t readAt(off64_t offset, void *data, size_t size);
 
-    virtual status_t getSize(off_t *size);
+    virtual status_t getSize(off64_t *size);
 
-    virtual DecryptHandle* DrmInitialization(DrmManagerClient *client);
+    virtual DecryptHandle* DrmInitialization();
 
     virtual void getDrmInfo(DecryptHandle **handle, DrmManagerClient **client);
 
@@ -46,7 +46,6 @@
     virtual ~FileSource();
 
 private:
-    FILE *mFile;
     int mFd;
     int64_t mOffset;
     int64_t mLength;
@@ -59,7 +58,7 @@
     int64_t mDrmBufSize;
     unsigned char *mDrmBuf;
 
-    ssize_t readAtDRM(off_t offset, void *data, size_t size);
+    ssize_t readAtDRM(off64_t offset, void *data, size_t size);
 
     FileSource(const FileSource &);
     FileSource &operator=(const FileSource &);
diff --git a/include/media/stagefright/HardwareAPI.h b/include/media/stagefright/HardwareAPI.h
index 63f11d1..d1ecaaf 100644
--- a/include/media/stagefright/HardwareAPI.h
+++ b/include/media/stagefright/HardwareAPI.h
@@ -19,28 +19,88 @@
 #define HARDWARE_API_H_
 
 #include <media/stagefright/OMXPluginBase.h>
-#include <media/stagefright/VideoRenderer.h>
-#include <surfaceflinger/ISurface.h>
+#include <ui/android_native_buffer.h>
 #include <utils/RefBase.h>
 
 #include <OMX_Component.h>
 
-extern android::VideoRenderer *createRenderer(
-        const android::sp<android::ISurface> &surface,
-        const char *componentName,
-        OMX_COLOR_FORMATTYPE colorFormat,
-        size_t displayWidth, size_t displayHeight,
-        size_t decodedWidth, size_t decodedHeight);
+namespace android {
 
-extern android::VideoRenderer *createRendererWithRotation(
-        const android::sp<android::ISurface> &surface,
-        const char *componentName,
-        OMX_COLOR_FORMATTYPE colorFormat,
-        size_t displayWidth, size_t displayHeight,
-        size_t decodedWidth, size_t decodedHeight,
-        int32_t rotationDegrees);
+// A pointer to this struct is passed to the OMX_SetParameter when the extension
+// index for the 'OMX.google.android.index.enableAndroidNativeBuffers' extension
+// is given.
+//
+// When Android native buffer use is disabled for a port (the default state),
+// the OMX node should operate as normal, and expect UseBuffer calls to set its
+// buffers.  This is the mode that will be used when CPU access to the buffer is
+// required.
+//
+// When Android native buffer use has been enabled for a given port, the video
+// color format for the port is to be interpreted as an Android pixel format
+// rather than an OMX color format.  The node should then expect to receive
+// UseAndroidNativeBuffer calls (via OMX_SetParameter) rather than UseBuffer
+// calls for that port.
+struct EnableAndroidNativeBuffersParams {
+    OMX_U32 nSize;
+    OMX_VERSIONTYPE nVersion;
+    OMX_U32 nPortIndex;
+    OMX_BOOL enable;
+};
+
+// A pointer to this struct is passed to OMX_SetParameter() when the extension
+// index "OMX.google.android.index.storeMetaDataInBuffers"
+// is given.
+//
+// When meta data is stored in the video buffers passed between OMX clients
+// and OMX components, interpretation of the buffer data is up to the
+// buffer receiver, and the data may or may not be the actual video data, but
+// some information helpful for the receiver to locate the actual data.
+// The buffer receiver thus needs to know how to interpret what is stored
+// in these buffers, with mechanisms pre-determined externally. How to
+// interpret the meta data is outside of the scope of this method.
+//
+// Currently, this is specifically used to pass meta data from video source
+// (camera component, for instance) to video encoder to avoid memcpying of
+// input video frame data. To do this, bStoreMetaDta is set to OMX_TRUE.
+// If bStoreMetaData is set to false, real YUV frame data will be stored
+// in the buffers. In addition, if no OMX_SetParameter() call is made
+// with the corresponding extension index, real YUV data is stored
+// in the buffers.
+struct StoreMetaDataInBuffersParams {
+    OMX_U32 nSize;
+    OMX_VERSIONTYPE nVersion;
+    OMX_U32 nPortIndex;
+    OMX_BOOL bStoreMetaData;
+};
+
+// A pointer to this struct is passed to OMX_SetParameter when the extension
+// index for the 'OMX.google.android.index.useAndroidNativeBuffer' extension is
+// given.  This call will only be performed if a prior call was made with the
+// 'OMX.google.android.index.enableAndroidNativeBuffers' extension index,
+// enabling use of Android native buffers.
+struct UseAndroidNativeBufferParams {
+    OMX_U32 nSize;
+    OMX_VERSIONTYPE nVersion;
+    OMX_U32 nPortIndex;
+    OMX_PTR pAppPrivate;
+    OMX_BUFFERHEADERTYPE **bufferHeader;
+    const sp<android_native_buffer_t>& nativeBuffer;
+};
+
+// A pointer to this struct is passed to OMX_GetParameter when the extension
+// index for the 'OMX.google.android.index.getAndroidNativeBufferUsage'
+// extension is given.  The usage bits returned from this query will be used to
+// allocate the Gralloc buffers that get passed to the useAndroidNativeBuffer
+// command.
+struct GetAndroidNativeBufferUsageParams {
+    OMX_U32 nSize;              // IN
+    OMX_VERSIONTYPE nVersion;   // IN
+    OMX_U32 nPortIndex;         // IN
+    OMX_U32 nUsage;             // OUT
+};
+
+}  // namespace android
 
 extern android::OMXPluginBase *createOMXPlugin();
 
 #endif  // HARDWARE_API_H_
-
diff --git a/include/media/stagefright/JPEGSource.h b/include/media/stagefright/JPEGSource.h
index 9d0a700..1b7e91b 100644
--- a/include/media/stagefright/JPEGSource.h
+++ b/include/media/stagefright/JPEGSource.h
@@ -42,9 +42,9 @@
     sp<DataSource> mSource;
     MediaBufferGroup *mGroup;
     bool mStarted;
-    off_t mSize;
+    off64_t mSize;
     int32_t mWidth, mHeight;
-    off_t mOffset;
+    off64_t mOffset;
 
     status_t parseJPEG();
 
diff --git a/include/media/stagefright/MPEG4Writer.h b/include/media/stagefright/MPEG4Writer.h
index 7bf07eb..5c5229d 100644
--- a/include/media/stagefright/MPEG4Writer.h
+++ b/include/media/stagefright/MPEG4Writer.h
@@ -61,20 +61,21 @@
 private:
     class Track;
 
-    FILE *mFile;
+    int  mFd;
+    status_t mInitCheck;
     bool mUse4ByteNalLength;
     bool mUse32BitOffset;
     bool mIsFileSizeLimitExplicitlyRequested;
     bool mPaused;
     bool mStarted;
-    off_t mOffset;
+    off64_t mOffset;
     off_t mMdatOffset;
     uint8_t *mMoovBoxBuffer;
-    off_t mMoovBoxBufferOffset;
+    off64_t mMoovBoxBufferOffset;
     bool  mWriteMoovBoxToMemory;
-    off_t mFreeBoxOffset;
+    off64_t mFreeBoxOffset;
     bool mStreamableFile;
-    off_t mEstimatedMoovBoxSize;
+    off64_t mEstimatedMoovBoxSize;
     uint32_t mInterleaveDurationUs;
     int32_t mTimeScale;
     int64_t mStartTimestampUs;
@@ -83,7 +84,7 @@
 
     List<Track *> mTracks;
 
-    List<off_t> mBoxes;
+    List<off64_t> mBoxes;
 
     void setStartTimestampUs(int64_t timeUs);
     int64_t getStartTimestampUs();  // Not const
@@ -97,6 +98,8 @@
         List<MediaBuffer *> mSamples;       // Sample data
 
         // Convenient constructor
+        Chunk(): mTrack(NULL), mTimeStampUs(0) {}
+
         Chunk(Track *track, int64_t timeUs, List<MediaBuffer *> samples)
             : mTrack(track), mTimeStampUs(timeUs), mSamples(samples) {
         }
@@ -123,13 +126,14 @@
     void bufferChunk(const Chunk& chunk);
 
     // Write all buffered chunks from all tracks
-    void writeChunks();
+    void writeAllChunks();
 
-    // Write a chunk if there is one
-    status_t writeOneChunk();
+    // Retrieve the proper chunk to write if there is one
+    // Return true if a chunk is found; otherwise, return false.
+    bool findChunkToWrite(Chunk *chunk);
 
-    // Write the first chunk from the given ChunkInfo.
-    void writeFirstChunk(ChunkInfo* info);
+    // Actually write the given chunk to the file.
+    void writeChunkToFile(Chunk* chunk);
 
     // Adjust other track media clock (presumably wall clock)
     // based on audio track media clock with the drift time.
@@ -145,10 +149,10 @@
     void unlock();
 
     // Acquire lock before calling these methods
-    off_t addSample_l(MediaBuffer *buffer);
-    off_t addLengthPrefixedSample_l(MediaBuffer *buffer);
+    off64_t addSample_l(MediaBuffer *buffer);
+    off64_t addLengthPrefixedSample_l(MediaBuffer *buffer);
 
-    inline size_t write(const void *ptr, size_t size, size_t nmemb, FILE* stream);
+    inline size_t write(const void *ptr, size_t size, size_t nmemb);
     bool exceedsFileSizeLimit();
     bool use32BitFileOffset() const;
     bool exceedsFileDurationLimit();
diff --git a/include/media/stagefright/MediaBuffer.h b/include/media/stagefright/MediaBuffer.h
index 339e6fb..c1c4f94 100644
--- a/include/media/stagefright/MediaBuffer.h
+++ b/include/media/stagefright/MediaBuffer.h
@@ -25,6 +25,7 @@
 
 namespace android {
 
+class GraphicBuffer;
 class MediaBuffer;
 class MediaBufferObserver;
 class MetaData;
@@ -48,6 +49,8 @@
 
     MediaBuffer(size_t size);
 
+    MediaBuffer(const sp<GraphicBuffer>& graphicBuffer);
+
     // Decrements the reference count and returns the buffer to its
     // associated MediaBufferGroup if the reference count drops to 0.
     void release();
@@ -63,6 +66,8 @@
 
     void set_range(size_t offset, size_t length);
 
+    sp<GraphicBuffer> graphicBuffer() const;
+
     sp<MetaData> meta_data();
 
     // Clears meta data and resets the range to the full extent.
@@ -94,6 +99,7 @@
 
     void *mData;
     size_t mSize, mRangeOffset, mRangeLength;
+    sp<GraphicBuffer> mGraphicBuffer;
 
     bool mOwnsData;
 
diff --git a/include/media/stagefright/MediaDefs.h b/include/media/stagefright/MediaDefs.h
index 92ce068..66dfff6 100644
--- a/include/media/stagefright/MediaDefs.h
+++ b/include/media/stagefright/MediaDefs.h
@@ -37,6 +37,8 @@
 extern const char *MEDIA_MIMETYPE_AUDIO_G711_ALAW;
 extern const char *MEDIA_MIMETYPE_AUDIO_G711_MLAW;
 extern const char *MEDIA_MIMETYPE_AUDIO_RAW;
+extern const char *MEDIA_MIMETYPE_AUDIO_FLAC;
+extern const char *MEDIA_MIMETYPE_AUDIO_AAC_ADTS;
 
 extern const char *MEDIA_MIMETYPE_CONTAINER_MPEG4;
 extern const char *MEDIA_MIMETYPE_CONTAINER_WAV;
@@ -44,6 +46,8 @@
 extern const char *MEDIA_MIMETYPE_CONTAINER_MATROSKA;
 extern const char *MEDIA_MIMETYPE_CONTAINER_MPEG2TS;
 
+extern const char *MEDIA_MIMETYPE_CONTAINER_WVM;
+
 }  // namespace android
 
 #endif  // MEDIA_DEFS_H_
diff --git a/include/media/stagefright/MediaSource.h b/include/media/stagefright/MediaSource.h
index dafc621..a31395e 100644
--- a/include/media/stagefright/MediaSource.h
+++ b/include/media/stagefright/MediaSource.h
@@ -78,31 +78,18 @@
         void clearSeekTo();
         bool getSeekTo(int64_t *time_us, SeekMode *mode) const;
 
-        // Option allows encoder to skip some frames until the specified
-        // time stamp.
-        // To prevent from being abused, when the skipFrame timestamp is
-        // found to be more than 1 second later than the current timestamp,
-        // an error will be returned from read().
-        void clearSkipFrame();
-        bool getSkipFrame(int64_t *timeUs) const;
-        void setSkipFrame(int64_t timeUs);
-
         void setLateBy(int64_t lateness_us);
         int64_t getLateBy() const;
 
     private:
         enum Options {
-            // Bit map
             kSeekTo_Option      = 1,
-            kSkipFrame_Option   = 2,
         };
 
         uint32_t mOptions;
         int64_t mSeekTimeUs;
         SeekMode mSeekMode;
         int64_t mLatenessUs;
-
-        int64_t mSkipFrameUntilTimeUs;
     };
 
     // Causes this source to suspend pulling data from its upstream source
diff --git a/include/media/stagefright/MediaSourceSplitter.h b/include/media/stagefright/MediaSourceSplitter.h
new file mode 100644
index 0000000..568f4c2
--- /dev/null
+++ b/include/media/stagefright/MediaSourceSplitter.h
@@ -0,0 +1,193 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// This class provides a way to split a single media source into multiple sources.
+// The constructor takes in the real mediaSource and createClient() can then be
+// used to create multiple sources served from this real mediaSource.
+//
+// Usage:
+// - Create MediaSourceSplitter by passing in a real mediaSource from which
+// multiple duplicate channels are needed.
+// - Create a client using createClient() and use it as any other mediaSource.
+//
+// Note that multiple clients can be created using createClient() and
+// started/stopped in any order. MediaSourceSplitter stops the real source only
+// when all clients have been stopped.
+//
+// If a new client is created/started after some existing clients have already
+// started, the new client will start getting its read frames from the current
+// time.
+
+#ifndef MEDIA_SOURCE_SPLITTER_H_
+
+#define MEDIA_SOURCE_SPLITTER_H_
+
+#include <media/stagefright/MediaSource.h>
+#include <utils/threads.h>
+#include <utils/Vector.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+class MediaBuffer;
+class MetaData;
+
+class MediaSourceSplitter : public RefBase {
+public:
+    // Constructor
+    // mediaSource: The real mediaSource. The class keeps a reference to it to
+    // implement the various clients.
+    MediaSourceSplitter(sp<MediaSource> mediaSource);
+
+    ~MediaSourceSplitter();
+
+    // Creates a new client of base type MediaSource. Multiple clients can be
+    // created which get their data through the same real mediaSource. These
+    // clients can then be used like any other MediaSource, all of which provide
+    // data from the same real source.
+    sp<MediaSource> createClient();
+
+private:
+    // Total number of clients created through createClient().
+    int32_t mNumberOfClients;
+
+    // reference to the real MediaSource passed to the constructor.
+    sp<MediaSource> mSource;
+
+    // Stores pointer to the MediaBuffer read from the real MediaSource.
+    // All clients use this to implement the read() call.
+    MediaBuffer *mLastReadMediaBuffer;
+
+    // Status code for read from the real MediaSource. All clients return
+    // this for their read().
+    status_t mLastReadStatus;
+
+    // Boolean telling whether the real MediaSource has started.
+    bool mSourceStarted;
+
+    // List of booleans, one for each client, storing whether the corresponding
+    // client's start() has been called.
+    Vector<bool> mClientsStarted;
+
+    // Stores the number of clients which are currently started.
+    int32_t mNumberOfClientsStarted;
+
+    // Since different clients call read() asynchronously, we need to keep track
+    // of what data is currently read into the mLastReadMediaBuffer.
+    // mCurrentReadBit stores the bit for the current read buffer. This bit
+    // flips each time a new buffer is read from the source.
+    // mClientsDesiredReadBit stores the bit for the next desired read buffer
+    // for each client. This bit flips each time read() is completed for this
+    // client.
+    bool mCurrentReadBit;
+    Vector<bool> mClientsDesiredReadBit;
+
+    // Number of clients whose current read has been completed.
+    int32_t mNumberOfCurrentReads;
+
+    // Boolean telling whether the last read has been completed for all clients.
+    // The variable is reset to false each time buffer is read from the real
+    // source.
+    bool mLastReadCompleted;
+
+    // A global mutex for access to critical sections.
+    Mutex mLock;
+
+    // Condition variable for waiting on read from source to complete.
+    Condition mReadFromSourceCondition;
+
+    // Condition variable for waiting on all client's last read to complete.
+    Condition mAllReadsCompleteCondition;
+
+    // Functions used by Client to implement the MediaSource interface.
+
+    // If the real source has not been started yet by any client, starts it.
+    status_t start(int clientId, MetaData *params);
+
+    // Stops the real source after all clients have called stop().
+    status_t stop(int clientId);
+
+    // returns the real source's getFormat().
+    sp<MetaData> getFormat(int clientId);
+
+    // If the client's desired buffer has already been read into
+    // mLastReadMediaBuffer, points the buffer to that. Otherwise if it is the
+    // master client, reads the buffer from source or else waits for the master
+    // client to read the buffer and uses that.
+    status_t read(int clientId,
+            MediaBuffer **buffer, const MediaSource::ReadOptions *options = NULL);
+
+    // Not implemented right now.
+    status_t pause(int clientId);
+
+    // Function which reads a buffer from the real source into
+    // mLastReadMediaBuffer
+    void readFromSource_lock(const MediaSource::ReadOptions *options);
+
+    // Waits until read from the real source has been completed.
+    // _lock means that the function should be called when the thread has already
+    // obtained the lock for the mutex mLock.
+    void waitForReadFromSource_lock(int32_t clientId);
+
+    // Waits until all clients have read the current buffer in
+    // mLastReadCompleted.
+    void waitForAllClientsLastRead_lock(int32_t clientId);
+
+    // Each client calls this after it completes its read(). Once all clients
+    // have called this for the current buffer, the function calls
+    // mAllReadsCompleteCondition.broadcast() to signal the waiting clients.
+    void signalReadComplete_lock(bool readAborted);
+
+    // Make these constructors private.
+    MediaSourceSplitter();
+    MediaSourceSplitter(const MediaSourceSplitter &);
+    MediaSourceSplitter &operator=(const MediaSourceSplitter &);
+
+    // This class implements the MediaSource interface. Each client stores a
+    // reference to the parent MediaSourceSplitter and uses it to complete the
+    // various calls.
+    class Client : public MediaSource {
+    public:
+        // Constructor stores reference to the parent MediaSourceSplitter and it
+        // client id.
+        Client(sp<MediaSourceSplitter> splitter, int32_t clientId);
+
+        // MediaSource interface
+        virtual status_t start(MetaData *params = NULL);
+
+        virtual status_t stop();
+
+        virtual sp<MetaData> getFormat();
+
+        virtual status_t read(
+                MediaBuffer **buffer, const ReadOptions *options = NULL);
+
+        virtual status_t pause();
+
+    private:
+        // Refernce to the parent MediaSourceSplitter
+        sp<MediaSourceSplitter> mSplitter;
+
+        // Id of this client.
+        int32_t mClientId;
+    };
+
+    friend class Client;
+};
+
+}  // namespace android
+
+#endif  // MEDIA_SOURCE_SPLITTER_H_
diff --git a/include/media/stagefright/MetaData.h b/include/media/stagefright/MetaData.h
index ea2fa52..f7f2235 100644
--- a/include/media/stagefright/MetaData.h
+++ b/include/media/stagefright/MetaData.h
@@ -30,17 +30,25 @@
 // The following keys map to int32_t data unless indicated otherwise.
 enum {
     kKeyMIMEType          = 'mime',  // cstring
-    kKeyWidth             = 'widt',  // int32_t
-    kKeyHeight            = 'heig',  // int32_t
+    kKeyWidth             = 'widt',  // int32_t, image pixel
+    kKeyHeight            = 'heig',  // int32_t, image pixel
+    kKeyDisplayWidth      = 'dWid',  // int32_t, display/presentation
+    kKeyDisplayHeight     = 'dHgt',  // int32_t, display/presentation
+
+    // a rectangle, if absent assumed to be (0, 0, width - 1, height - 1)
+    kKeyCropRect          = 'crop',
+
     kKeyRotation          = 'rotA',  // int32_t (angle in degrees)
     kKeyIFramesInterval   = 'ifiv',  // int32_t
     kKeyStride            = 'strd',  // int32_t
     kKeySliceHeight       = 'slht',  // int32_t
     kKeyChannelCount      = '#chn',  // int32_t
-    kKeySampleRate        = 'srte',  // int32_t (also video frame rate)
+    kKeySampleRate        = 'srte',  // int32_t (audio sampling rate Hz)
+    kKeyFrameRate         = 'frmR',  // int32_t (video frame rate fps)
     kKeyBitRate           = 'brte',  // int32_t (bps)
     kKeyESDS              = 'esds',  // raw data
     kKeyAVCC              = 'avcc',  // raw data
+    kKeyD263              = 'd263',  // raw data
     kKeyVorbisInfo        = 'vinf',  // raw data
     kKeyVorbisBooks       = 'vboo',  // raw data
     kKeyWantsNALFragments = 'NALf',
@@ -94,7 +102,6 @@
     // Track authoring progress status
     // kKeyTrackTimeStatus is used to track progress in elapsed time
     kKeyTrackTimeStatus   = 'tktm',  // int64_t
-    kKeyRotationDegree    = 'rdge',  // int32_t (clockwise, in degree)
 
     kKeyNotRealTime       = 'ntrt',  // bool (int32_t)
 
@@ -104,11 +111,15 @@
     kKeyValidSamples      = 'valD',  // int32_t
 
     kKeyIsUnreadable      = 'unre',  // bool (int32_t)
+
+    // An indication that a video buffer has been rendered.
+    kKeyRendered          = 'rend',  // bool (int32_t)
 };
 
 enum {
     kTypeESDS        = 'esds',
     kTypeAVCC        = 'avcc',
+    kTypeD263        = 'd263',
 };
 
 class MetaData : public RefBase {
@@ -123,6 +134,7 @@
         TYPE_INT64    = 'in64',
         TYPE_FLOAT    = 'floa',
         TYPE_POINTER  = 'ptr ',
+        TYPE_RECT     = 'rect',
     };
 
     void clear();
@@ -134,12 +146,22 @@
     bool setFloat(uint32_t key, float value);
     bool setPointer(uint32_t key, void *value);
 
+    bool setRect(
+            uint32_t key,
+            int32_t left, int32_t top,
+            int32_t right, int32_t bottom);
+
     bool findCString(uint32_t key, const char **value);
     bool findInt32(uint32_t key, int32_t *value);
     bool findInt64(uint32_t key, int64_t *value);
     bool findFloat(uint32_t key, float *value);
     bool findPointer(uint32_t key, void **value);
 
+    bool findRect(
+            uint32_t key,
+            int32_t *left, int32_t *top,
+            int32_t *right, int32_t *bottom);
+
     bool setData(uint32_t key, uint32_t type, const void *data, size_t size);
 
     bool findData(uint32_t key, uint32_t *type,
@@ -185,6 +207,10 @@
         }
     };
 
+    struct Rect {
+        int32_t mLeft, mTop, mRight, mBottom;
+    };
+
     KeyedVector<uint32_t, typed_data> mItems;
 
     // MetaData &operator=(const MetaData &);
diff --git a/include/media/stagefright/NativeWindowWrapper.h b/include/media/stagefright/NativeWindowWrapper.h
new file mode 100644
index 0000000..f323cbc
--- /dev/null
+++ b/include/media/stagefright/NativeWindowWrapper.h
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef NATIVE_WINDOW_WRAPPER_H_
+
+#define NATIVE_WINDOW_WRAPPER_H_
+
+#include <surfaceflinger/Surface.h>
+#include <gui/SurfaceTextureClient.h>
+
+namespace android {
+
+// Both Surface and SurfaceTextureClient are RefBase that implement the
+// ANativeWindow interface, but at different addresses. ANativeWindow is not
+// a RefBase but acts like one for use with sp<>.  This wrapper converts a
+// Surface or SurfaceTextureClient into a single reference-counted object
+// that holds an sp reference to the underlying Surface or SurfaceTextureClient,
+// It provides a method to get the ANativeWindow.
+
+struct NativeWindowWrapper : RefBase {
+    NativeWindowWrapper(
+            const sp<Surface> &surface) :
+        mSurface(surface) { }
+
+    NativeWindowWrapper(
+            const sp<SurfaceTextureClient> &surfaceTextureClient) :
+        mSurfaceTextureClient(surfaceTextureClient) { }
+
+    sp<ANativeWindow> getNativeWindow() const {
+        if (mSurface != NULL) {
+            return mSurface;
+        } else {
+            return mSurfaceTextureClient;
+        }
+    }
+
+    // If needed later we can provide a method to ask what kind of native window
+
+private:
+    // At most one of mSurface and mSurfaceTextureClient will be non-NULL
+    const sp<Surface> mSurface;
+    const sp<SurfaceTextureClient> mSurfaceTextureClient;
+
+    DISALLOW_EVIL_CONSTRUCTORS(NativeWindowWrapper);
+};
+
+}  // namespace android
+
+#endif  // NATIVE_WINDOW_WRAPPER_H_
diff --git a/include/media/stagefright/OMXCodec.h b/include/media/stagefright/OMXCodec.h
index 8274dfb..93b5d24 100644
--- a/include/media/stagefright/OMXCodec.h
+++ b/include/media/stagefright/OMXCodec.h
@@ -18,6 +18,7 @@
 
 #define OMX_CODEC_H_
 
+#include <android/native_window.h>
 #include <media/IOMX.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaSource.h>
@@ -38,13 +39,28 @@
         // The client wants to access the output buffer's video
         // data for example for thumbnail extraction.
         kClientNeedsFramebuffer  = 4,
+
+        // Request for software or hardware codecs. If request
+        // can not be fullfilled, Create() returns NULL.
+        kSoftwareCodecsOnly      = 8,
+        kHardwareCodecsOnly      = 16,
+
+        // Store meta data in video buffers
+        kStoreMetaDataInVideoBuffers = 32,
+
+        // Only submit one input buffer at one time.
+        kOnlySubmitOneInputBufferAtOneTime = 64,
+
+        // Enable GRALLOC_USAGE_PROTECTED for output buffers from native window
+        kEnableGrallocUsageProtected = 128,
     };
     static sp<MediaSource> Create(
             const sp<IOMX> &omx,
             const sp<MetaData> &meta, bool createEncoder,
             const sp<MediaSource> &source,
             const char *matchComponentName = NULL,
-            uint32_t flags = 0);
+            uint32_t flags = 0,
+            const sp<ANativeWindow> &nativeWindow = NULL);
 
     static void setComponentRole(
             const sp<IOMX> &omx, IOMX::node_id node, bool isEncoder,
@@ -114,12 +130,18 @@
         kAvoidMemcopyInputRecordingFrames     = 2048,
         kRequiresLargerEncoderOutputBuffer    = 4096,
         kOutputBuffersAreUnreadable           = 8192,
-        kStoreMetaDataInInputVideoBuffers     = 16384,
+    };
+
+    enum BufferStatus {
+        OWNED_BY_US,
+        OWNED_BY_COMPONENT,
+        OWNED_BY_NATIVE_WINDOW,
+        OWNED_BY_CLIENT,
     };
 
     struct BufferInfo {
         IOMX::buffer_id mBuffer;
-        bool mOwnedByComponent;
+        BufferStatus mStatus;
         sp<IMemory> mMem;
         size_t mSize;
         void *mData;
@@ -156,7 +178,7 @@
     int64_t mSeekTimeUs;
     ReadOptions::SeekMode mSeekMode;
     int64_t mTargetTimeUs;
-    int64_t mSkipTimeUs;
+    bool mOutputPortSettingsChangedPending;
 
     MediaBuffer *mLeftOverBuffer;
 
@@ -165,13 +187,25 @@
 
     bool mPaused;
 
+    sp<ANativeWindow> mNativeWindow;
+
+    // The index in each of the mPortBuffers arrays of the buffer that will be
+    // submitted to OMX next.  This only applies when using buffers from a
+    // native window.
+    size_t mNextNativeBufferIndex[2];
+
     // A list of indices into mPortStatus[kPortIndexOutput] filled with data.
     List<size_t> mFilledBuffers;
     Condition mBufferFilled;
 
+    bool mIsMetaDataStoredInVideoBuffers;
+    bool mOnlySubmitOneBufferAtOneTime;
+    bool mEnableGrallocUsageProtected;
+
     OMXCodec(const sp<IOMX> &omx, IOMX::node_id node, uint32_t quirks,
              bool isEncoder, const char *mime, const char *componentName,
-             const sp<MediaSource> &source);
+             const sp<MediaSource> &source,
+             const sp<ANativeWindow> &nativeWindow);
 
     void addCodecSpecificData(const void *data, size_t size);
     void clearCodecSpecificData();
@@ -222,13 +256,20 @@
 
     status_t allocateBuffers();
     status_t allocateBuffersOnPort(OMX_U32 portIndex);
+    status_t allocateOutputBuffersFromNativeWindow();
+
+    status_t queueBufferToNativeWindow(BufferInfo *info);
+    status_t cancelBufferToNativeWindow(BufferInfo *info);
+    BufferInfo* dequeueBufferFromNativeWindow();
 
     status_t freeBuffersOnPort(
             OMX_U32 portIndex, bool onlyThoseWeOwn = false);
 
-    void drainInputBuffer(IOMX::buffer_id buffer);
+    status_t freeBuffer(OMX_U32 portIndex, size_t bufIndex);
+
+    bool drainInputBuffer(IOMX::buffer_id buffer);
     void fillOutputBuffer(IOMX::buffer_id buffer);
-    void drainInputBuffer(BufferInfo *info);
+    bool drainInputBuffer(BufferInfo *info);
     void fillOutputBuffer(BufferInfo *info);
 
     void drainInputBuffers();
@@ -256,6 +297,7 @@
 
     status_t init();
     void initOutputFormat(const sp<MetaData> &inputFormat);
+    status_t initNativeWindow();
 
     void dumpPortStatus(OMX_U32 portIndex);
 
@@ -270,6 +312,10 @@
             uint32_t flags,
             Vector<String8> *matchingCodecs);
 
+    void restorePatchedDataPointer(BufferInfo *info);
+
+    status_t applyRotation();
+
     OMXCodec(const OMXCodec &);
     OMXCodec &operator=(const OMXCodec &);
 };
@@ -282,6 +328,7 @@
 struct CodecCapabilities {
     String8 mComponentName;
     Vector<CodecProfileLevel> mProfileLevels;
+    Vector<OMX_U32> mColorFormats;
 };
 
 // Return a vector of componentNames with supported profile/level pairs
diff --git a/include/media/stagefright/StagefrightMediaScanner.h b/include/media/stagefright/StagefrightMediaScanner.h
index 4437eee..108acb4 100644
--- a/include/media/stagefright/StagefrightMediaScanner.h
+++ b/include/media/stagefright/StagefrightMediaScanner.h
@@ -22,8 +22,6 @@
 
 namespace android {
 
-struct MediaMetadataRetriever;
-
 struct StagefrightMediaScanner : public MediaScanner {
     StagefrightMediaScanner();
     virtual ~StagefrightMediaScanner();
@@ -35,8 +33,6 @@
     virtual char *extractAlbumArt(int fd);
 
 private:
-    sp<MediaMetadataRetriever> mRetriever;
-
     StagefrightMediaScanner(const StagefrightMediaScanner &);
     StagefrightMediaScanner &operator=(const StagefrightMediaScanner &);
 };
diff --git a/include/media/stagefright/VideoRenderer.h b/include/media/stagefright/VideoRenderer.h
deleted file mode 100644
index f80b277..0000000
--- a/include/media/stagefright/VideoRenderer.h
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef VIDEO_RENDERER_H_
-
-#define VIDEO_RENDERER_H_
-
-#include <sys/types.h>
-
-namespace android {
-
-class VideoRenderer {
-public:
-    virtual ~VideoRenderer() {}
-
-    virtual void render(
-            const void *data, size_t size, void *platformPrivate) = 0;
-
-protected:
-    VideoRenderer() {}
-
-    VideoRenderer(const VideoRenderer &);
-    VideoRenderer &operator=(const VideoRenderer &);
-};
-
-}  // namespace android
-
-#endif  // VIDEO_RENDERER_H_
diff --git a/include/media/stagefright/VideoSourceDownSampler.h b/include/media/stagefright/VideoSourceDownSampler.h
new file mode 100644
index 0000000..439918c
--- /dev/null
+++ b/include/media/stagefright/VideoSourceDownSampler.h
@@ -0,0 +1,97 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// VideoSourceDownSampler implements the MediaSource interface,
+// downsampling frames provided from a real video source.
+
+#ifndef VIDEO_SOURCE_DOWN_SAMPLER_H_
+
+#define VIDEO_SOURCE_DOWN_SAMPLER_H_
+
+#include <media/stagefright/MediaSource.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+class IMemory;
+class MediaBuffer;
+class MetaData;
+
+class VideoSourceDownSampler : public MediaSource {
+public:
+    virtual ~VideoSourceDownSampler();
+
+    // Constructor:
+    // videoSource: The real video source which provides the original frames.
+    // width, height: The desired width, height. These should be less than or equal
+    // to those of the real video source. We then downsample the original frames to
+    // this size.
+    VideoSourceDownSampler(const sp<MediaSource> &videoSource,
+        int32_t width, int32_t height);
+
+    // MediaSource interface
+    virtual status_t start(MetaData *params = NULL);
+
+    virtual status_t stop();
+
+    virtual sp<MetaData> getFormat();
+
+    virtual status_t read(
+            MediaBuffer **buffer, const ReadOptions *options = NULL);
+
+    virtual status_t pause();
+
+private:
+    // Reference to the real video source.
+    sp<MediaSource> mRealVideoSource;
+
+    // Size of frames to be provided by this source.
+    int32_t mWidth;
+    int32_t mHeight;
+
+    // Size of frames provided by the real source.
+    int32_t mRealSourceWidth;
+    int32_t mRealSourceHeight;
+
+    // Down sampling paramters.
+    int32_t mDownSampleOffsetX;
+    int32_t mDownSampleOffsetY;
+    int32_t mDownSampleSkipX;
+    int32_t mDownSampleSkipY;
+
+    // True if we need to crop the still video image to get the video frame.
+    bool mNeedDownSampling;
+
+    // Meta data. This is a copy of the real source except for the width and
+    // height parameters.
+    sp<MetaData> mMeta;
+
+    // Computes the offset, skip parameters for downsampling the original frame
+    // to the desired size.
+    void computeDownSamplingParameters();
+
+    // Downsamples the frame in sourceBuffer to size (mWidth x mHeight). A new
+    // buffer is created which stores the downsampled image.
+    void downSampleYUVImage(const MediaBuffer &sourceBuffer, MediaBuffer **buffer) const;
+
+    // Disallow these.
+    VideoSourceDownSampler(const VideoSourceDownSampler &);
+    VideoSourceDownSampler &operator=(const VideoSourceDownSampler &);
+};
+
+}  // namespace android
+
+#endif  // VIDEO_SOURCE_DOWN_SAMPLER_H_
diff --git a/include/media/stagefright/YUVCanvas.h b/include/media/stagefright/YUVCanvas.h
new file mode 100644
index 0000000..ff70923
--- /dev/null
+++ b/include/media/stagefright/YUVCanvas.h
@@ -0,0 +1,79 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// YUVCanvas holds a reference to a YUVImage on which it can do various
+// drawing operations. It provides various utility functions for filling,
+// cropping, etc.
+
+
+#ifndef YUV_CANVAS_H_
+
+#define YUV_CANVAS_H_
+
+#include <stdint.h>
+
+namespace android {
+
+class YUVImage;
+class Rect;
+
+class YUVCanvas {
+public:
+
+    // Constructor takes in reference to a yuvImage on which it can do
+    // various drawing opreations.
+    YUVCanvas(YUVImage &yuvImage);
+    ~YUVCanvas();
+
+    // Fills the entire image with the given YUV values.
+    void FillYUV(uint8_t yValue, uint8_t uValue, uint8_t vValue);
+
+    // Fills the rectangular region [startX,endX]x[startY,endY] with the given YUV values.
+    void FillYUVRectangle(const Rect& rect,
+            uint8_t yValue, uint8_t uValue, uint8_t vValue);
+
+    // Copies the region [startX,endX]x[startY,endY] from srcImage into the
+    // canvas' target image (mYUVImage) starting at
+    // (destinationStartX,destinationStartY).
+    // Note that undefined behavior may occur if srcImage is same as the canvas'
+    // target image.
+    void CopyImageRect(
+            const Rect& srcRect,
+            int32_t destStartX, int32_t destStartY,
+            const YUVImage &srcImage);
+
+    // Downsamples the srcImage into the canvas' target image (mYUVImage)
+    // The downsampling copies pixels from the source image starting at
+    // (srcOffsetX, srcOffsetY) to the target image, starting at (0, 0).
+    // For each X increment in the target image, skipX pixels are skipped
+    // in the source image.
+    // Similarly for each Y increment in the target image, skipY pixels
+    // are skipped in the source image.
+    void downsample(
+            int32_t srcOffsetX, int32_t srcOffsetY,
+            int32_t skipX, int32_t skipY,
+            const YUVImage &srcImage);
+
+private:
+    YUVImage& mYUVImage;
+
+    YUVCanvas(const YUVCanvas &);
+    YUVCanvas &operator=(const YUVCanvas &);
+};
+
+}  // namespace android
+
+#endif  // YUV_CANVAS_H_
diff --git a/include/media/stagefright/YUVImage.h b/include/media/stagefright/YUVImage.h
new file mode 100644
index 0000000..4e98618
--- /dev/null
+++ b/include/media/stagefright/YUVImage.h
@@ -0,0 +1,178 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// A container class to hold YUV data and provide various utilities,
+// e.g. to set/get pixel values.
+// Supported formats:
+//  - YUV420 Planar
+//  - YUV420 Semi Planar
+//
+//  Currently does not support variable strides.
+//
+//  Implementation: Two simple abstractions are done to simplify access
+//  to YUV channels for different formats:
+//  - initializeYUVPointers() sets up pointers (mYdata, mUdata, mVdata) to
+//  point to the right start locations of the different channel data depending
+//  on the format.
+//  - getOffsets() returns the correct offset for the different channels
+//  depending on the format.
+//  Location of any pixel's YUV channels can then be easily computed using these.
+//
+
+#ifndef YUV_IMAGE_H_
+
+#define YUV_IMAGE_H_
+
+#include <stdint.h>
+#include <cstring>
+
+namespace android {
+
+class Rect;
+
+class YUVImage {
+public:
+    // Supported YUV formats
+    enum YUVFormat {
+        YUV420Planar,
+        YUV420SemiPlanar
+    };
+
+    // Constructs an image with the given size, format. Also allocates and owns
+    // the required memory.
+    YUVImage(YUVFormat yuvFormat, int32_t width, int32_t height);
+
+    // Constructs an image with the given size, format. The memory is provided
+    // by the caller and we don't own it.
+    YUVImage(YUVFormat yuvFormat, int32_t width, int32_t height, uint8_t *buffer);
+
+    // Destructor to delete the memory if it owns it.
+    ~YUVImage();
+
+    // Returns the size of the buffer required to store the YUV data for the given
+    // format and geometry. Useful when the caller wants to allocate the requisite
+    // memory.
+    static size_t bufferSize(YUVFormat yuvFormat, int32_t width, int32_t height);
+
+    int32_t width() const {return mWidth;}
+    int32_t height() const {return mHeight;}
+
+    // Returns true if pixel is the range [0, width-1] x [0, height-1]
+    // and false otherwise.
+    bool validPixel(int32_t x, int32_t y) const;
+
+    // Get the pixel YUV value at pixel (x,y).
+    // Note that the range of x is [0, width-1] and the range of y is [0, height-1].
+    // Returns true if get was successful and false otherwise.
+    bool getPixelValue(int32_t x, int32_t y,
+            uint8_t *yPtr, uint8_t *uPtr, uint8_t *vPtr) const;
+
+    // Set the pixel YUV value at pixel (x,y).
+    // Note that the range of x is [0, width-1] and the range of y is [0, height-1].
+    // Returns true if set was successful and false otherwise.
+    bool setPixelValue(int32_t x, int32_t y,
+            uint8_t yValue, uint8_t uValue, uint8_t vValue);
+
+    // Uses memcpy to copy an entire row of data
+    static void fastCopyRectangle420Planar(
+            const Rect& srcRect,
+            int32_t destStartX, int32_t destStartY,
+            const YUVImage &srcImage, YUVImage &destImage);
+
+    // Uses memcpy to copy an entire row of data
+    static void fastCopyRectangle420SemiPlanar(
+            const Rect& srcRect,
+            int32_t destStartX, int32_t destStartY,
+            const YUVImage &srcImage, YUVImage &destImage);
+
+    // Tries to use memcopy to copy entire rows of data.
+    // Returns false if fast copy is not possible for the passed image formats.
+    static bool fastCopyRectangle(
+            const Rect& srcRect,
+            int32_t destStartX, int32_t destStartY,
+            const YUVImage &srcImage, YUVImage &destImage);
+
+    // Convert the given YUV value to RGB.
+    void yuv2rgb(uint8_t yValue, uint8_t uValue, uint8_t vValue,
+        uint8_t *r, uint8_t *g, uint8_t *b) const;
+
+    // Write the image to a human readable PPM file.
+    // Returns true if write was succesful and false otherwise.
+    bool writeToPPM(const char *filename) const;
+
+private:
+    // YUV Format of the image.
+    YUVFormat mYUVFormat;
+
+    int32_t mWidth;
+    int32_t mHeight;
+
+    // Pointer to the memory buffer.
+    uint8_t *mBuffer;
+
+    // Boolean telling whether we own the memory buffer.
+    bool mOwnBuffer;
+
+    // Pointer to start of the Y data plane.
+    uint8_t *mYdata;
+
+    // Pointer to start of the U data plane. Note that in case of interleaved formats like
+    // YUV420 semiplanar, mUdata points to the start of the U data in the UV plane.
+    uint8_t *mUdata;
+
+    // Pointer to start of the V data plane. Note that in case of interleaved formats like
+    // YUV420 semiplanar, mVdata points to the start of the V data in the UV plane.
+    uint8_t *mVdata;
+
+    // Initialize the pointers mYdata, mUdata, mVdata to point to the right locations for
+    // the given format and geometry.
+    // Returns true if initialize was succesful and false otherwise.
+    bool initializeYUVPointers();
+
+    // For the given pixel location, this returns the offset of the location of y, u and v
+    // data from the corresponding base pointers -- mYdata, mUdata, mVdata.
+    // Note that the range of x is [0, width-1] and the range of y is [0, height-1].
+    // Returns true if getting offsets was succesful and false otherwise.
+    bool getOffsets(int32_t x, int32_t y,
+        int32_t *yOffset, int32_t *uOffset, int32_t *vOffset) const;
+
+    // Returns the offset increments incurred in going from one data row to the next data row
+    // for the YUV channels. Note that this corresponds to data rows and not pixel rows.
+    // E.g. depending on formats, U/V channels may have only one data row corresponding
+    // to two pixel rows.
+    bool getOffsetIncrementsPerDataRow(
+        int32_t *yDataOffsetIncrement,
+        int32_t *uDataOffsetIncrement,
+        int32_t *vDataOffsetIncrement) const;
+
+    // Given the offset return the address of the corresponding channel's data.
+    uint8_t* getYAddress(int32_t offset) const;
+    uint8_t* getUAddress(int32_t offset) const;
+    uint8_t* getVAddress(int32_t offset) const;
+
+    // Given the pixel location, returns the address of the corresponding channel's data.
+    // Note that the range of x is [0, width-1] and the range of y is [0, height-1].
+    bool getYUVAddresses(int32_t x, int32_t y,
+        uint8_t **yAddr, uint8_t **uAddr, uint8_t **vAddr) const;
+
+    // Disallow implicit casting and copying.
+    YUVImage(const YUVImage &);
+    YUVImage &operator=(const YUVImage &);
+};
+
+}  // namespace android
+
+#endif  // YUV_IMAGE_H_
diff --git a/include/media/stagefright/foundation/ABitReader.h b/include/media/stagefright/foundation/ABitReader.h
index 5135211..5510b12 100644
--- a/include/media/stagefright/foundation/ABitReader.h
+++ b/include/media/stagefright/foundation/ABitReader.h
@@ -31,6 +31,8 @@
     uint32_t getBits(size_t n);
     void skipBits(size_t n);
 
+    void putBits(uint32_t x, size_t n);
+
     size_t numBitsLeft() const;
 
     const uint8_t *data() const;
@@ -43,7 +45,6 @@
     size_t mNumBitsLeft;
 
     void fillReservoir();
-    void putBits(uint32_t x, size_t n);
 
     DISALLOW_EVIL_CONSTRUCTORS(ABitReader);
 };
diff --git a/include/media/stagefright/foundation/ADebug.h b/include/media/stagefright/foundation/ADebug.h
index 69021d8..450dcfe 100644
--- a/include/media/stagefright/foundation/ADebug.h
+++ b/include/media/stagefright/foundation/ADebug.h
@@ -32,6 +32,7 @@
 #define CHECK(condition)                                \
     LOG_ALWAYS_FATAL_IF(                                \
             !(condition),                               \
+            "%s",                                       \
             __FILE__ ":" LITERAL_TO_STRING(__LINE__)    \
             " CHECK(" #condition ") failed.")
 
@@ -58,10 +59,12 @@
     do {                                                                \
         AString ___res = Compare_##suffix(x, y);                        \
         if (!___res.empty()) {                                          \
-            LOG_ALWAYS_FATAL(                                           \
-                    __FILE__ ":" LITERAL_TO_STRING(__LINE__)            \
-                    " CHECK_" #suffix "( " #x "," #y ") failed: %s",    \
-                    ___res.c_str());                                    \
+            AString ___full =                                           \
+                __FILE__ ":" LITERAL_TO_STRING(__LINE__)                \
+                    " CHECK_" #suffix "( " #x "," #y ") failed: ";      \
+            ___full.append(___res);                                     \
+                                                                        \
+            LOG_ALWAYS_FATAL("%s", ___full.c_str());                    \
         }                                                               \
     } while (false)
 
@@ -72,7 +75,10 @@
 #define CHECK_GE(x,y)   CHECK_OP(x,y,GE,>=)
 #define CHECK_GT(x,y)   CHECK_OP(x,y,GT,>)
 
-#define TRESPASS()      LOG_ALWAYS_FATAL("Should not be here.")
+#define TRESPASS() \
+        LOG_ALWAYS_FATAL(                                       \
+            __FILE__ ":" LITERAL_TO_STRING(__LINE__)            \
+                " Should not be here.");
 
 }  // namespace android
 
diff --git a/include/media/stagefright/foundation/AHierarchicalStateMachine.h b/include/media/stagefright/foundation/AHierarchicalStateMachine.h
new file mode 100644
index 0000000..b5786fb
--- /dev/null
+++ b/include/media/stagefright/foundation/AHierarchicalStateMachine.h
@@ -0,0 +1,49 @@
+#ifndef A_HIERARCHICAL_STATE_MACHINE_H_
+
+#define A_HIERARCHICAL_STATE_MACHINE_H_
+
+#include <media/stagefright/foundation/AHandler.h>
+
+namespace android {
+
+struct AState : public RefBase {
+    AState(const sp<AState> &parentState = NULL);
+
+    sp<AState> parentState();
+
+protected:
+    virtual ~AState();
+
+    virtual void stateEntered();
+    virtual void stateExited();
+
+    virtual bool onMessageReceived(const sp<AMessage> &msg) = 0;
+
+private:
+    friend struct AHierarchicalStateMachine;
+
+    sp<AState> mParentState;
+
+    DISALLOW_EVIL_CONSTRUCTORS(AState);
+};
+
+struct AHierarchicalStateMachine : public AHandler {
+    AHierarchicalStateMachine();
+
+protected:
+    virtual ~AHierarchicalStateMachine();
+
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+
+    // Only to be called in response to a message.
+    void changeState(const sp<AState> &state);
+
+private:
+    sp<AState> mState;
+
+    DISALLOW_EVIL_CONSTRUCTORS(AHierarchicalStateMachine);
+};
+
+}  // namespace android
+
+#endif  // A_HIERARCHICAL_STATE_MACHINE_H_
diff --git a/include/media/stagefright/foundation/AMessage.h b/include/media/stagefright/foundation/AMessage.h
index c674cba..72dc730 100644
--- a/include/media/stagefright/foundation/AMessage.h
+++ b/include/media/stagefright/foundation/AMessage.h
@@ -26,16 +26,22 @@
 namespace android {
 
 struct AString;
+struct Parcel;
 
 struct AMessage : public RefBase {
     AMessage(uint32_t what = 0, ALooper::handler_id target = 0);
 
+    static sp<AMessage> FromParcel(const Parcel &parcel);
+    void writeToParcel(Parcel *parcel) const;
+
     void setWhat(uint32_t what);
     uint32_t what() const;
 
     void setTarget(ALooper::handler_id target);
     ALooper::handler_id target() const;
 
+    void clear();
+
     void setInt32(const char *name, int32_t value);
     void setInt64(const char *name, int64_t value);
     void setSize(const char *name, size_t value);
@@ -46,6 +52,10 @@
     void setObject(const char *name, const sp<RefBase> &obj);
     void setMessage(const char *name, const sp<AMessage> &obj);
 
+    void setRect(
+            const char *name,
+            int32_t left, int32_t top, int32_t right, int32_t bottom);
+
     bool findInt32(const char *name, int32_t *value) const;
     bool findInt64(const char *name, int64_t *value) const;
     bool findSize(const char *name, size_t *value) const;
@@ -56,8 +66,15 @@
     bool findObject(const char *name, sp<RefBase> *obj) const;
     bool findMessage(const char *name, sp<AMessage> *obj) const;
 
+    bool findRect(
+            const char *name,
+            int32_t *left, int32_t *top, int32_t *right, int32_t *bottom) const;
+
     void post(int64_t delayUs = 0);
 
+    // Performs a deep-copy of "this", contained messages are in turn "dup'ed".
+    // Warning: RefBase items, i.e. "objects" are _not_ copied but only have
+    // their refcount incremented.
     sp<AMessage> dup() const;
 
     AString debugString(int32_t indent = 0) const;
@@ -76,11 +93,16 @@
         kTypeString,
         kTypeObject,
         kTypeMessage,
+        kTypeRect,
     };
 
     uint32_t mWhat;
     ALooper::handler_id mTarget;
 
+    struct Rect {
+        int32_t mLeft, mTop, mRight, mBottom;
+    };
+
     struct Item {
         union {
             int32_t int32Value;
@@ -91,6 +113,7 @@
             void *ptrValue;
             RefBase *refValue;
             AString *stringValue;
+            Rect rectValue;
         } u;
         const char *mName;
         Type mType;
@@ -102,7 +125,6 @@
     Item mItems[kMaxNumItems];
     size_t mNumItems;
 
-    void clear();
     Item *allocateItem(const char *name);
     void freeItem(Item *item);
     const Item *findItem(const char *name, Type type) const;
diff --git a/include/private/media/AudioTrackShared.h b/include/private/media/AudioTrackShared.h
index c6990bf..4610135 100644
--- a/include/private/media/AudioTrackShared.h
+++ b/include/private/media/AudioTrackShared.h
@@ -31,6 +31,7 @@
 #define MAX_STARTUP_TIMEOUT_MS  3000    // Longer timeout period at startup to cope with A2DP init time
 #define MAX_RUN_TIMEOUT_MS      1000
 #define WAIT_PERIOD_MS          10
+#define RESTORE_TIMEOUT_MS      5000    // Maximum waiting time for a track to be restored
 
 #define CBLK_UNDERRUN_MSK       0x0001
 #define CBLK_UNDERRUN_ON        0x0001  // underrun (out) or overrrun (in) indication
@@ -47,6 +48,12 @@
 #define CBLK_DISABLED_MSK       0x0010
 #define CBLK_DISABLED_ON        0x0010  // track disabled by AudioFlinger due to underrun:
 #define CBLK_DISABLED_OFF       0x0000  // must be re-started
+#define CBLK_RESTORING_MSK      0x0020
+#define CBLK_RESTORING_ON       0x0020  // track is being restored after invalidation
+#define CBLK_RESTORING_OFF      0x0000  // by AudioFlinger
+#define CBLK_RESTORED_MSK       0x0040
+#define CBLK_RESTORED_ON        0x0040  // track has been restored after invalidation
+#define CBLK_RESTORED_OFF       0x0040  // by AudioFlinger
 
 struct audio_track_cblk_t
 {
diff --git a/include/private/surfaceflinger/SharedBufferStack.h b/include/private/surfaceflinger/SharedBufferStack.h
index 4ae3cdf..717f837 100644
--- a/include/private/surfaceflinger/SharedBufferStack.h
+++ b/include/private/surfaceflinger/SharedBufferStack.h
@@ -65,7 +65,7 @@
     // When changing these values, the COMPILE_TIME_ASSERT at the end of this
     // file need to be updated.
     static const unsigned int NUM_LAYERS_MAX  = 31;
-    static const unsigned int NUM_BUFFER_MAX  = 16;
+    static const unsigned int NUM_BUFFER_MAX  = 32;
     static const unsigned int NUM_BUFFER_MIN  = 2;
     static const unsigned int NUM_DISPLAY_MAX = 4;
 
@@ -123,7 +123,7 @@
 
 // ----------------------------------------------------------------------------
 
-// 32 KB max
+// 64 KB max
 class SharedClient
 {
 public:
@@ -284,6 +284,8 @@
     uint32_t getTransform(int buffer) const;
 
     status_t resize(int newNumBuffers);
+    status_t grow(int newNumBuffers);
+    status_t shrink(int newNumBuffers);
 
     SharedBufferStack::Statistics getStats() const;
     
@@ -345,6 +347,13 @@
     int mNumBuffers;
     BufferList mBufferList;
 
+    struct BuffersAvailableCondition : public ConditionBase {
+        int mNumBuffers;
+        inline BuffersAvailableCondition(SharedBufferServer* sbs,
+                int numBuffers);
+        inline bool operator()() const;
+        inline const char* name() const { return "BuffersAvailableCondition"; }
+    };
 
     struct RetireUpdate : public UpdateBase {
         const int numBuffers;
@@ -385,7 +394,7 @@
 
 // ---------------------------------------------------------------------------
 
-COMPILE_TIME_ASSERT(sizeof(SharedClient) <= 32768)
+COMPILE_TIME_ASSERT(sizeof(SharedClient) <= 65536)
 COMPILE_TIME_ASSERT(sizeof(surface_flinger_cblk_t) <= 4096)
 
 // ---------------------------------------------------------------------------
diff --git a/include/private/ui/sw_gralloc_handle.h b/include/private/ui/sw_gralloc_handle.h
deleted file mode 100644
index b3d333e..0000000
--- a/include/private/ui/sw_gralloc_handle.h
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Copyright (C) 2008 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_UI_PRIVATE_SW_GRALLOC_HANDLE_H
-#define ANDROID_UI_PRIVATE_SW_GRALLOC_HANDLE_H
-
-#include <stdint.h>
-#include <limits.h>
-#include <sys/cdefs.h>
-#include <hardware/gralloc.h>
-#include <errno.h>
-
-#include <cutils/native_handle.h>
-
-namespace android {
-
-/*****************************************************************************/
-
-struct sw_gralloc_handle_t : public native_handle 
-{
-    // file-descriptors
-    int     fd;
-    // ints
-    int     magic;
-    int     size;
-    int     base;
-    int     prot;
-    int     pid;
-
-    static const int sNumInts = 5;
-    static const int sNumFds = 1;
-    static const int sMagic = '_sgh';
-
-    sw_gralloc_handle_t() :
-        fd(-1), magic(sMagic), size(0), base(0), prot(0), pid(getpid())
-    {
-        version = sizeof(native_handle);
-        numInts = sNumInts;
-        numFds = sNumFds;
-    }
-    ~sw_gralloc_handle_t() {
-        magic = 0;
-    }
-
-    static int validate(const native_handle* h) {
-        const sw_gralloc_handle_t* hnd = (const sw_gralloc_handle_t*)h;
-        if (!h || h->version != sizeof(native_handle) ||
-                h->numInts != sNumInts || h->numFds != sNumFds ||
-                hnd->magic != sMagic) 
-        {
-            return -EINVAL;
-        }
-        return 0;
-    }
-
-    static status_t alloc(uint32_t w, uint32_t h, int format,
-            int usage, buffer_handle_t* handle, int32_t* stride);
-    static status_t free(sw_gralloc_handle_t* hnd);
-    static status_t registerBuffer(sw_gralloc_handle_t* hnd);
-    static status_t unregisterBuffer(sw_gralloc_handle_t* hnd);
-    static status_t lock(sw_gralloc_handle_t* hnd, int usage,
-            int l, int t, int w, int h, void** vaddr);
-    static status_t unlock(sw_gralloc_handle_t* hnd);
-};
-
-/*****************************************************************************/
-
-}; // namespace android
-
-#endif /* ANDROID_UI_PRIVATE_SW_GRALLOC_HANDLE_H */
diff --git a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
index ebe3302..0b061db 100644
--- a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
+++ b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
@@ -53,6 +53,11 @@
 namespace android {
 namespace {
 
+// Flag to allow a one time init of global memory, only happens on first call ever
+int LvmInitFlag = LVM_FALSE;
+SessionContext GlobalSessionMemory[LVM_MAX_SESSIONS];
+int SessionIndex[LVM_MAX_SESSIONS];
+
 /* local functions */
 #define CHECK_ARG(cond) {                     \
     if (!(cond)) {                            \
@@ -61,11 +66,6 @@
     }                                         \
 }
 
-// Flag to allow a one time init of global memory, only happens on first call ever
-int LvmInitFlag = LVM_FALSE;
-SessionContext GlobalSessionMemory[LVM_MAX_SESSIONS];
-
-int SessionIndex[LVM_MAX_SESSIONS];
 
 // NXP SW BassBoost UUID
 const effect_descriptor_t gBassBoostDescriptor = {
@@ -2588,9 +2588,11 @@
             pContext->pBundledContext->SamplesToExitCountBb -= outBuffer->frameCount * 2; // STEREO
             //LOGV("\tEffect_process: Waiting to turn off BASS_BOOST, %d samples left",
             //    pContext->pBundledContext->SamplesToExitCountBb);
-        } else {
+        }
+        if(pContext->pBundledContext->SamplesToExitCountBb <= 0) {
             status = -ENODATA;
             pContext->pBundledContext->NumberEffectsEnabled--;
+            LOGV("\tEffect_process() this is the last frame for LVM_BASS_BOOST");
         }
     }
     if ((pContext->pBundledContext->bVolumeEnabled == LVM_FALSE)&&
@@ -2606,9 +2608,11 @@
             pContext->pBundledContext->SamplesToExitCountEq -= outBuffer->frameCount * 2; // STEREO
             //LOGV("\tEffect_process: Waiting to turn off EQUALIZER, %d samples left",
             //    pContext->pBundledContext->SamplesToExitCountEq);
-        } else {
+        }
+        if(pContext->pBundledContext->SamplesToExitCountEq <= 0) {
             status = -ENODATA;
             pContext->pBundledContext->NumberEffectsEnabled--;
+            LOGV("\tEffect_process() this is the last frame for LVM_EQUALIZER");
         }
     }
     if ((pContext->pBundledContext->bVirtualizerEnabled == LVM_FALSE)&&
@@ -2618,9 +2622,11 @@
             pContext->pBundledContext->SamplesToExitCountVirt -= outBuffer->frameCount * 2;// STEREO
             //LOGV("\tEffect_process: Waiting for to turn off VIRTUALIZER, %d samples left",
             //    pContext->pBundledContext->SamplesToExitCountVirt);
-        } else {
+        }
+        if(pContext->pBundledContext->SamplesToExitCountVirt <= 0) {
             status = -ENODATA;
             pContext->pBundledContext->NumberEffectsEnabled--;
+            LOGV("\tEffect_process() this is the last frame for LVM_VIRTUALIZER");
         }
     }
 
diff --git a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
index 26c5aca..9097e20 100755
--- a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
+++ b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
@@ -166,7 +166,7 @@
     REVERB_VOLUME_RAMP,
 };
 
-#define REVERB_DEFAULT_PRESET REVERB_PRESET_MEDIUMROOM
+#define REVERB_DEFAULT_PRESET REVERB_PRESET_NONE
 
 
 #define REVERB_SEND_LEVEL   (0x0C00) // 0.75 in 4.12 format
diff --git a/media/libeffects/visualizer/Android.mk b/media/libeffects/visualizer/Android.mk
index 48b45ff..e6ff654 100644
--- a/media/libeffects/visualizer/Android.mk
+++ b/media/libeffects/visualizer/Android.mk
@@ -27,4 +27,4 @@
 
 LOCAL_PRELINK_MODULE := false
 
-include $(BUILD_SHARED_LIBRARY)
\ No newline at end of file
+include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libmedia/Android.mk b/media/libmedia/Android.mk
index 2e5cbe3..fd4c6c6 100644
--- a/media/libmedia/Android.mk
+++ b/media/libmedia/Android.mk
@@ -15,6 +15,7 @@
     IMediaRecorderClient.cpp \
     IMediaPlayer.cpp \
     IMediaRecorder.cpp \
+    IStreamSource.cpp \
     Metadata.cpp \
     mediarecorder.cpp \
     IMediaMetadataRetriever.cpp \
@@ -35,7 +36,9 @@
     fixedfft.cpp.arm
 
 LOCAL_SHARED_LIBRARIES := \
-	libui libcutils libutils libbinder libsonivox libicuuc libexpat libsurfaceflinger_client libcamera_client
+	libui libcutils libutils libbinder libsonivox libicuuc libexpat \
+        libsurfaceflinger_client libcamera_client libstagefright_foundation \
+        libgui
 
 LOCAL_MODULE:= libmedia
 
diff --git a/media/libmedia/AudioEffect.cpp b/media/libmedia/AudioEffect.cpp
index 88b8c86..aadeba5 100644
--- a/media/libmedia/AudioEffect.cpp
+++ b/media/libmedia/AudioEffect.cpp
@@ -27,7 +27,6 @@
 #include <media/AudioEffect.h>
 
 #include <utils/Log.h>
-#include <cutils/atomic.h>
 #include <binder/IPCThreadState.h>
 
 
@@ -207,18 +206,22 @@
         return INVALID_OPERATION;
     }
 
-    if (enabled) {
-        LOGV("enable %p", this);
-        if (android_atomic_or(1, &mEnabled) == 0) {
-           return mIEffect->enable();
+    status_t status = NO_ERROR;
+
+    AutoMutex lock(mLock);
+    if (enabled != mEnabled) {
+        if (enabled) {
+            LOGV("enable %p", this);
+            status = mIEffect->enable();
+        } else {
+            LOGV("disable %p", this);
+            status = mIEffect->disable();
         }
-    } else {
-        LOGV("disable %p", this);
-        if (android_atomic_and(~1, &mEnabled) == 1) {
-           return mIEffect->disable();
+        if (status == NO_ERROR) {
+            mEnabled = enabled;
         }
     }
-    return NO_ERROR;
+    return status;
 }
 
 status_t AudioEffect::command(uint32_t cmdCode,
@@ -232,26 +235,26 @@
         return INVALID_OPERATION;
     }
 
-    if ((cmdCode == EFFECT_CMD_ENABLE || cmdCode == EFFECT_CMD_DISABLE) &&
-            (replySize == NULL || *replySize != sizeof(status_t) || replyData == NULL)) {
-        return BAD_VALUE;
+    if (cmdCode == EFFECT_CMD_ENABLE || cmdCode == EFFECT_CMD_DISABLE) {
+        if (mEnabled == (cmdCode == EFFECT_CMD_ENABLE)) {
+            return NO_ERROR;
+        }
+        if (replySize == NULL || *replySize != sizeof(status_t) || replyData == NULL) {
+            return BAD_VALUE;
+        }
+        mLock.lock();
     }
 
     status_t status = mIEffect->command(cmdCode, cmdSize, cmdData, replySize, replyData);
-    if (status != NO_ERROR) {
-        return status;
-    }
 
     if (cmdCode == EFFECT_CMD_ENABLE || cmdCode == EFFECT_CMD_DISABLE) {
-        status = *(status_t *)replyData;
-        if (status != NO_ERROR) {
-            return status;
+        if (status == NO_ERROR) {
+            status = *(status_t *)replyData;
         }
-        if (cmdCode == EFFECT_CMD_ENABLE) {
-            android_atomic_or(1, &mEnabled);
-        } else {
-            android_atomic_and(~1, &mEnabled);
+        if (status == NO_ERROR) {
+            mEnabled = (cmdCode == EFFECT_CMD_ENABLE);
         }
+        mLock.unlock();
     }
 
     return status;
@@ -370,11 +373,7 @@
 {
     LOGV("enableStatusChanged %p enabled %d mCbf %p", this, enabled, mCbf);
     if (mStatus == ALREADY_EXISTS) {
-        if (enabled) {
-            android_atomic_or(1, &mEnabled);
-        } else {
-            android_atomic_and(~1, &mEnabled);
-        }
+        mEnabled = enabled;
         if (mCbf) {
             mCbf(EVENT_ENABLE_STATUS_CHANGED, mUserData, &enabled);
         }
diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp
index a6c515c..a18bedb 100644
--- a/media/libmedia/AudioRecord.cpp
+++ b/media/libmedia/AudioRecord.cpp
@@ -35,7 +35,6 @@
 #include <binder/Parcel.h>
 #include <binder/IPCThreadState.h>
 #include <utils/Timers.h>
-#include <cutils/atomic.h>
 
 #define LIKELY( exp )       (__builtin_expect( (exp) != 0, true  ))
 #define UNLIKELY( exp )     (__builtin_expect( (exp) != 0, false ))
@@ -129,6 +128,9 @@
 {
 
     LOGV("set(): sampleRate %d, channels %d, frameCount %d",sampleRate, channels, frameCount);
+
+    AutoMutex lock(mLock);
+
     if (mAudioRecord != 0) {
         return INVALID_OPERATION;
     }
@@ -184,7 +186,7 @@
     mSessionId = sessionId;
 
     // create the IAudioRecord
-    status = openRecord(sampleRate, format, channelCount,
+    status = openRecord_l(sampleRate, format, channelCount,
                         frameCount, flags, input);
     if (status != NO_ERROR) {
         return status;
@@ -282,28 +284,39 @@
         t->mLock.lock();
      }
 
-    if (android_atomic_or(1, &mActive) == 0) {
-        ret = mAudioRecord->start();
-        if (ret == DEAD_OBJECT) {
-            LOGV("start() dead IAudioRecord: creating a new one");
-            ret = openRecord(mCblk->sampleRate, mFormat, mChannelCount,
-                    mFrameCount, mFlags, getInput());
-            if (ret == NO_ERROR) {
-                ret = mAudioRecord->start();
+    AutoMutex lock(mLock);
+    // acquire a strong reference on the IAudioRecord and IMemory so that they cannot be destroyed
+    // while we are accessing the cblk
+    sp <IAudioRecord> audioRecord = mAudioRecord;
+    sp <IMemory> iMem = mCblkMemory;
+    audio_track_cblk_t* cblk = mCblk;
+    if (mActive == 0) {
+        mActive = 1;
+
+        cblk->lock.lock();
+        if (!(cblk->flags & CBLK_INVALID_MSK)) {
+            cblk->lock.unlock();
+            ret = mAudioRecord->start();
+            cblk->lock.lock();
+            if (ret == DEAD_OBJECT) {
+                cblk->flags |= CBLK_INVALID_MSK;
             }
         }
+        if (cblk->flags & CBLK_INVALID_MSK) {
+            ret = restoreRecord_l(cblk);
+        }
+        cblk->lock.unlock();
         if (ret == NO_ERROR) {
-            mNewPosition = mCblk->user + mUpdatePeriod;
-            mCblk->bufferTimeoutMs = MAX_RUN_TIMEOUT_MS;
-            mCblk->waitTimeMs = 0;
+            mNewPosition = cblk->user + mUpdatePeriod;
+            cblk->bufferTimeoutMs = MAX_RUN_TIMEOUT_MS;
+            cblk->waitTimeMs = 0;
             if (t != 0) {
                t->run("ClientRecordThread", THREAD_PRIORITY_AUDIO_CLIENT);
             } else {
                 setpriority(PRIO_PROCESS, 0, THREAD_PRIORITY_AUDIO_CLIENT);
             }
         } else {
-            LOGV("start() failed");
-            android_atomic_and(~1, &mActive);
+            mActive = 0;
         }
     }
 
@@ -322,9 +335,11 @@
 
     if (t != 0) {
         t->mLock.lock();
-     }
+    }
 
-    if (android_atomic_and(~1, &mActive) == 1) {
+    AutoMutex lock(mLock);
+    if (mActive == 1) {
+        mActive = 0;
         mCblk->cv.signal();
         mAudioRecord->stop();
         // the record head position will reset to 0, so if a marker is set, we need
@@ -351,6 +366,7 @@
 
 uint32_t AudioRecord::getSampleRate()
 {
+    AutoMutex lock(mLock);
     return mCblk->sampleRate;
 }
 
@@ -398,6 +414,7 @@
 {
     if (position == 0) return BAD_VALUE;
 
+    AutoMutex lock(mLock);
     *position = mCblk->user;
 
     return NO_ERROR;
@@ -413,7 +430,8 @@
 
 // -------------------------------------------------------------------------
 
-status_t AudioRecord::openRecord(
+// must be called with mLock held
+status_t AudioRecord::openRecord_l(
         uint32_t sampleRate,
         int format,
         int channelCount,
@@ -457,6 +475,7 @@
 
 status_t AudioRecord::obtainBuffer(Buffer* audioBuffer, int32_t waitCount)
 {
+    AutoMutex lock(mLock);
     int active;
     status_t result;
     audio_track_cblk_t* cblk = mCblk;
@@ -481,7 +500,19 @@
                 cblk->lock.unlock();
                 return WOULD_BLOCK;
             }
-            result = cblk->cv.waitRelative(cblk->lock, milliseconds(waitTimeMs));
+            if (!(cblk->flags & CBLK_INVALID_MSK)) {
+                mLock.unlock();
+                result = cblk->cv.waitRelative(cblk->lock, milliseconds(waitTimeMs));
+                cblk->lock.unlock();
+                mLock.lock();
+                if (mActive == 0) {
+                    return status_t(STOPPED);
+                }
+                cblk->lock.lock();
+            }
+            if (cblk->flags & CBLK_INVALID_MSK) {
+                goto create_new_record;
+            }
             if (__builtin_expect(result!=NO_ERROR, false)) {
                 cblk->waitTimeMs += waitTimeMs;
                 if (cblk->waitTimeMs >= cblk->bufferTimeoutMs) {
@@ -489,16 +520,17 @@
                             "user=%08x, server=%08x", cblk->user, cblk->server);
                     cblk->lock.unlock();
                     result = mAudioRecord->start();
-                    if (result == DEAD_OBJECT) {
-                        LOGW("obtainBuffer() dead IAudioRecord: creating a new one");
-                        result = openRecord(cblk->sampleRate, mFormat, mChannelCount,
-                                            mFrameCount, mFlags, getInput());
-                        if (result == NO_ERROR) {
-                            cblk = mCblk;
-                            mAudioRecord->start();
-                        }
-                    }
                     cblk->lock.lock();
+                    if (result == DEAD_OBJECT) {
+                        cblk->flags |= CBLK_INVALID_MSK;
+create_new_record:
+                        result = AudioRecord::restoreRecord_l(cblk);
+                    }
+                    if (result != NO_ERROR) {
+                        LOGW("obtainBuffer create Track error %d", result);
+                        cblk->lock.unlock();
+                        return result;
+                    }
                     cblk->waitTimeMs = 0;
                 }
                 if (--waitCount == 0) {
@@ -538,12 +570,19 @@
 
 void AudioRecord::releaseBuffer(Buffer* audioBuffer)
 {
-    audio_track_cblk_t* cblk = mCblk;
-    cblk->stepUser(audioBuffer->frameCount);
+    AutoMutex lock(mLock);
+    mCblk->stepUser(audioBuffer->frameCount);
 }
 
 audio_io_handle_t AudioRecord::getInput()
 {
+    AutoMutex lock(mLock);
+    return getInput_l();
+}
+
+// must be called with mLock held
+audio_io_handle_t AudioRecord::getInput_l()
+{
     mInput = AudioSystem::getInput(mInputSource,
                                 mCblk->sampleRate,
                                 mFormat, mChannels,
@@ -571,6 +610,12 @@
         return BAD_VALUE;
     }
 
+    mLock.lock();
+    // acquire a strong reference on the IAudioRecord and IMemory so that they cannot be destroyed
+    // while we are accessing the cblk
+    sp <IAudioRecord> audioRecord = mAudioRecord;
+    sp <IMemory> iMem = mCblkMemory;
+    mLock.unlock();
 
     do {
 
@@ -611,9 +656,17 @@
     uint32_t frames = mRemainingFrames;
     size_t readSize;
 
+    mLock.lock();
+    // acquire a strong reference on the IAudioRecord and IMemory so that they cannot be destroyed
+    // while we are accessing the cblk
+    sp <IAudioRecord> audioRecord = mAudioRecord;
+    sp <IMemory> iMem = mCblkMemory;
+    audio_track_cblk_t* cblk = mCblk;
+    mLock.unlock();
+
     // Manage marker callback
     if (!mMarkerReached && (mMarkerPosition > 0)) {
-        if (mCblk->user >= mMarkerPosition) {
+        if (cblk->user >= mMarkerPosition) {
             mCbf(EVENT_MARKER, mUserData, (void *)&mMarkerPosition);
             mMarkerReached = true;
         }
@@ -621,7 +674,7 @@
 
     // Manage new position callback
     if (mUpdatePeriod > 0) {
-        while (mCblk->user >= mNewPosition) {
+        while (cblk->user >= mNewPosition) {
             mCbf(EVENT_NEW_POS, mUserData, (void *)&mNewPosition);
             mNewPosition += mUpdatePeriod;
         }
@@ -667,11 +720,11 @@
 
 
     // Manage overrun callback
-    if (mActive && (mCblk->framesAvailable_l() == 0)) {
-        LOGV("Overrun user: %x, server: %x, flags %04x", mCblk->user, mCblk->server, mCblk->flags);
-        if ((mCblk->flags & CBLK_UNDERRUN_MSK) == CBLK_UNDERRUN_OFF) {
+    if (mActive && (cblk->framesAvailable() == 0)) {
+        LOGV("Overrun user: %x, server: %x, flags %04x", cblk->user, cblk->server, cblk->flags);
+        if ((cblk->flags & CBLK_UNDERRUN_MSK) == CBLK_UNDERRUN_OFF) {
             mCbf(EVENT_OVERRUN, mUserData, 0);
-            mCblk->flags |= CBLK_UNDERRUN_ON;
+            cblk->flags |= CBLK_UNDERRUN_ON;
         }
     }
 
@@ -683,6 +736,69 @@
     return true;
 }
 
+// must be called with mLock and cblk.lock held. Callers must also hold strong references on
+// the IAudioRecord and IMemory in case they are recreated here.
+// If the IAudioRecord is successfully restored, the cblk pointer is updated
+status_t AudioRecord::restoreRecord_l(audio_track_cblk_t*& cblk)
+{
+    status_t result;
+
+    if (!(cblk->flags & CBLK_RESTORING_MSK)) {
+        LOGW("dead IAudioRecord, creating a new one");
+
+        cblk->flags |= CBLK_RESTORING_ON;
+        // signal old cblk condition so that other threads waiting for available buffers stop
+        // waiting now
+        cblk->cv.broadcast();
+        cblk->lock.unlock();
+
+        // if the new IAudioRecord is created, openRecord_l() will modify the
+        // following member variables: mAudioRecord, mCblkMemory and mCblk.
+        // It will also delete the strong references on previous IAudioRecord and IMemory
+        result = openRecord_l(cblk->sampleRate, mFormat, mChannelCount,
+                mFrameCount, mFlags, getInput_l());
+        if (result == NO_ERROR) {
+            result = mAudioRecord->start();
+        }
+        if (result != NO_ERROR) {
+            mActive = false;
+        }
+
+        // signal old cblk condition for other threads waiting for restore completion
+        cblk->lock.lock();
+        cblk->flags |= CBLK_RESTORED_MSK;
+        cblk->cv.broadcast();
+        cblk->lock.unlock();
+    } else {
+        if (!(cblk->flags & CBLK_RESTORED_MSK)) {
+            LOGW("dead IAudioRecord, waiting for a new one to be created");
+            mLock.unlock();
+            result = cblk->cv.waitRelative(cblk->lock, milliseconds(RESTORE_TIMEOUT_MS));
+            cblk->lock.unlock();
+            mLock.lock();
+        } else {
+            LOGW("dead IAudioRecord, already restored");
+            result = NO_ERROR;
+            cblk->lock.unlock();
+        }
+        if (result != NO_ERROR || mActive == 0) {
+            result = status_t(STOPPED);
+        }
+    }
+    LOGV("restoreRecord_l() status %d mActive %d cblk %p, old cblk %p flags %08x old flags %08x",
+         result, mActive, mCblk, cblk, mCblk->flags, cblk->flags);
+
+    if (result == NO_ERROR) {
+        // from now on we switch to the newly created cblk
+        cblk = mCblk;
+    }
+    cblk->lock.lock();
+
+    LOGW_IF(result != NO_ERROR, "restoreRecord_l() error %d", result);
+
+    return result;
+}
+
 // =========================================================================
 
 AudioRecord::ClientRecordThread::ClientRecordThread(AudioRecord& receiver, bool bCanCallJava)
diff --git a/media/libmedia/AudioSystem.cpp b/media/libmedia/AudioSystem.cpp
index 1a3fcd6..2f694ba 100644
--- a/media/libmedia/AudioSystem.cpp
+++ b/media/libmedia/AudioSystem.cpp
@@ -169,15 +169,6 @@
     return af->setMode(mode);
 }
 
-
-status_t AudioSystem::isStreamActive(int stream, bool* state) {
-    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
-    if (af == 0) return PERMISSION_DENIED;
-    *state = af->isStreamActive(stream);
-    return NO_ERROR;
-}
-
-
 status_t AudioSystem::setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs) {
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
@@ -677,6 +668,13 @@
     return aps->getStrategyForStream(stream);
 }
 
+uint32_t AudioSystem::getDevicesForStream(AudioSystem::stream_type stream)
+{
+    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    if (aps == 0) return 0;
+    return aps->getDevicesForStream(stream);
+}
+
 audio_io_handle_t AudioSystem::getOutputForEffect(effect_descriptor_t *desc)
 {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
@@ -702,6 +700,14 @@
     return aps->unregisterEffect(id);
 }
 
+status_t AudioSystem::isStreamActive(int stream, bool* state, uint32_t inPastMs) {
+    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    if (aps == 0) return PERMISSION_DENIED;
+    *state = aps->isStreamActive(stream, inPastMs);
+    return NO_ERROR;
+}
+
+
 // ---------------------------------------------------------------------------
 
 void AudioSystem::AudioPolicyServiceClient::binderDied(const wp<IBinder>& who) {
diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp
index 587c8ff..8d8f67b 100644
--- a/media/libmedia/AudioTrack.cpp
+++ b/media/libmedia/AudioTrack.cpp
@@ -35,7 +35,6 @@
 #include <binder/Parcel.h>
 #include <binder/IPCThreadState.h>
 #include <utils/Timers.h>
-#include <cutils/atomic.h>
 
 #define LIKELY( exp )       (__builtin_expect( (exp) != 0, true  ))
 #define UNLIKELY( exp )     (__builtin_expect( (exp) != 0, false ))
@@ -149,6 +148,7 @@
 
     LOGV_IF(sharedBuffer != 0, "sharedBuffer: %p, size: %d", sharedBuffer->pointer(), sharedBuffer->size());
 
+    AutoMutex lock(mLock);
     if (mAudioTrack != 0) {
         LOGE("Track already in use");
         return INVALID_OPERATION;
@@ -212,8 +212,15 @@
     mAuxEffectId = 0;
 
     // create the IAudioTrack
-    status_t status = createTrack(streamType, sampleRate, format, channelCount,
-                                  frameCount, flags, sharedBuffer, output, true);
+    status_t status = createTrack_l(streamType,
+                                  sampleRate,
+                                  format,
+                                  channelCount,
+                                  frameCount,
+                                  flags,
+                                  sharedBuffer,
+                                  output,
+                                  true);
 
     if (status != NO_ERROR) {
         return status;
@@ -312,39 +319,42 @@
         t->mLock.lock();
      }
 
-    if (android_atomic_or(1, &mActive) == 0) {
-        mNewPosition = mCblk->server + mUpdatePeriod;
-        mCblk->bufferTimeoutMs = MAX_STARTUP_TIMEOUT_MS;
-        mCblk->waitTimeMs = 0;
-        mCblk->flags &= ~CBLK_DISABLED_ON;
+    AutoMutex lock(mLock);
+    // acquire a strong reference on the IMemory and IAudioTrack so that they cannot be destroyed
+    // while we are accessing the cblk
+    sp <IAudioTrack> audioTrack = mAudioTrack;
+    sp <IMemory> iMem = mCblkMemory;
+    audio_track_cblk_t* cblk = mCblk;
+
+    if (mActive == 0) {
+        mActive = 1;
+        mNewPosition = cblk->server + mUpdatePeriod;
+        cblk->bufferTimeoutMs = MAX_STARTUP_TIMEOUT_MS;
+        cblk->waitTimeMs = 0;
+        cblk->flags &= ~CBLK_DISABLED_ON;
         if (t != 0) {
            t->run("AudioTrackThread", THREAD_PRIORITY_AUDIO_CLIENT);
         } else {
             setpriority(PRIO_PROCESS, 0, THREAD_PRIORITY_AUDIO_CLIENT);
         }
 
-        if (mCblk->flags & CBLK_INVALID_MSK) {
-            LOGW("start() track %p invalidated, creating a new one", this);
-            // no need to clear the invalid flag as this cblk will not be used anymore
-            // force new track creation
-            status = DEAD_OBJECT;
-        } else {
+        LOGV("start %p before lock cblk %p", this, mCblk);
+        cblk->lock.lock();
+        if (!(cblk->flags & CBLK_INVALID_MSK)) {
+            cblk->lock.unlock();
             status = mAudioTrack->start();
-        }
-        if (status == DEAD_OBJECT) {
-            LOGV("start() dead IAudioTrack: creating a new one");
-            status = createTrack(mStreamType, mCblk->sampleRate, mFormat, mChannelCount,
-                                 mFrameCount, mFlags, mSharedBuffer, getOutput(), false);
-            if (status == NO_ERROR) {
-                status = mAudioTrack->start();
-                if (status == NO_ERROR) {
-                    mNewPosition = mCblk->server + mUpdatePeriod;
-                }
+            cblk->lock.lock();
+            if (status == DEAD_OBJECT) {
+                cblk->flags |= CBLK_INVALID_MSK;
             }
         }
+        if (cblk->flags & CBLK_INVALID_MSK) {
+            status = restoreTrack_l(cblk, true);
+        }
+        cblk->lock.unlock();
         if (status != NO_ERROR) {
             LOGV("start() failed");
-            android_atomic_and(~1, &mActive);
+            mActive = 0;
             if (t != 0) {
                 t->requestExit();
             } else {
@@ -367,19 +377,21 @@
         t->mLock.lock();
     }
 
-    if (android_atomic_and(~1, &mActive) == 1) {
+    AutoMutex lock(mLock);
+    if (mActive == 1) {
+        mActive = 0;
         mCblk->cv.signal();
         mAudioTrack->stop();
         // Cancel loops (If we are in the middle of a loop, playback
         // would not stop until loopCount reaches 0).
-        setLoop(0, 0, 0);
+        setLoop_l(0, 0, 0);
         // the playback head position will reset to 0, so if a marker is set, we need
         // to activate it again
         mMarkerReached = false;
         // Force flush if a shared buffer is used otherwise audioflinger
         // will not stop before end of buffer is reached.
         if (mSharedBuffer != 0) {
-            flush();
+            flush_l();
         }
         if (t != 0) {
             t->requestExit();
@@ -400,6 +412,13 @@
 
 void AudioTrack::flush()
 {
+    AutoMutex lock(mLock);
+    flush_l();
+}
+
+// must be called with mLock held
+void AudioTrack::flush_l()
+{
     LOGV("flush");
 
     // clear playback marker and periodic update counter
@@ -407,7 +426,6 @@
     mMarkerReached = false;
     mUpdatePeriod = 0;
 
-
     if (!mActive) {
         mAudioTrack->flush();
         // Release AudioTrack callback thread in case it was waiting for new buffers
@@ -419,7 +437,9 @@
 void AudioTrack::pause()
 {
     LOGV("pause");
-    if (android_atomic_and(~1, &mActive) == 1) {
+    AutoMutex lock(mLock);
+    if (mActive == 1) {
+        mActive = 0;
         mAudioTrack->pause();
     }
 }
@@ -441,6 +461,7 @@
         return BAD_VALUE;
     }
 
+    AutoMutex lock(mLock);
     mVolume[LEFT] = left;
     mVolume[RIGHT] = right;
 
@@ -466,6 +487,7 @@
     if (level > 1.0f) {
         return BAD_VALUE;
     }
+    AutoMutex lock(mLock);
 
     mSendLevel = level;
 
@@ -491,17 +513,26 @@
     // Resampler implementation limits input sampling rate to 2 x output sampling rate.
     if (rate <= 0 || rate > afSamplingRate*2 ) return BAD_VALUE;
 
+    AutoMutex lock(mLock);
     mCblk->sampleRate = rate;
     return NO_ERROR;
 }
 
 uint32_t AudioTrack::getSampleRate()
 {
+    AutoMutex lock(mLock);
     return mCblk->sampleRate;
 }
 
 status_t AudioTrack::setLoop(uint32_t loopStart, uint32_t loopEnd, int loopCount)
 {
+    AutoMutex lock(mLock);
+    return setLoop_l(loopStart, loopEnd, loopCount);
+}
+
+// must be called with mLock held
+status_t AudioTrack::setLoop_l(uint32_t loopStart, uint32_t loopEnd, int loopCount)
+{
     audio_track_cblk_t* cblk = mCblk;
 
     Mutex::Autolock _l(cblk->lock);
@@ -536,6 +567,7 @@
 
 status_t AudioTrack::getLoop(uint32_t *loopStart, uint32_t *loopEnd, int *loopCount)
 {
+    AutoMutex lock(mLock);
     if (loopStart != 0) {
         *loopStart = mCblk->loopStart;
     }
@@ -595,6 +627,7 @@
 
 status_t AudioTrack::setPosition(uint32_t position)
 {
+    AutoMutex lock(mLock);
     Mutex::Autolock _l(mCblk->lock);
 
     if (!stopped()) return INVALID_OPERATION;
@@ -610,7 +643,7 @@
 status_t AudioTrack::getPosition(uint32_t *position)
 {
     if (position == 0) return BAD_VALUE;
-
+    AutoMutex lock(mLock);
     *position = mCblk->server;
 
     return NO_ERROR;
@@ -618,9 +651,11 @@
 
 status_t AudioTrack::reload()
 {
+    AutoMutex lock(mLock);
+
     if (!stopped()) return INVALID_OPERATION;
 
-    flush();
+    flush_l();
 
     mCblk->stepUser(mCblk->frameCount);
 
@@ -629,6 +664,13 @@
 
 audio_io_handle_t AudioTrack::getOutput()
 {
+    AutoMutex lock(mLock);
+    return getOutput_l();
+}
+
+// must be called with mLock held
+audio_io_handle_t AudioTrack::getOutput_l()
+{
     return AudioSystem::getOutput((AudioSystem::stream_type)mStreamType,
             mCblk->sampleRate, mFormat, mChannels, (AudioSystem::output_flags)mFlags);
 }
@@ -650,7 +692,8 @@
 
 // -------------------------------------------------------------------------
 
-status_t AudioTrack::createTrack(
+// must be called with mLock held
+status_t AudioTrack::createTrack_l(
         int streamType,
         uint32_t sampleRate,
         int format,
@@ -770,6 +813,7 @@
 
 status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, int32_t waitCount)
 {
+    AutoMutex lock(mLock);
     int active;
     status_t result;
     audio_track_cblk_t* cblk = mCblk;
@@ -796,12 +840,17 @@
                 return WOULD_BLOCK;
             }
             if (!(cblk->flags & CBLK_INVALID_MSK)) {
+                mLock.unlock();
                 result = cblk->cv.waitRelative(cblk->lock, milliseconds(waitTimeMs));
-            }
-            if (cblk->flags & CBLK_INVALID_MSK) {
-                LOGW("obtainBuffer() track %p invalidated, creating a new one", this);
-                // no need to clear the invalid flag as this cblk will not be used anymore
                 cblk->lock.unlock();
+                mLock.lock();
+                if (mActive == 0) {
+                    return status_t(STOPPED);
+                }
+                cblk->lock.lock();
+            }
+
+            if (cblk->flags & CBLK_INVALID_MSK) {
                 goto create_new_track;
             }
             if (__builtin_expect(result!=NO_ERROR, false)) {
@@ -815,18 +864,17 @@
                         //unlock cblk mutex before calling mAudioTrack->start() (see issue #1617140)
                         cblk->lock.unlock();
                         result = mAudioTrack->start();
-                        if (result == DEAD_OBJECT) {
-                            LOGW("obtainBuffer() dead IAudioTrack: creating a new one");
-create_new_track:
-                            result = createTrack(mStreamType, cblk->sampleRate, mFormat, mChannelCount,
-                                                 mFrameCount, mFlags, mSharedBuffer, getOutput(), false);
-                            if (result == NO_ERROR) {
-                                cblk = mCblk;
-                                cblk->bufferTimeoutMs = MAX_RUN_TIMEOUT_MS;
-                                mAudioTrack->start();
-                            }
-                        }
                         cblk->lock.lock();
+                        if (result == DEAD_OBJECT) {
+                            cblk->flags |= CBLK_INVALID_MSK;
+create_new_track:
+                            result = restoreTrack_l(cblk, false);
+                        }
+                        if (result != NO_ERROR) {
+                            LOGW("obtainBuffer create Track error %d", result);
+                            cblk->lock.unlock();
+                            return result;
+                        }
                     }
                     cblk->waitTimeMs = 0;
                 }
@@ -844,7 +892,7 @@
     }
 
     // restart track if it was disabled by audioflinger due to previous underrun
-    if (cblk->flags & CBLK_DISABLED_MSK) {
+    if (mActive && (cblk->flags & CBLK_DISABLED_MSK)) {
         cblk->flags &= ~CBLK_DISABLED_ON;
         LOGW("obtainBuffer() track %p disabled, restarting", this);
         mAudioTrack->start();
@@ -879,8 +927,8 @@
 
 void AudioTrack::releaseBuffer(Buffer* audioBuffer)
 {
-    audio_track_cblk_t* cblk = mCblk;
-    cblk->stepUser(audioBuffer->frameCount);
+    AutoMutex lock(mLock);
+    mCblk->stepUser(audioBuffer->frameCount);
 }
 
 // -------------------------------------------------------------------------
@@ -899,6 +947,13 @@
 
     LOGV("write %p: %d bytes, mActive=%d", this, userSize, mActive);
 
+    // acquire a strong reference on the IMemory and IAudioTrack so that they cannot be destroyed
+    // while we are accessing the cblk
+    mLock.lock();
+    sp <IAudioTrack> audioTrack = mAudioTrack;
+    sp <IMemory> iMem = mCblkMemory;
+    mLock.unlock();
+
     ssize_t written = 0;
     const int8_t *src = (const int8_t *)buffer;
     Buffer audioBuffer;
@@ -949,21 +1004,29 @@
     uint32_t frames;
     size_t writtenSize;
 
+    mLock.lock();
+    // acquire a strong reference on the IMemory and IAudioTrack so that they cannot be destroyed
+    // while we are accessing the cblk
+    sp <IAudioTrack> audioTrack = mAudioTrack;
+    sp <IMemory> iMem = mCblkMemory;
+    audio_track_cblk_t* cblk = mCblk;
+    mLock.unlock();
+
     // Manage underrun callback
-    if (mActive && (mCblk->framesReady() == 0)) {
-        LOGV("Underrun user: %x, server: %x, flags %04x", mCblk->user, mCblk->server, mCblk->flags);
-        if ((mCblk->flags & CBLK_UNDERRUN_MSK) == CBLK_UNDERRUN_OFF) {
+    if (mActive && (cblk->framesReady() == 0)) {
+        LOGV("Underrun user: %x, server: %x, flags %04x", cblk->user, cblk->server, cblk->flags);
+        if ((cblk->flags & CBLK_UNDERRUN_MSK) == CBLK_UNDERRUN_OFF) {
             mCbf(EVENT_UNDERRUN, mUserData, 0);
-            if (mCblk->server == mCblk->frameCount) {
+            if (cblk->server == cblk->frameCount) {
                 mCbf(EVENT_BUFFER_END, mUserData, 0);
             }
-            mCblk->flags |= CBLK_UNDERRUN_ON;
+            cblk->flags |= CBLK_UNDERRUN_ON;
             if (mSharedBuffer != 0) return false;
         }
     }
 
     // Manage loop end callback
-    while (mLoopCount > mCblk->loopCount) {
+    while (mLoopCount > cblk->loopCount) {
         int loopCount = -1;
         mLoopCount--;
         if (mLoopCount >= 0) loopCount = mLoopCount;
@@ -973,7 +1036,7 @@
 
     // Manage marker callback
     if (!mMarkerReached && (mMarkerPosition > 0)) {
-        if (mCblk->server >= mMarkerPosition) {
+        if (cblk->server >= mMarkerPosition) {
             mCbf(EVENT_MARKER, mUserData, (void *)&mMarkerPosition);
             mMarkerReached = true;
         }
@@ -981,7 +1044,7 @@
 
     // Manage new position callback
     if (mUpdatePeriod > 0) {
-        while (mCblk->server >= mNewPosition) {
+        while (cblk->server >= mNewPosition) {
             mCbf(EVENT_NEW_POS, mUserData, (void *)&mNewPosition);
             mNewPosition += mUpdatePeriod;
         }
@@ -1064,6 +1127,84 @@
     return true;
 }
 
+// must be called with mLock and cblk.lock held. Callers must also hold strong references on
+// the IAudioTrack and IMemory in case they are recreated here.
+// If the IAudioTrack is successfully restored, the cblk pointer is updated
+status_t AudioTrack::restoreTrack_l(audio_track_cblk_t*& cblk, bool fromStart)
+{
+    status_t result;
+
+    if (!(cblk->flags & CBLK_RESTORING_MSK)) {
+        LOGW("dead IAudioTrack, creating a new one from %s",
+             fromStart ? "start()" : "obtainBuffer()");
+
+        cblk->flags |= CBLK_RESTORING_ON;
+        // signal old cblk condition so that other threads waiting for available buffers stop
+        // waiting now
+        cblk->cv.broadcast();
+        cblk->lock.unlock();
+
+        // if the new IAudioTrack is created, createTrack_l() will modify the
+        // following member variables: mAudioTrack, mCblkMemory and mCblk.
+        // It will also delete the strong references on previous IAudioTrack and IMemory
+        result = createTrack_l(mStreamType,
+                               cblk->sampleRate,
+                               mFormat,
+                               mChannelCount,
+                               mFrameCount,
+                               mFlags,
+                               mSharedBuffer,
+                               getOutput_l(),
+                               false);
+
+        if (result == NO_ERROR) {
+            if (!fromStart) {
+                mCblk->bufferTimeoutMs = MAX_RUN_TIMEOUT_MS;
+            }
+            result = mAudioTrack->start();
+            if (fromStart && result == NO_ERROR) {
+                mNewPosition = mCblk->server + mUpdatePeriod;
+            }
+        }
+        if (result != NO_ERROR) {
+            mActive = false;
+        }
+
+        // signal old cblk condition for other threads waiting for restore completion
+        cblk->lock.lock();
+        cblk->flags |= CBLK_RESTORED_MSK;
+        cblk->cv.broadcast();
+        cblk->lock.unlock();
+    } else {
+        if (!(cblk->flags & CBLK_RESTORED_MSK)) {
+            LOGW("dead IAudioTrack, waiting for a new one");
+            mLock.unlock();
+            result = cblk->cv.waitRelative(cblk->lock, milliseconds(RESTORE_TIMEOUT_MS));
+            cblk->lock.unlock();
+            mLock.lock();
+        } else {
+            LOGW("dead IAudioTrack, already restored");
+            result = NO_ERROR;
+            cblk->lock.unlock();
+        }
+        if (result != NO_ERROR || mActive == 0) {
+            result = status_t(STOPPED);
+        }
+    }
+    LOGV("restoreTrack_l() status %d mActive %d cblk %p, old cblk %p flags %08x old flags %08x",
+         result, mActive, mCblk, cblk, mCblk->flags, cblk->flags);
+
+    if (result == NO_ERROR) {
+        // from now on we switch to the newly created cblk
+        cblk = mCblk;
+    }
+    cblk->lock.lock();
+
+    LOGW_IF(result != NO_ERROR, "restoreTrack_l() error %d", result);
+
+    return result;
+}
+
 status_t AudioTrack::dump(int fd, const Vector<String16>& args) const
 {
 
@@ -1193,7 +1334,9 @@
 
     this->server = s;
 
-    cv.signal();
+    if (!(flags & CBLK_INVALID_MSK)) {
+        cv.signal();
+    }
     lock.unlock();
     return true;
 }
diff --git a/media/libmedia/IAudioFlinger.cpp b/media/libmedia/IAudioFlinger.cpp
index 3a89e25..eec47c0 100644
--- a/media/libmedia/IAudioFlinger.cpp
+++ b/media/libmedia/IAudioFlinger.cpp
@@ -47,7 +47,6 @@
     SET_MODE,
     SET_MIC_MUTE,
     GET_MIC_MUTE,
-    IS_STREAM_ACTIVE,
     SET_PARAMETERS,
     GET_PARAMETERS,
     REGISTER_CLIENT,
@@ -316,15 +315,6 @@
         return reply.readInt32();
     }
 
-    virtual bool isStreamActive(int stream) const
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeInt32(stream);
-        remote()->transact(IS_STREAM_ACTIVE, data, &reply);
-        return reply.readInt32();
-    }
-
     virtual status_t setParameters(int ioHandle, const String8& keyValuePairs)
     {
         Parcel data, reply;
@@ -826,12 +816,6 @@
             reply->writeInt32( getMicMute() );
             return NO_ERROR;
         } break;
-        case IS_STREAM_ACTIVE: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            int stream = data.readInt32();
-            reply->writeInt32( isStreamActive(stream) );
-            return NO_ERROR;
-        } break;
         case SET_PARAMETERS: {
             CHECK_INTERFACE(IAudioFlinger, data, reply);
             int ioHandle = data.readInt32();
diff --git a/media/libmedia/IAudioPolicyService.cpp b/media/libmedia/IAudioPolicyService.cpp
index 950c213..b89a278 100644
--- a/media/libmedia/IAudioPolicyService.cpp
+++ b/media/libmedia/IAudioPolicyService.cpp
@@ -48,7 +48,9 @@
     GET_STRATEGY_FOR_STREAM,
     GET_OUTPUT_FOR_EFFECT,
     REGISTER_EFFECT,
-    UNREGISTER_EFFECT
+    UNREGISTER_EFFECT,
+    IS_STREAM_ACTIVE,
+    GET_DEVICES_FOR_STREAM,
 };
 
 class BpAudioPolicyService : public BpInterface<IAudioPolicyService>
@@ -262,6 +264,15 @@
         return reply.readInt32();
     }
 
+    virtual uint32_t getDevicesForStream(AudioSystem::stream_type stream)
+    {
+        Parcel data, reply;
+        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+        data.writeInt32(static_cast <uint32_t>(stream));
+        remote()->transact(GET_DEVICES_FOR_STREAM, data, &reply);
+        return (uint32_t) reply.readInt32();
+    }
+
     virtual audio_io_handle_t getOutputForEffect(effect_descriptor_t *desc)
     {
         Parcel data, reply;
@@ -297,6 +308,15 @@
         return static_cast <status_t> (reply.readInt32());
     }
 
+    virtual bool isStreamActive(int stream, uint32_t inPastMs) const
+    {
+        Parcel data, reply;
+        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+        data.writeInt32(stream);
+        data.writeInt32(inPastMs);
+        remote()->transact(IS_STREAM_ACTIVE, data, &reply);
+        return reply.readInt32();
+    }
 };
 
 IMPLEMENT_META_INTERFACE(AudioPolicyService, "android.media.IAudioPolicyService");
@@ -485,6 +505,14 @@
             return NO_ERROR;
         } break;
 
+        case GET_DEVICES_FOR_STREAM: {
+            CHECK_INTERFACE(IAudioPolicyService, data, reply);
+            AudioSystem::stream_type stream =
+                    static_cast <AudioSystem::stream_type>(data.readInt32());
+            reply->writeInt32(static_cast <int>(getDevicesForStream(stream)));
+            return NO_ERROR;
+        } break;
+
         case GET_OUTPUT_FOR_EFFECT: {
             CHECK_INTERFACE(IAudioPolicyService, data, reply);
             effect_descriptor_t desc;
@@ -517,6 +545,14 @@
             return NO_ERROR;
         } break;
 
+        case IS_STREAM_ACTIVE: {
+            CHECK_INTERFACE(IAudioPolicyService, data, reply);
+            int stream = data.readInt32();
+            uint32_t inPastMs = (uint32_t)data.readInt32();
+            reply->writeInt32( isStreamActive(stream, inPastMs) );
+            return NO_ERROR;
+        } break;
+
         default:
             return BBinder::onTransact(code, data, reply, flags);
     }
diff --git a/media/libmedia/IMediaPlayer.cpp b/media/libmedia/IMediaPlayer.cpp
index 0f55b19..2399216 100644
--- a/media/libmedia/IMediaPlayer.cpp
+++ b/media/libmedia/IMediaPlayer.cpp
@@ -22,6 +22,8 @@
 
 #include <media/IMediaPlayer.h>
 #include <surfaceflinger/ISurface.h>
+#include <surfaceflinger/Surface.h>
+#include <gui/ISurfaceTexture.h>
 
 namespace android {
 
@@ -43,10 +45,9 @@
     INVOKE,
     SET_METADATA_FILTER,
     GET_METADATA,
-    SUSPEND,
-    RESUME,
     SET_AUX_EFFECT_SEND_LEVEL,
-    ATTACH_AUX_EFFECT
+    ATTACH_AUX_EFFECT,
+    SET_VIDEO_SURFACETEXTURE,
 };
 
 class BpMediaPlayer: public BpInterface<IMediaPlayer>
@@ -65,15 +66,27 @@
         remote()->transact(DISCONNECT, data, &reply);
     }
 
-    status_t setVideoSurface(const sp<ISurface>& surface)
+    // pass the buffered Surface to the media player service
+    status_t setVideoSurface(const sp<Surface>& surface)
     {
         Parcel data, reply;
         data.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor());
-        data.writeStrongBinder(surface->asBinder());
+        Surface::writeToParcel(surface, &data);
         remote()->transact(SET_VIDEO_SURFACE, data, &reply);
         return reply.readInt32();
     }
 
+    // pass the buffered ISurfaceTexture to the media player service
+    status_t setVideoSurfaceTexture(const sp<ISurfaceTexture>& surfaceTexture)
+    {
+        Parcel data, reply;
+        data.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor());
+        sp<IBinder> b(surfaceTexture->asBinder());
+        data.writeStrongBinder(b);
+        remote()->transact(SET_VIDEO_SURFACETEXTURE, data, &reply);
+        return reply.readInt32();
+    }
+
     status_t prepareAsync()
     {
         Parcel data, reply;
@@ -204,26 +217,6 @@
         return reply->readInt32();
     }
 
-    status_t suspend() {
-        Parcel request;
-        request.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor());
-
-        Parcel reply;
-        remote()->transact(SUSPEND, request, &reply);
-
-        return reply.readInt32();
-    }
-
-    status_t resume() {
-        Parcel request;
-        request.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor());
-
-        Parcel reply;
-        remote()->transact(RESUME, request, &reply);
-
-        return reply.readInt32();
-    }
-
     status_t setAuxEffectSendLevel(float level)
     {
         Parcel data, reply;
@@ -241,6 +234,7 @@
         remote()->transact(ATTACH_AUX_EFFECT, data, &reply);
         return reply.readInt32();
     }
+
 };
 
 IMPLEMENT_META_INTERFACE(MediaPlayer, "android.media.IMediaPlayer");
@@ -258,10 +252,17 @@
         } break;
         case SET_VIDEO_SURFACE: {
             CHECK_INTERFACE(IMediaPlayer, data, reply);
-            sp<ISurface> surface = interface_cast<ISurface>(data.readStrongBinder());
+            sp<Surface> surface = Surface::readFromParcel(data);
             reply->writeInt32(setVideoSurface(surface));
             return NO_ERROR;
         } break;
+        case SET_VIDEO_SURFACETEXTURE: {
+            CHECK_INTERFACE(IMediaPlayer, data, reply);
+            sp<ISurfaceTexture> surfaceTexture =
+                    interface_cast<ISurfaceTexture>(data.readStrongBinder());
+            reply->writeInt32(setVideoSurfaceTexture(surfaceTexture));
+            return NO_ERROR;
+        } break;
         case PREPARE_ASYNC: {
             CHECK_INTERFACE(IMediaPlayer, data, reply);
             reply->writeInt32(prepareAsync());
@@ -341,16 +342,6 @@
             reply->writeInt32(setMetadataFilter(data));
             return NO_ERROR;
         } break;
-        case SUSPEND: {
-            CHECK_INTERFACE(IMediaPlayer, data, reply);
-            reply->writeInt32(suspend());
-            return NO_ERROR;
-        } break;
-        case RESUME: {
-            CHECK_INTERFACE(IMediaPlayer, data, reply);
-            reply->writeInt32(resume());
-            return NO_ERROR;
-        } break;
         case GET_METADATA: {
             CHECK_INTERFACE(IMediaPlayer, data, reply);
             const status_t retcode = getMetadata(data.readInt32(), data.readInt32(), reply);
diff --git a/media/libmedia/IMediaPlayerService.cpp b/media/libmedia/IMediaPlayerService.cpp
index 4abfa75..17a0362 100644
--- a/media/libmedia/IMediaPlayerService.cpp
+++ b/media/libmedia/IMediaPlayerService.cpp
@@ -23,6 +23,7 @@
 #include <media/IMediaPlayerService.h>
 #include <media/IMediaRecorder.h>
 #include <media/IOMX.h>
+#include <media/IStreamSource.h>
 
 #include <utils/Errors.h>  // for status_t
 
@@ -31,11 +32,14 @@
 enum {
     CREATE_URL = IBinder::FIRST_CALL_TRANSACTION,
     CREATE_FD,
+    CREATE_STREAM,
     DECODE_URL,
     DECODE_FD,
     CREATE_MEDIA_RECORDER,
     CREATE_METADATA_RETRIEVER,
-    GET_OMX
+    GET_OMX,
+    ADD_BATTERY_DATA,
+    PULL_BATTERY_DATA
 };
 
 class BpMediaPlayerService: public BpInterface<IMediaPlayerService>
@@ -107,6 +111,21 @@
         return interface_cast<IMediaPlayer>(reply.readStrongBinder());;
     }
 
+    virtual sp<IMediaPlayer> create(
+            pid_t pid, const sp<IMediaPlayerClient> &client,
+            const sp<IStreamSource> &source, int audioSessionId) {
+        Parcel data, reply;
+        data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
+        data.writeInt32(static_cast<int32_t>(pid));
+        data.writeStrongBinder(client->asBinder());
+        data.writeStrongBinder(source->asBinder());
+        data.writeInt32(static_cast<int32_t>(audioSessionId));
+
+        remote()->transact(CREATE_STREAM, data, &reply);
+
+        return interface_cast<IMediaPlayer>(reply.readStrongBinder());;
+    }
+
     virtual sp<IMemory> decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, int* pFormat)
     {
         Parcel data, reply;
@@ -139,6 +158,19 @@
         remote()->transact(GET_OMX, data, &reply);
         return interface_cast<IOMX>(reply.readStrongBinder());
     }
+
+    virtual void addBatteryData(uint32_t params) {
+        Parcel data, reply;
+        data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
+        data.writeInt32(params);
+        remote()->transact(ADD_BATTERY_DATA, data, &reply);
+    }
+
+    virtual status_t pullBatteryData(Parcel* reply) {
+        Parcel data;
+        data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
+        return remote()->transact(PULL_BATTERY_DATA, data, reply);
+    }
 };
 
 IMPLEMENT_META_INTERFACE(MediaPlayerService, "android.media.IMediaPlayerService");
@@ -184,6 +216,27 @@
             reply->writeStrongBinder(player->asBinder());
             return NO_ERROR;
         } break;
+        case CREATE_STREAM:
+        {
+            CHECK_INTERFACE(IMediaPlayerService, data, reply);
+
+            pid_t pid = static_cast<pid_t>(data.readInt32());
+
+            sp<IMediaPlayerClient> client =
+                interface_cast<IMediaPlayerClient>(data.readStrongBinder());
+
+            sp<IStreamSource> source =
+                interface_cast<IStreamSource>(data.readStrongBinder());
+
+            int audioSessionId = static_cast<int>(data.readInt32());
+
+            sp<IMediaPlayer> player =
+                create(pid, client, source, audioSessionId);
+
+            reply->writeStrongBinder(player->asBinder());
+            return OK;
+            break;
+        }
         case DECODE_URL: {
             CHECK_INTERFACE(IMediaPlayerService, data, reply);
             const char* url = data.readCString();
@@ -232,6 +285,17 @@
             reply->writeStrongBinder(omx->asBinder());
             return NO_ERROR;
         } break;
+        case ADD_BATTERY_DATA: {
+            CHECK_INTERFACE(IMediaPlayerService, data, reply);
+            uint32_t params = data.readInt32();
+            addBatteryData(params);
+            return NO_ERROR;
+        } break;
+        case PULL_BATTERY_DATA: {
+            CHECK_INTERFACE(IMediaPlayerService, data, reply);
+            pullBatteryData(reply);
+            return NO_ERROR;
+        } break;
         default:
             return BBinder::onTransact(code, data, reply, flags);
     }
diff --git a/media/libmedia/IMediaRecorder.cpp b/media/libmedia/IMediaRecorder.cpp
index 947ff34..59cd1b7 100644
--- a/media/libmedia/IMediaRecorder.cpp
+++ b/media/libmedia/IMediaRecorder.cpp
@@ -19,7 +19,7 @@
 #define LOG_TAG "IMediaRecorder"
 #include <utils/Log.h>
 #include <binder/Parcel.h>
-#include <surfaceflinger/ISurface.h>
+#include <surfaceflinger/Surface.h>
 #include <camera/ICamera.h>
 #include <media/IMediaRecorderClient.h>
 #include <media/IMediaRecorder.h>
@@ -43,6 +43,7 @@
     SET_AUDIO_ENCODER,
     SET_OUTPUT_FILE_PATH,
     SET_OUTPUT_FILE_FD,
+    SET_OUTPUT_FILE_AUXILIARY_FD,
     SET_VIDEO_SIZE,
     SET_VIDEO_FRAMERATE,
     SET_PARAMETERS,
@@ -69,12 +70,12 @@
         return reply.readInt32();
     }
 
-    status_t setPreviewSurface(const sp<ISurface>& surface)
+    status_t setPreviewSurface(const sp<Surface>& surface)
     {
         LOGV("setPreviewSurface(%p)", surface.get());
         Parcel data, reply;
         data.writeInterfaceToken(IMediaRecorder::getInterfaceDescriptor());
-        data.writeStrongBinder(surface->asBinder());
+        Surface::writeToParcel(surface, &data);
         remote()->transact(SET_PREVIEW_SURFACE, data, &reply);
         return reply.readInt32();
     }
@@ -159,6 +160,15 @@
         return reply.readInt32();
     }
 
+    status_t setOutputFileAuxiliary(int fd) {
+        LOGV("setOutputFileAuxiliary(%d)", fd);
+        Parcel data, reply;
+        data.writeInterfaceToken(IMediaRecorder::getInterfaceDescriptor());
+        data.writeFileDescriptor(fd);
+        remote()->transact(SET_OUTPUT_FILE_AUXILIARY_FD, data, &reply);
+        return reply.readInt32();
+    }
+
     status_t setVideoSize(int width, int height)
     {
         LOGV("setVideoSize(%dx%d)", width, height);
@@ -377,6 +387,13 @@
             ::close(fd);
             return NO_ERROR;
         } break;
+        case SET_OUTPUT_FILE_AUXILIARY_FD: {
+            LOGV("SET_OUTPUT_FILE_AUXILIARY_FD");
+            CHECK_INTERFACE(IMediaRecorder, data, reply);
+            int fd = dup(data.readFileDescriptor());
+            reply->writeInt32(setOutputFileAuxiliary(fd));
+            return NO_ERROR;
+        } break;
         case SET_VIDEO_SIZE: {
             LOGV("SET_VIDEO_SIZE");
             CHECK_INTERFACE(IMediaRecorder, data, reply);
@@ -409,7 +426,7 @@
         case SET_PREVIEW_SURFACE: {
             LOGV("SET_PREVIEW_SURFACE");
             CHECK_INTERFACE(IMediaRecorder, data, reply);
-            sp<ISurface> surface = interface_cast<ISurface>(data.readStrongBinder());
+            sp<Surface> surface = Surface::readFromParcel(data);
             reply->writeInt32(setPreviewSurface(surface));
             return NO_ERROR;
         } break;
diff --git a/media/libmedia/IOMX.cpp b/media/libmedia/IOMX.cpp
index ae6c2bf..d6a1757 100644
--- a/media/libmedia/IOMX.cpp
+++ b/media/libmedia/IOMX.cpp
@@ -5,6 +5,7 @@
 #include <binder/IMemory.h>
 #include <binder/Parcel.h>
 #include <media/IOMX.h>
+#include <media/stagefright/foundation/ADebug.h>
 #include <surfaceflinger/ISurface.h>
 #include <surfaceflinger/Surface.h>
 
@@ -21,59 +22,20 @@
     SET_PARAMETER,
     GET_CONFIG,
     SET_CONFIG,
+    ENABLE_GRAPHIC_BUFFERS,
     USE_BUFFER,
+    USE_GRAPHIC_BUFFER,
+    STORE_META_DATA_IN_BUFFERS,
     ALLOC_BUFFER,
     ALLOC_BUFFER_WITH_BACKUP,
     FREE_BUFFER,
     FILL_BUFFER,
     EMPTY_BUFFER,
     GET_EXTENSION_INDEX,
-    CREATE_RENDERER,
     OBSERVER_ON_MSG,
-    RENDERER_RENDER,
+    GET_GRAPHIC_BUFFER_USAGE,
 };
 
-sp<IOMXRenderer> IOMX::createRenderer(
-        const sp<Surface> &surface,
-        const char *componentName,
-        OMX_COLOR_FORMATTYPE colorFormat,
-        size_t encodedWidth, size_t encodedHeight,
-        size_t displayWidth, size_t displayHeight,
-        int32_t rotationDegrees) {
-    return createRenderer(
-            surface->getISurface(),
-            componentName, colorFormat, encodedWidth, encodedHeight,
-            displayWidth, displayHeight,
-            rotationDegrees);
-}
-
-sp<IOMXRenderer> IOMX::createRendererFromJavaSurface(
-        JNIEnv *env, jobject javaSurface,
-        const char *componentName,
-        OMX_COLOR_FORMATTYPE colorFormat,
-        size_t encodedWidth, size_t encodedHeight,
-        size_t displayWidth, size_t displayHeight,
-        int32_t rotationDegrees) {
-    jclass surfaceClass = env->FindClass("android/view/Surface");
-    if (surfaceClass == NULL) {
-        LOGE("Can't find android/view/Surface");
-        return NULL;
-    }
-
-    jfieldID surfaceID = env->GetFieldID(surfaceClass, ANDROID_VIEW_SURFACE_JNI_ID, "I");
-    if (surfaceID == NULL) {
-        LOGE("Can't find Surface.mSurface");
-        return NULL;
-    }
-
-    sp<Surface> surface = (Surface *)env->GetIntField(javaSurface, surfaceID);
-
-    return createRenderer(
-            surface, componentName, colorFormat, encodedWidth,
-            encodedHeight, displayWidth, displayHeight,
-            rotationDegrees);
-}
-
 class BpOMX : public BpInterface<IOMX> {
 public:
     BpOMX(const sp<IBinder> &impl)
@@ -220,6 +182,32 @@
         return reply.readInt32();
     }
 
+    virtual status_t enableGraphicBuffers(
+            node_id node, OMX_U32 port_index, OMX_BOOL enable) {
+        Parcel data, reply;
+        data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
+        data.writeIntPtr((intptr_t)node);
+        data.writeInt32(port_index);
+        data.writeInt32((uint32_t)enable);
+        remote()->transact(ENABLE_GRAPHIC_BUFFERS, data, &reply);
+
+        status_t err = reply.readInt32();
+        return err;
+    }
+
+    virtual status_t getGraphicBufferUsage(
+            node_id node, OMX_U32 port_index, OMX_U32* usage) {
+        Parcel data, reply;
+        data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
+        data.writeIntPtr((intptr_t)node);
+        data.writeInt32(port_index);
+        remote()->transact(GET_GRAPHIC_BUFFER_USAGE, data, &reply);
+
+        status_t err = reply.readInt32();
+        *usage = reply.readInt32();
+        return err;
+    }
+
     virtual status_t useBuffer(
             node_id node, OMX_U32 port_index, const sp<IMemory> &params,
             buffer_id *buffer) {
@@ -242,6 +230,42 @@
         return err;
     }
 
+
+    virtual status_t useGraphicBuffer(
+            node_id node, OMX_U32 port_index,
+            const sp<GraphicBuffer> &graphicBuffer, buffer_id *buffer) {
+        Parcel data, reply;
+        data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
+        data.writeIntPtr((intptr_t)node);
+        data.writeInt32(port_index);
+        data.write(*graphicBuffer);
+        remote()->transact(USE_GRAPHIC_BUFFER, data, &reply);
+
+        status_t err = reply.readInt32();
+        if (err != OK) {
+            *buffer = 0;
+
+            return err;
+        }
+
+        *buffer = (void*)reply.readIntPtr();
+
+        return err;
+    }
+
+    virtual status_t storeMetaDataInBuffers(
+            node_id node, OMX_U32 port_index, OMX_BOOL enable) {
+        Parcel data, reply;
+        data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
+        data.writeIntPtr((intptr_t)node);
+        data.writeInt32(port_index);
+        data.writeInt32((uint32_t)enable);
+        remote()->transact(STORE_META_DATA_IN_BUFFERS, data, &reply);
+
+        status_t err = reply.readInt32();
+        return err;
+    }
+
     virtual status_t allocateBuffer(
             node_id node, OMX_U32 port_index, size_t size,
             buffer_id *buffer, void **buffer_data) {
@@ -347,30 +371,6 @@
 
         return err;
     }
-
-    virtual sp<IOMXRenderer> createRenderer(
-            const sp<ISurface> &surface,
-            const char *componentName,
-            OMX_COLOR_FORMATTYPE colorFormat,
-            size_t encodedWidth, size_t encodedHeight,
-            size_t displayWidth, size_t displayHeight,
-            int32_t rotationDegrees) {
-        Parcel data, reply;
-        data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
-
-        data.writeStrongBinder(surface->asBinder());
-        data.writeCString(componentName);
-        data.writeInt32(colorFormat);
-        data.writeInt32(encodedWidth);
-        data.writeInt32(encodedHeight);
-        data.writeInt32(displayWidth);
-        data.writeInt32(displayHeight);
-        data.writeInt32(rotationDegrees);
-
-        remote()->transact(CREATE_RENDERER, data, &reply);
-
-        return interface_cast<IOMXRenderer>(reply.readStrongBinder());
-    }
 };
 
 IMPLEMENT_META_INTERFACE(OMX, "android.hardware.IOMX");
@@ -464,74 +464,8 @@
         }
 
         case GET_PARAMETER:
-        {
-            CHECK_INTERFACE(IOMX, data, reply);
-
-            node_id node = (void*)data.readIntPtr();
-            OMX_INDEXTYPE index = static_cast<OMX_INDEXTYPE>(data.readInt32());
-
-            size_t size = data.readInt32();
-
-            // XXX I am not happy with this but Parcel::readInplace didn't work.
-            void *params = malloc(size);
-            data.read(params, size);
-
-            status_t err = getParameter(node, index, params, size);
-
-            reply->writeInt32(err);
-
-            if (err == OK) {
-                reply->write(params, size);
-            }
-
-            free(params);
-            params = NULL;
-
-            return NO_ERROR;
-        }
-
         case SET_PARAMETER:
-        {
-            CHECK_INTERFACE(IOMX, data, reply);
-
-            node_id node = (void*)data.readIntPtr();
-            OMX_INDEXTYPE index = static_cast<OMX_INDEXTYPE>(data.readInt32());
-
-            size_t size = data.readInt32();
-            void *params = const_cast<void *>(data.readInplace(size));
-
-            reply->writeInt32(setParameter(node, index, params, size));
-
-            return NO_ERROR;
-        }
-
         case GET_CONFIG:
-        {
-            CHECK_INTERFACE(IOMX, data, reply);
-
-            node_id node = (void*)data.readIntPtr();
-            OMX_INDEXTYPE index = static_cast<OMX_INDEXTYPE>(data.readInt32());
-
-            size_t size = data.readInt32();
-
-            // XXX I am not happy with this but Parcel::readInplace didn't work.
-            void *params = malloc(size);
-            data.read(params, size);
-
-            status_t err = getConfig(node, index, params, size);
-
-            reply->writeInt32(err);
-
-            if (err == OK) {
-                reply->write(params, size);
-            }
-
-            free(params);
-            params = NULL;
-
-            return NO_ERROR;
-        }
-
         case SET_CONFIG:
         {
             CHECK_INTERFACE(IOMX, data, reply);
@@ -540,9 +474,65 @@
             OMX_INDEXTYPE index = static_cast<OMX_INDEXTYPE>(data.readInt32());
 
             size_t size = data.readInt32();
-            void *params = const_cast<void *>(data.readInplace(size));
 
-            reply->writeInt32(setConfig(node, index, params, size));
+            void *params = malloc(size);
+            data.read(params, size);
+
+            status_t err;
+            switch (code) {
+                case GET_PARAMETER:
+                    err = getParameter(node, index, params, size);
+                    break;
+                case SET_PARAMETER:
+                    err = setParameter(node, index, params, size);
+                    break;
+                case GET_CONFIG:
+                    err = getConfig(node, index, params, size);
+                    break;
+                case SET_CONFIG:
+                    err = setConfig(node, index, params, size);
+                    break;
+                default:
+                    TRESPASS();
+            }
+
+            reply->writeInt32(err);
+
+            if ((code == GET_PARAMETER || code == GET_CONFIG) && err == OK) {
+                reply->write(params, size);
+            }
+
+            free(params);
+            params = NULL;
+
+            return NO_ERROR;
+        }
+
+        case ENABLE_GRAPHIC_BUFFERS:
+        {
+            CHECK_INTERFACE(IOMX, data, reply);
+
+            node_id node = (void*)data.readIntPtr();
+            OMX_U32 port_index = data.readInt32();
+            OMX_BOOL enable = (OMX_BOOL)data.readInt32();
+
+            status_t err = enableGraphicBuffers(node, port_index, enable);
+            reply->writeInt32(err);
+
+            return NO_ERROR;
+        }
+
+        case GET_GRAPHIC_BUFFER_USAGE:
+        {
+            CHECK_INTERFACE(IOMX, data, reply);
+
+            node_id node = (void*)data.readIntPtr();
+            OMX_U32 port_index = data.readInt32();
+
+            OMX_U32 usage = 0;
+            status_t err = getGraphicBufferUsage(node, port_index, &usage);
+            reply->writeInt32(err);
+            reply->writeInt32(usage);
 
             return NO_ERROR;
         }
@@ -567,6 +557,41 @@
             return NO_ERROR;
         }
 
+        case USE_GRAPHIC_BUFFER:
+        {
+            CHECK_INTERFACE(IOMX, data, reply);
+
+            node_id node = (void*)data.readIntPtr();
+            OMX_U32 port_index = data.readInt32();
+            sp<GraphicBuffer> graphicBuffer = new GraphicBuffer();
+            data.read(*graphicBuffer);
+
+            buffer_id buffer;
+            status_t err = useGraphicBuffer(
+                    node, port_index, graphicBuffer, &buffer);
+            reply->writeInt32(err);
+
+            if (err == OK) {
+                reply->writeIntPtr((intptr_t)buffer);
+            }
+
+            return NO_ERROR;
+        }
+
+        case STORE_META_DATA_IN_BUFFERS:
+        {
+            CHECK_INTERFACE(IOMX, data, reply);
+
+            node_id node = (void*)data.readIntPtr();
+            OMX_U32 port_index = data.readInt32();
+            OMX_BOOL enable = (OMX_BOOL)data.readInt32();
+
+            status_t err = storeMetaDataInBuffers(node, port_index, enable);
+            reply->writeInt32(err);
+
+            return NO_ERROR;
+        }
+
         case ALLOC_BUFFER:
         {
             CHECK_INTERFACE(IOMX, data, reply);
@@ -672,35 +697,6 @@
             return OK;
         }
 
-        case CREATE_RENDERER:
-        {
-            CHECK_INTERFACE(IOMX, data, reply);
-
-            sp<ISurface> isurface =
-                interface_cast<ISurface>(data.readStrongBinder());
-
-            const char *componentName = data.readCString();
-
-            OMX_COLOR_FORMATTYPE colorFormat =
-                static_cast<OMX_COLOR_FORMATTYPE>(data.readInt32());
-
-            size_t encodedWidth = (size_t)data.readInt32();
-            size_t encodedHeight = (size_t)data.readInt32();
-            size_t displayWidth = (size_t)data.readInt32();
-            size_t displayHeight = (size_t)data.readInt32();
-            int32_t rotationDegrees = data.readInt32();
-
-            sp<IOMXRenderer> renderer =
-                createRenderer(isurface, componentName, colorFormat,
-                               encodedWidth, encodedHeight,
-                               displayWidth, displayHeight,
-                               rotationDegrees);
-
-            reply->writeStrongBinder(renderer->asBinder());
-
-            return OK;
-        }
-
         default:
             return BBinder::onTransact(code, data, reply, flags);
     }
@@ -746,44 +742,4 @@
     }
 }
 
-////////////////////////////////////////////////////////////////////////////////
-
-class BpOMXRenderer : public BpInterface<IOMXRenderer> {
-public:
-    BpOMXRenderer(const sp<IBinder> &impl)
-        : BpInterface<IOMXRenderer>(impl) {
-    }
-
-    virtual void render(IOMX::buffer_id buffer) {
-        Parcel data, reply;
-        data.writeInterfaceToken(IOMXRenderer::getInterfaceDescriptor());
-        data.writeIntPtr((intptr_t)buffer);
-
-        // NOTE: Do NOT make this a ONE_WAY call, it must be synchronous
-        // so that the caller knows when to recycle the buffer.
-        remote()->transact(RENDERER_RENDER, data, &reply);
-    }
-};
-
-IMPLEMENT_META_INTERFACE(OMXRenderer, "android.hardware.IOMXRenderer");
-
-status_t BnOMXRenderer::onTransact(
-    uint32_t code, const Parcel &data, Parcel *reply, uint32_t flags) {
-    switch (code) {
-        case RENDERER_RENDER:
-        {
-            CHECK_INTERFACE(IOMXRenderer, data, reply);
-
-            IOMX::buffer_id buffer = (void*)data.readIntPtr();
-
-            render(buffer);
-
-            return NO_ERROR;
-        }
-
-        default:
-            return BBinder::onTransact(code, data, reply, flags);
-    }
-}
-
 }  // namespace android
diff --git a/media/libmedia/IStreamSource.cpp b/media/libmedia/IStreamSource.cpp
new file mode 100644
index 0000000..c14ee82
--- /dev/null
+++ b/media/libmedia/IStreamSource.cpp
@@ -0,0 +1,189 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "IStreamSource"
+#include <utils/Log.h>
+
+#include <media/IStreamSource.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+#include <binder/IMemory.h>
+#include <binder/Parcel.h>
+
+namespace android {
+
+// static
+const char *const IStreamListener::kKeyResumeAtPTS = "resume-at-PTS";
+
+enum {
+    // IStreamSource
+    SET_LISTENER = IBinder::FIRST_CALL_TRANSACTION,
+    SET_BUFFERS,
+    ON_BUFFER_AVAILABLE,
+
+    // IStreamListener
+    QUEUE_BUFFER,
+    ISSUE_COMMAND,
+};
+
+struct BpStreamSource : public BpInterface<IStreamSource> {
+    BpStreamSource(const sp<IBinder> &impl)
+        : BpInterface<IStreamSource>(impl) {
+    }
+
+    virtual void setListener(const sp<IStreamListener> &listener) {
+        Parcel data, reply;
+        data.writeInterfaceToken(IStreamSource::getInterfaceDescriptor());
+        data.writeStrongBinder(listener->asBinder());
+        remote()->transact(SET_LISTENER, data, &reply);
+    }
+
+    virtual void setBuffers(const Vector<sp<IMemory> > &buffers) {
+        Parcel data, reply;
+        data.writeInterfaceToken(IStreamSource::getInterfaceDescriptor());
+        data.writeInt32(static_cast<int32_t>(buffers.size()));
+        for (size_t i = 0; i < buffers.size(); ++i) {
+            data.writeStrongBinder(buffers.itemAt(i)->asBinder());
+        }
+        remote()->transact(SET_BUFFERS, data, &reply);
+    }
+
+    virtual void onBufferAvailable(size_t index) {
+        Parcel data, reply;
+        data.writeInterfaceToken(IStreamSource::getInterfaceDescriptor());
+        data.writeInt32(static_cast<int32_t>(index));
+        remote()->transact(
+                ON_BUFFER_AVAILABLE, data, &reply, IBinder::FLAG_ONEWAY);
+    }
+};
+
+IMPLEMENT_META_INTERFACE(StreamSource, "android.hardware.IStreamSource");
+
+status_t BnStreamSource::onTransact(
+        uint32_t code, const Parcel &data, Parcel *reply, uint32_t flags) {
+    switch (code) {
+        case SET_LISTENER:
+        {
+            CHECK_INTERFACE(IStreamSource, data, reply);
+            setListener(
+                    interface_cast<IStreamListener>(data.readStrongBinder()));
+            break;
+        }
+
+        case SET_BUFFERS:
+        {
+            CHECK_INTERFACE(IStreamSource, data, reply);
+            size_t n = static_cast<size_t>(data.readInt32());
+            Vector<sp<IMemory> > buffers;
+            for (size_t i = 0; i < n; ++i) {
+                sp<IMemory> mem =
+                    interface_cast<IMemory>(data.readStrongBinder());
+
+                buffers.push(mem);
+            }
+            setBuffers(buffers);
+            break;
+        }
+
+        case ON_BUFFER_AVAILABLE:
+        {
+            CHECK_INTERFACE(IStreamSource, data, reply);
+            onBufferAvailable(static_cast<size_t>(data.readInt32()));
+            break;
+        }
+
+        default:
+            return BBinder::onTransact(code, data, reply, flags);
+    }
+
+    return OK;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct BpStreamListener : public BpInterface<IStreamListener> {
+    BpStreamListener(const sp<IBinder> &impl)
+        : BpInterface<IStreamListener>(impl) {
+    }
+
+    virtual void queueBuffer(size_t index, size_t size) {
+        Parcel data, reply;
+        data.writeInterfaceToken(IStreamListener::getInterfaceDescriptor());
+        data.writeInt32(static_cast<int32_t>(index));
+        data.writeInt32(static_cast<int32_t>(size));
+
+        remote()->transact(QUEUE_BUFFER, data, &reply, IBinder::FLAG_ONEWAY);
+    }
+
+    virtual void issueCommand(
+            Command cmd, bool synchronous, const sp<AMessage> &msg) {
+        Parcel data, reply;
+        data.writeInterfaceToken(IStreamListener::getInterfaceDescriptor());
+        data.writeInt32(static_cast<int32_t>(cmd));
+        data.writeInt32(static_cast<int32_t>(synchronous));
+
+        if (msg != NULL) {
+            data.writeInt32(1);
+            msg->writeToParcel(&data);
+        } else {
+            data.writeInt32(0);
+        }
+
+        remote()->transact(ISSUE_COMMAND, data, &reply, IBinder::FLAG_ONEWAY);
+    }
+};
+
+IMPLEMENT_META_INTERFACE(StreamListener, "android.hardware.IStreamListener");
+
+status_t BnStreamListener::onTransact(
+        uint32_t code, const Parcel &data, Parcel *reply, uint32_t flags) {
+    switch (code) {
+        case QUEUE_BUFFER:
+        {
+            CHECK_INTERFACE(IStreamListener, data, reply);
+            size_t index = static_cast<size_t>(data.readInt32());
+            size_t size = static_cast<size_t>(data.readInt32());
+
+            queueBuffer(index, size);
+            break;
+        }
+
+        case ISSUE_COMMAND:
+        {
+            CHECK_INTERFACE(IStreamListener, data, reply);
+            Command cmd = static_cast<Command>(data.readInt32());
+
+            bool synchronous = static_cast<bool>(data.readInt32());
+
+            sp<AMessage> msg;
+
+            if (data.readInt32()) {
+                msg = AMessage::FromParcel(data);
+            }
+
+            issueCommand(cmd, synchronous, msg);
+            break;
+        }
+
+        default:
+            return BBinder::onTransact(code, data, reply, flags);
+    }
+
+    return OK;
+}
+
+}  // namespace android
diff --git a/media/libmedia/MediaProfiles.cpp b/media/libmedia/MediaProfiles.cpp
index 3869389..7fb7aed 100644
--- a/media/libmedia/MediaProfiles.cpp
+++ b/media/libmedia/MediaProfiles.cpp
@@ -59,8 +59,21 @@
 };
 
 const MediaProfiles::NameToTagMap MediaProfiles::sCamcorderQualityNameMap[] = {
+    {"low", CAMCORDER_QUALITY_LOW},
     {"high", CAMCORDER_QUALITY_HIGH},
-    {"low",  CAMCORDER_QUALITY_LOW}
+    {"qcif", CAMCORDER_QUALITY_QCIF},
+    {"cif", CAMCORDER_QUALITY_CIF},
+    {"480p", CAMCORDER_QUALITY_480P},
+    {"720p", CAMCORDER_QUALITY_720P},
+    {"1080p", CAMCORDER_QUALITY_1080P},
+
+    {"timelapselow",  CAMCORDER_QUALITY_TIME_LAPSE_LOW},
+    {"timelapsehigh", CAMCORDER_QUALITY_TIME_LAPSE_HIGH},
+    {"timelapseqcif", CAMCORDER_QUALITY_TIME_LAPSE_QCIF},
+    {"timelapsecif", CAMCORDER_QUALITY_TIME_LAPSE_CIF},
+    {"timelapse480p", CAMCORDER_QUALITY_TIME_LAPSE_480P},
+    {"timelapse720p", CAMCORDER_QUALITY_TIME_LAPSE_720P},
+    {"timelapse1080p", CAMCORDER_QUALITY_TIME_LAPSE_1080P}
 };
 
 /*static*/ void
@@ -271,8 +284,17 @@
     return static_cast<output_format>(format);
 }
 
+static bool isCameraIdFound(int cameraId, const Vector<int>& cameraIds) {
+    for (int i = 0, n = cameraIds.size(); i < n; ++i) {
+        if (cameraId == cameraIds[i]) {
+            return true;
+        }
+    }
+    return false;
+}
+
 /*static*/ MediaProfiles::CamcorderProfile*
-MediaProfiles::createCamcorderProfile(int cameraId, const char **atts)
+MediaProfiles::createCamcorderProfile(int cameraId, const char **atts, Vector<int>& cameraIds)
 {
     CHECK(!strcmp("quality",    atts[0]) &&
           !strcmp("fileFormat", atts[2]) &&
@@ -288,6 +310,9 @@
 
     MediaProfiles::CamcorderProfile *profile = new MediaProfiles::CamcorderProfile;
     profile->mCameraId = cameraId;
+    if (!isCameraIdFound(cameraId, cameraIds)) {
+        cameraIds.add(cameraId);
+    }
     profile->mFileFormat = static_cast<output_format>(fileFormat);
     profile->mQuality = static_cast<camcorder_quality>(quality);
     profile->mDuration = atoi(atts[5]);
@@ -357,12 +382,167 @@
         profiles->mCurrentCameraId = getCameraId(atts);
     } else if (strcmp("EncoderProfile", name) == 0) {
         profiles->mCamcorderProfiles.add(
-            createCamcorderProfile(profiles->mCurrentCameraId, atts));
+            createCamcorderProfile(profiles->mCurrentCameraId, atts, profiles->mCameraIds));
     } else if (strcmp("ImageEncoding", name) == 0) {
         profiles->addImageEncodingQualityLevel(profiles->mCurrentCameraId, atts);
     }
 }
 
+static bool isCamcorderProfile(camcorder_quality quality) {
+    return quality >= CAMCORDER_QUALITY_LIST_START &&
+           quality <= CAMCORDER_QUALITY_LIST_END;
+}
+
+static bool isTimelapseProfile(camcorder_quality quality) {
+    return quality >= CAMCORDER_QUALITY_TIME_LAPSE_LIST_START &&
+           quality <= CAMCORDER_QUALITY_TIME_LAPSE_LIST_END;
+}
+
+void MediaProfiles::initRequiredProfileRefs(const Vector<int>& cameraIds) {
+    LOGV("Number of camera ids: %d", cameraIds.size());
+    CHECK(cameraIds.size() > 0);
+    mRequiredProfileRefs = new RequiredProfiles[cameraIds.size()];
+    for (size_t i = 0, n = cameraIds.size(); i < n; ++i) {
+        mRequiredProfileRefs[i].mCameraId = cameraIds[i];
+        for (size_t j = 0; j < kNumRequiredProfiles; ++j) {
+            mRequiredProfileRefs[i].mRefs[j].mHasRefProfile = false;
+            mRequiredProfileRefs[i].mRefs[j].mRefProfileIndex = -1;
+            if ((j & 1) == 0) {  // low resolution
+                mRequiredProfileRefs[i].mRefs[j].mResolutionProduct = 0x7FFFFFFF;
+            } else {             // high resolution
+                mRequiredProfileRefs[i].mRefs[j].mResolutionProduct = 0;
+            }
+        }
+    }
+}
+
+int MediaProfiles::getRequiredProfileRefIndex(int cameraId) {
+    for (size_t i = 0, n = mCameraIds.size(); i < n; ++i) {
+        if (mCameraIds[i] == cameraId) {
+            return i;
+        }
+    }
+    return -1;
+}
+
+void MediaProfiles::checkAndAddRequiredProfilesIfNecessary() {
+    if (sIsInitialized) {
+        return;
+    }
+
+    initRequiredProfileRefs(mCameraIds);
+
+    for (size_t i = 0, n = mCamcorderProfiles.size(); i < n; ++i) {
+        int product = mCamcorderProfiles[i]->mVideoCodec->mFrameWidth *
+                      mCamcorderProfiles[i]->mVideoCodec->mFrameHeight;
+
+        camcorder_quality quality = mCamcorderProfiles[i]->mQuality;
+        int cameraId = mCamcorderProfiles[i]->mCameraId;
+        int index = -1;
+        int refIndex = getRequiredProfileRefIndex(cameraId);
+        CHECK(refIndex != -1);
+        RequiredProfileRefInfo *info;
+        camcorder_quality refQuality;
+        VideoCodec *codec = NULL;
+
+        // Check high and low from either camcorder profile or timelapse profile
+        // but not both. Default, check camcorder profile
+        size_t j = 0;
+        size_t n = 2;
+        if (isTimelapseProfile(quality)) {
+            // Check timelapse profile instead.
+            j = 2;
+            n = kNumRequiredProfiles;
+        } else {
+            // Must be camcorder profile.
+            CHECK(isCamcorderProfile(quality));
+        }
+        for (; j < n; ++j) {
+            info = &(mRequiredProfileRefs[refIndex].mRefs[j]);
+            if ((j % 2 == 0 && product > info->mResolutionProduct) ||  // low
+                (j % 2 != 0 && product < info->mResolutionProduct)) {  // high
+                continue;
+            }
+            switch (j) {
+                case 0:
+                   refQuality = CAMCORDER_QUALITY_LOW;
+                   break;
+                case 1:
+                   refQuality = CAMCORDER_QUALITY_HIGH;
+                   break;
+                case 2:
+                   refQuality = CAMCORDER_QUALITY_TIME_LAPSE_LOW;
+                   break;
+                case 3:
+                   refQuality = CAMCORDER_QUALITY_TIME_LAPSE_HIGH;
+                   break;
+                default:
+                    CHECK(!"Should never reach here");
+            }
+
+            if (!info->mHasRefProfile) {
+                index = getCamcorderProfileIndex(cameraId, refQuality);
+            }
+            if (index == -1) {
+                // New high or low quality profile is found.
+                // Update its reference.
+                info->mHasRefProfile = true;
+                info->mRefProfileIndex = i;
+                info->mResolutionProduct = product;
+            }
+        }
+    }
+
+    for (size_t cameraId = 0; cameraId < mCameraIds.size(); ++cameraId) {
+        for (size_t j = 0; j < kNumRequiredProfiles; ++j) {
+            int refIndex = getRequiredProfileRefIndex(cameraId);
+            CHECK(refIndex != -1);
+            RequiredProfileRefInfo *info =
+                    &mRequiredProfileRefs[refIndex].mRefs[j];
+
+            if (info->mHasRefProfile) {
+
+                CamcorderProfile *profile =
+                    new CamcorderProfile(
+                            *mCamcorderProfiles[info->mRefProfileIndex]);
+
+                // Overwrite the quality
+                switch (j % kNumRequiredProfiles) {
+                    case 0:
+                        profile->mQuality = CAMCORDER_QUALITY_LOW;
+                        break;
+                    case 1:
+                        profile->mQuality = CAMCORDER_QUALITY_HIGH;
+                        break;
+                    case 2:
+                        profile->mQuality = CAMCORDER_QUALITY_TIME_LAPSE_LOW;
+                        break;
+                    case 3:
+                        profile->mQuality = CAMCORDER_QUALITY_TIME_LAPSE_HIGH;
+                        break;
+                    default:
+                        CHECK(!"Should never come here");
+                }
+
+                int index = getCamcorderProfileIndex(cameraId, profile->mQuality);
+                if (index != -1) {
+                    LOGV("Profile quality %d for camera %d already exists",
+                        profile->mQuality, cameraId);
+                    CHECK(index == refIndex);
+                    continue;
+                }
+
+                // Insert the new profile
+                LOGV("Add a profile: quality %d=>%d for camera %d",
+                        mCamcorderProfiles[info->mRefProfileIndex]->mQuality,
+                        profile->mQuality, cameraId);
+
+                mCamcorderProfiles.add(profile);
+            }
+        }
+    }
+}
+
 /*static*/ MediaProfiles*
 MediaProfiles::getInstance()
 {
@@ -383,6 +563,9 @@
         } else {
             sInstance = createInstanceFromXmlFile(value);
         }
+        CHECK(sInstance != NULL);
+        sInstance->checkAndAddRequiredProfilesIfNecessary();
+        sIsInitialized = true;
     }
 
     return sInstance;
@@ -411,16 +594,16 @@
 }
 
 /*static*/ MediaProfiles::CamcorderProfile*
-MediaProfiles::createDefaultCamcorderHighProfile()
+MediaProfiles::createDefaultCamcorderTimeLapseQcifProfile(camcorder_quality quality)
 {
     MediaProfiles::VideoCodec *videoCodec =
-        new MediaProfiles::VideoCodec(VIDEO_ENCODER_H263, 360000, 352, 288, 20);
+        new MediaProfiles::VideoCodec(VIDEO_ENCODER_H263, 1000000, 176, 144, 20);
 
     AudioCodec *audioCodec = new AudioCodec(AUDIO_ENCODER_AMR_NB, 12200, 8000, 1);
     CamcorderProfile *profile = new MediaProfiles::CamcorderProfile;
     profile->mCameraId = 0;
     profile->mFileFormat = OUTPUT_FORMAT_THREE_GPP;
-    profile->mQuality = CAMCORDER_QUALITY_HIGH;
+    profile->mQuality = quality;
     profile->mDuration = 60;
     profile->mVideoCodec = videoCodec;
     profile->mAudioCodec = audioCodec;
@@ -428,7 +611,40 @@
 }
 
 /*static*/ MediaProfiles::CamcorderProfile*
-MediaProfiles::createDefaultCamcorderLowProfile()
+MediaProfiles::createDefaultCamcorderTimeLapse480pProfile(camcorder_quality quality)
+{
+    MediaProfiles::VideoCodec *videoCodec =
+        new MediaProfiles::VideoCodec(VIDEO_ENCODER_H263, 20000000, 720, 480, 20);
+
+    AudioCodec *audioCodec = new AudioCodec(AUDIO_ENCODER_AMR_NB, 12200, 8000, 1);
+    CamcorderProfile *profile = new MediaProfiles::CamcorderProfile;
+    profile->mCameraId = 0;
+    profile->mFileFormat = OUTPUT_FORMAT_THREE_GPP;
+    profile->mQuality = quality;
+    profile->mDuration = 60;
+    profile->mVideoCodec = videoCodec;
+    profile->mAudioCodec = audioCodec;
+    return profile;
+}
+
+/*static*/ void
+MediaProfiles::createDefaultCamcorderTimeLapseLowProfiles(
+        MediaProfiles::CamcorderProfile **lowTimeLapseProfile,
+        MediaProfiles::CamcorderProfile **lowSpecificTimeLapseProfile) {
+    *lowTimeLapseProfile = createDefaultCamcorderTimeLapseQcifProfile(CAMCORDER_QUALITY_TIME_LAPSE_LOW);
+    *lowSpecificTimeLapseProfile = createDefaultCamcorderTimeLapseQcifProfile(CAMCORDER_QUALITY_TIME_LAPSE_QCIF);
+}
+
+/*static*/ void
+MediaProfiles::createDefaultCamcorderTimeLapseHighProfiles(
+        MediaProfiles::CamcorderProfile **highTimeLapseProfile,
+        MediaProfiles::CamcorderProfile **highSpecificTimeLapseProfile) {
+    *highTimeLapseProfile = createDefaultCamcorderTimeLapse480pProfile(CAMCORDER_QUALITY_TIME_LAPSE_HIGH);
+    *highSpecificTimeLapseProfile = createDefaultCamcorderTimeLapse480pProfile(CAMCORDER_QUALITY_TIME_LAPSE_480P);
+}
+
+/*static*/ MediaProfiles::CamcorderProfile*
+MediaProfiles::createDefaultCamcorderQcifProfile(camcorder_quality quality)
 {
     MediaProfiles::VideoCodec *videoCodec =
         new MediaProfiles::VideoCodec(VIDEO_ENCODER_H263, 192000, 176, 144, 20);
@@ -439,18 +655,72 @@
     MediaProfiles::CamcorderProfile *profile = new MediaProfiles::CamcorderProfile;
     profile->mCameraId = 0;
     profile->mFileFormat = OUTPUT_FORMAT_THREE_GPP;
-    profile->mQuality = CAMCORDER_QUALITY_LOW;
+    profile->mQuality = quality;
     profile->mDuration = 30;
     profile->mVideoCodec = videoCodec;
     profile->mAudioCodec = audioCodec;
     return profile;
 }
 
+/*static*/ MediaProfiles::CamcorderProfile*
+MediaProfiles::createDefaultCamcorderCifProfile(camcorder_quality quality)
+{
+    MediaProfiles::VideoCodec *videoCodec =
+        new MediaProfiles::VideoCodec(VIDEO_ENCODER_H263, 360000, 352, 288, 20);
+
+    AudioCodec *audioCodec = new AudioCodec(AUDIO_ENCODER_AMR_NB, 12200, 8000, 1);
+    CamcorderProfile *profile = new MediaProfiles::CamcorderProfile;
+    profile->mCameraId = 0;
+    profile->mFileFormat = OUTPUT_FORMAT_THREE_GPP;
+    profile->mQuality = quality;
+    profile->mDuration = 60;
+    profile->mVideoCodec = videoCodec;
+    profile->mAudioCodec = audioCodec;
+    return profile;
+}
+
+/*static*/ void
+MediaProfiles::createDefaultCamcorderLowProfiles(
+        MediaProfiles::CamcorderProfile **lowProfile,
+        MediaProfiles::CamcorderProfile **lowSpecificProfile) {
+    *lowProfile = createDefaultCamcorderQcifProfile(CAMCORDER_QUALITY_LOW);
+    *lowSpecificProfile = createDefaultCamcorderQcifProfile(CAMCORDER_QUALITY_QCIF);
+}
+
+/*static*/ void
+MediaProfiles::createDefaultCamcorderHighProfiles(
+        MediaProfiles::CamcorderProfile **highProfile,
+        MediaProfiles::CamcorderProfile **highSpecificProfile) {
+    *highProfile = createDefaultCamcorderCifProfile(CAMCORDER_QUALITY_HIGH);
+    *highSpecificProfile = createDefaultCamcorderCifProfile(CAMCORDER_QUALITY_CIF);
+}
+
 /*static*/ void
 MediaProfiles::createDefaultCamcorderProfiles(MediaProfiles *profiles)
 {
-    profiles->mCamcorderProfiles.add(createDefaultCamcorderHighProfile());
-    profiles->mCamcorderProfiles.add(createDefaultCamcorderLowProfile());
+    // low camcorder profiles.
+    MediaProfiles::CamcorderProfile *lowProfile, *lowSpecificProfile;
+    createDefaultCamcorderLowProfiles(&lowProfile, &lowSpecificProfile);
+    profiles->mCamcorderProfiles.add(lowProfile);
+    profiles->mCamcorderProfiles.add(lowSpecificProfile);
+
+    // high camcorder profiles.
+    MediaProfiles::CamcorderProfile* highProfile, *highSpecificProfile;
+    createDefaultCamcorderHighProfiles(&highProfile, &highSpecificProfile);
+    profiles->mCamcorderProfiles.add(highProfile);
+    profiles->mCamcorderProfiles.add(highSpecificProfile);
+
+    // low camcorder time lapse profiles.
+    MediaProfiles::CamcorderProfile *lowTimeLapseProfile, *lowSpecificTimeLapseProfile;
+    createDefaultCamcorderTimeLapseLowProfiles(&lowTimeLapseProfile, &lowSpecificTimeLapseProfile);
+    profiles->mCamcorderProfiles.add(lowTimeLapseProfile);
+    profiles->mCamcorderProfiles.add(lowSpecificTimeLapseProfile);
+
+    // high camcorder time lapse profiles.
+    MediaProfiles::CamcorderProfile *highTimeLapseProfile, *highSpecificTimeLapseProfile;
+    createDefaultCamcorderTimeLapseHighProfiles(&highTimeLapseProfile, &highSpecificTimeLapseProfile);
+    profiles->mCamcorderProfiles.add(highTimeLapseProfile);
+    profiles->mCamcorderProfiles.add(highSpecificTimeLapseProfile);
 }
 
 /*static*/ void
@@ -513,7 +783,6 @@
     createDefaultAudioDecoders(profiles);
     createDefaultEncoderOutputFileFormats(profiles);
     createDefaultImageEncodingQualityLevels(profiles);
-    sIsInitialized = true;
     return profiles;
 }
 
@@ -567,9 +836,6 @@
 exit:
     ::XML_ParserFree(parser);
     ::fclose(fp);
-    if (profiles) {
-        sIsInitialized = true;
-    }
     return profiles;
 }
 
@@ -668,13 +934,8 @@
     return decoders;  // copy out
 }
 
-int MediaProfiles::getCamcorderProfileParamByName(const char *name,
-                                                  int cameraId,
-                                                  camcorder_quality quality) const
+int MediaProfiles::getCamcorderProfileIndex(int cameraId, camcorder_quality quality) const
 {
-    LOGV("getCamcorderProfileParamByName: %s for camera %d, quality %d",
-         name, cameraId, quality);
-
     int index = -1;
     for (size_t i = 0, n = mCamcorderProfiles.size(); i < n; ++i) {
         if (mCamcorderProfiles[i]->mCameraId == cameraId &&
@@ -683,6 +944,17 @@
             break;
         }
     }
+    return index;
+}
+
+int MediaProfiles::getCamcorderProfileParamByName(const char *name,
+                                                  int cameraId,
+                                                  camcorder_quality quality) const
+{
+    LOGV("getCamcorderProfileParamByName: %s for camera %d, quality %d",
+         name, cameraId, quality);
+
+    int index = getCamcorderProfileIndex(cameraId, quality);
     if (index == -1) {
         LOGE("The given camcorder profile camera %d quality %d is not found",
              cameraId, quality);
@@ -705,6 +977,11 @@
     return -1;
 }
 
+bool MediaProfiles::hasCamcorderProfile(int cameraId, camcorder_quality quality) const
+{
+    return (getCamcorderProfileIndex(cameraId, quality) != -1);
+}
+
 Vector<int> MediaProfiles::getImageEncodingQualityLevels(int cameraId) const
 {
     Vector<int> result;
diff --git a/media/libmedia/MediaScanner.cpp b/media/libmedia/MediaScanner.cpp
index c5112a5..5ec573e 100644
--- a/media/libmedia/MediaScanner.cpp
+++ b/media/libmedia/MediaScanner.cpp
@@ -48,8 +48,7 @@
 }
 
 status_t MediaScanner::processDirectory(
-        const char *path, const char *extensions,
-        MediaScannerClient &client,
+        const char *path, MediaScannerClient &client,
         ExceptionCheck exceptionCheck, void *exceptionEnv) {
     int pathLength = strlen(path);
     if (pathLength >= PATH_MAX) {
@@ -72,38 +71,20 @@
 
     status_t result =
         doProcessDirectory(
-                pathBuffer, pathRemaining, extensions, client,
-                exceptionCheck, exceptionEnv);
+                pathBuffer, pathRemaining, client, exceptionCheck, exceptionEnv);
 
     free(pathBuffer);
 
     return result;
 }
 
-static bool fileMatchesExtension(const char* path, const char* extensions) {
-    const char* extension = strrchr(path, '.');
-    if (!extension) return false;
-    ++extension;    // skip the dot
-    if (extension[0] == 0) return false;
-
-    while (extensions[0]) {
-        const char* comma = strchr(extensions, ',');
-        size_t length = (comma ? comma - extensions : strlen(extensions));
-        if (length == strlen(extension) && strncasecmp(extension, extensions, length) == 0) return true;
-        extensions += length;
-        if (extensions[0] == ',') ++extensions;
-    }
-
-    return false;
-}
-
 status_t MediaScanner::doProcessDirectory(
-        char *path, int pathRemaining, const char *extensions,
-        MediaScannerClient &client, ExceptionCheck exceptionCheck,
-        void *exceptionEnv) {
+        char *path, int pathRemaining, MediaScannerClient &client,
+        ExceptionCheck exceptionCheck, void *exceptionEnv) {
     // place to copy file or directory name
     char* fileSpot = path + strlen(path);
     struct dirent* entry;
+    struct stat statbuf;
 
     // ignore directories that contain a  ".nomedia" file
     if (pathRemaining >= 8 /* strlen(".nomedia") */ ) {
@@ -133,12 +114,18 @@
             continue;
         }
 
+        int nameLength = strlen(name);
+        if (nameLength + 1 > pathRemaining) {
+            // path too long!
+            continue;
+        }
+        strcpy(fileSpot, name);
+
         int type = entry->d_type;
         if (type == DT_UNKNOWN) {
             // If the type is unknown, stat() the file instead.
             // This is sometimes necessary when accessing NFS mounted filesystems, but
             // could be needed in other cases well.
-            struct stat statbuf;
             if (stat(path, &statbuf) == 0) {
                 if (S_ISREG(statbuf.st_mode)) {
                     type = DT_REG;
@@ -150,34 +137,29 @@
             }
         }
         if (type == DT_REG || type == DT_DIR) {
-            int nameLength = strlen(name);
-            bool isDirectory = (type == DT_DIR);
-
-            if (nameLength > pathRemaining || (isDirectory && nameLength + 1 > pathRemaining)) {
-                // path too long!
-                continue;
-            }
-
-            strcpy(fileSpot, name);
-            if (isDirectory) {
+            if (type == DT_DIR) {
                 // ignore directories with a name that starts with '.'
                 // for example, the Mac ".Trashes" directory
                 if (name[0] == '.') continue;
 
+                // report the directory to the client
+                if (stat(path, &statbuf) == 0) {
+                    client.scanFile(path, statbuf.st_mtime, 0, true);
+                }
+
+                // and now process its contents
                 strcat(fileSpot, "/");
-                int err = doProcessDirectory(path, pathRemaining - nameLength - 1, extensions, client, exceptionCheck, exceptionEnv);
+                int err = doProcessDirectory(path, pathRemaining - nameLength - 1, client,
+                        exceptionCheck, exceptionEnv);
                 if (err) {
                     // pass exceptions up - ignore other errors
                     if (exceptionCheck && exceptionCheck(exceptionEnv)) goto failure;
                     LOGE("Error processing '%s' - skipping\n", path);
                     continue;
                 }
-            } else if (fileMatchesExtension(path, extensions)) {
-                struct stat statbuf;
+            } else {
                 stat(path, &statbuf);
-                if (statbuf.st_size > 0) {
-                    client.scanFile(path, statbuf.st_mtime, statbuf.st_size);
-                }
+                client.scanFile(path, statbuf.st_mtime, statbuf.st_size, false);
                 if (exceptionCheck && exceptionCheck(exceptionEnv)) goto failure;
             }
         }
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index cc41e66..0ee0249 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -172,16 +172,6 @@
     return INVALID_OPERATION;
 }
 
-status_t MediaPlayer::suspend() {
-    Mutex::Autolock _l(mLock);
-    return mPlayer->suspend();
-}
-
-status_t MediaPlayer::resume() {
-    Mutex::Autolock _l(mLock);
-    return mPlayer->resume();
-}
-
 status_t MediaPlayer::setMetadataFilter(const Parcel& filter)
 {
     LOGD("setMetadataFilter");
@@ -207,10 +197,18 @@
     LOGV("setVideoSurface");
     Mutex::Autolock _l(mLock);
     if (mPlayer == 0) return NO_INIT;
-    if (surface != NULL)
-        return  mPlayer->setVideoSurface(surface->getISurface());
-    else
-        return  mPlayer->setVideoSurface(NULL);
+
+    return mPlayer->setVideoSurface(surface);
+}
+
+status_t MediaPlayer::setVideoSurfaceTexture(
+        const sp<ISurfaceTexture>& surfaceTexture)
+{
+    LOGV("setVideoSurfaceTexture");
+    Mutex::Autolock _l(mLock);
+    if (mPlayer == 0) return NO_INIT;
+
+    return mPlayer->setVideoSurfaceTexture(surfaceTexture);
 }
 
 // must call with lock held
@@ -449,6 +447,9 @@
         } else {
             mCurrentState = MEDIA_PLAYER_IDLE;
         }
+        // setDataSource has to be called again to create a
+        // new mediaplayer.
+        mPlayer = 0;
         return ret;
     }
     clear_l();
@@ -616,7 +617,9 @@
     case MEDIA_INFO:
         // ext1: Media framework error code.
         // ext2: Implementation dependant error code.
-        LOGW("info/warning (%d, %d)", ext1, ext2);
+        if (ext1 != MEDIA_INFO_VIDEO_TRACK_LAGGING) {
+            LOGW("info/warning (%d, %d)", ext1, ext2);
+        }
         break;
     case MEDIA_SEEK_COMPLETE:
         LOGV("Received seek complete");
diff --git a/media/libmedia/mediarecorder.cpp b/media/libmedia/mediarecorder.cpp
index e20e3ba..0100a17 100644
--- a/media/libmedia/mediarecorder.cpp
+++ b/media/libmedia/mediarecorder.cpp
@@ -65,7 +65,7 @@
         return INVALID_OPERATION;
     }
 
-    status_t ret = mMediaRecorder->setPreviewSurface(surface->getISurface());
+    status_t ret = mMediaRecorder->setPreviewSurface(surface);
     if (OK != ret) {
         LOGV("setPreviewSurface failed: %d", ret);
         mCurrentState = MEDIA_RECORDER_ERROR;
@@ -298,6 +298,17 @@
         return INVALID_OPERATION;
     }
 
+    // It appears that if an invalid file descriptor is passed through
+    // binder calls, the server-side of the inter-process function call
+    // is skipped. As a result, the check at the server-side to catch
+    // the invalid file descritpor never gets invoked. This is to workaround
+    // this issue by checking the file descriptor first before passing
+    // it through binder call.
+    if (fd < 0) {
+        LOGE("Invalid file descriptor: %d", fd);
+        return BAD_VALUE;
+    }
+
     status_t ret = mMediaRecorder->setOutputFile(fd, offset, length);
     if (OK != ret) {
         LOGV("setOutputFile failed: %d", ret);
@@ -308,6 +319,32 @@
     return ret;
 }
 
+status_t MediaRecorder::setOutputFileAuxiliary(int fd)
+{
+    LOGV("setOutputFileAuxiliary(%d)", fd);
+    if(mMediaRecorder == NULL) {
+        LOGE("media recorder is not initialized yet");
+        return INVALID_OPERATION;
+    }
+    if (mIsAuxiliaryOutputFileSet) {
+        LOGE("output file has already been set");
+        return INVALID_OPERATION;
+    }
+    if (!(mCurrentState & MEDIA_RECORDER_DATASOURCE_CONFIGURED)) {
+        LOGE("setOutputFile called in an invalid state(%d)", mCurrentState);
+        return INVALID_OPERATION;
+    }
+
+    status_t ret = mMediaRecorder->setOutputFileAuxiliary(fd);
+    if (OK != ret) {
+        LOGV("setOutputFileAuxiliary failed: %d", ret);
+        mCurrentState = MEDIA_RECORDER_ERROR;
+        return ret;
+    }
+    mIsAuxiliaryOutputFileSet = true;
+    return ret;
+}
+
 status_t MediaRecorder::setVideoSize(int width, int height)
 {
     LOGV("setVideoSize(%d, %d)", width, height);
@@ -571,6 +608,7 @@
     mIsAudioEncoderSet = false;
     mIsVideoEncoderSet = false;
     mIsOutputFileSet   = false;
+    mIsAuxiliaryOutputFileSet = false;
 }
 
 // Release should be OK in any state
@@ -643,4 +681,3 @@
 }
 
 }; // namespace android
-
diff --git a/media/libmediaplayerservice/Android.mk b/media/libmediaplayerservice/Android.mk
index 55846be..e65f6d8 100644
--- a/media/libmediaplayerservice/Android.mk
+++ b/media/libmediaplayerservice/Android.mk
@@ -31,20 +31,13 @@
 	libandroid_runtime    			\
 	libstagefright        			\
 	libstagefright_omx    			\
-	libstagefright_color_conversion         \
 	libstagefright_foundation               \
-	libsurfaceflinger_client
+	libsurfaceflinger_client                \
+	libgui
 
 LOCAL_STATIC_LIBRARIES := \
-        libstagefright_rtsp
-
-ifneq ($(BUILD_WITHOUT_PV),true)
-LOCAL_SHARED_LIBRARIES += \
-	libopencore_player    \
-	libopencore_author
-else
-LOCAL_CFLAGS += -DNO_OPENCORE
-endif
+        libstagefright_rtsp                     \
+        libstagefright_nuplayer                 \
 
 ifneq ($(TARGET_SIMULATOR),true)
 LOCAL_SHARED_LIBRARIES += libdl
@@ -56,9 +49,11 @@
 	$(TOP)/frameworks/base/include/media/stagefright/openmax \
 	$(TOP)/frameworks/base/media/libstagefright/include             \
 	$(TOP)/frameworks/base/media/libstagefright/rtsp                \
-        $(TOP)/external/tremolo/Tremolo
+        $(TOP)/external/tremolo/Tremolo \
 
 LOCAL_MODULE:= libmediaplayerservice
 
 include $(BUILD_SHARED_LIBRARY)
 
+include $(call all-makefiles-under,$(LOCAL_PATH))
+
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index c43e9bb..a42cca5 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -23,6 +23,7 @@
 
 #include <sys/types.h>
 #include <sys/stat.h>
+#include <sys/time.h>
 #include <dirent.h>
 #include <unistd.h>
 
@@ -51,14 +52,16 @@
 #include <media/Metadata.h>
 #include <media/AudioTrack.h>
 
+#include <private/android_filesystem_config.h>
+
 #include "MediaRecorderClient.h"
 #include "MediaPlayerService.h"
 #include "MetadataRetrieverClient.h"
 
 #include "MidiFile.h"
-#include <media/PVPlayer.h>
 #include "TestPlayerStub.h"
 #include "StagefrightPlayer.h"
+#include "nuplayer/NuPlayerDriver.h"
 
 #include <OMX.h>
 
@@ -196,11 +199,6 @@
         {".rtttl", SONIVOX_PLAYER},
         {".rtx", SONIVOX_PLAYER},
         {".ota", SONIVOX_PLAYER},
-#ifndef NO_OPENCORE
-        {".wma", PV_PLAYER},
-        {".wmv", PV_PLAYER},
-        {".asf", PV_PLAYER},
-#endif
 };
 
 // TODO: Find real cause of Audio/Video delay in PV framework and remove this workaround
@@ -216,6 +214,15 @@
 {
     LOGV("MediaPlayerService created");
     mNextConnId = 1;
+
+    mBatteryAudio.refCount = 0;
+    for (int i = 0; i < NUM_AUDIO_DEVICES; i++) {
+        mBatteryAudio.deviceOn[i] = 0;
+        mBatteryAudio.lastTime[i] = 0;
+        mBatteryAudio.totalTime[i] = 0;
+    }
+    // speaker is on by default
+    mBatteryAudio.deviceOn[SPEAKER] = 1;
 }
 
 MediaPlayerService::~MediaPlayerService()
@@ -284,6 +291,26 @@
     return c;
 }
 
+sp<IMediaPlayer> MediaPlayerService::create(
+        pid_t pid, const sp<IMediaPlayerClient> &client,
+        const sp<IStreamSource> &source, int audioSessionId) {
+    int32_t connId = android_atomic_inc(&mNextConnId);
+    sp<Client> c = new Client(this, pid, connId, client, audioSessionId);
+
+    LOGV("Create new client(%d) from pid %d, audioSessionId=%d",
+         connId, pid, audioSessionId);
+
+    if (OK != c->setDataSource(source)) {
+        c.clear();
+    } else {
+        wp<Client> w = c;
+        Mutex::Autolock lock(mLock);
+        mClients.add(w);
+    }
+
+    return c;
+}
+
 sp<IOMX> MediaPlayerService::getOMX() {
     Mutex::Autolock autoLock(mLock);
 
@@ -691,14 +718,6 @@
     if (ident == 0x5367674f) // 'OggS'
         return STAGEFRIGHT_PLAYER;
 
-#ifndef NO_OPENCORE
-    if (ident == 0x75b22630) {
-        // The magic number for .asf files, i.e. wmv and wma content.
-        // These are not currently supported through stagefright.
-        return PV_PLAYER;
-    }
-#endif
-
     // Some kind of MIDI?
     EAS_DATA_HANDLE easdata;
     if (EAS_Init(&easdata) == EAS_SUCCESS) {
@@ -725,6 +744,17 @@
         return TEST_PLAYER;
     }
 
+    if (!strncasecmp("http://", url, 7)) {
+        size_t len = strlen(url);
+        if (len >= 5 && !strcasecmp(".m3u8", &url[len - 5])) {
+            return NU_PLAYER;
+        }
+
+        if (strstr(url,"m3u8")) {
+            return NU_PLAYER;
+        }
+    }
+
     // use MidiFile for MIDI extensions
     int lenURL = strlen(url);
     for (int i = 0; i < NELEM(FILE_EXTS); ++i) {
@@ -737,16 +767,6 @@
         }
     }
 
-    if (!strncasecmp(url, "rtsp://", 7)) {
-        char value[PROPERTY_VALUE_MAX];
-        if (property_get("media.stagefright.enable-rtsp", value, NULL)
-            && (strcmp(value, "1") && strcasecmp(value, "true"))) {
-            // For now, we're going to use PV for rtsp-based playback
-            // by default until we can clear up a few more issues.
-            return PV_PLAYER;
-        }
-    }
-
     return getDefaultPlayerType();
 }
 
@@ -755,12 +775,6 @@
 {
     sp<MediaPlayerBase> p;
     switch (playerType) {
-#ifndef NO_OPENCORE
-        case PV_PLAYER:
-            LOGV(" create PVPlayer");
-            p = new PVPlayer();
-            break;
-#endif
         case SONIVOX_PLAYER:
             LOGV(" create MidiFile");
             p = new MidiFile();
@@ -769,10 +783,17 @@
             LOGV(" create StagefrightPlayer");
             p = new StagefrightPlayer;
             break;
+        case NU_PLAYER:
+            LOGV(" create NuPlayer");
+            p = new NuPlayerDriver;
+            break;
         case TEST_PLAYER:
             LOGV("Create Test Player stub");
             p = new TestPlayerStub();
             break;
+        default:
+            LOGE("Unknown player type: %d", playerType);
+            return NULL;
     }
     if (p != NULL) {
         if (p->initCheck() == NO_ERROR) {
@@ -891,7 +912,31 @@
     return mStatus;
 }
 
-status_t MediaPlayerService::Client::setVideoSurface(const sp<ISurface>& surface)
+status_t MediaPlayerService::Client::setDataSource(
+        const sp<IStreamSource> &source) {
+    // create the right type of player
+    sp<MediaPlayerBase> p = createPlayer(NU_PLAYER);
+
+    if (p == NULL) {
+        return NO_INIT;
+    }
+
+    if (!p->hardwareOutput()) {
+        mAudioOutput = new AudioOutput(mAudioSessionId);
+        static_cast<MediaPlayerInterface*>(p.get())->setAudioSink(mAudioOutput);
+    }
+
+    // now set data source
+    mStatus = p->setDataSource(source);
+
+    if (mStatus == OK) {
+        mPlayer = p;
+    }
+
+    return mStatus;
+}
+
+status_t MediaPlayerService::Client::setVideoSurface(const sp<Surface>& surface)
 {
     LOGV("[%d] setVideoSurface(%p)", mConnId, surface.get());
     sp<MediaPlayerBase> p = getPlayer();
@@ -899,6 +944,15 @@
     return p->setVideoSurface(surface);
 }
 
+status_t MediaPlayerService::Client::setVideoSurfaceTexture(
+        const sp<ISurfaceTexture>& surfaceTexture)
+{
+    LOGV("[%d] setVideoSurfaceTexture(%p)", mConnId, surfaceTexture.get());
+    sp<MediaPlayerBase> p = getPlayer();
+    if (p == 0) return UNKNOWN_ERROR;
+    return p->setVideoSurfaceTexture(surfaceTexture);
+}
+
 status_t MediaPlayerService::Client::invoke(const Parcel& request,
                                             Parcel *reply)
 {
@@ -966,20 +1020,6 @@
     return OK;
 }
 
-status_t MediaPlayerService::Client::suspend() {
-    sp<MediaPlayerBase> p = getPlayer();
-    if (p == 0) return UNKNOWN_ERROR;
-
-    return p->suspend();
-}
-
-status_t MediaPlayerService::Client::resume() {
-    sp<MediaPlayerBase> p = getPlayer();
-    if (p == 0) return UNKNOWN_ERROR;
-
-    return p->resume();
-}
-
 status_t MediaPlayerService::Client::prepareAsync()
 {
     LOGV("[%d] prepareAsync", mConnId);
@@ -1743,4 +1783,192 @@
     return 0;
 }
 
+void MediaPlayerService::addBatteryData(uint32_t params)
+{
+    Mutex::Autolock lock(mLock);
+
+    int32_t time = systemTime() / 1000000L;
+
+    // change audio output devices. This notification comes from AudioFlinger
+    if ((params & kBatteryDataSpeakerOn)
+            || (params & kBatteryDataOtherAudioDeviceOn)) {
+
+        int deviceOn[NUM_AUDIO_DEVICES];
+        for (int i = 0; i < NUM_AUDIO_DEVICES; i++) {
+            deviceOn[i] = 0;
+        }
+
+        if ((params & kBatteryDataSpeakerOn)
+                && (params & kBatteryDataOtherAudioDeviceOn)) {
+            deviceOn[SPEAKER_AND_OTHER] = 1;
+        } else if (params & kBatteryDataSpeakerOn) {
+            deviceOn[SPEAKER] = 1;
+        } else {
+            deviceOn[OTHER_AUDIO_DEVICE] = 1;
+        }
+
+        for (int i = 0; i < NUM_AUDIO_DEVICES; i++) {
+            if (mBatteryAudio.deviceOn[i] != deviceOn[i]){
+
+                if (mBatteryAudio.refCount > 0) { // if playing audio
+                    if (!deviceOn[i]) {
+                        mBatteryAudio.lastTime[i] += time;
+                        mBatteryAudio.totalTime[i] += mBatteryAudio.lastTime[i];
+                        mBatteryAudio.lastTime[i] = 0;
+                    } else {
+                        mBatteryAudio.lastTime[i] = 0 - time;
+                    }
+                }
+
+                mBatteryAudio.deviceOn[i] = deviceOn[i];
+            }
+        }
+        return;
+    }
+
+    // an sudio stream is started
+    if (params & kBatteryDataAudioFlingerStart) {
+        // record the start time only if currently no other audio
+        // is being played
+        if (mBatteryAudio.refCount == 0) {
+            for (int i = 0; i < NUM_AUDIO_DEVICES; i++) {
+                if (mBatteryAudio.deviceOn[i]) {
+                    mBatteryAudio.lastTime[i] -= time;
+                }
+            }
+        }
+
+        mBatteryAudio.refCount ++;
+        return;
+
+    } else if (params & kBatteryDataAudioFlingerStop) {
+        if (mBatteryAudio.refCount <= 0) {
+            LOGW("Battery track warning: refCount is <= 0");
+            return;
+        }
+
+        // record the stop time only if currently this is the only
+        // audio being played
+        if (mBatteryAudio.refCount == 1) {
+            for (int i = 0; i < NUM_AUDIO_DEVICES; i++) {
+                if (mBatteryAudio.deviceOn[i]) {
+                    mBatteryAudio.lastTime[i] += time;
+                    mBatteryAudio.totalTime[i] += mBatteryAudio.lastTime[i];
+                    mBatteryAudio.lastTime[i] = 0;
+                }
+            }
+        }
+
+        mBatteryAudio.refCount --;
+        return;
+    }
+
+    int uid = IPCThreadState::self()->getCallingUid();
+    if (uid == AID_MEDIA) {
+        return;
+    }
+    int index = mBatteryData.indexOfKey(uid);
+
+    if (index < 0) { // create a new entry for this UID
+        BatteryUsageInfo info;
+        info.audioTotalTime = 0;
+        info.videoTotalTime = 0;
+        info.audioLastTime = 0;
+        info.videoLastTime = 0;
+        info.refCount = 0;
+
+        if (mBatteryData.add(uid, info) == NO_MEMORY) {
+            LOGE("Battery track error: no memory for new app");
+            return;
+        }
+    }
+
+    BatteryUsageInfo &info = mBatteryData.editValueFor(uid);
+
+    if (params & kBatteryDataCodecStarted) {
+        if (params & kBatteryDataTrackAudio) {
+            info.audioLastTime -= time;
+            info.refCount ++;
+        }
+        if (params & kBatteryDataTrackVideo) {
+            info.videoLastTime -= time;
+            info.refCount ++;
+        }
+    } else {
+        if (info.refCount == 0) {
+            LOGW("Battery track warning: refCount is already 0");
+            return;
+        } else if (info.refCount < 0) {
+            LOGE("Battery track error: refCount < 0");
+            mBatteryData.removeItem(uid);
+            return;
+        }
+
+        if (params & kBatteryDataTrackAudio) {
+            info.audioLastTime += time;
+            info.refCount --;
+        }
+        if (params & kBatteryDataTrackVideo) {
+            info.videoLastTime += time;
+            info.refCount --;
+        }
+
+        // no stream is being played by this UID
+        if (info.refCount == 0) {
+            info.audioTotalTime += info.audioLastTime;
+            info.audioLastTime = 0;
+            info.videoTotalTime += info.videoLastTime;
+            info.videoLastTime = 0;
+        }
+    }
+}
+
+status_t MediaPlayerService::pullBatteryData(Parcel* reply) {
+    Mutex::Autolock lock(mLock);
+
+    // audio output devices usage
+    int32_t time = systemTime() / 1000000L; //in ms
+    int32_t totalTime;
+
+    for (int i = 0; i < NUM_AUDIO_DEVICES; i++) {
+        totalTime = mBatteryAudio.totalTime[i];
+
+        if (mBatteryAudio.deviceOn[i]
+            && (mBatteryAudio.lastTime[i] != 0)) {
+                int32_t tmpTime = mBatteryAudio.lastTime[i] + time;
+                totalTime += tmpTime;
+        }
+
+        reply->writeInt32(totalTime);
+        // reset the total time
+        mBatteryAudio.totalTime[i] = 0;
+   }
+
+    // codec usage
+    BatteryUsageInfo info;
+    int size = mBatteryData.size();
+
+    reply->writeInt32(size);
+    int i = 0;
+
+    while (i < size) {
+        info = mBatteryData.valueAt(i);
+
+        reply->writeInt32(mBatteryData.keyAt(i)); //UID
+        reply->writeInt32(info.audioTotalTime);
+        reply->writeInt32(info.videoTotalTime);
+
+        info.audioTotalTime = 0;
+        info.videoTotalTime = 0;
+
+        // remove the UID entry where no stream is being played
+        if (info.refCount <= 0) {
+            mBatteryData.removeItemsAt(i);
+            size --;
+            i --;
+        }
+        i++;
+    }
+    return NO_ERROR;
+}
 } // namespace android
diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h
index 4492e20..ff6ccf5 100644
--- a/media/libmediaplayerservice/MediaPlayerService.h
+++ b/media/libmediaplayerservice/MediaPlayerService.h
@@ -191,6 +191,11 @@
             const KeyedVector<String8, String8> *headers, int audioSessionId);
 
     virtual sp<IMediaPlayer>    create(pid_t pid, const sp<IMediaPlayerClient>& client, int fd, int64_t offset, int64_t length, int audioSessionId);
+
+    virtual sp<IMediaPlayer>    create(
+            pid_t pid, const sp<IMediaPlayerClient> &client,
+            const sp<IStreamSource> &source, int audioSessionId);
+
     virtual sp<IMemory>         decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, int* pFormat);
     virtual sp<IMemory>         decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, int* pNumChannels, int* pFormat);
     virtual sp<IOMX>            getOMX();
@@ -199,14 +204,59 @@
 
             void                removeClient(wp<Client> client);
 
+    // For battery usage tracking purpose
+    struct BatteryUsageInfo {
+        // how many streams are being played by one UID
+        int     refCount;
+        // a temp variable to store the duration(ms) of audio codecs
+        // when we start a audio codec, we minus the system time from audioLastTime
+        // when we pause it, we add the system time back to the audioLastTime
+        // so after the pause, audioLastTime = pause time - start time
+        // if multiple audio streams are played (or recorded), then audioLastTime
+        // = the total playing time of all the streams
+        int32_t audioLastTime;
+        // when all the audio streams are being paused, we assign audioLastTime to
+        // this variable, so this value could be provided to the battery app
+        // in the next pullBatteryData call
+        int32_t audioTotalTime;
 
+        int32_t videoLastTime;
+        int32_t videoTotalTime;
+    };
+    KeyedVector<int, BatteryUsageInfo>    mBatteryData;
+
+    enum {
+        SPEAKER,
+        OTHER_AUDIO_DEVICE,
+        SPEAKER_AND_OTHER,
+        NUM_AUDIO_DEVICES
+    };
+
+    struct BatteryAudioFlingerUsageInfo {
+        int refCount; // how many audio streams are being played
+        int deviceOn[NUM_AUDIO_DEVICES]; // whether the device is currently used
+        int32_t lastTime[NUM_AUDIO_DEVICES]; // in ms
+        // totalTime[]: total time of audio output devices usage
+        int32_t totalTime[NUM_AUDIO_DEVICES]; // in ms
+    };
+
+    // This varialble is used to record the usage of audio output device
+    // for battery app
+    BatteryAudioFlingerUsageInfo mBatteryAudio;
+
+    // Collect info of the codec usage from media player and media recorder
+    virtual void                addBatteryData(uint32_t params);
+    // API for the Battery app to pull the data of codecs usage
+    virtual status_t            pullBatteryData(Parcel* reply);
 private:
 
     class Client : public BnMediaPlayer {
 
         // IMediaPlayer interface
         virtual void            disconnect();
-        virtual status_t        setVideoSurface(const sp<ISurface>& surface);
+        virtual status_t        setVideoSurface(const sp<Surface>& surface);
+        virtual status_t        setVideoSurfaceTexture(
+                                        const sp<ISurfaceTexture>& surfaceTexture);
         virtual status_t        prepareAsync();
         virtual status_t        start();
         virtual status_t        stop();
@@ -224,8 +274,6 @@
         virtual status_t        getMetadata(bool update_only,
                                             bool apply_filter,
                                             Parcel *reply);
-        virtual status_t        suspend();
-        virtual status_t        resume();
         virtual status_t        setAuxEffectSendLevel(float level);
         virtual status_t        attachAuxEffect(int effectId);
 
@@ -236,6 +284,9 @@
                         const KeyedVector<String8, String8> *headers);
 
                 status_t        setDataSource(int fd, int64_t offset, int64_t length);
+
+                status_t        setDataSource(const sp<IStreamSource> &source);
+
         static  void            notify(void* cookie, int msg, int ext1, int ext2);
 
                 pid_t           pid() const { return mPid; }
diff --git a/media/libmediaplayerservice/MediaRecorderClient.cpp b/media/libmediaplayerservice/MediaRecorderClient.cpp
index 19915f1..1a1780c 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.cpp
+++ b/media/libmediaplayerservice/MediaRecorderClient.cpp
@@ -31,10 +31,6 @@
 #include <binder/MemoryHeapBase.h>
 #include <binder/MemoryBase.h>
 
-#ifndef NO_OPENCORE
-#include <media/PVMediaRecorder.h>
-#endif
-
 #include <utils/String16.h>
 
 #include <media/AudioTrack.h>
@@ -70,7 +66,7 @@
     return mRecorder->setCamera(camera);
 }
 
-status_t MediaRecorderClient::setPreviewSurface(const sp<ISurface>& surface)
+status_t MediaRecorderClient::setPreviewSurface(const sp<Surface>& surface)
 {
     LOGV("setPreviewSurface");
     Mutex::Autolock lock(mLock);
@@ -164,6 +160,17 @@
     return mRecorder->setOutputFile(fd, offset, length);
 }
 
+status_t MediaRecorderClient::setOutputFileAuxiliary(int fd)
+{
+    LOGV("setOutputFileAuxiliary(%d)", fd);
+    Mutex::Autolock lock(mLock);
+    if (mRecorder == NULL) {
+        LOGE("recorder is not initialized");
+        return NO_INIT;
+    }
+    return mRecorder->setOutputFileAuxiliary(fd);
+}
+
 status_t MediaRecorderClient::setVideoSize(int width, int height)
 {
     LOGV("setVideoSize(%dx%d)", width, height);
@@ -293,22 +300,7 @@
 {
     LOGV("Client constructor");
     mPid = pid;
-
-    char value[PROPERTY_VALUE_MAX];
-    if (!property_get("media.stagefright.enable-record", value, NULL)
-        || !strcmp(value, "1") || !strcasecmp(value, "true")) {
-        mRecorder = new StagefrightRecorder;
-    } else
-#ifndef NO_OPENCORE
-    {
-        mRecorder = new PVMediaRecorder();
-    }
-#else
-    {
-        mRecorder = NULL;
-    }
-#endif
-
+    mRecorder = new StagefrightRecorder;
     mMediaPlayerService = service;
 }
 
@@ -337,4 +329,3 @@
 }
 
 }; // namespace android
-
diff --git a/media/libmediaplayerservice/MediaRecorderClient.h b/media/libmediaplayerservice/MediaRecorderClient.h
index 1d1913d..fded98e 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.h
+++ b/media/libmediaplayerservice/MediaRecorderClient.h
@@ -29,7 +29,7 @@
 {
 public:
     virtual     status_t        setCamera(const sp<ICamera>& camera);
-    virtual     status_t        setPreviewSurface(const sp<ISurface>& surface);
+    virtual     status_t        setPreviewSurface(const sp<Surface>& surface);
     virtual     status_t        setVideoSource(int vs);
     virtual     status_t        setAudioSource(int as);
     virtual     status_t        setOutputFormat(int of);
@@ -37,6 +37,7 @@
     virtual     status_t        setAudioEncoder(int ae);
     virtual     status_t        setOutputFile(const char* path);
     virtual     status_t        setOutputFile(int fd, int64_t offset, int64_t length);
+    virtual     status_t        setOutputFileAuxiliary(int fd);
     virtual     status_t        setVideoSize(int width, int height);
     virtual     status_t        setVideoFrameRate(int frames_per_second);
     virtual     status_t        setParameters(const String8& params);
@@ -66,4 +67,3 @@
 }; // namespace android
 
 #endif // ANDROID_MEDIARECORDERCLIENT_H
-
diff --git a/media/libmediaplayerservice/MetadataRetrieverClient.cpp b/media/libmediaplayerservice/MetadataRetrieverClient.cpp
index 713e441..5fcf2a7 100644
--- a/media/libmediaplayerservice/MetadataRetrieverClient.cpp
+++ b/media/libmediaplayerservice/MetadataRetrieverClient.cpp
@@ -35,7 +35,6 @@
 #include <binder/IServiceManager.h>
 #include <media/MediaMetadataRetrieverInterface.h>
 #include <media/MediaPlayerInterface.h>
-#include <media/PVMetadataRetriever.h>
 #include <private/media/VideoFrame.h>
 #include "MidiMetadataRetriever.h"
 #include "MetadataRetrieverClient.h"
@@ -105,12 +104,6 @@
             p = new StagefrightMetadataRetriever;
             break;
         }
-#ifndef NO_OPENCORE
-        case PV_PLAYER:
-            LOGV("create pv metadata retriever");
-            p = new PVMetadataRetriever();
-            break;
-#endif
         case SONIVOX_PLAYER:
             LOGV("create midi metadata retriever");
             p = new MidiMetadataRetriever();
diff --git a/media/libmediaplayerservice/MidiFile.h b/media/libmediaplayerservice/MidiFile.h
index 4a60ece..a98231c 100644
--- a/media/libmediaplayerservice/MidiFile.h
+++ b/media/libmediaplayerservice/MidiFile.h
@@ -35,7 +35,10 @@
             const char* path, const KeyedVector<String8, String8> *headers);
 
     virtual status_t    setDataSource(int fd, int64_t offset, int64_t length);
-    virtual status_t    setVideoSurface(const sp<ISurface>& surface) { return UNKNOWN_ERROR; }
+    virtual status_t    setVideoSurface(const sp<Surface>& surface) { return UNKNOWN_ERROR; }
+    virtual status_t    setVideoSurfaceTexture(
+                                const sp<ISurfaceTexture>& surfaceTexture)
+                            { return UNKNOWN_ERROR; }
     virtual status_t    prepare();
     virtual status_t    prepareAsync();
     virtual status_t    start();
diff --git a/media/libmediaplayerservice/StagefrightPlayer.cpp b/media/libmediaplayerservice/StagefrightPlayer.cpp
index 6bded09..e277121 100644
--- a/media/libmediaplayerservice/StagefrightPlayer.cpp
+++ b/media/libmediaplayerservice/StagefrightPlayer.cpp
@@ -33,7 +33,6 @@
 
 status_t StagefrightPlayer::setDataSource(
         const char *url, const KeyedVector<String8, String8> *headers) {
-    LOGI("setDataSource('%s')", url);
     return mPlayer->setDataSource(url, headers);
 }
 
@@ -44,10 +43,22 @@
     return mPlayer->setDataSource(dup(fd), offset, length);
 }
 
-status_t StagefrightPlayer::setVideoSurface(const sp<ISurface> &surface) {
+status_t StagefrightPlayer::setDataSource(const sp<IStreamSource> &source) {
+    return mPlayer->setDataSource(source);
+}
+
+status_t StagefrightPlayer::setVideoSurface(const sp<Surface> &surface) {
     LOGV("setVideoSurface");
 
-    mPlayer->setISurface(surface);
+    mPlayer->setSurface(surface);
+    return OK;
+}
+
+status_t StagefrightPlayer::setVideoSurfaceTexture(
+        const sp<ISurfaceTexture> &surfaceTexture) {
+    LOGV("setVideoSurfaceTexture");
+
+    mPlayer->setSurfaceTexture(surfaceTexture);
     return OK;
 }
 
@@ -140,16 +151,6 @@
     return STAGEFRIGHT_PLAYER;
 }
 
-status_t StagefrightPlayer::suspend() {
-    LOGV("suspend");
-    return mPlayer->suspend();
-}
-
-status_t StagefrightPlayer::resume() {
-    LOGV("resume");
-    return mPlayer->resume();
-}
-
 status_t StagefrightPlayer::invoke(const Parcel &request, Parcel *reply) {
     return INVALID_OPERATION;
 }
diff --git a/media/libmediaplayerservice/StagefrightPlayer.h b/media/libmediaplayerservice/StagefrightPlayer.h
index 781eb44..e2796d2 100644
--- a/media/libmediaplayerservice/StagefrightPlayer.h
+++ b/media/libmediaplayerservice/StagefrightPlayer.h
@@ -35,7 +35,12 @@
             const char *url, const KeyedVector<String8, String8> *headers);
 
     virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
-    virtual status_t setVideoSurface(const sp<ISurface> &surface);
+
+    virtual status_t setDataSource(const sp<IStreamSource> &source);
+
+    virtual status_t setVideoSurface(const sp<Surface> &surface);
+    virtual status_t setVideoSurfaceTexture(
+            const sp<ISurfaceTexture> &surfaceTexture);
     virtual status_t prepare();
     virtual status_t prepareAsync();
     virtual status_t start();
@@ -50,8 +55,6 @@
     virtual player_type playerType();
     virtual status_t invoke(const Parcel &request, Parcel *reply);
     virtual void setAudioSink(const sp<AudioSink> &audioSink);
-    virtual status_t suspend();
-    virtual status_t resume();
 
     virtual status_t getMetadata(
             const media::Metadata::Filter& ids, Parcel *records);
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index 3261fe6..e3dfabb 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -21,9 +21,15 @@
 #include "StagefrightRecorder.h"
 
 #include <binder/IPCThreadState.h>
+#include <binder/IServiceManager.h>
+
+#include <media/IMediaPlayerService.h>
 #include <media/stagefright/AudioSource.h>
 #include <media/stagefright/AMRWriter.h>
 #include <media/stagefright/CameraSource.h>
+#include <media/stagefright/VideoSourceDownSampler.h>
+#include <media/stagefright/CameraSourceTimeLapse.h>
+#include <media/stagefright/MediaSourceSplitter.h>
 #include <media/stagefright/MPEG2TSWriter.h>
 #include <media/stagefright/MPEG4Writer.h>
 #include <media/stagefright/MediaDebug.h>
@@ -33,21 +39,34 @@
 #include <media/stagefright/OMXCodec.h>
 #include <media/MediaProfiles.h>
 #include <camera/ICamera.h>
-#include <camera/Camera.h>
 #include <camera/CameraParameters.h>
-#include <surfaceflinger/ISurface.h>
+#include <surfaceflinger/Surface.h>
 #include <utils/Errors.h>
 #include <sys/types.h>
-#include <unistd.h>
 #include <ctype.h>
+#include <unistd.h>
 
 #include "ARTPWriter.h"
 
 namespace android {
 
+// To collect the encoder usage for the battery app
+static void addBatteryData(uint32_t params) {
+    sp<IBinder> binder =
+        defaultServiceManager()->getService(String16("media.player"));
+    sp<IMediaPlayerService> service = interface_cast<IMediaPlayerService>(binder);
+    CHECK(service.get() != NULL);
+
+    service->addBatteryData(params);
+}
+
+
 StagefrightRecorder::StagefrightRecorder()
-    : mWriter(NULL),
-      mOutputFd(-1) {
+    : mWriter(NULL), mWriterAux(NULL),
+      mOutputFd(-1), mOutputFdAux(-1),
+      mAudioSource(AUDIO_SOURCE_LIST_END),
+      mVideoSource(VIDEO_SOURCE_LIST_END),
+      mStarted(false) {
 
     LOGV("Constructor");
     reset();
@@ -164,7 +183,8 @@
 
 status_t StagefrightRecorder::setVideoFrameRate(int frames_per_second) {
     LOGV("setVideoFrameRate: %d", frames_per_second);
-    if (frames_per_second <= 0 || frames_per_second > 30) {
+    if ((frames_per_second <= 0 && frames_per_second != -1) ||
+        frames_per_second > 120) {
         LOGE("Invalid video frame rate: %d", frames_per_second);
         return BAD_VALUE;
     }
@@ -182,26 +202,11 @@
         return BAD_VALUE;
     }
 
-    int64_t token = IPCThreadState::self()->clearCallingIdentity();
-    mFlags &= ~FLAGS_HOT_CAMERA;
-    mCamera = Camera::create(camera);
-    if (mCamera == 0) {
-        LOGE("Unable to connect to camera");
-        IPCThreadState::self()->restoreCallingIdentity(token);
-        return -EBUSY;
-    }
-
-    LOGV("Connected to camera");
-    if (mCamera->previewEnabled()) {
-        LOGV("camera is hot");
-        mFlags |= FLAGS_HOT_CAMERA;
-    }
-    IPCThreadState::self()->restoreCallingIdentity(token);
-
+    mCamera = camera;
     return OK;
 }
 
-status_t StagefrightRecorder::setPreviewSurface(const sp<ISurface> &surface) {
+status_t StagefrightRecorder::setPreviewSurface(const sp<Surface> &surface) {
     LOGV("setPreviewSurface: %p", surface.get());
     mPreviewSurface = surface;
 
@@ -235,10 +240,32 @@
     return OK;
 }
 
+status_t StagefrightRecorder::setOutputFileAuxiliary(int fd) {
+    LOGV("setOutputFileAuxiliary: %d", fd);
+
+    if (fd < 0) {
+        LOGE("Invalid file descriptor: %d", fd);
+        return -EBADF;
+    }
+
+    mCaptureAuxVideo = true;
+
+    if (mOutputFdAux >= 0) {
+        ::close(mOutputFdAux);
+    }
+    mOutputFdAux = dup(fd);
+
+    return OK;
+}
+
 // Attempt to parse an int64 literal optionally surrounded by whitespace,
 // returns true on success, false otherwise.
 static bool safe_strtoi64(const char *s, int64_t *val) {
     char *end;
+
+    // It is lame, but according to man page, we have to set errno to 0
+    // before calling strtoll().
+    errno = 0;
     *val = strtoll(s, &end, 10);
 
     if (end == s || errno == ERANGE) {
@@ -353,6 +380,8 @@
 
 status_t StagefrightRecorder::setParamMaxFileDurationUs(int64_t timeUs) {
     LOGV("setParamMaxFileDurationUs: %lld us", timeUs);
+
+    // This is meant for backward compatibility for MediaRecorder.java
     if (timeUs <= 0) {
         LOGW("Max file duration is not positive: %lld us. Disabling duration limit.", timeUs);
         timeUs = 0; // Disable the duration limit for zero or negative values.
@@ -370,7 +399,13 @@
 
 status_t StagefrightRecorder::setParamMaxFileSizeBytes(int64_t bytes) {
     LOGV("setParamMaxFileSizeBytes: %lld bytes", bytes);
-    if (bytes <= 1024) {  // XXX: 1 kB
+
+    // This is meant for backward compatibility for MediaRecorder.java
+    if (bytes <= 0) {
+        LOGW("Max file size is not positive: %lld bytes. "
+             "Disabling file size limit.", bytes);
+        bytes = 0; // Disable the file size limit for zero or negative values.
+    } else if (bytes <= 1024) {  // XXX: 1 kB
         LOGE("Max file size is too small: %lld bytes", bytes);
         return BAD_VALUE;
     }
@@ -493,6 +528,68 @@
     return OK;
 }
 
+status_t StagefrightRecorder::setParamTimeLapseEnable(int32_t timeLapseEnable) {
+    LOGV("setParamTimeLapseEnable: %d", timeLapseEnable);
+
+    if(timeLapseEnable == 0) {
+        mCaptureTimeLapse = false;
+    } else if (timeLapseEnable == 1) {
+        mCaptureTimeLapse = true;
+    } else {
+        return BAD_VALUE;
+    }
+    return OK;
+}
+
+status_t StagefrightRecorder::setParamTimeBetweenTimeLapseFrameCapture(int64_t timeUs) {
+    LOGV("setParamTimeBetweenTimeLapseFrameCapture: %lld us", timeUs);
+
+    // Not allowing time more than a day
+    if (timeUs <= 0 || timeUs > 86400*1E6) {
+        LOGE("Time between time lapse frame capture (%lld) is out of range [0, 1 Day]", timeUs);
+        return BAD_VALUE;
+    }
+
+    mTimeBetweenTimeLapseFrameCaptureUs = timeUs;
+    return OK;
+}
+
+status_t StagefrightRecorder::setParamAuxVideoWidth(int32_t width) {
+    LOGV("setParamAuxVideoWidth : %d", width);
+
+    if (width <= 0) {
+        LOGE("Width (%d) is not positive", width);
+        return BAD_VALUE;
+    }
+
+    mAuxVideoWidth = width;
+    return OK;
+}
+
+status_t StagefrightRecorder::setParamAuxVideoHeight(int32_t height) {
+    LOGV("setParamAuxVideoHeight : %d", height);
+
+    if (height <= 0) {
+        LOGE("Height (%d) is not positive", height);
+        return BAD_VALUE;
+    }
+
+    mAuxVideoHeight = height;
+    return OK;
+}
+
+status_t StagefrightRecorder::setParamAuxVideoEncodingBitRate(int32_t bitRate) {
+    LOGV("StagefrightRecorder::setParamAuxVideoEncodingBitRate: %d", bitRate);
+
+    if (bitRate <= 0) {
+        LOGE("Invalid video encoding bit rate: %d", bitRate);
+        return BAD_VALUE;
+    }
+
+    mAuxVideoBitRate = bitRate;
+    return OK;
+}
+
 status_t StagefrightRecorder::setParameter(
         const String8 &key, const String8 &value) {
     LOGV("setParameter: key (%s) => value (%s)", key.string(), value.string());
@@ -581,6 +678,32 @@
         if (safe_strtoi32(value.string(), &timeScale)) {
             return setParamVideoTimeScale(timeScale);
         }
+    } else if (key == "time-lapse-enable") {
+        int32_t timeLapseEnable;
+        if (safe_strtoi32(value.string(), &timeLapseEnable)) {
+            return setParamTimeLapseEnable(timeLapseEnable);
+        }
+    } else if (key == "time-between-time-lapse-frame-capture") {
+        int64_t timeBetweenTimeLapseFrameCaptureMs;
+        if (safe_strtoi64(value.string(), &timeBetweenTimeLapseFrameCaptureMs)) {
+            return setParamTimeBetweenTimeLapseFrameCapture(
+                    1000LL * timeBetweenTimeLapseFrameCaptureMs);
+        }
+    } else if (key == "video-aux-param-width") {
+        int32_t auxWidth;
+        if (safe_strtoi32(value.string(), &auxWidth)) {
+            return setParamAuxVideoWidth(auxWidth);
+        }
+    } else if (key == "video-aux-param-height") {
+        int32_t auxHeight;
+        if (safe_strtoi32(value.string(), &auxHeight)) {
+            return setParamAuxVideoHeight(auxHeight);
+        }
+    } else if (key == "video-aux-param-encoding-bitrate") {
+        int32_t auxVideoBitRate;
+        if (safe_strtoi32(value.string(), &auxVideoBitRate)) {
+            return setParamAuxVideoEncodingBitRate(auxVideoBitRate);
+        }
     } else {
         LOGE("setParameter: failed to find key %s", key.string());
     }
@@ -640,30 +763,54 @@
         return UNKNOWN_ERROR;
     }
 
+    status_t status = OK;
+
     switch (mOutputFormat) {
         case OUTPUT_FORMAT_DEFAULT:
         case OUTPUT_FORMAT_THREE_GPP:
         case OUTPUT_FORMAT_MPEG_4:
-            return startMPEG4Recording();
+            status = startMPEG4Recording();
+            break;
 
         case OUTPUT_FORMAT_AMR_NB:
         case OUTPUT_FORMAT_AMR_WB:
-            return startAMRRecording();
+            status = startAMRRecording();
+            break;
 
         case OUTPUT_FORMAT_AAC_ADIF:
         case OUTPUT_FORMAT_AAC_ADTS:
-            return startAACRecording();
+            status = startAACRecording();
+            break;
 
         case OUTPUT_FORMAT_RTP_AVP:
-            return startRTPRecording();
+            status = startRTPRecording();
+            break;
 
         case OUTPUT_FORMAT_MPEG2TS:
-            return startMPEG2TSRecording();
+            status = startMPEG2TSRecording();
+            break;
 
         default:
             LOGE("Unsupported output file format: %d", mOutputFormat);
-            return UNKNOWN_ERROR;
+            status = UNKNOWN_ERROR;
+            break;
     }
+
+    if ((status == OK) && (!mStarted)) {
+        mStarted = true;
+
+        uint32_t params = IMediaPlayerService::kBatteryDataCodecStarted;
+        if (mAudioSource != AUDIO_SOURCE_LIST_END) {
+            params |= IMediaPlayerService::kBatteryDataTrackAudio;
+        }
+        if (mVideoSource != VIDEO_SOURCE_LIST_END) {
+            params |= IMediaPlayerService::kBatteryDataTrackVideo;
+        }
+
+        addBatteryData(params);
+    }
+
+    return status;
 }
 
 sp<MediaSource> StagefrightRecorder::createAudioSource() {
@@ -745,27 +892,12 @@
                     mAudioEncoder);
             return BAD_VALUE;
         }
-        if (mSampleRate != 8000) {
-            LOGE("Invalid sampling rate %d used for AMRNB recording",
-                    mSampleRate);
-            return BAD_VALUE;
-        }
     } else {  // mOutputFormat must be OUTPUT_FORMAT_AMR_WB
         if (mAudioEncoder != AUDIO_ENCODER_AMR_WB) {
             LOGE("Invlaid encoder %d used for AMRWB recording",
                     mAudioEncoder);
             return BAD_VALUE;
         }
-        if (mSampleRate != 16000) {
-            LOGE("Invalid sample rate %d used for AMRWB recording",
-                    mSampleRate);
-            return BAD_VALUE;
-        }
-    }
-    if (mAudioChannels != 1) {
-        LOGE("Invalid number of audio channels %d used for amr recording",
-                mAudioChannels);
-        return BAD_VALUE;
     }
 
     if (mAudioSource >= AUDIO_SOURCE_LIST_END) {
@@ -773,13 +905,17 @@
         return BAD_VALUE;
     }
 
-    sp<MediaSource> audioEncoder = createAudioSource();
+    status_t status = BAD_VALUE;
+    if (OK != (status = checkAudioEncoderCapabilities())) {
+        return status;
+    }
 
+    sp<MediaSource> audioEncoder = createAudioSource();
     if (audioEncoder == NULL) {
         return UNKNOWN_ERROR;
     }
 
-    mWriter = new AMRWriter(dup(mOutputFd));
+    mWriter = new AMRWriter(mOutputFd);
     mWriter->addSource(audioEncoder);
 
     if (mMaxFileDurationUs != 0) {
@@ -814,13 +950,20 @@
     if (mAudioSource != AUDIO_SOURCE_LIST_END) {
         source = createAudioSource();
     } else {
-        status_t err = setupVideoEncoder(&source);
+
+        sp<CameraSource> cameraSource;
+        status_t err = setupCameraSource(&cameraSource);
+        if (err != OK) {
+            return err;
+        }
+
+        err = setupVideoEncoder(cameraSource, mVideoBitRate, &source);
         if (err != OK) {
             return err;
         }
     }
 
-    mWriter = new ARTPWriter(dup(mOutputFd));
+    mWriter = new ARTPWriter(mOutputFd);
     mWriter->addSource(source);
     mWriter->setListener(mListener);
 
@@ -830,7 +973,7 @@
 status_t StagefrightRecorder::startMPEG2TSRecording() {
     CHECK_EQ(mOutputFormat, OUTPUT_FORMAT_MPEG2TS);
 
-    sp<MediaWriter> writer = new MPEG2TSWriter(dup(mOutputFd));
+    sp<MediaWriter> writer = new MPEG2TSWriter(mOutputFd);
 
     if (mAudioSource != AUDIO_SOURCE_LIST_END) {
         if (mAudioEncoder != AUDIO_ENCODER_AAC) {
@@ -850,8 +993,14 @@
             return ERROR_UNSUPPORTED;
         }
 
+        sp<CameraSource> cameraSource;
+        status_t err = setupCameraSource(&cameraSource);
+        if (err != OK) {
+            return err;
+        }
+
         sp<MediaSource> encoder;
-        status_t err = setupVideoEncoder(&encoder);
+        err = setupVideoEncoder(cameraSource, mVideoBitRate, &encoder);
 
         if (err != OK) {
             return err;
@@ -879,7 +1028,7 @@
                         "enc.vid.fps.min", mVideoEncoder);
     int maxFrameRate = mEncoderProfiles->getVideoEncoderParamByName(
                         "enc.vid.fps.max", mVideoEncoder);
-    if (mFrameRate < minFrameRate) {
+    if (mFrameRate < minFrameRate && mFrameRate != -1) {
         LOGW("Intended video encoding frame rate (%d fps) is too small"
              " and will be set to (%d fps)", mFrameRate, minFrameRate);
         mFrameRate = minFrameRate;
@@ -924,60 +1073,91 @@
     }
 }
 
-status_t StagefrightRecorder::setupCameraSource() {
-    clipVideoBitRate();
-    clipVideoFrameRate();
-    clipVideoFrameWidth();
-    clipVideoFrameHeight();
-
-    int64_t token = IPCThreadState::self()->clearCallingIdentity();
-    if (mCamera == 0) {
-        mCamera = Camera::connect(mCameraId);
-        if (mCamera == 0) {
-            LOGE("Camera connection could not be established.");
-            return -EBUSY;
-        }
-        mFlags &= ~FLAGS_HOT_CAMERA;
-        mCamera->lock();
+status_t StagefrightRecorder::checkVideoEncoderCapabilities() {
+    if (!mCaptureTimeLapse) {
+        // Dont clip for time lapse capture as encoder will have enough
+        // time to encode because of slow capture rate of time lapse.
+        clipVideoBitRate();
+        clipVideoFrameRate();
+        clipVideoFrameWidth();
+        clipVideoFrameHeight();
     }
-
-    // Set the actual video recording frame size
-    CameraParameters params(mCamera->getParameters());
-    params.setPreviewSize(mVideoWidth, mVideoHeight);
-    params.setPreviewFrameRate(mFrameRate);
-    String8 s = params.flatten();
-    if (OK != mCamera->setParameters(s)) {
-        LOGE("Could not change settings."
-             " Someone else is using camera %d?", mCameraId);
-        return -EBUSY;
-    }
-    CameraParameters newCameraParams(mCamera->getParameters());
-
-    // Check on video frame size
-    int frameWidth = 0, frameHeight = 0;
-    newCameraParams.getPreviewSize(&frameWidth, &frameHeight);
-    if (frameWidth  < 0 || frameWidth  != mVideoWidth ||
-        frameHeight < 0 || frameHeight != mVideoHeight) {
-        LOGE("Failed to set the video frame size to %dx%d",
-                mVideoWidth, mVideoHeight);
-        IPCThreadState::self()->restoreCallingIdentity(token);
-        return UNKNOWN_ERROR;
-    }
-
-    // Check on video frame rate
-    int frameRate = newCameraParams.getPreviewFrameRate();
-    if (frameRate < 0 || (frameRate - mFrameRate) != 0) {
-        LOGE("Failed to set frame rate to %d fps. The actual "
-             "frame rate is %d", mFrameRate, frameRate);
-    }
-
-    // This CHECK is good, since we just passed the lock/unlock
-    // check earlier by calling mCamera->setParameters().
-    CHECK_EQ(OK, mCamera->setPreviewDisplay(mPreviewSurface));
-    IPCThreadState::self()->restoreCallingIdentity(token);
     return OK;
 }
 
+status_t StagefrightRecorder::checkAudioEncoderCapabilities() {
+    clipAudioBitRate();
+    clipAudioSampleRate();
+    clipNumberOfAudioChannels();
+    return OK;
+}
+
+void StagefrightRecorder::clipAudioBitRate() {
+    LOGV("clipAudioBitRate: encoder %d", mAudioEncoder);
+
+    int minAudioBitRate =
+            mEncoderProfiles->getAudioEncoderParamByName(
+                "enc.aud.bps.min", mAudioEncoder);
+    if (mAudioBitRate < minAudioBitRate) {
+        LOGW("Intended audio encoding bit rate (%d) is too small"
+            " and will be set to (%d)", mAudioBitRate, minAudioBitRate);
+        mAudioBitRate = minAudioBitRate;
+    }
+
+    int maxAudioBitRate =
+            mEncoderProfiles->getAudioEncoderParamByName(
+                "enc.aud.bps.max", mAudioEncoder);
+    if (mAudioBitRate > maxAudioBitRate) {
+        LOGW("Intended audio encoding bit rate (%d) is too large"
+            " and will be set to (%d)", mAudioBitRate, maxAudioBitRate);
+        mAudioBitRate = maxAudioBitRate;
+    }
+}
+
+void StagefrightRecorder::clipAudioSampleRate() {
+    LOGV("clipAudioSampleRate: encoder %d", mAudioEncoder);
+
+    int minSampleRate =
+            mEncoderProfiles->getAudioEncoderParamByName(
+                "enc.aud.hz.min", mAudioEncoder);
+    if (mSampleRate < minSampleRate) {
+        LOGW("Intended audio sample rate (%d) is too small"
+            " and will be set to (%d)", mSampleRate, minSampleRate);
+        mSampleRate = minSampleRate;
+    }
+
+    int maxSampleRate =
+            mEncoderProfiles->getAudioEncoderParamByName(
+                "enc.aud.hz.max", mAudioEncoder);
+    if (mSampleRate > maxSampleRate) {
+        LOGW("Intended audio sample rate (%d) is too large"
+            " and will be set to (%d)", mSampleRate, maxSampleRate);
+        mSampleRate = maxSampleRate;
+    }
+}
+
+void StagefrightRecorder::clipNumberOfAudioChannels() {
+    LOGV("clipNumberOfAudioChannels: encoder %d", mAudioEncoder);
+
+    int minChannels =
+            mEncoderProfiles->getAudioEncoderParamByName(
+                "enc.aud.ch.min", mAudioEncoder);
+    if (mAudioChannels < minChannels) {
+        LOGW("Intended number of audio channels (%d) is too small"
+            " and will be set to (%d)", mAudioChannels, minChannels);
+        mAudioChannels = minChannels;
+    }
+
+    int maxChannels =
+            mEncoderProfiles->getAudioEncoderParamByName(
+                "enc.aud.ch.max", mAudioEncoder);
+    if (mAudioChannels > maxChannels) {
+        LOGW("Intended number of audio channels (%d) is too large"
+            " and will be set to (%d)", mAudioChannels, maxChannels);
+        mAudioChannels = maxChannels;
+    }
+}
+
 void StagefrightRecorder::clipVideoFrameHeight() {
     LOGV("clipVideoFrameHeight: encoder %d", mVideoEncoder);
     int minFrameHeight = mEncoderProfiles->getVideoEncoderParamByName(
@@ -995,18 +1175,64 @@
     }
 }
 
-status_t StagefrightRecorder::setupVideoEncoder(sp<MediaSource> *source) {
+status_t StagefrightRecorder::setupCameraSource(
+        sp<CameraSource> *cameraSource) {
+    status_t err = OK;
+    if ((err = checkVideoEncoderCapabilities()) != OK) {
+        return err;
+    }
+    Size videoSize;
+    videoSize.width = mVideoWidth;
+    videoSize.height = mVideoHeight;
+    if (mCaptureTimeLapse) {
+        mCameraSourceTimeLapse = CameraSourceTimeLapse::CreateFromCamera(
+                mCamera, mCameraId,
+                videoSize, mFrameRate, mPreviewSurface,
+                mTimeBetweenTimeLapseFrameCaptureUs);
+        *cameraSource = mCameraSourceTimeLapse;
+    } else {
+        *cameraSource = CameraSource::CreateFromCamera(
+                mCamera, mCameraId, videoSize, mFrameRate,
+                mPreviewSurface, true /*storeMetaDataInVideoBuffers*/);
+    }
+    if (*cameraSource == NULL) {
+        return UNKNOWN_ERROR;
+    }
+
+    if ((*cameraSource)->initCheck() != OK) {
+        (*cameraSource).clear();
+        *cameraSource = NULL;
+        return NO_INIT;
+    }
+
+    // When frame rate is not set, the actual frame rate will be set to
+    // the current frame rate being used.
+    if (mFrameRate == -1) {
+        int32_t frameRate = 0;
+        CHECK ((*cameraSource)->getFormat()->findInt32(
+                    kKeyFrameRate, &frameRate));
+        LOGI("Frame rate is not explicitly set. Use the current frame "
+             "rate (%d fps)", frameRate);
+        mFrameRate = frameRate;
+    }
+
+    CHECK(mFrameRate != -1);
+
+    mIsMetaDataStoredInVideoBuffers =
+        (*cameraSource)->isMetaDataStoredInVideoBuffers();
+
+    return OK;
+}
+
+status_t StagefrightRecorder::setupVideoEncoder(
+        sp<MediaSource> cameraSource,
+        int32_t videoBitRate,
+        sp<MediaSource> *source) {
     source->clear();
 
-    status_t err = setupCameraSource();
-    if (err != OK) return err;
-
-    sp<CameraSource> cameraSource = CameraSource::CreateFromCamera(mCamera);
-    CHECK(cameraSource != NULL);
-
     sp<MetaData> enc_meta = new MetaData;
-    enc_meta->setInt32(kKeyBitRate, mVideoBitRate);
-    enc_meta->setInt32(kKeySampleRate, mFrameRate);
+    enc_meta->setInt32(kKeyBitRate, videoBitRate);
+    enc_meta->setInt32(kKeyFrameRate, mFrameRate);
 
     switch (mVideoEncoder) {
         case VIDEO_ENCODER_H263:
@@ -1054,10 +1280,29 @@
     OMXClient client;
     CHECK_EQ(client.connect(), OK);
 
+    uint32_t encoder_flags = 0;
+    if (mIsMetaDataStoredInVideoBuffers) {
+        encoder_flags |= OMXCodec::kHardwareCodecsOnly;
+        encoder_flags |= OMXCodec::kStoreMetaDataInVideoBuffers;
+    }
+
+    // Do not wait for all the input buffers to become available.
+    // This give timelapse video recording faster response in
+    // receiving output from video encoder component.
+    if (mCaptureTimeLapse) {
+        encoder_flags |= OMXCodec::kOnlySubmitOneInputBufferAtOneTime;
+    }
+
     sp<MediaSource> encoder = OMXCodec::Create(
             client.interface(), enc_meta,
-            true /* createEncoder */, cameraSource);
+            true /* createEncoder */, cameraSource,
+            NULL, encoder_flags);
     if (encoder == NULL) {
+        LOGW("Failed to create the encoder");
+        // When the encoder fails to be created, we need
+        // release the camera source due to the camera's lock
+        // and unlock mechanism.
+        cameraSource->stop();
         return UNKNOWN_ERROR;
     }
 
@@ -1067,18 +1312,23 @@
 }
 
 status_t StagefrightRecorder::setupAudioEncoder(const sp<MediaWriter>& writer) {
-    sp<MediaSource> audioEncoder;
+    status_t status = BAD_VALUE;
+    if (OK != (status = checkAudioEncoderCapabilities())) {
+        return status;
+    }
+
     switch(mAudioEncoder) {
         case AUDIO_ENCODER_AMR_NB:
         case AUDIO_ENCODER_AMR_WB:
         case AUDIO_ENCODER_AAC:
-            audioEncoder = createAudioSource();
             break;
+
         default:
             LOGE("Unsupported audio encoder: %d", mAudioEncoder);
             return UNKNOWN_ERROR;
     }
 
+    sp<MediaSource> audioEncoder = createAudioSource();
     if (audioEncoder == NULL) {
         return UNKNOWN_ERROR;
     }
@@ -1087,54 +1337,153 @@
     return OK;
 }
 
-status_t StagefrightRecorder::startMPEG4Recording() {
-    int32_t totalBitRate = 0;
+status_t StagefrightRecorder::setupMPEG4Recording(
+        bool useSplitCameraSource,
+        int outputFd,
+        int32_t videoWidth, int32_t videoHeight,
+        int32_t videoBitRate,
+        int32_t *totalBitRate,
+        sp<MediaWriter> *mediaWriter) {
+    mediaWriter->clear();
+    *totalBitRate = 0;
     status_t err = OK;
-    sp<MediaWriter> writer = new MPEG4Writer(dup(mOutputFd));
+    sp<MediaWriter> writer = new MPEG4Writer(outputFd);
 
-    // Add audio source first if it exists
-    if (mAudioSource != AUDIO_SOURCE_LIST_END) {
-        err = setupAudioEncoder(writer);
-        if (err != OK) return err;
-        totalBitRate += mAudioBitRate;
-    }
     if (mVideoSource == VIDEO_SOURCE_DEFAULT
             || mVideoSource == VIDEO_SOURCE_CAMERA) {
+
+        sp<MediaSource> cameraMediaSource;
+        if (useSplitCameraSource) {
+            LOGV("Using Split camera source");
+            cameraMediaSource = mCameraSourceSplitter->createClient();
+        } else {
+            sp<CameraSource> cameraSource;
+            err = setupCameraSource(&cameraSource);
+            cameraMediaSource = cameraSource;
+        }
+        if ((videoWidth != mVideoWidth) || (videoHeight != mVideoHeight)) {
+            // Use downsampling from the original source.
+            cameraMediaSource =
+                new VideoSourceDownSampler(cameraMediaSource, videoWidth, videoHeight);
+        }
+        if (err != OK) {
+            return err;
+        }
+
         sp<MediaSource> encoder;
-        err = setupVideoEncoder(&encoder);
-        if (err != OK) return err;
+        err = setupVideoEncoder(cameraMediaSource, videoBitRate, &encoder);
+        if (err != OK) {
+            return err;
+        }
+
         writer->addSource(encoder);
-        totalBitRate += mVideoBitRate;
+        *totalBitRate += videoBitRate;
+    }
+
+    // Audio source is added at the end if it exists.
+    // This help make sure that the "recoding" sound is suppressed for
+    // camcorder applications in the recorded files.
+    if (!mCaptureTimeLapse && (mAudioSource != AUDIO_SOURCE_LIST_END)) {
+        err = setupAudioEncoder(writer);
+        if (err != OK) return err;
+        *totalBitRate += mAudioBitRate;
     }
 
     if (mInterleaveDurationUs > 0) {
         reinterpret_cast<MPEG4Writer *>(writer.get())->
             setInterleaveDuration(mInterleaveDurationUs);
     }
-
     if (mMaxFileDurationUs != 0) {
         writer->setMaxFileDuration(mMaxFileDurationUs);
     }
     if (mMaxFileSizeBytes != 0) {
         writer->setMaxFileSize(mMaxFileSizeBytes);
     }
-    sp<MetaData> meta = new MetaData;
-    meta->setInt64(kKeyTime, systemTime() / 1000);
-    meta->setInt32(kKeyFileType, mOutputFormat);
-    meta->setInt32(kKeyBitRate, totalBitRate);
-    meta->setInt32(kKey64BitFileOffset, mUse64BitFileOffset);
+
+    writer->setListener(mListener);
+    *mediaWriter = writer;
+    return OK;
+}
+
+void StagefrightRecorder::setupMPEG4MetaData(int64_t startTimeUs, int32_t totalBitRate,
+        sp<MetaData> *meta) {
+    (*meta)->setInt64(kKeyTime, startTimeUs);
+    (*meta)->setInt32(kKeyFileType, mOutputFormat);
+    (*meta)->setInt32(kKeyBitRate, totalBitRate);
+    (*meta)->setInt32(kKey64BitFileOffset, mUse64BitFileOffset);
     if (mMovieTimeScale > 0) {
-        meta->setInt32(kKeyTimeScale, mMovieTimeScale);
+        (*meta)->setInt32(kKeyTimeScale, mMovieTimeScale);
     }
     if (mTrackEveryTimeDurationUs > 0) {
-        meta->setInt64(kKeyTrackTimeStatus, mTrackEveryTimeDurationUs);
+        (*meta)->setInt64(kKeyTrackTimeStatus, mTrackEveryTimeDurationUs);
     }
     if (mRotationDegrees != 0) {
-        meta->setInt32(kKeyRotationDegree, mRotationDegrees);
+        (*meta)->setInt32(kKeyRotation, mRotationDegrees);
     }
-    writer->setListener(mListener);
-    mWriter = writer;
-    return mWriter->start(meta.get());
+}
+
+status_t StagefrightRecorder::startMPEG4Recording() {
+    if (mCaptureAuxVideo) {
+        if (!mCaptureTimeLapse) {
+            LOGE("Auxiliary video can be captured only in time lapse mode");
+            return UNKNOWN_ERROR;
+        }
+        LOGV("Creating MediaSourceSplitter");
+        sp<CameraSource> cameraSource;
+        status_t err = setupCameraSource(&cameraSource);
+        if (err != OK) {
+            return err;
+        }
+        mCameraSourceSplitter = new MediaSourceSplitter(cameraSource);
+    } else {
+        mCameraSourceSplitter = NULL;
+    }
+
+    int32_t totalBitRate;
+    status_t err = setupMPEG4Recording(mCaptureAuxVideo,
+            mOutputFd, mVideoWidth, mVideoHeight,
+            mVideoBitRate, &totalBitRate, &mWriter);
+    if (err != OK) {
+        return err;
+    }
+
+    int64_t startTimeUs = systemTime() / 1000;
+    sp<MetaData> meta = new MetaData;
+    setupMPEG4MetaData(startTimeUs, totalBitRate, &meta);
+
+    err = mWriter->start(meta.get());
+    if (err != OK) {
+        return err;
+    }
+
+    if (mCaptureAuxVideo) {
+        CHECK(mOutputFdAux >= 0);
+        if (mWriterAux != NULL) {
+            LOGE("Auxiliary File writer is not avaialble");
+            return UNKNOWN_ERROR;
+        }
+        if ((mAuxVideoWidth > mVideoWidth) || (mAuxVideoHeight > mVideoHeight) ||
+                ((mAuxVideoWidth == mVideoWidth) && mAuxVideoHeight == mVideoHeight)) {
+            LOGE("Auxiliary video size (%d x %d) same or larger than the main video size (%d x %d)",
+                    mAuxVideoWidth, mAuxVideoHeight, mVideoWidth, mVideoHeight);
+            return UNKNOWN_ERROR;
+        }
+
+        int32_t totalBitrateAux;
+        err = setupMPEG4Recording(mCaptureAuxVideo,
+                mOutputFdAux, mAuxVideoWidth, mAuxVideoHeight,
+                mAuxVideoBitRate, &totalBitrateAux, &mWriterAux);
+        if (err != OK) {
+            return err;
+        }
+
+        sp<MetaData> metaAux = new MetaData;
+        setupMPEG4MetaData(startTimeUs, totalBitrateAux, &metaAux);
+
+        return mWriterAux->start(metaAux.get());
+    }
+
+    return OK;
 }
 
 status_t StagefrightRecorder::pause() {
@@ -1143,35 +1492,80 @@
         return UNKNOWN_ERROR;
     }
     mWriter->pause();
+
+    if (mCaptureAuxVideo) {
+        if (mWriterAux == NULL) {
+            return UNKNOWN_ERROR;
+        }
+        mWriterAux->pause();
+    }
+
+    if (mStarted) {
+        mStarted = false;
+
+        uint32_t params = 0;
+        if (mAudioSource != AUDIO_SOURCE_LIST_END) {
+            params |= IMediaPlayerService::kBatteryDataTrackAudio;
+        }
+        if (mVideoSource != VIDEO_SOURCE_LIST_END) {
+            params |= IMediaPlayerService::kBatteryDataTrackVideo;
+        }
+
+        addBatteryData(params);
+    }
+
+
     return OK;
 }
 
 status_t StagefrightRecorder::stop() {
     LOGV("stop");
     status_t err = OK;
+
+    if (mCaptureTimeLapse && mCameraSourceTimeLapse != NULL) {
+        mCameraSourceTimeLapse->startQuickReadReturns();
+        mCameraSourceTimeLapse = NULL;
+    }
+
+    if (mCaptureAuxVideo) {
+        if (mWriterAux != NULL) {
+            mWriterAux->stop();
+            mWriterAux.clear();
+        }
+    }
+
     if (mWriter != NULL) {
         err = mWriter->stop();
         mWriter.clear();
     }
 
-    if (mCamera != 0) {
-        LOGV("Disconnect camera");
-        int64_t token = IPCThreadState::self()->clearCallingIdentity();
-        if ((mFlags & FLAGS_HOT_CAMERA) == 0) {
-            LOGV("Camera was cold when we started, stopping preview");
-            mCamera->stopPreview();
-        }
-        mCamera->unlock();
-        mCamera.clear();
-        IPCThreadState::self()->restoreCallingIdentity(token);
-        mFlags = 0;
-    }
-
     if (mOutputFd >= 0) {
         ::close(mOutputFd);
         mOutputFd = -1;
     }
 
+    if (mCaptureAuxVideo) {
+        if (mOutputFdAux >= 0) {
+            ::close(mOutputFdAux);
+            mOutputFdAux = -1;
+        }
+    }
+
+    if (mStarted) {
+        mStarted = false;
+
+        uint32_t params = 0;
+        if (mAudioSource != AUDIO_SOURCE_LIST_END) {
+            params |= IMediaPlayerService::kBatteryDataTrackAudio;
+        }
+        if (mVideoSource != VIDEO_SOURCE_LIST_END) {
+            params |= IMediaPlayerService::kBatteryDataTrackVideo;
+        }
+
+        addBatteryData(params);
+    }
+
+
     return err;
 }
 
@@ -1196,8 +1590,11 @@
     mVideoEncoder  = VIDEO_ENCODER_H263;
     mVideoWidth    = 176;
     mVideoHeight   = 144;
-    mFrameRate     = 20;
+    mAuxVideoWidth    = 176;
+    mAuxVideoHeight   = 144;
+    mFrameRate     = -1;
     mVideoBitRate  = 192000;
+    mAuxVideoBitRate = 192000;
     mSampleRate    = 8000;
     mAudioChannels = 1;
     mAudioBitRate  = 12200;
@@ -1214,11 +1611,17 @@
     mMaxFileDurationUs = 0;
     mMaxFileSizeBytes = 0;
     mTrackEveryTimeDurationUs = 0;
-    mRotationDegrees = 0;
+    mCaptureTimeLapse = false;
+    mTimeBetweenTimeLapseFrameCaptureUs = -1;
+    mCaptureAuxVideo = false;
+    mCameraSourceSplitter = NULL;
+    mCameraSourceTimeLapse = NULL;
+    mIsMetaDataStoredInVideoBuffers = false;
     mEncoderProfiles = MediaProfiles::getInstance();
+    mRotationDegrees = 0;
 
     mOutputFd = -1;
-    mFlags = 0;
+    mOutputFdAux = -1;
 
     return OK;
 }
@@ -1255,6 +1658,8 @@
     snprintf(buffer, SIZE, "   Recorder: %p\n", this);
     snprintf(buffer, SIZE, "   Output file (fd %d):\n", mOutputFd);
     result.append(buffer);
+    snprintf(buffer, SIZE, "   Output file Auxiliary (fd %d):\n", mOutputFdAux);
+    result.append(buffer);
     snprintf(buffer, SIZE, "     File format: %d\n", mOutputFormat);
     result.append(buffer);
     snprintf(buffer, SIZE, "     Max file size (bytes): %lld\n", mMaxFileSizeBytes);
@@ -1287,8 +1692,6 @@
     result.append(buffer);
     snprintf(buffer, SIZE, "     Camera Id: %d\n", mCameraId);
     result.append(buffer);
-    snprintf(buffer, SIZE, "     Camera flags: %d\n", mFlags);
-    result.append(buffer);
     snprintf(buffer, SIZE, "     Encoder: %d\n", mVideoEncoder);
     result.append(buffer);
     snprintf(buffer, SIZE, "     Encoder profile: %d\n", mVideoEncoderProfile);
@@ -1299,10 +1702,14 @@
     result.append(buffer);
     snprintf(buffer, SIZE, "     Frame size (pixels): %dx%d\n", mVideoWidth, mVideoHeight);
     result.append(buffer);
+    snprintf(buffer, SIZE, "     Aux Frame size (pixels): %dx%d\n", mAuxVideoWidth, mAuxVideoHeight);
+    result.append(buffer);
     snprintf(buffer, SIZE, "     Frame rate (fps): %d\n", mFrameRate);
     result.append(buffer);
     snprintf(buffer, SIZE, "     Bit rate (bps): %d\n", mVideoBitRate);
     result.append(buffer);
+    snprintf(buffer, SIZE, "     Aux Bit rate (bps): %d\n", mAuxVideoBitRate);
+    result.append(buffer);
     ::write(fd, result.string(), result.size());
     return OK;
 }
diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h
index e42df2e..2c440c1 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.h
+++ b/media/libmediaplayerservice/StagefrightRecorder.h
@@ -19,13 +19,18 @@
 #define STAGEFRIGHT_RECORDER_H_
 
 #include <media/MediaRecorderBase.h>
+#include <camera/CameraParameters.h>
 #include <utils/String8.h>
 
 namespace android {
 
 class Camera;
+class CameraSource;
+class CameraSourceTimeLapse;
+class MediaSourceSplitter;
 struct MediaSource;
 struct MediaWriter;
+class MetaData;
 struct AudioSource;
 class MediaProfiles;
 
@@ -42,9 +47,10 @@
     virtual status_t setVideoSize(int width, int height);
     virtual status_t setVideoFrameRate(int frames_per_second);
     virtual status_t setCamera(const sp<ICamera>& camera);
-    virtual status_t setPreviewSurface(const sp<ISurface>& surface);
+    virtual status_t setPreviewSurface(const sp<Surface>& surface);
     virtual status_t setOutputFile(const char *path);
     virtual status_t setOutputFile(int fd, int64_t offset, int64_t length);
+    virtual status_t setOutputFileAuxiliary(int fd);
     virtual status_t setParameters(const String8& params);
     virtual status_t setListener(const sp<IMediaRecorderClient>& listener);
     virtual status_t prepare();
@@ -57,15 +63,10 @@
     virtual status_t dump(int fd, const Vector<String16>& args) const;
 
 private:
-    enum CameraFlags {
-        FLAGS_SET_CAMERA = 1L << 0,
-        FLAGS_HOT_CAMERA = 1L << 1,
-    };
-
-    sp<Camera> mCamera;
-    sp<ISurface> mPreviewSurface;
+    sp<ICamera> mCamera;
+    sp<Surface> mPreviewSurface;
     sp<IMediaRecorderClient> mListener;
-    sp<MediaWriter> mWriter;
+    sp<MediaWriter> mWriter, mWriterAux;
     sp<AudioSource> mAudioSourceNode;
 
     audio_source mAudioSource;
@@ -75,8 +76,9 @@
     video_encoder mVideoEncoder;
     bool mUse64BitFileOffset;
     int32_t mVideoWidth, mVideoHeight;
+    int32_t mAuxVideoWidth, mAuxVideoHeight;
     int32_t mFrameRate;
-    int32_t mVideoBitRate;
+    int32_t mVideoBitRate, mAuxVideoBitRate;
     int32_t mAudioBitRate;
     int32_t mAudioChannels;
     int32_t mSampleRate;
@@ -93,21 +95,43 @@
     int64_t mTrackEveryTimeDurationUs;
     int32_t mRotationDegrees;  // Clockwise
 
-    String8 mParams;
-    int mOutputFd;
-    int32_t mFlags;
+    bool mCaptureTimeLapse;
+    int64_t mTimeBetweenTimeLapseFrameCaptureUs;
+    bool mCaptureAuxVideo;
+    sp<MediaSourceSplitter> mCameraSourceSplitter;
+    sp<CameraSourceTimeLapse> mCameraSourceTimeLapse;
 
+    String8 mParams;
+    int mOutputFd, mOutputFdAux;
+
+    bool mIsMetaDataStoredInVideoBuffers;
     MediaProfiles *mEncoderProfiles;
 
+    bool mStarted;
+
+    status_t setupMPEG4Recording(
+        bool useSplitCameraSource,
+        int outputFd,
+        int32_t videoWidth, int32_t videoHeight,
+        int32_t videoBitRate,
+        int32_t *totalBitRate,
+        sp<MediaWriter> *mediaWriter);
+    void setupMPEG4MetaData(int64_t startTimeUs, int32_t totalBitRate,
+        sp<MetaData> *meta);
     status_t startMPEG4Recording();
     status_t startAMRRecording();
     status_t startAACRecording();
     status_t startRTPRecording();
     status_t startMPEG2TSRecording();
     sp<MediaSource> createAudioSource();
-    status_t setupCameraSource();
+    status_t checkVideoEncoderCapabilities();
+    status_t checkAudioEncoderCapabilities();
+    status_t setupCameraSource(sp<CameraSource> *cameraSource);
     status_t setupAudioEncoder(const sp<MediaWriter>& writer);
-    status_t setupVideoEncoder(sp<MediaSource> *source);
+    status_t setupVideoEncoder(
+            sp<MediaSource> cameraSource,
+            int32_t videoBitRate,
+            sp<MediaSource> *source);
 
     // Encoding parameter handling utilities
     status_t setParameter(const String8 &key, const String8 &value);
@@ -115,6 +139,11 @@
     status_t setParamAudioNumberOfChannels(int32_t channles);
     status_t setParamAudioSamplingRate(int32_t sampleRate);
     status_t setParamAudioTimeScale(int32_t timeScale);
+    status_t setParamTimeLapseEnable(int32_t timeLapseEnable);
+    status_t setParamTimeBetweenTimeLapseFrameCapture(int64_t timeUs);
+    status_t setParamAuxVideoHeight(int32_t height);
+    status_t setParamAuxVideoWidth(int32_t width);
+    status_t setParamAuxVideoEncodingBitRate(int32_t bitRate);
     status_t setParamVideoEncodingBitRate(int32_t bitRate);
     status_t setParamVideoIFramesInterval(int32_t seconds);
     status_t setParamVideoEncoderProfile(int32_t profile);
@@ -132,6 +161,9 @@
     void clipVideoFrameRate();
     void clipVideoFrameWidth();
     void clipVideoFrameHeight();
+    void clipAudioBitRate();
+    void clipAudioSampleRate();
+    void clipNumberOfAudioChannels();
 
     StagefrightRecorder(const StagefrightRecorder &);
     StagefrightRecorder &operator=(const StagefrightRecorder &);
@@ -140,4 +172,3 @@
 }  // namespace android
 
 #endif  // STAGEFRIGHT_RECORDER_H_
-
diff --git a/media/libmediaplayerservice/TestPlayerStub.h b/media/libmediaplayerservice/TestPlayerStub.h
index 6e6c3cd..d9c3db3 100644
--- a/media/libmediaplayerservice/TestPlayerStub.h
+++ b/media/libmediaplayerservice/TestPlayerStub.h
@@ -75,9 +75,13 @@
 
 
     // All the methods below wrap the mPlayer instance.
-    virtual status_t setVideoSurface(const android::sp<android::ISurface>& s)  {
+    virtual status_t setVideoSurface(const android::sp<android::Surface>& s)  {
         return mPlayer->setVideoSurface(s);
     }
+    virtual status_t setVideoSurfaceTexture(
+            const android::sp<android::ISurfaceTexture>& st)  {
+        return mPlayer->setVideoSurfaceTexture(st);
+    }
     virtual status_t prepare() {return mPlayer->prepare();}
     virtual status_t prepareAsync()  {return mPlayer->prepareAsync();}
     virtual status_t start()  {return mPlayer->start();}
diff --git a/media/libmediaplayerservice/nuplayer/Android.mk b/media/libmediaplayerservice/nuplayer/Android.mk
new file mode 100644
index 0000000..c20e279
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/Android.mk
@@ -0,0 +1,25 @@
+LOCAL_PATH:= $(call my-dir)
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:=                       \
+        HTTPLiveSource.cpp              \
+        NuPlayer.cpp                    \
+        NuPlayerDecoder.cpp             \
+        NuPlayerDriver.cpp              \
+        NuPlayerRenderer.cpp            \
+        NuPlayerStreamListener.cpp      \
+        DecoderWrapper.cpp              \
+        StreamingSource.cpp             \
+
+LOCAL_C_INCLUDES := \
+        $(TOP)/frameworks/base/include/media/stagefright/openmax        \
+	$(TOP)/frameworks/base/media/libstagefright/include             \
+        $(TOP)/frameworks/base/media/libstagefright/mpeg2ts             \
+        $(TOP)/frameworks/base/media/libstagefright/httplive            \
+
+LOCAL_MODULE:= libstagefright_nuplayer
+
+LOCAL_MODULE_TAGS := eng
+
+include $(BUILD_STATIC_LIBRARY)
+
diff --git a/media/libmediaplayerservice/nuplayer/DecoderWrapper.cpp b/media/libmediaplayerservice/nuplayer/DecoderWrapper.cpp
new file mode 100644
index 0000000..802d1fb
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/DecoderWrapper.cpp
@@ -0,0 +1,576 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "DecoderWrapper"
+#include <utils/Log.h>
+
+#include "DecoderWrapper.h"
+
+#include "AACDecoder.h"
+
+#include <media/stagefright/foundation/hexdump.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/ACodec.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MetaData.h>
+
+namespace android {
+
+struct DecoderWrapper::WrapperSource : public MediaSource {
+    WrapperSource(
+            const sp<MetaData> &meta,
+            const sp<AMessage> &notify);
+
+    virtual status_t start(MetaData *params);
+    virtual status_t stop();
+    virtual sp<MetaData> getFormat();
+
+    virtual status_t read(
+            MediaBuffer **buffer, const ReadOptions *options);
+
+    void queueBuffer(const sp<ABuffer> &buffer);
+    void queueEOS(status_t finalResult);
+    void clear();
+
+protected:
+    virtual ~WrapperSource();
+
+private:
+    Mutex mLock;
+    Condition mCondition;
+
+    sp<MetaData> mMeta;
+    sp<AMessage> mNotify;
+
+    List<sp<ABuffer> > mQueue;
+    status_t mFinalResult;
+
+    DISALLOW_EVIL_CONSTRUCTORS(WrapperSource);
+};
+
+DecoderWrapper::WrapperSource::WrapperSource(
+        const sp<MetaData> &meta, const sp<AMessage> &notify)
+    : mMeta(meta),
+      mNotify(notify),
+      mFinalResult(OK) {
+}
+
+DecoderWrapper::WrapperSource::~WrapperSource() {
+}
+
+status_t DecoderWrapper::WrapperSource::start(MetaData *params) {
+    return OK;
+}
+
+status_t DecoderWrapper::WrapperSource::stop() {
+    return OK;
+}
+
+sp<MetaData> DecoderWrapper::WrapperSource::getFormat() {
+    return mMeta;
+}
+
+status_t DecoderWrapper::WrapperSource::read(
+        MediaBuffer **out, const ReadOptions *options) {
+    Mutex::Autolock autoLock(mLock);
+
+    bool requestedBuffer = false;
+
+    while (mQueue.empty() && mFinalResult == OK) {
+        if (!requestedBuffer) {
+            mNotify->dup()->post();
+            requestedBuffer = true;
+        }
+
+        mCondition.wait(mLock);
+    }
+
+    if (mQueue.empty()) {
+        return mFinalResult;
+    }
+
+    sp<ABuffer> src = *mQueue.begin();
+    mQueue.erase(mQueue.begin());
+
+    MediaBuffer *dst = new MediaBuffer(src->size());
+    memcpy(dst->data(), src->data(), src->size());
+
+    int64_t timeUs;
+    CHECK(src->meta()->findInt64("timeUs", &timeUs));
+
+    dst->meta_data()->setInt64(kKeyTime, timeUs);
+
+    *out = dst;
+
+    return OK;
+}
+
+void DecoderWrapper::WrapperSource::queueBuffer(const sp<ABuffer> &buffer) {
+    Mutex::Autolock autoLock(mLock);
+    mQueue.push_back(buffer);
+    mCondition.broadcast();
+}
+
+void DecoderWrapper::WrapperSource::queueEOS(status_t finalResult) {
+    CHECK_NE(finalResult, (status_t)OK);
+
+    Mutex::Autolock autoLock(mLock);
+    mFinalResult = finalResult;
+    mCondition.broadcast();
+}
+
+void DecoderWrapper::WrapperSource::clear() {
+    Mutex::Autolock autoLock(mLock);
+    mQueue.clear();
+    mFinalResult = OK;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct DecoderWrapper::WrapperReader : public AHandler {
+    WrapperReader(
+            const sp<MediaSource> &decoder,
+            const sp<AMessage> &notify);
+
+    void start();
+    void stop();
+    void readMore(bool flush = false);
+
+protected:
+    virtual ~WrapperReader();
+
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+
+private:
+    enum {
+        kWhatRead
+    };
+
+    sp<MediaSource> mDecoder;
+    sp<AMessage> mNotify;
+    bool mEOS;
+    bool mSentFormat;
+
+    void sendFormatChange();
+
+    DISALLOW_EVIL_CONSTRUCTORS(WrapperReader);
+};
+
+DecoderWrapper::WrapperReader::WrapperReader(
+        const sp<MediaSource> &decoder, const sp<AMessage> &notify)
+    : mDecoder(decoder),
+      mNotify(notify),
+      mEOS(false),
+      mSentFormat(false) {
+}
+
+DecoderWrapper::WrapperReader::~WrapperReader() {
+}
+
+void DecoderWrapper::WrapperReader::start() {
+    CHECK_EQ(mDecoder->start(), (status_t)OK);
+    readMore();
+}
+
+void DecoderWrapper::WrapperReader::stop() {
+    CHECK_EQ(mDecoder->stop(), (status_t)OK);
+}
+
+void DecoderWrapper::WrapperReader::readMore(bool flush) {
+    if (!flush && mEOS) {
+        return;
+    }
+
+    sp<AMessage> msg = new AMessage(kWhatRead, id());
+    msg->setInt32("flush", static_cast<int32_t>(flush));
+    msg->post();
+}
+
+void DecoderWrapper::WrapperReader::onMessageReceived(
+        const sp<AMessage> &msg) {
+    switch (msg->what()) {
+        case kWhatRead:
+        {
+            int32_t flush;
+            CHECK(msg->findInt32("flush", &flush));
+
+            MediaSource::ReadOptions options;
+            if (flush) {
+                // Dummy seek
+                options.setSeekTo(0);
+                mEOS = false;
+            }
+
+            CHECK(!mEOS);
+
+            MediaBuffer *src;
+            status_t err = mDecoder->read(&src, &options);
+
+            if (err == OK) {
+                if (!mSentFormat) {
+                    sendFormatChange();
+                    mSentFormat = true;
+                }
+
+                sp<AMessage> notify = mNotify->dup();
+
+                sp<AMessage> realNotify;
+                CHECK(notify->findMessage("real-notify", &realNotify));
+
+                realNotify->setInt32("what", ACodec::kWhatDrainThisBuffer);
+
+                sp<ABuffer> dst = new ABuffer(src->range_length());
+                memcpy(dst->data(),
+                       (const uint8_t *)src->data() + src->range_offset(),
+                       src->range_length());
+
+                int64_t timeUs;
+                CHECK(src->meta_data()->findInt64(kKeyTime, &timeUs));
+                src->release();
+                src = NULL;
+
+                dst->meta()->setInt64("timeUs", timeUs);
+
+                realNotify->setObject("buffer", dst);
+
+                notify->post();
+            } else if (err == INFO_FORMAT_CHANGED) {
+                sendFormatChange();
+
+                readMore(false /* flush */);
+            } else {
+                sp<AMessage> notify = mNotify->dup();
+
+                sp<AMessage> realNotify;
+                CHECK(notify->findMessage("real-notify", &realNotify));
+
+                realNotify->setInt32("what", ACodec::kWhatEOS);
+                mEOS = true;
+
+                notify->post();
+            }
+            break;
+        }
+
+        default:
+            TRESPASS();
+            break;
+    }
+}
+
+void DecoderWrapper::WrapperReader::sendFormatChange() {
+    sp<AMessage> notify = mNotify->dup();
+
+    sp<AMessage> realNotify;
+    CHECK(notify->findMessage("real-notify", &realNotify));
+
+    realNotify->setInt32("what", ACodec::kWhatOutputFormatChanged);
+
+    sp<MetaData> meta = mDecoder->getFormat();
+
+    const char *mime;
+    CHECK(meta->findCString(kKeyMIMEType, &mime));
+
+    realNotify->setString("mime", mime);
+
+    if (!strncasecmp("audio/", mime, 6)) {
+        int32_t numChannels;
+        CHECK(meta->findInt32(kKeyChannelCount, &numChannels));
+
+        int32_t sampleRate;
+        CHECK(meta->findInt32(kKeySampleRate, &sampleRate));
+
+        realNotify->setInt32("channel-count", numChannels);
+        realNotify->setInt32("sample-rate", sampleRate);
+    } else {
+        CHECK(!strncasecmp("video/", mime, 6));
+
+        int32_t width, height;
+        CHECK(meta->findInt32(kKeyWidth, &width));
+        CHECK(meta->findInt32(kKeyHeight, &height));
+
+        realNotify->setInt32("width", width);
+        realNotify->setInt32("height", height);
+
+        int32_t cropLeft, cropTop, cropRight, cropBottom;
+        if (!meta->findRect(
+                    kKeyCropRect,
+                    &cropLeft, &cropTop, &cropRight, &cropBottom)) {
+            cropLeft = 0;
+            cropTop = 0;
+            cropRight = width - 1;
+            cropBottom = height - 1;
+        }
+
+        realNotify->setRect("crop", cropLeft, cropTop, cropRight, cropBottom);
+    }
+
+    notify->post();
+
+    mSentFormat = true;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+DecoderWrapper::DecoderWrapper()
+    : mNumOutstandingInputBuffers(0),
+      mNumOutstandingOutputBuffers(0),
+      mNumPendingDecodes(0),
+      mFlushing(false) {
+}
+
+DecoderWrapper::~DecoderWrapper() {
+}
+
+void DecoderWrapper::setNotificationMessage(const sp<AMessage> &msg) {
+    mNotify = msg;
+}
+
+void DecoderWrapper::initiateSetup(const sp<AMessage> &msg) {
+    msg->setWhat(kWhatSetup);
+    msg->setTarget(id());
+    msg->post();
+}
+
+void DecoderWrapper::initiateShutdown() {
+    (new AMessage(kWhatShutdown, id()))->post();
+}
+
+void DecoderWrapper::signalFlush() {
+    (new AMessage(kWhatFlush, id()))->post();
+}
+
+void DecoderWrapper::signalResume() {
+    (new AMessage(kWhatResume, id()))->post();
+}
+
+void DecoderWrapper::onMessageReceived(const sp<AMessage> &msg) {
+    switch (msg->what()) {
+        case kWhatSetup:
+            onSetup(msg);
+            break;
+
+        case kWhatShutdown:
+            onShutdown();
+            break;
+
+        case kWhatInputDataRequested:
+        {
+            postFillBuffer();
+            ++mNumOutstandingInputBuffers;
+            break;
+        }
+
+        case kWhatInputBufferFilled:
+        {
+            CHECK_GT(mNumOutstandingInputBuffers, 0);
+            --mNumOutstandingInputBuffers;
+
+            if (mFlushing) {
+                mSource->queueEOS(INFO_DISCONTINUITY);
+
+                completeFlushIfPossible();
+                break;
+            }
+
+            sp<RefBase> obj;
+            if (!msg->findObject("buffer", &obj)) {
+                int32_t err = OK;
+                CHECK(msg->findInt32("err", &err));
+
+                mSource->queueEOS(err);
+                break;
+            }
+
+            sp<ABuffer> buffer = static_cast<ABuffer *>(obj.get());
+
+            mSource->queueBuffer(buffer);
+            break;
+        }
+
+        case kWhatFillBufferDone:
+        {
+            sp<AMessage> notify;
+            CHECK(msg->findMessage("real-notify", &notify));
+
+            int32_t what;
+            CHECK(notify->findInt32("what", &what));
+
+            if (what == ACodec::kWhatDrainThisBuffer) {
+                CHECK_GT(mNumPendingDecodes, 0);
+                --mNumPendingDecodes;
+
+                sp<AMessage> reply =
+                    new AMessage(kWhatOutputBufferDrained, id());
+
+                notify->setMessage("reply", reply);
+
+                ++mNumOutstandingOutputBuffers;
+            } else if (what == ACodec::kWhatEOS) {
+                CHECK_GT(mNumPendingDecodes, 0);
+                --mNumPendingDecodes;
+
+                if (mFlushing) {
+                    completeFlushIfPossible();
+                    break;
+                }
+            }
+
+            notify->post();
+            break;
+        }
+
+        case kWhatOutputBufferDrained:
+        {
+            CHECK_GT(mNumOutstandingOutputBuffers, 0);
+            --mNumOutstandingOutputBuffers;
+
+            if (mFlushing) {
+                completeFlushIfPossible();
+                break;
+            }
+
+            ++mNumPendingDecodes;
+            mReader->readMore();
+            break;
+        }
+
+        case kWhatFlush:
+        {
+            onFlush();
+            break;
+        }
+
+        case kWhatResume:
+        {
+            onResume();
+            break;
+        }
+
+        default:
+            TRESPASS();
+            break;
+    }
+}
+
+void DecoderWrapper::onSetup(const sp<AMessage> &msg) {
+    AString mime;
+    CHECK(msg->findString("mime", &mime));
+
+    CHECK(!strcasecmp(mime.c_str(), MEDIA_MIMETYPE_AUDIO_AAC));
+
+    int32_t numChannels, sampleRate;
+    CHECK(msg->findInt32("channel-count", &numChannels));
+    CHECK(msg->findInt32("sample-rate", &sampleRate));
+
+    sp<RefBase> obj;
+    CHECK(msg->findObject("esds", &obj));
+    sp<ABuffer> esds = static_cast<ABuffer *>(obj.get());
+
+    sp<MetaData> meta = new MetaData;
+    meta->setCString(kKeyMIMEType, mime.c_str());
+    meta->setInt32(kKeySampleRate, sampleRate);
+    meta->setInt32(kKeyChannelCount, numChannels);
+    meta->setData(kKeyESDS, 0, esds->data(), esds->size());
+
+    mSource = new WrapperSource(
+            meta, new AMessage(kWhatInputDataRequested, id()));
+
+    sp<MediaSource> decoder = new AACDecoder(mSource);
+
+    mReaderLooper = new ALooper;
+    mReaderLooper->setName("DecoderWrapper looper");
+
+    mReaderLooper->start(
+            false, /* runOnCallingThread */
+            false, /* canCallJava */
+            PRIORITY_AUDIO);
+
+    sp<AMessage> notify = new AMessage(kWhatFillBufferDone, id());
+    notify->setMessage("real-notify", mNotify);
+
+    mReader = new WrapperReader(decoder, notify);
+    mReaderLooper->registerHandler(mReader);
+
+    mReader->start();
+    ++mNumPendingDecodes;
+}
+
+void DecoderWrapper::onShutdown() {
+    mReaderLooper->stop();
+    mReaderLooper.clear();
+
+    mReader->stop();
+    mReader.clear();
+
+    mSource.clear();
+
+    mNumOutstandingInputBuffers = 0;
+    mNumOutstandingOutputBuffers = 0;
+    mNumPendingDecodes = 0;
+    mFlushing = false;
+
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("what", ACodec::kWhatShutdownCompleted);
+    notify->post();
+}
+
+void DecoderWrapper::postFillBuffer() {
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("what", ACodec::kWhatFillThisBuffer);
+    sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, id());
+    notify->setMessage("reply", reply);
+    notify->post();
+}
+
+void DecoderWrapper::onFlush() {
+    CHECK(!mFlushing);
+    mFlushing = true;
+
+    completeFlushIfPossible();
+}
+
+void DecoderWrapper::completeFlushIfPossible() {
+    CHECK(mFlushing);
+
+    if (mNumOutstandingInputBuffers > 0
+            || mNumOutstandingOutputBuffers > 0
+            || mNumPendingDecodes > 0) {
+        return;
+    }
+
+    mFlushing = false;
+
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("what", ACodec::kWhatFlushCompleted);
+    notify->post();
+}
+
+void DecoderWrapper::onResume() {
+    CHECK(!mFlushing);
+
+    ++mNumPendingDecodes;
+
+    mSource->clear();
+    mReader->readMore(true /* flush */);
+}
+
+}  // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/DecoderWrapper.h b/media/libmediaplayerservice/nuplayer/DecoderWrapper.h
new file mode 100644
index 0000000..b9be12c
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/DecoderWrapper.h
@@ -0,0 +1,82 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef DECODER_WRAPPER_H_
+
+#define DECODER_WRAPPER_H_
+
+#include <media/stagefright/foundation/AHandler.h>
+
+namespace android {
+
+struct MediaSource;
+
+struct DecoderWrapper : public AHandler {
+    DecoderWrapper();
+
+    void setNotificationMessage(const sp<AMessage> &msg);
+    void initiateSetup(const sp<AMessage> &msg);
+    void initiateShutdown();
+    void signalFlush();
+    void signalResume();
+
+protected:
+    virtual ~DecoderWrapper();
+
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+
+private:
+    struct WrapperSource;
+    struct WrapperReader;
+
+    enum {
+        kWhatSetup,
+        kWhatInputBufferFilled,
+        kWhatOutputBufferDrained,
+        kWhatShutdown,
+        kWhatFillBufferDone,
+        kWhatInputDataRequested,
+        kWhatFlush,
+        kWhatResume,
+    };
+
+    sp<AMessage> mNotify;
+
+    sp<WrapperSource> mSource;
+
+    sp<ALooper> mReaderLooper;
+    sp<WrapperReader> mReader;
+
+    int32_t mNumOutstandingInputBuffers;
+    int32_t mNumOutstandingOutputBuffers;
+    int32_t mNumPendingDecodes;
+    bool mFlushing;
+
+    void onSetup(const sp<AMessage> &msg);
+    void onShutdown();
+    void onFlush();
+    void onResume();
+
+    void postFillBuffer();
+    void completeFlushIfPossible();
+
+    DISALLOW_EVIL_CONSTRUCTORS(DecoderWrapper);
+};
+
+}  // namespace android
+
+#endif  // DECODER_WRAPPER_H_
+
diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
new file mode 100644
index 0000000..d07ea1b
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
@@ -0,0 +1,157 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "HTTPLiveSource"
+#include <utils/Log.h>
+
+#include "HTTPLiveSource.h"
+
+#include "ATSParser.h"
+#include "AnotherPacketSource.h"
+#include "LiveDataSource.h"
+#include "LiveSession.h"
+
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MetaData.h>
+
+namespace android {
+
+NuPlayer::HTTPLiveSource::HTTPLiveSource(const char *url, uint32_t flags)
+    : mURL(url),
+      mFlags(flags),
+      mEOS(false),
+      mOffset(0) {
+}
+
+NuPlayer::HTTPLiveSource::~HTTPLiveSource() {
+    mLiveSession->disconnect();
+    mLiveLooper->stop();
+}
+
+void NuPlayer::HTTPLiveSource::start() {
+    mLiveLooper = new ALooper;
+    mLiveLooper->setName("http live");
+    mLiveLooper->start();
+
+    mLiveSession = new LiveSession(
+            (mFlags & kFlagIncognito) ? LiveSession::kFlagIncognito : 0);
+
+    mLiveLooper->registerHandler(mLiveSession);
+
+    mLiveSession->connect(mURL.c_str());
+
+    mTSParser = new ATSParser;
+}
+
+sp<MetaData> NuPlayer::HTTPLiveSource::getFormat(bool audio) {
+    ATSParser::SourceType type =
+        audio ? ATSParser::MPEG2ADTS_AUDIO : ATSParser::AVC_VIDEO;
+
+    sp<AnotherPacketSource> source =
+        static_cast<AnotherPacketSource *>(mTSParser->getSource(type).get());
+
+    if (source == NULL) {
+        return NULL;
+    }
+
+    return source->getFormat();
+}
+
+bool NuPlayer::HTTPLiveSource::feedMoreTSData() {
+    if (mEOS) {
+        return false;
+    }
+
+    sp<LiveDataSource> source =
+        static_cast<LiveDataSource *>(mLiveSession->getDataSource().get());
+
+    for (int32_t i = 0; i < 50; ++i) {
+        char buffer[188];
+        ssize_t n = source->readAtNonBlocking(mOffset, buffer, sizeof(buffer));
+
+        if (n == -EWOULDBLOCK) {
+            break;
+        } else if (n < 0) {
+            LOGI("input data EOS reached.");
+            mTSParser->signalEOS(n);
+            mEOS = true;
+            break;
+        } else {
+            if (buffer[0] == 0x00) {
+                // XXX legacy
+                sp<AMessage> extra;
+                mTSParser->signalDiscontinuity(
+                        buffer[1] == 0x00
+                            ? ATSParser::DISCONTINUITY_SEEK
+                            : ATSParser::DISCONTINUITY_FORMATCHANGE,
+                        extra);
+            } else {
+                mTSParser->feedTSPacket(buffer, sizeof(buffer));
+            }
+
+            mOffset += n;
+        }
+    }
+
+    return true;
+}
+
+status_t NuPlayer::HTTPLiveSource::dequeueAccessUnit(
+        bool audio, sp<ABuffer> *accessUnit) {
+    ATSParser::SourceType type =
+        audio ? ATSParser::MPEG2ADTS_AUDIO : ATSParser::AVC_VIDEO;
+
+    sp<AnotherPacketSource> source =
+        static_cast<AnotherPacketSource *>(mTSParser->getSource(type).get());
+
+    if (source == NULL) {
+        return -EWOULDBLOCK;
+    }
+
+    status_t finalResult;
+    if (!source->hasBufferAvailable(&finalResult)) {
+        return finalResult == OK ? -EWOULDBLOCK : finalResult;
+    }
+
+    return source->dequeueAccessUnit(accessUnit);
+}
+
+status_t NuPlayer::HTTPLiveSource::getDuration(int64_t *durationUs) {
+    return mLiveSession->getDuration(durationUs);
+}
+
+status_t NuPlayer::HTTPLiveSource::seekTo(int64_t seekTimeUs) {
+    // We need to make sure we're not seeking until we have seen the very first
+    // PTS timestamp in the whole stream (from the beginning of the stream).
+    while (!mTSParser->PTSTimeDeltaEstablished() && feedMoreTSData()) {
+        usleep(100000);
+    }
+
+    mLiveSession->seekTo(seekTimeUs);
+
+    return OK;
+}
+
+bool NuPlayer::HTTPLiveSource::isSeekable() {
+    return mLiveSession->isSeekable();
+}
+
+}  // namespace android
+
diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h
new file mode 100644
index 0000000..a8ce7f4
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h
@@ -0,0 +1,65 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef HTTP_LIVE_SOURCE_H_
+
+#define HTTP_LIVE_SOURCE_H_
+
+#include "NuPlayer.h"
+#include "NuPlayerSource.h"
+
+namespace android {
+
+struct ATSParser;
+struct LiveSession;
+
+struct NuPlayer::HTTPLiveSource : public NuPlayer::Source {
+    enum Flags {
+        // Don't log any URLs.
+        kFlagIncognito = 1,
+    };
+    HTTPLiveSource(const char *url, uint32_t flags = 0);
+
+    virtual void start();
+
+    // Returns true iff more data was available, false on EOS.
+    virtual bool feedMoreTSData();
+
+    virtual sp<MetaData> getFormat(bool audio);
+    virtual status_t dequeueAccessUnit(bool audio, sp<ABuffer> *accessUnit);
+
+    virtual status_t getDuration(int64_t *durationUs);
+    virtual status_t seekTo(int64_t seekTimeUs);
+    virtual bool isSeekable();
+
+protected:
+    virtual ~HTTPLiveSource();
+
+private:
+    AString mURL;
+    uint32_t mFlags;
+    bool mEOS;
+    off64_t mOffset;
+    sp<ALooper> mLiveLooper;
+    sp<LiveSession> mLiveSession;
+    sp<ATSParser> mTSParser;
+
+    DISALLOW_EVIL_CONSTRUCTORS(HTTPLiveSource);
+};
+
+}  // namespace android
+
+#endif  // HTTP_LIVE_SOURCE_H_
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
new file mode 100644
index 0000000..d439f6e
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -0,0 +1,731 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NuPlayer"
+#include <utils/Log.h>
+
+#include "NuPlayer.h"
+
+#include "HTTPLiveSource.h"
+#include "NuPlayerDecoder.h"
+#include "NuPlayerDriver.h"
+#include "NuPlayerRenderer.h"
+#include "NuPlayerSource.h"
+#include "StreamingSource.h"
+
+#include "ATSParser.h"
+
+#include <media/stagefright/foundation/hexdump.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/ACodec.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MetaData.h>
+#include <surfaceflinger/Surface.h>
+#include <gui/ISurfaceTexture.h>
+
+namespace android {
+
+////////////////////////////////////////////////////////////////////////////////
+
+NuPlayer::NuPlayer()
+    : mAudioEOS(false),
+      mVideoEOS(false),
+      mScanSourcesPending(false),
+      mScanSourcesGeneration(0),
+      mFlushingAudio(NONE),
+      mFlushingVideo(NONE),
+      mResetInProgress(false),
+      mResetPostponed(false) {
+}
+
+NuPlayer::~NuPlayer() {
+}
+
+void NuPlayer::setDriver(const wp<NuPlayerDriver> &driver) {
+    mDriver = driver;
+}
+
+void NuPlayer::setDataSource(const sp<IStreamSource> &source) {
+    sp<AMessage> msg = new AMessage(kWhatSetDataSource, id());
+
+    msg->setObject("source", new StreamingSource(source));
+    msg->post();
+}
+
+void NuPlayer::setDataSource(
+        const char *url, const KeyedVector<String8, String8> *headers) {
+    sp<AMessage> msg = new AMessage(kWhatSetDataSource, id());
+
+    uint32_t flags = 0;
+
+    if (headers) {
+        ssize_t index = headers->indexOfKey(String8("x-hide-urls-from-log"));
+
+        if (index >= 0) {
+            flags |= HTTPLiveSource::kFlagIncognito;
+        }
+    }
+
+    msg->setObject("source", new HTTPLiveSource(url, flags));
+    msg->post();
+}
+
+void NuPlayer::setVideoSurface(const sp<Surface> &surface) {
+    sp<AMessage> msg = new AMessage(kWhatSetVideoNativeWindow, id());
+    msg->setObject("native-window", new NativeWindowWrapper(surface));
+    msg->post();
+}
+
+void NuPlayer::setVideoSurfaceTexture(const sp<ISurfaceTexture> &surfaceTexture) {
+    sp<AMessage> msg = new AMessage(kWhatSetVideoNativeWindow, id());
+    sp<SurfaceTextureClient> surfaceTextureClient(surfaceTexture != NULL ?
+                new SurfaceTextureClient(surfaceTexture) : NULL);
+    msg->setObject("native-window", new NativeWindowWrapper(surfaceTextureClient));
+    msg->post();
+}
+
+void NuPlayer::setAudioSink(const sp<MediaPlayerBase::AudioSink> &sink) {
+    sp<AMessage> msg = new AMessage(kWhatSetAudioSink, id());
+    msg->setObject("sink", sink);
+    msg->post();
+}
+
+void NuPlayer::start() {
+    (new AMessage(kWhatStart, id()))->post();
+}
+
+void NuPlayer::pause() {
+    (new AMessage(kWhatPause, id()))->post();
+}
+
+void NuPlayer::resume() {
+    (new AMessage(kWhatResume, id()))->post();
+}
+
+void NuPlayer::resetAsync() {
+    (new AMessage(kWhatReset, id()))->post();
+}
+
+void NuPlayer::seekToAsync(int64_t seekTimeUs) {
+    sp<AMessage> msg = new AMessage(kWhatSeek, id());
+    msg->setInt64("seekTimeUs", seekTimeUs);
+    msg->post();
+}
+
+// static
+bool NuPlayer::IsFlushingState(FlushStatus state, bool *needShutdown) {
+    switch (state) {
+        case FLUSHING_DECODER:
+            if (needShutdown != NULL) {
+                *needShutdown = false;
+            }
+            return true;
+
+        case FLUSHING_DECODER_SHUTDOWN:
+            if (needShutdown != NULL) {
+                *needShutdown = true;
+            }
+            return true;
+
+        default:
+            return false;
+    }
+}
+
+void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
+    switch (msg->what()) {
+        case kWhatSetDataSource:
+        {
+            LOGV("kWhatSetDataSource");
+
+            CHECK(mSource == NULL);
+
+            sp<RefBase> obj;
+            CHECK(msg->findObject("source", &obj));
+
+            mSource = static_cast<Source *>(obj.get());
+            break;
+        }
+
+        case kWhatSetVideoNativeWindow:
+        {
+            LOGV("kWhatSetVideoNativeWindow");
+
+            sp<RefBase> obj;
+            CHECK(msg->findObject("native-window", &obj));
+
+            mNativeWindow = static_cast<NativeWindowWrapper *>(obj.get());
+            break;
+        }
+
+        case kWhatSetAudioSink:
+        {
+            LOGV("kWhatSetAudioSink");
+
+            sp<RefBase> obj;
+            CHECK(msg->findObject("sink", &obj));
+
+            mAudioSink = static_cast<MediaPlayerBase::AudioSink *>(obj.get());
+            break;
+        }
+
+        case kWhatStart:
+        {
+            LOGV("kWhatStart");
+
+            mAudioEOS = false;
+            mVideoEOS = false;
+            mSkipRenderingAudioUntilMediaTimeUs = -1;
+            mSkipRenderingVideoUntilMediaTimeUs = -1;
+
+            mSource->start();
+
+            mRenderer = new Renderer(
+                    mAudioSink,
+                    new AMessage(kWhatRendererNotify, id()));
+
+            looper()->registerHandler(mRenderer);
+
+            postScanSources();
+            break;
+        }
+
+        case kWhatScanSources:
+        {
+            int32_t generation;
+            CHECK(msg->findInt32("generation", &generation));
+            if (generation != mScanSourcesGeneration) {
+                // Drop obsolete msg.
+                break;
+            }
+
+            mScanSourcesPending = false;
+
+            LOGV("scanning sources haveAudio=%d, haveVideo=%d",
+                 mAudioDecoder != NULL, mVideoDecoder != NULL);
+
+            instantiateDecoder(false, &mVideoDecoder);
+
+            if (mAudioSink != NULL) {
+                instantiateDecoder(true, &mAudioDecoder);
+            }
+
+            if (!mSource->feedMoreTSData()) {
+                if (mAudioDecoder == NULL && mVideoDecoder == NULL) {
+                    // We're not currently decoding anything (no audio or
+                    // video tracks found) and we just ran out of input data.
+                    notifyListener(MEDIA_PLAYBACK_COMPLETE, 0, 0);
+                }
+                break;
+            }
+
+            if (mAudioDecoder == NULL || mVideoDecoder == NULL) {
+                msg->post(100000ll);
+                mScanSourcesPending = true;
+            }
+            break;
+        }
+
+        case kWhatVideoNotify:
+        case kWhatAudioNotify:
+        {
+            bool audio = msg->what() == kWhatAudioNotify;
+
+            sp<AMessage> codecRequest;
+            CHECK(msg->findMessage("codec-request", &codecRequest));
+
+            int32_t what;
+            CHECK(codecRequest->findInt32("what", &what));
+
+            if (what == ACodec::kWhatFillThisBuffer) {
+                status_t err = feedDecoderInputData(
+                        audio, codecRequest);
+
+                if (err == -EWOULDBLOCK) {
+                    if (mSource->feedMoreTSData()) {
+                        msg->post();
+                    }
+                }
+            } else if (what == ACodec::kWhatEOS) {
+                mRenderer->queueEOS(audio, ERROR_END_OF_STREAM);
+            } else if (what == ACodec::kWhatFlushCompleted) {
+                bool needShutdown;
+
+                if (audio) {
+                    CHECK(IsFlushingState(mFlushingAudio, &needShutdown));
+                    mFlushingAudio = FLUSHED;
+                } else {
+                    CHECK(IsFlushingState(mFlushingVideo, &needShutdown));
+                    mFlushingVideo = FLUSHED;
+                }
+
+                LOGV("decoder %s flush completed", audio ? "audio" : "video");
+
+                if (needShutdown) {
+                    LOGV("initiating %s decoder shutdown",
+                         audio ? "audio" : "video");
+
+                    (audio ? mAudioDecoder : mVideoDecoder)->initiateShutdown();
+
+                    if (audio) {
+                        mFlushingAudio = SHUTTING_DOWN_DECODER;
+                    } else {
+                        mFlushingVideo = SHUTTING_DOWN_DECODER;
+                    }
+                }
+
+                finishFlushIfPossible();
+            } else if (what == ACodec::kWhatOutputFormatChanged) {
+                if (audio) {
+                    int32_t numChannels;
+                    CHECK(codecRequest->findInt32("channel-count", &numChannels));
+
+                    int32_t sampleRate;
+                    CHECK(codecRequest->findInt32("sample-rate", &sampleRate));
+
+                    LOGV("Audio output format changed to %d Hz, %d channels",
+                         sampleRate, numChannels);
+
+                    mAudioSink->close();
+                    CHECK_EQ(mAudioSink->open(sampleRate, numChannels), (status_t)OK);
+                    mAudioSink->start();
+
+                    mRenderer->signalAudioSinkChanged();
+                } else {
+                    // video
+
+                    int32_t width, height;
+                    CHECK(codecRequest->findInt32("width", &width));
+                    CHECK(codecRequest->findInt32("height", &height));
+
+                    int32_t cropLeft, cropTop, cropRight, cropBottom;
+                    CHECK(codecRequest->findRect(
+                                "crop",
+                                &cropLeft, &cropTop, &cropRight, &cropBottom));
+
+                    LOGV("Video output format changed to %d x %d "
+                         "(crop: %d, %d, %d, %d)",
+                         width, height,
+                         cropLeft, cropTop, cropRight, cropBottom);
+
+                    notifyListener(
+                            MEDIA_SET_VIDEO_SIZE,
+                            cropRight - cropLeft + 1,
+                            cropBottom - cropTop + 1);
+                }
+            } else if (what == ACodec::kWhatShutdownCompleted) {
+                LOGV("%s shutdown completed", audio ? "audio" : "video");
+                if (audio) {
+                    mAudioDecoder.clear();
+
+                    CHECK_EQ((int)mFlushingAudio, (int)SHUTTING_DOWN_DECODER);
+                    mFlushingAudio = SHUT_DOWN;
+                } else {
+                    mVideoDecoder.clear();
+
+                    CHECK_EQ((int)mFlushingVideo, (int)SHUTTING_DOWN_DECODER);
+                    mFlushingVideo = SHUT_DOWN;
+                }
+
+                finishFlushIfPossible();
+            } else {
+                CHECK_EQ((int)what, (int)ACodec::kWhatDrainThisBuffer);
+
+                renderBuffer(audio, codecRequest);
+            }
+
+            break;
+        }
+
+        case kWhatRendererNotify:
+        {
+            int32_t what;
+            CHECK(msg->findInt32("what", &what));
+
+            if (what == Renderer::kWhatEOS) {
+                int32_t audio;
+                CHECK(msg->findInt32("audio", &audio));
+
+                if (audio) {
+                    mAudioEOS = true;
+                } else {
+                    mVideoEOS = true;
+                }
+
+                LOGV("reached %s EOS", audio ? "audio" : "video");
+
+                if ((mAudioEOS || mAudioDecoder == NULL)
+                        && (mVideoEOS || mVideoDecoder == NULL)) {
+                    notifyListener(MEDIA_PLAYBACK_COMPLETE, 0, 0);
+                }
+            } else if (what == Renderer::kWhatPosition) {
+                int64_t positionUs;
+                CHECK(msg->findInt64("positionUs", &positionUs));
+
+                if (mDriver != NULL) {
+                    sp<NuPlayerDriver> driver = mDriver.promote();
+                    if (driver != NULL) {
+                        driver->notifyPosition(positionUs);
+                    }
+                }
+            } else {
+                CHECK_EQ(what, (int32_t)Renderer::kWhatFlushComplete);
+
+                int32_t audio;
+                CHECK(msg->findInt32("audio", &audio));
+
+                LOGV("renderer %s flush completed.", audio ? "audio" : "video");
+            }
+            break;
+        }
+
+        case kWhatMoreDataQueued:
+        {
+            break;
+        }
+
+        case kWhatReset:
+        {
+            LOGV("kWhatReset");
+
+            if (mFlushingAudio != NONE || mFlushingVideo != NONE) {
+                // We're currently flushing, postpone the reset until that's
+                // completed.
+
+                LOGV("postponing reset");
+
+                mResetPostponed = true;
+                break;
+            }
+
+            if (mAudioDecoder == NULL && mVideoDecoder == NULL) {
+                finishReset();
+                break;
+            }
+
+            if (mAudioDecoder != NULL) {
+                flushDecoder(true /* audio */, true /* needShutdown */);
+            }
+
+            if (mVideoDecoder != NULL) {
+                flushDecoder(false /* audio */, true /* needShutdown */);
+            }
+
+            mResetInProgress = true;
+            break;
+        }
+
+        case kWhatSeek:
+        {
+            int64_t seekTimeUs;
+            CHECK(msg->findInt64("seekTimeUs", &seekTimeUs));
+
+            LOGV("kWhatSeek seekTimeUs=%lld us (%.2f secs)",
+                 seekTimeUs, seekTimeUs / 1E6);
+
+            mSource->seekTo(seekTimeUs);
+
+            if (mDriver != NULL) {
+                sp<NuPlayerDriver> driver = mDriver.promote();
+                if (driver != NULL) {
+                    driver->notifySeekComplete();
+                }
+            }
+
+            break;
+        }
+
+        case kWhatPause:
+        {
+            CHECK(mRenderer != NULL);
+            mRenderer->pause();
+            break;
+        }
+
+        case kWhatResume:
+        {
+            CHECK(mRenderer != NULL);
+            mRenderer->resume();
+            break;
+        }
+
+        default:
+            TRESPASS();
+            break;
+    }
+}
+
+void NuPlayer::finishFlushIfPossible() {
+    if (mFlushingAudio != FLUSHED && mFlushingAudio != SHUT_DOWN) {
+        return;
+    }
+
+    if (mFlushingVideo != FLUSHED && mFlushingVideo != SHUT_DOWN) {
+        return;
+    }
+
+    LOGV("both audio and video are flushed now.");
+
+    mRenderer->signalTimeDiscontinuity();
+
+    if (mAudioDecoder != NULL) {
+        mAudioDecoder->signalResume();
+    }
+
+    if (mVideoDecoder != NULL) {
+        mVideoDecoder->signalResume();
+    }
+
+    mFlushingAudio = NONE;
+    mFlushingVideo = NONE;
+
+    if (mResetInProgress) {
+        LOGV("reset completed");
+
+        mResetInProgress = false;
+        finishReset();
+    } else if (mResetPostponed) {
+        (new AMessage(kWhatReset, id()))->post();
+        mResetPostponed = false;
+    } else if (mAudioDecoder == NULL || mVideoDecoder == NULL) {
+        postScanSources();
+    }
+}
+
+void NuPlayer::finishReset() {
+    CHECK(mAudioDecoder == NULL);
+    CHECK(mVideoDecoder == NULL);
+
+    mRenderer.clear();
+    mSource.clear();
+
+    if (mDriver != NULL) {
+        sp<NuPlayerDriver> driver = mDriver.promote();
+        if (driver != NULL) {
+            driver->notifyResetComplete();
+        }
+    }
+}
+
+void NuPlayer::postScanSources() {
+    if (mScanSourcesPending) {
+        return;
+    }
+
+    sp<AMessage> msg = new AMessage(kWhatScanSources, id());
+    msg->setInt32("generation", mScanSourcesGeneration);
+    msg->post();
+
+    mScanSourcesPending = true;
+}
+
+status_t NuPlayer::instantiateDecoder(bool audio, sp<Decoder> *decoder) {
+    if (*decoder != NULL) {
+        return OK;
+    }
+
+    sp<MetaData> meta = mSource->getFormat(audio);
+
+    if (meta == NULL) {
+        return -EWOULDBLOCK;
+    }
+
+    sp<AMessage> notify =
+        new AMessage(audio ? kWhatAudioNotify : kWhatVideoNotify,
+                     id());
+
+    *decoder = audio ? new Decoder(notify) :
+                       new Decoder(notify, mNativeWindow);
+    looper()->registerHandler(*decoder);
+
+    (*decoder)->configure(meta);
+
+    int64_t durationUs;
+    if (mDriver != NULL && mSource->getDuration(&durationUs) == OK) {
+        sp<NuPlayerDriver> driver = mDriver.promote();
+        if (driver != NULL) {
+            driver->notifyDuration(durationUs);
+        }
+    }
+
+    return OK;
+}
+
+status_t NuPlayer::feedDecoderInputData(bool audio, const sp<AMessage> &msg) {
+    sp<AMessage> reply;
+    CHECK(msg->findMessage("reply", &reply));
+
+    if ((audio && IsFlushingState(mFlushingAudio))
+            || (!audio && IsFlushingState(mFlushingVideo))) {
+        reply->setInt32("err", INFO_DISCONTINUITY);
+        reply->post();
+        return OK;
+    }
+
+    sp<ABuffer> accessUnit;
+    status_t err = mSource->dequeueAccessUnit(audio, &accessUnit);
+
+    if (err == -EWOULDBLOCK) {
+        return err;
+    } else if (err != OK) {
+        if (err == INFO_DISCONTINUITY) {
+            int32_t type;
+            CHECK(accessUnit->meta()->findInt32("discontinuity", &type));
+
+            bool formatChange =
+                type == ATSParser::DISCONTINUITY_FORMATCHANGE;
+
+            LOGV("%s discontinuity (formatChange=%d)",
+                 audio ? "audio" : "video", formatChange);
+
+            if (audio) {
+                mSkipRenderingAudioUntilMediaTimeUs = -1;
+            } else {
+                mSkipRenderingVideoUntilMediaTimeUs = -1;
+            }
+
+            sp<AMessage> extra;
+            if (accessUnit->meta()->findMessage("extra", &extra)
+                    && extra != NULL) {
+                int64_t resumeAtMediaTimeUs;
+                if (extra->findInt64(
+                            "resume-at-mediatimeUs", &resumeAtMediaTimeUs)) {
+                    LOGI("suppressing rendering of %s until %lld us",
+                            audio ? "audio" : "video", resumeAtMediaTimeUs);
+
+                    if (audio) {
+                        mSkipRenderingAudioUntilMediaTimeUs =
+                            resumeAtMediaTimeUs;
+                    } else {
+                        mSkipRenderingVideoUntilMediaTimeUs =
+                            resumeAtMediaTimeUs;
+                    }
+                }
+            }
+
+            flushDecoder(audio, formatChange);
+        }
+
+        reply->setInt32("err", err);
+        reply->post();
+        return OK;
+    }
+
+    // LOGV("returned a valid buffer of %s data", audio ? "audio" : "video");
+
+#if 0
+    int64_t mediaTimeUs;
+    CHECK(accessUnit->meta()->findInt64("timeUs", &mediaTimeUs));
+    LOGV("feeding %s input buffer at media time %.2f secs",
+         audio ? "audio" : "video",
+         mediaTimeUs / 1E6);
+#endif
+
+    reply->setObject("buffer", accessUnit);
+    reply->post();
+
+    return OK;
+}
+
+void NuPlayer::renderBuffer(bool audio, const sp<AMessage> &msg) {
+    // LOGV("renderBuffer %s", audio ? "audio" : "video");
+
+    sp<AMessage> reply;
+    CHECK(msg->findMessage("reply", &reply));
+
+    sp<RefBase> obj;
+    CHECK(msg->findObject("buffer", &obj));
+
+    sp<ABuffer> buffer = static_cast<ABuffer *>(obj.get());
+
+    int64_t &skipUntilMediaTimeUs =
+        audio
+            ? mSkipRenderingAudioUntilMediaTimeUs
+            : mSkipRenderingVideoUntilMediaTimeUs;
+
+    if (skipUntilMediaTimeUs >= 0) {
+        int64_t mediaTimeUs;
+        CHECK(buffer->meta()->findInt64("timeUs", &mediaTimeUs));
+
+        if (mediaTimeUs < skipUntilMediaTimeUs) {
+            LOGV("dropping %s buffer at time %lld as requested.",
+                 audio ? "audio" : "video",
+                 mediaTimeUs);
+
+            reply->post();
+            return;
+        }
+
+        skipUntilMediaTimeUs = -1;
+    }
+
+    mRenderer->queueBuffer(audio, buffer, reply);
+}
+
+void NuPlayer::notifyListener(int msg, int ext1, int ext2) {
+    if (mDriver == NULL) {
+        return;
+    }
+
+    sp<NuPlayerDriver> driver = mDriver.promote();
+
+    if (driver == NULL) {
+        return;
+    }
+
+    driver->sendEvent(msg, ext1, ext2);
+}
+
+void NuPlayer::flushDecoder(bool audio, bool needShutdown) {
+    // Make sure we don't continue to scan sources until we finish flushing.
+    ++mScanSourcesGeneration;
+    mScanSourcesPending = false;
+
+    (audio ? mAudioDecoder : mVideoDecoder)->signalFlush();
+    mRenderer->flush(audio);
+
+    FlushStatus newStatus =
+        needShutdown ? FLUSHING_DECODER_SHUTDOWN : FLUSHING_DECODER;
+
+    if (audio) {
+        CHECK(mFlushingAudio == NONE
+                || mFlushingAudio == AWAITING_DISCONTINUITY);
+
+        mFlushingAudio = newStatus;
+
+        if (mFlushingVideo == NONE) {
+            mFlushingVideo = (mVideoDecoder != NULL)
+                ? AWAITING_DISCONTINUITY
+                : FLUSHED;
+        }
+    } else {
+        CHECK(mFlushingVideo == NONE
+                || mFlushingVideo == AWAITING_DISCONTINUITY);
+
+        mFlushingVideo = newStatus;
+
+        if (mFlushingAudio == NONE) {
+            mFlushingAudio = (mAudioDecoder != NULL)
+                ? AWAITING_DISCONTINUITY
+                : FLUSHED;
+        }
+    }
+}
+
+}  // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h
new file mode 100644
index 0000000..fb5b001
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h
@@ -0,0 +1,139 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef NU_PLAYER_H_
+
+#define NU_PLAYER_H_
+
+#include <media/MediaPlayerInterface.h>
+#include <media/stagefright/foundation/AHandler.h>
+#include <media/stagefright/NativeWindowWrapper.h>
+#include <gui/SurfaceTextureClient.h>
+#include <surfaceflinger/Surface.h>
+
+namespace android {
+
+struct ACodec;
+struct MetaData;
+struct NuPlayerDriver;
+
+struct NuPlayer : public AHandler {
+    NuPlayer();
+
+    void setDriver(const wp<NuPlayerDriver> &driver);
+
+    void setDataSource(const sp<IStreamSource> &source);
+
+    void setDataSource(
+            const char *url, const KeyedVector<String8, String8> *headers);
+
+    void setVideoSurface(const sp<Surface> &surface);
+    void setVideoSurfaceTexture(const sp<ISurfaceTexture> &surfaceTexture);
+    void setAudioSink(const sp<MediaPlayerBase::AudioSink> &sink);
+    void start();
+
+    void pause();
+    void resume();
+
+    // Will notify the driver through "notifyResetComplete" once finished.
+    void resetAsync();
+
+    // Will notify the driver through "notifySeekComplete" once finished.
+    void seekToAsync(int64_t seekTimeUs);
+
+protected:
+    virtual ~NuPlayer();
+
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+
+private:
+    struct Decoder;
+    struct HTTPLiveSource;
+    struct NuPlayerStreamListener;
+    struct Renderer;
+    struct Source;
+    struct StreamingSource;
+
+    enum {
+        kWhatSetDataSource,
+        kWhatSetVideoNativeWindow,
+        kWhatSetAudioSink,
+        kWhatMoreDataQueued,
+        kWhatStart,
+        kWhatScanSources,
+        kWhatVideoNotify,
+        kWhatAudioNotify,
+        kWhatRendererNotify,
+        kWhatReset,
+        kWhatSeek,
+        kWhatPause,
+        kWhatResume,
+    };
+
+    wp<NuPlayerDriver> mDriver;
+    sp<Source> mSource;
+    sp<NativeWindowWrapper> mNativeWindow;
+    sp<MediaPlayerBase::AudioSink> mAudioSink;
+    sp<Decoder> mVideoDecoder;
+    sp<Decoder> mAudioDecoder;
+    sp<Renderer> mRenderer;
+
+    bool mAudioEOS;
+    bool mVideoEOS;
+
+    bool mScanSourcesPending;
+    int32_t mScanSourcesGeneration;
+
+    enum FlushStatus {
+        NONE,
+        AWAITING_DISCONTINUITY,
+        FLUSHING_DECODER,
+        FLUSHING_DECODER_SHUTDOWN,
+        SHUTTING_DOWN_DECODER,
+        FLUSHED,
+        SHUT_DOWN,
+    };
+
+    FlushStatus mFlushingAudio;
+    FlushStatus mFlushingVideo;
+    bool mResetInProgress;
+    bool mResetPostponed;
+
+    int64_t mSkipRenderingAudioUntilMediaTimeUs;
+    int64_t mSkipRenderingVideoUntilMediaTimeUs;
+
+    status_t instantiateDecoder(bool audio, sp<Decoder> *decoder);
+
+    status_t feedDecoderInputData(bool audio, const sp<AMessage> &msg);
+    void renderBuffer(bool audio, const sp<AMessage> &msg);
+
+    void notifyListener(int msg, int ext1, int ext2);
+
+    void finishFlushIfPossible();
+
+    void flushDecoder(bool audio, bool needShutdown);
+
+    static bool IsFlushingState(FlushStatus state, bool *needShutdown = NULL);
+
+    void finishReset();
+    void postScanSources();
+
+    DISALLOW_EVIL_CONSTRUCTORS(NuPlayer);
+};
+
+}  // namespace android
+
+#endif  // NU_PLAYER_H_
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
new file mode 100644
index 0000000..517acc9
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
@@ -0,0 +1,298 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NuPlayerDecoder"
+#include <utils/Log.h>
+
+#include "NuPlayerDecoder.h"
+
+#include "DecoderWrapper.h"
+#include "ESDS.h"
+
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/ACodec.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/Utils.h>
+#include <surfaceflinger/Surface.h>
+#include <gui/ISurfaceTexture.h>
+
+namespace android {
+
+NuPlayer::Decoder::Decoder(
+        const sp<AMessage> &notify,
+        const sp<NativeWindowWrapper> &nativeWindow)
+    : mNotify(notify),
+      mNativeWindow(nativeWindow) {
+}
+
+NuPlayer::Decoder::~Decoder() {
+}
+
+void NuPlayer::Decoder::configure(const sp<MetaData> &meta) {
+    CHECK(mCodec == NULL);
+    CHECK(mWrapper == NULL);
+
+    const char *mime;
+    CHECK(meta->findCString(kKeyMIMEType, &mime));
+
+    sp<AMessage> notifyMsg =
+        new AMessage(kWhatCodecNotify, id());
+
+    sp<AMessage> format = makeFormat(meta);
+
+    if (mNativeWindow != NULL) {
+        format->setObject("native-window", mNativeWindow);
+    }
+
+    if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) {
+        mWrapper = new DecoderWrapper;
+        looper()->registerHandler(mWrapper);
+
+        mWrapper->setNotificationMessage(notifyMsg);
+        mWrapper->initiateSetup(format);
+    } else {
+        mCodec = new ACodec;
+        looper()->registerHandler(mCodec);
+
+        mCodec->setNotificationMessage(notifyMsg);
+        mCodec->initiateSetup(format);
+    }
+}
+
+void NuPlayer::Decoder::onMessageReceived(const sp<AMessage> &msg) {
+    switch (msg->what()) {
+        case kWhatCodecNotify:
+        {
+            int32_t what;
+            CHECK(msg->findInt32("what", &what));
+
+            if (what == ACodec::kWhatFillThisBuffer) {
+                onFillThisBuffer(msg);
+            } else {
+                sp<AMessage> notify = mNotify->dup();
+                notify->setMessage("codec-request", msg);
+                notify->post();
+            }
+            break;
+        }
+
+        default:
+            TRESPASS();
+            break;
+    }
+}
+
+sp<AMessage> NuPlayer::Decoder::makeFormat(const sp<MetaData> &meta) {
+    CHECK(mCSD.isEmpty());
+
+    const char *mime;
+    CHECK(meta->findCString(kKeyMIMEType, &mime));
+
+    sp<AMessage> msg = new AMessage;
+    msg->setString("mime", mime);
+
+    if (!strncasecmp("video/", mime, 6)) {
+        int32_t width, height;
+        CHECK(meta->findInt32(kKeyWidth, &width));
+        CHECK(meta->findInt32(kKeyHeight, &height));
+
+        msg->setInt32("width", width);
+        msg->setInt32("height", height);
+    } else {
+        CHECK(!strncasecmp("audio/", mime, 6));
+
+        int32_t numChannels, sampleRate;
+        CHECK(meta->findInt32(kKeyChannelCount, &numChannels));
+        CHECK(meta->findInt32(kKeySampleRate, &sampleRate));
+
+        msg->setInt32("channel-count", numChannels);
+        msg->setInt32("sample-rate", sampleRate);
+    }
+
+    int32_t maxInputSize;
+    if (meta->findInt32(kKeyMaxInputSize, &maxInputSize)) {
+        msg->setInt32("max-input-size", maxInputSize);
+    }
+
+    mCSDIndex = 0;
+
+    uint32_t type;
+    const void *data;
+    size_t size;
+    if (meta->findData(kKeyAVCC, &type, &data, &size)) {
+        // Parse the AVCDecoderConfigurationRecord
+
+        const uint8_t *ptr = (const uint8_t *)data;
+
+        CHECK(size >= 7);
+        CHECK_EQ((unsigned)ptr[0], 1u);  // configurationVersion == 1
+        uint8_t profile = ptr[1];
+        uint8_t level = ptr[3];
+
+        // There is decodable content out there that fails the following
+        // assertion, let's be lenient for now...
+        // CHECK((ptr[4] >> 2) == 0x3f);  // reserved
+
+        size_t lengthSize = 1 + (ptr[4] & 3);
+
+        // commented out check below as H264_QVGA_500_NO_AUDIO.3gp
+        // violates it...
+        // CHECK((ptr[5] >> 5) == 7);  // reserved
+
+        size_t numSeqParameterSets = ptr[5] & 31;
+
+        ptr += 6;
+        size -= 6;
+
+        sp<ABuffer> buffer = new ABuffer(1024);
+        buffer->setRange(0, 0);
+
+        for (size_t i = 0; i < numSeqParameterSets; ++i) {
+            CHECK(size >= 2);
+            size_t length = U16_AT(ptr);
+
+            ptr += 2;
+            size -= 2;
+
+            CHECK(size >= length);
+
+            memcpy(buffer->data() + buffer->size(), "\x00\x00\x00\x01", 4);
+            memcpy(buffer->data() + buffer->size() + 4, ptr, length);
+            buffer->setRange(0, buffer->size() + 4 + length);
+
+            ptr += length;
+            size -= length;
+        }
+
+        buffer->meta()->setInt32("csd", true);
+        mCSD.push(buffer);
+
+        buffer = new ABuffer(1024);
+        buffer->setRange(0, 0);
+
+        CHECK(size >= 1);
+        size_t numPictureParameterSets = *ptr;
+        ++ptr;
+        --size;
+
+        for (size_t i = 0; i < numPictureParameterSets; ++i) {
+            CHECK(size >= 2);
+            size_t length = U16_AT(ptr);
+
+            ptr += 2;
+            size -= 2;
+
+            CHECK(size >= length);
+
+            memcpy(buffer->data() + buffer->size(), "\x00\x00\x00\x01", 4);
+            memcpy(buffer->data() + buffer->size() + 4, ptr, length);
+            buffer->setRange(0, buffer->size() + 4 + length);
+
+            ptr += length;
+            size -= length;
+        }
+
+        buffer->meta()->setInt32("csd", true);
+        mCSD.push(buffer);
+
+        msg->setObject("csd", buffer);
+    } else if (meta->findData(kKeyESDS, &type, &data, &size)) {
+#if 0
+        ESDS esds((const char *)data, size);
+        CHECK_EQ(esds.InitCheck(), (status_t)OK);
+
+        const void *codec_specific_data;
+        size_t codec_specific_data_size;
+        esds.getCodecSpecificInfo(
+                &codec_specific_data, &codec_specific_data_size);
+
+        sp<ABuffer> buffer = new ABuffer(codec_specific_data_size);
+
+        memcpy(buffer->data(), codec_specific_data,
+               codec_specific_data_size);
+
+        buffer->meta()->setInt32("csd", true);
+        mCSD.push(buffer);
+#else
+        sp<ABuffer> buffer = new ABuffer(size);
+        memcpy(buffer->data(), data, size);
+
+        msg->setObject("esds", buffer);
+#endif
+    }
+
+    return msg;
+}
+
+void NuPlayer::Decoder::onFillThisBuffer(const sp<AMessage> &msg) {
+    sp<AMessage> reply;
+    CHECK(msg->findMessage("reply", &reply));
+
+#if 0
+    sp<RefBase> obj;
+    CHECK(msg->findObject("buffer", &obj));
+    sp<ABuffer> outBuffer = static_cast<ABuffer *>(obj.get());
+#else
+    sp<ABuffer> outBuffer;
+#endif
+
+    if (mCSDIndex < mCSD.size()) {
+        outBuffer = mCSD.editItemAt(mCSDIndex++);
+        outBuffer->meta()->setInt64("timeUs", 0);
+
+        reply->setObject("buffer", outBuffer);
+        reply->post();
+        return;
+    }
+
+    sp<AMessage> notify = mNotify->dup();
+    notify->setMessage("codec-request", msg);
+    notify->post();
+}
+
+void NuPlayer::Decoder::signalFlush() {
+    if (mCodec != NULL) {
+        mCodec->signalFlush();
+    } else {
+        CHECK(mWrapper != NULL);
+        mWrapper->signalFlush();
+    }
+}
+
+void NuPlayer::Decoder::signalResume() {
+    if (mCodec != NULL) {
+        mCodec->signalResume();
+    } else {
+        CHECK(mWrapper != NULL);
+        mWrapper->signalResume();
+    }
+}
+
+void NuPlayer::Decoder::initiateShutdown() {
+    if (mCodec != NULL) {
+        mCodec->initiateShutdown();
+    } else {
+        CHECK(mWrapper != NULL);
+        mWrapper->initiateShutdown();
+    }
+}
+
+}  // namespace android
+
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h
new file mode 100644
index 0000000..732f090
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef NUPLAYER_DECODER_H_
+
+#define NUPLAYER_DECODER_H_
+
+#include "NuPlayer.h"
+
+#include <media/stagefright/foundation/AHandler.h>
+
+namespace android {
+
+struct ABuffer;
+struct DecoderWrapper;
+
+struct NuPlayer::Decoder : public AHandler {
+    Decoder(const sp<AMessage> &notify,
+            const sp<NativeWindowWrapper> &nativeWindow = NULL);
+
+    void configure(const sp<MetaData> &meta);
+
+    void signalFlush();
+    void signalResume();
+    void initiateShutdown();
+
+protected:
+    virtual ~Decoder();
+
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+
+private:
+    enum {
+        kWhatCodecNotify,
+    };
+
+    sp<AMessage> mNotify;
+    sp<NativeWindowWrapper> mNativeWindow;
+
+    sp<ACodec> mCodec;
+    sp<DecoderWrapper> mWrapper;
+
+    Vector<sp<ABuffer> > mCSD;
+    size_t mCSDIndex;
+
+    sp<AMessage> makeFormat(const sp<MetaData> &meta);
+
+    void onFillThisBuffer(const sp<AMessage> &msg);
+
+    DISALLOW_EVIL_CONSTRUCTORS(Decoder);
+};
+
+}  // namespace android
+
+#endif  // NUPLAYER_DECODER_H_
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
new file mode 100644
index 0000000..0eca958
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
@@ -0,0 +1,275 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NuPlayerDriver"
+#include <utils/Log.h>
+
+#include "NuPlayerDriver.h"
+
+#include "NuPlayer.h"
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/ALooper.h>
+
+namespace android {
+
+NuPlayerDriver::NuPlayerDriver()
+    : mResetInProgress(false),
+      mDurationUs(-1),
+      mPositionUs(-1),
+      mLooper(new ALooper),
+      mState(UNINITIALIZED),
+      mStartupSeekTimeUs(-1) {
+    mLooper->setName("NuPlayerDriver Looper");
+
+    mLooper->start(
+            false, /* runOnCallingThread */
+            true,  /* canCallJava */
+            PRIORITY_AUDIO);
+
+    mPlayer = new NuPlayer;
+    mLooper->registerHandler(mPlayer);
+
+    mPlayer->setDriver(this);
+}
+
+NuPlayerDriver::~NuPlayerDriver() {
+    mLooper->stop();
+}
+
+status_t NuPlayerDriver::initCheck() {
+    return OK;
+}
+
+status_t NuPlayerDriver::setDataSource(
+        const char *url, const KeyedVector<String8, String8> *headers) {
+    CHECK_EQ((int)mState, (int)UNINITIALIZED);
+
+    mPlayer->setDataSource(url, headers);
+
+    mState = STOPPED;
+
+    return OK;
+}
+
+status_t NuPlayerDriver::setDataSource(int fd, int64_t offset, int64_t length) {
+    return INVALID_OPERATION;
+}
+
+status_t NuPlayerDriver::setDataSource(const sp<IStreamSource> &source) {
+    CHECK_EQ((int)mState, (int)UNINITIALIZED);
+
+    mPlayer->setDataSource(source);
+
+    mState = STOPPED;
+
+    return OK;
+}
+
+status_t NuPlayerDriver::setVideoSurface(const sp<Surface> &surface) {
+    mPlayer->setVideoSurface(surface);
+
+    return OK;
+}
+
+status_t NuPlayerDriver::setVideoSurfaceTexture(
+        const sp<ISurfaceTexture> &surfaceTexture) {
+    mPlayer->setVideoSurfaceTexture(surfaceTexture);
+
+    return OK;
+}
+
+status_t NuPlayerDriver::prepare() {
+    return OK;
+}
+
+status_t NuPlayerDriver::prepareAsync() {
+    sendEvent(MEDIA_PREPARED);
+
+    return OK;
+}
+
+status_t NuPlayerDriver::start() {
+    switch (mState) {
+        case UNINITIALIZED:
+            return INVALID_OPERATION;
+        case STOPPED:
+        {
+            mPlayer->start();
+
+            if (mStartupSeekTimeUs >= 0) {
+                mPlayer->seekToAsync(mStartupSeekTimeUs);
+                mStartupSeekTimeUs = -1;
+            }
+            break;
+        }
+        case PLAYING:
+            return OK;
+        default:
+        {
+            CHECK_EQ((int)mState, (int)PAUSED);
+
+            mPlayer->resume();
+            break;
+        }
+    }
+
+    mState = PLAYING;
+
+    return OK;
+}
+
+status_t NuPlayerDriver::stop() {
+    return pause();
+}
+
+status_t NuPlayerDriver::pause() {
+    switch (mState) {
+        case UNINITIALIZED:
+            return INVALID_OPERATION;
+        case STOPPED:
+            return OK;
+        case PLAYING:
+            mPlayer->pause();
+            break;
+        default:
+        {
+            CHECK_EQ((int)mState, (int)PAUSED);
+            return OK;
+        }
+    }
+
+    mState = PAUSED;
+
+    return OK;
+}
+
+bool NuPlayerDriver::isPlaying() {
+    return mState == PLAYING;
+}
+
+status_t NuPlayerDriver::seekTo(int msec) {
+    int64_t seekTimeUs = msec * 1000ll;
+
+    switch (mState) {
+        case UNINITIALIZED:
+            return INVALID_OPERATION;
+        case STOPPED:
+        {
+            mStartupSeekTimeUs = seekTimeUs;
+            break;
+        }
+        case PLAYING:
+        case PAUSED:
+        {
+            mPlayer->seekToAsync(seekTimeUs);
+            break;
+        }
+
+        default:
+            TRESPASS();
+            break;
+    }
+
+    return OK;
+}
+
+status_t NuPlayerDriver::getCurrentPosition(int *msec) {
+    Mutex::Autolock autoLock(mLock);
+
+    if (mPositionUs < 0) {
+        *msec = 0;
+    } else {
+        *msec = (mPositionUs + 500ll) / 1000;
+    }
+
+    return OK;
+}
+
+status_t NuPlayerDriver::getDuration(int *msec) {
+    Mutex::Autolock autoLock(mLock);
+
+    if (mDurationUs < 0) {
+        *msec = 0;
+    } else {
+        *msec = (mDurationUs + 500ll) / 1000;
+    }
+
+    return OK;
+}
+
+status_t NuPlayerDriver::reset() {
+    Mutex::Autolock autoLock(mLock);
+    mResetInProgress = true;
+
+    mPlayer->resetAsync();
+
+    while (mResetInProgress) {
+        mCondition.wait(mLock);
+    }
+
+    mDurationUs = -1;
+    mPositionUs = -1;
+    mState = UNINITIALIZED;
+    mStartupSeekTimeUs = -1;
+
+    return OK;
+}
+
+status_t NuPlayerDriver::setLooping(int loop) {
+    return INVALID_OPERATION;
+}
+
+player_type NuPlayerDriver::playerType() {
+    return NU_PLAYER;
+}
+
+status_t NuPlayerDriver::invoke(const Parcel &request, Parcel *reply) {
+    return INVALID_OPERATION;
+}
+
+void NuPlayerDriver::setAudioSink(const sp<AudioSink> &audioSink) {
+    mPlayer->setAudioSink(audioSink);
+}
+
+status_t NuPlayerDriver::getMetadata(
+        const media::Metadata::Filter& ids, Parcel *records) {
+    return INVALID_OPERATION;
+}
+
+void NuPlayerDriver::notifyResetComplete() {
+    Mutex::Autolock autoLock(mLock);
+    CHECK(mResetInProgress);
+    mResetInProgress = false;
+    mCondition.broadcast();
+}
+
+void NuPlayerDriver::notifyDuration(int64_t durationUs) {
+    Mutex::Autolock autoLock(mLock);
+    mDurationUs = durationUs;
+}
+
+void NuPlayerDriver::notifyPosition(int64_t positionUs) {
+    Mutex::Autolock autoLock(mLock);
+    mPositionUs = positionUs;
+}
+
+void NuPlayerDriver::notifySeekComplete() {
+    sendEvent(MEDIA_SEEK_COMPLETE);
+}
+
+}  // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
new file mode 100644
index 0000000..67d0f3e
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
@@ -0,0 +1,97 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/MediaPlayerInterface.h>
+
+#include <media/stagefright/foundation/ABase.h>
+
+namespace android {
+
+struct ALooper;
+struct NuPlayer;
+
+struct NuPlayerDriver : public MediaPlayerInterface {
+    NuPlayerDriver();
+
+    virtual status_t initCheck();
+
+    virtual status_t setDataSource(
+            const char *url, const KeyedVector<String8, String8> *headers);
+
+    virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
+
+    virtual status_t setDataSource(const sp<IStreamSource> &source);
+
+    virtual status_t setVideoSurface(const sp<Surface> &surface);
+    virtual status_t setVideoSurfaceTexture(
+            const sp<ISurfaceTexture> &surfaceTexture);
+    virtual status_t prepare();
+    virtual status_t prepareAsync();
+    virtual status_t start();
+    virtual status_t stop();
+    virtual status_t pause();
+    virtual bool isPlaying();
+    virtual status_t seekTo(int msec);
+    virtual status_t getCurrentPosition(int *msec);
+    virtual status_t getDuration(int *msec);
+    virtual status_t reset();
+    virtual status_t setLooping(int loop);
+    virtual player_type playerType();
+    virtual status_t invoke(const Parcel &request, Parcel *reply);
+    virtual void setAudioSink(const sp<AudioSink> &audioSink);
+
+    virtual status_t getMetadata(
+            const media::Metadata::Filter& ids, Parcel *records);
+
+    void notifyResetComplete();
+    void notifyDuration(int64_t durationUs);
+    void notifyPosition(int64_t positionUs);
+    void notifySeekComplete();
+
+protected:
+    virtual ~NuPlayerDriver();
+
+private:
+    Mutex mLock;
+    Condition mCondition;
+
+    // The following are protected through "mLock"
+    // >>>
+    bool mResetInProgress;
+    int64_t mDurationUs;
+    int64_t mPositionUs;
+    // <<<
+
+    sp<ALooper> mLooper;
+    sp<NuPlayer> mPlayer;
+
+    enum State {
+        UNINITIALIZED,
+        STOPPED,
+        PLAYING,
+        PAUSED
+    };
+
+    State mState;
+
+    int64_t mStartupSeekTimeUs;
+
+    DISALLOW_EVIL_CONSTRUCTORS(NuPlayerDriver);
+};
+
+}  // namespace android
+
+
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
new file mode 100644
index 0000000..369a3a8
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -0,0 +1,593 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NuPlayerRenderer"
+#include <utils/Log.h>
+
+#include "NuPlayerRenderer.h"
+
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+namespace android {
+
+NuPlayer::Renderer::Renderer(
+        const sp<MediaPlayerBase::AudioSink> &sink,
+        const sp<AMessage> &notify)
+    : mAudioSink(sink),
+      mNotify(notify),
+      mNumFramesWritten(0),
+      mDrainAudioQueuePending(false),
+      mDrainVideoQueuePending(false),
+      mAudioQueueGeneration(0),
+      mVideoQueueGeneration(0),
+      mAnchorTimeMediaUs(-1),
+      mAnchorTimeRealUs(-1),
+      mFlushingAudio(false),
+      mFlushingVideo(false),
+      mHasAudio(false),
+      mHasVideo(false),
+      mSyncQueues(false),
+      mPaused(false) {
+}
+
+NuPlayer::Renderer::~Renderer() {
+}
+
+void NuPlayer::Renderer::queueBuffer(
+        bool audio,
+        const sp<ABuffer> &buffer,
+        const sp<AMessage> &notifyConsumed) {
+    sp<AMessage> msg = new AMessage(kWhatQueueBuffer, id());
+    msg->setInt32("audio", static_cast<int32_t>(audio));
+    msg->setObject("buffer", buffer);
+    msg->setMessage("notifyConsumed", notifyConsumed);
+    msg->post();
+}
+
+void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) {
+    CHECK_NE(finalResult, (status_t)OK);
+
+    sp<AMessage> msg = new AMessage(kWhatQueueEOS, id());
+    msg->setInt32("audio", static_cast<int32_t>(audio));
+    msg->setInt32("finalResult", finalResult);
+    msg->post();
+}
+
+void NuPlayer::Renderer::flush(bool audio) {
+    {
+        Mutex::Autolock autoLock(mFlushLock);
+        if (audio) {
+            CHECK(!mFlushingAudio);
+            mFlushingAudio = true;
+        } else {
+            CHECK(!mFlushingVideo);
+            mFlushingVideo = true;
+        }
+    }
+
+    sp<AMessage> msg = new AMessage(kWhatFlush, id());
+    msg->setInt32("audio", static_cast<int32_t>(audio));
+    msg->post();
+}
+
+void NuPlayer::Renderer::signalTimeDiscontinuity() {
+    CHECK(mAudioQueue.empty());
+    CHECK(mVideoQueue.empty());
+    mAnchorTimeMediaUs = -1;
+    mAnchorTimeRealUs = -1;
+    mSyncQueues = mHasAudio && mHasVideo;
+}
+
+void NuPlayer::Renderer::pause() {
+    (new AMessage(kWhatPause, id()))->post();
+}
+
+void NuPlayer::Renderer::resume() {
+    (new AMessage(kWhatResume, id()))->post();
+}
+
+void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
+    switch (msg->what()) {
+        case kWhatDrainAudioQueue:
+        {
+            int32_t generation;
+            CHECK(msg->findInt32("generation", &generation));
+            if (generation != mAudioQueueGeneration) {
+                break;
+            }
+
+            mDrainAudioQueuePending = false;
+
+            onDrainAudioQueue();
+
+            postDrainAudioQueue();
+            break;
+        }
+
+        case kWhatDrainVideoQueue:
+        {
+            int32_t generation;
+            CHECK(msg->findInt32("generation", &generation));
+            if (generation != mVideoQueueGeneration) {
+                break;
+            }
+
+            mDrainVideoQueuePending = false;
+
+            onDrainVideoQueue();
+
+            postDrainVideoQueue();
+            break;
+        }
+
+        case kWhatQueueBuffer:
+        {
+            onQueueBuffer(msg);
+            break;
+        }
+
+        case kWhatQueueEOS:
+        {
+            onQueueEOS(msg);
+            break;
+        }
+
+        case kWhatFlush:
+        {
+            onFlush(msg);
+            break;
+        }
+
+        case kWhatAudioSinkChanged:
+        {
+            onAudioSinkChanged();
+            break;
+        }
+
+        case kWhatPause:
+        {
+            onPause();
+            break;
+        }
+
+        case kWhatResume:
+        {
+            onResume();
+            break;
+        }
+
+        default:
+            TRESPASS();
+            break;
+    }
+}
+
+void NuPlayer::Renderer::postDrainAudioQueue() {
+    if (mDrainAudioQueuePending || mSyncQueues || mPaused) {
+        return;
+    }
+
+    if (mAudioQueue.empty()) {
+        return;
+    }
+
+    mDrainAudioQueuePending = true;
+    sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, id());
+    msg->setInt32("generation", mAudioQueueGeneration);
+    msg->post(10000);
+}
+
+void NuPlayer::Renderer::signalAudioSinkChanged() {
+    (new AMessage(kWhatAudioSinkChanged, id()))->post();
+}
+
+void NuPlayer::Renderer::onDrainAudioQueue() {
+    uint32_t numFramesPlayed;
+    CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK);
+
+    ssize_t numFramesAvailableToWrite =
+        mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed);
+
+    CHECK_GE(numFramesAvailableToWrite, 0);
+
+    size_t numBytesAvailableToWrite =
+        numFramesAvailableToWrite * mAudioSink->frameSize();
+
+    while (numBytesAvailableToWrite > 0) {
+        if (mAudioQueue.empty()) {
+            break;
+        }
+
+        QueueEntry *entry = &*mAudioQueue.begin();
+
+        if (entry->mBuffer == NULL) {
+            // EOS
+
+            notifyEOS(true /* audio */);
+
+            mAudioQueue.erase(mAudioQueue.begin());
+            entry = NULL;
+            return;
+        }
+
+        if (entry->mOffset == 0) {
+            int64_t mediaTimeUs;
+            CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
+
+            LOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
+
+            mAnchorTimeMediaUs = mediaTimeUs;
+
+            uint32_t numFramesPlayed;
+            CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK);
+
+            uint32_t numFramesPendingPlayout =
+                mNumFramesWritten - numFramesPlayed;
+
+            int64_t realTimeOffsetUs =
+                (mAudioSink->latency() / 2  /* XXX */
+                    + numFramesPendingPlayout
+                        * mAudioSink->msecsPerFrame()) * 1000ll;
+
+            // LOGI("realTimeOffsetUs = %lld us", realTimeOffsetUs);
+
+            mAnchorTimeRealUs =
+                ALooper::GetNowUs() + realTimeOffsetUs;
+        }
+
+        size_t copy = entry->mBuffer->size() - entry->mOffset;
+        if (copy > numBytesAvailableToWrite) {
+            copy = numBytesAvailableToWrite;
+        }
+
+        CHECK_EQ(mAudioSink->write(
+                    entry->mBuffer->data() + entry->mOffset, copy),
+                 (ssize_t)copy);
+
+        entry->mOffset += copy;
+        if (entry->mOffset == entry->mBuffer->size()) {
+            entry->mNotifyConsumed->post();
+            mAudioQueue.erase(mAudioQueue.begin());
+            entry = NULL;
+        }
+
+        numBytesAvailableToWrite -= copy;
+        mNumFramesWritten += copy / mAudioSink->frameSize();
+    }
+
+    notifyPosition();
+}
+
+void NuPlayer::Renderer::postDrainVideoQueue() {
+    if (mDrainVideoQueuePending || mSyncQueues || mPaused) {
+        return;
+    }
+
+    if (mVideoQueue.empty()) {
+        return;
+    }
+
+    QueueEntry &entry = *mVideoQueue.begin();
+
+    sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id());
+    msg->setInt32("generation", mVideoQueueGeneration);
+
+    int64_t delayUs;
+
+    if (entry.mBuffer == NULL) {
+        // EOS doesn't carry a timestamp.
+        delayUs = 0;
+    } else {
+        int64_t mediaTimeUs;
+        CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
+
+        if (mAnchorTimeMediaUs < 0) {
+            delayUs = 0;
+
+            if (!mHasAudio) {
+                mAnchorTimeMediaUs = mediaTimeUs;
+                mAnchorTimeRealUs = ALooper::GetNowUs();
+            }
+        } else {
+            int64_t realTimeUs =
+                (mediaTimeUs - mAnchorTimeMediaUs) + mAnchorTimeRealUs;
+
+            delayUs = realTimeUs - ALooper::GetNowUs();
+        }
+    }
+
+    msg->post(delayUs);
+
+    mDrainVideoQueuePending = true;
+}
+
+void NuPlayer::Renderer::onDrainVideoQueue() {
+    if (mVideoQueue.empty()) {
+        return;
+    }
+
+    QueueEntry *entry = &*mVideoQueue.begin();
+
+    if (entry->mBuffer == NULL) {
+        // EOS
+
+        notifyEOS(false /* audio */);
+
+        mVideoQueue.erase(mVideoQueue.begin());
+        entry = NULL;
+        return;
+    }
+
+#if 0
+    int64_t mediaTimeUs;
+    CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
+
+    LOGI("rendering video at media time %.2f secs", mediaTimeUs / 1E6);
+#endif
+
+    entry->mNotifyConsumed->setInt32("render", true);
+    entry->mNotifyConsumed->post();
+    mVideoQueue.erase(mVideoQueue.begin());
+    entry = NULL;
+
+    notifyPosition();
+}
+
+void NuPlayer::Renderer::notifyEOS(bool audio) {
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("what", kWhatEOS);
+    notify->setInt32("audio", static_cast<int32_t>(audio));
+    notify->post();
+}
+
+void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
+    int32_t audio;
+    CHECK(msg->findInt32("audio", &audio));
+
+    if (audio) {
+        mHasAudio = true;
+    } else {
+        mHasVideo = true;
+    }
+
+    if (dropBufferWhileFlushing(audio, msg)) {
+        return;
+    }
+
+    sp<RefBase> obj;
+    CHECK(msg->findObject("buffer", &obj));
+    sp<ABuffer> buffer = static_cast<ABuffer *>(obj.get());
+
+    sp<AMessage> notifyConsumed;
+    CHECK(msg->findMessage("notifyConsumed", &notifyConsumed));
+
+    QueueEntry entry;
+    entry.mBuffer = buffer;
+    entry.mNotifyConsumed = notifyConsumed;
+    entry.mOffset = 0;
+    entry.mFinalResult = OK;
+
+    if (audio) {
+        mAudioQueue.push_back(entry);
+        postDrainAudioQueue();
+    } else {
+        mVideoQueue.push_back(entry);
+        postDrainVideoQueue();
+    }
+
+    if (mSyncQueues && !mAudioQueue.empty() && !mVideoQueue.empty()) {
+        int64_t firstAudioTimeUs;
+        int64_t firstVideoTimeUs;
+        CHECK((*mAudioQueue.begin()).mBuffer->meta()
+                ->findInt64("timeUs", &firstAudioTimeUs));
+        CHECK((*mVideoQueue.begin()).mBuffer->meta()
+                ->findInt64("timeUs", &firstVideoTimeUs));
+
+        int64_t diff = firstVideoTimeUs - firstAudioTimeUs;
+
+        LOGV("queueDiff = %.2f secs", diff / 1E6);
+
+        if (diff > 100000ll) {
+            // Audio data starts More than 0.1 secs before video.
+            // Drop some audio.
+
+            (*mAudioQueue.begin()).mNotifyConsumed->post();
+            mAudioQueue.erase(mAudioQueue.begin());
+            return;
+        }
+
+        syncQueuesDone();
+    }
+}
+
+void NuPlayer::Renderer::syncQueuesDone() {
+    if (!mSyncQueues) {
+        return;
+    }
+
+    mSyncQueues = false;
+
+    if (!mAudioQueue.empty()) {
+        postDrainAudioQueue();
+    }
+
+    if (!mVideoQueue.empty()) {
+        postDrainVideoQueue();
+    }
+}
+
+void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) {
+    int32_t audio;
+    CHECK(msg->findInt32("audio", &audio));
+
+    if (dropBufferWhileFlushing(audio, msg)) {
+        return;
+    }
+
+    int32_t finalResult;
+    CHECK(msg->findInt32("finalResult", &finalResult));
+
+    QueueEntry entry;
+    entry.mOffset = 0;
+    entry.mFinalResult = finalResult;
+
+    if (audio) {
+        mAudioQueue.push_back(entry);
+        postDrainAudioQueue();
+    } else {
+        mVideoQueue.push_back(entry);
+        postDrainVideoQueue();
+    }
+}
+
+void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {
+    int32_t audio;
+    CHECK(msg->findInt32("audio", &audio));
+
+    // If we're currently syncing the queues, i.e. dropping audio while
+    // aligning the first audio/video buffer times and only one of the
+    // two queues has data, we may starve that queue by not requesting
+    // more buffers from the decoder. If the other source then encounters
+    // a discontinuity that leads to flushing, we'll never find the
+    // corresponding discontinuity on the other queue.
+    // Therefore we'll stop syncing the queues if at least one of them
+    // is flushed.
+    syncQueuesDone();
+
+    if (audio) {
+        flushQueue(&mAudioQueue);
+
+        Mutex::Autolock autoLock(mFlushLock);
+        mFlushingAudio = false;
+
+        mDrainAudioQueuePending = false;
+        ++mAudioQueueGeneration;
+    } else {
+        flushQueue(&mVideoQueue);
+
+        Mutex::Autolock autoLock(mFlushLock);
+        mFlushingVideo = false;
+
+        mDrainVideoQueuePending = false;
+        ++mVideoQueueGeneration;
+    }
+
+    notifyFlushComplete(audio);
+}
+
+void NuPlayer::Renderer::flushQueue(List<QueueEntry> *queue) {
+    while (!queue->empty()) {
+        QueueEntry *entry = &*queue->begin();
+
+        if (entry->mBuffer != NULL) {
+            entry->mNotifyConsumed->post();
+        }
+
+        queue->erase(queue->begin());
+        entry = NULL;
+    }
+}
+
+void NuPlayer::Renderer::notifyFlushComplete(bool audio) {
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("what", kWhatFlushComplete);
+    notify->setInt32("audio", static_cast<int32_t>(audio));
+    notify->post();
+}
+
+bool NuPlayer::Renderer::dropBufferWhileFlushing(
+        bool audio, const sp<AMessage> &msg) {
+    bool flushing = false;
+
+    {
+        Mutex::Autolock autoLock(mFlushLock);
+        if (audio) {
+            flushing = mFlushingAudio;
+        } else {
+            flushing = mFlushingVideo;
+        }
+    }
+
+    if (!flushing) {
+        return false;
+    }
+
+    sp<AMessage> notifyConsumed;
+    if (msg->findMessage("notifyConsumed", &notifyConsumed)) {
+        notifyConsumed->post();
+    }
+
+    return true;
+}
+
+void NuPlayer::Renderer::onAudioSinkChanged() {
+    CHECK(!mDrainAudioQueuePending);
+    mNumFramesWritten = 0;
+}
+
+void NuPlayer::Renderer::notifyPosition() {
+    if (mAnchorTimeRealUs < 0 || mAnchorTimeMediaUs < 0) {
+        return;
+    }
+
+    int64_t nowUs = ALooper::GetNowUs();
+    int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs;
+
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("what", kWhatPosition);
+    notify->setInt64("positionUs", positionUs);
+    notify->post();
+}
+
+void NuPlayer::Renderer::onPause() {
+    CHECK(!mPaused);
+
+    mDrainAudioQueuePending = false;
+    ++mAudioQueueGeneration;
+
+    mDrainVideoQueuePending = false;
+    ++mVideoQueueGeneration;
+
+    if (mHasAudio) {
+        mAudioSink->pause();
+    }
+
+    mPaused = true;
+}
+
+void NuPlayer::Renderer::onResume() {
+    CHECK(mPaused);
+
+    if (mHasAudio) {
+        mAudioSink->start();
+    }
+
+    mPaused = false;
+
+    if (!mAudioQueue.empty()) {
+        postDrainAudioQueue();
+    }
+
+    if (!mVideoQueue.empty()) {
+        postDrainVideoQueue();
+    }
+}
+
+}  // namespace android
+
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
new file mode 100644
index 0000000..703e971
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
@@ -0,0 +1,127 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef NUPLAYER_RENDERER_H_
+
+#define NUPLAYER_RENDERER_H_
+
+#include "NuPlayer.h"
+
+namespace android {
+
+struct ABuffer;
+
+struct NuPlayer::Renderer : public AHandler {
+    Renderer(const sp<MediaPlayerBase::AudioSink> &sink,
+             const sp<AMessage> &notify);
+
+    void queueBuffer(
+            bool audio,
+            const sp<ABuffer> &buffer,
+            const sp<AMessage> &notifyConsumed);
+
+    void queueEOS(bool audio, status_t finalResult);
+
+    void flush(bool audio);
+
+    void signalTimeDiscontinuity();
+
+    void signalAudioSinkChanged();
+
+    void pause();
+    void resume();
+
+    enum {
+        kWhatEOS,
+        kWhatFlushComplete,
+        kWhatPosition,
+    };
+
+protected:
+    virtual ~Renderer();
+
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+
+private:
+    enum {
+        kWhatDrainAudioQueue,
+        kWhatDrainVideoQueue,
+        kWhatQueueBuffer,
+        kWhatQueueEOS,
+        kWhatFlush,
+        kWhatAudioSinkChanged,
+        kWhatPause,
+        kWhatResume,
+    };
+
+    struct QueueEntry {
+        sp<ABuffer> mBuffer;
+        sp<AMessage> mNotifyConsumed;
+        size_t mOffset;
+        status_t mFinalResult;
+    };
+
+    sp<MediaPlayerBase::AudioSink> mAudioSink;
+    sp<AMessage> mNotify;
+    List<QueueEntry> mAudioQueue;
+    List<QueueEntry> mVideoQueue;
+    uint32_t mNumFramesWritten;
+
+    bool mDrainAudioQueuePending;
+    bool mDrainVideoQueuePending;
+    int32_t mAudioQueueGeneration;
+    int32_t mVideoQueueGeneration;
+
+    int64_t mAnchorTimeMediaUs;
+    int64_t mAnchorTimeRealUs;
+
+    Mutex mFlushLock;  // protects the following 2 member vars.
+    bool mFlushingAudio;
+    bool mFlushingVideo;
+
+    bool mHasAudio;
+    bool mHasVideo;
+    bool mSyncQueues;
+
+    bool mPaused;
+
+    void onDrainAudioQueue();
+    void postDrainAudioQueue();
+
+    void onDrainVideoQueue();
+    void postDrainVideoQueue();
+
+    void onQueueBuffer(const sp<AMessage> &msg);
+    void onQueueEOS(const sp<AMessage> &msg);
+    void onFlush(const sp<AMessage> &msg);
+    void onAudioSinkChanged();
+    void onPause();
+    void onResume();
+
+    void notifyEOS(bool audio);
+    void notifyFlushComplete(bool audio);
+    void notifyPosition();
+
+    void flushQueue(List<QueueEntry> *queue);
+    bool dropBufferWhileFlushing(bool audio, const sp<AMessage> &msg);
+    void syncQueuesDone();
+
+    DISALLOW_EVIL_CONSTRUCTORS(Renderer);
+};
+
+}  // namespace android
+
+#endif  // NUPLAYER_RENDERER_H_
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
new file mode 100644
index 0000000..5e55487
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef NUPLAYER_SOURCE_H_
+
+#define NUPLAYER_SOURCE_H_
+
+#include "NuPlayer.h"
+
+namespace android {
+
+struct ABuffer;
+
+struct NuPlayer::Source : public RefBase {
+    Source() {}
+
+    virtual void start() = 0;
+
+    // Returns true iff more data was available, false on EOS.
+    virtual bool feedMoreTSData() = 0;
+
+    virtual sp<MetaData> getFormat(bool audio) = 0;
+
+    virtual status_t dequeueAccessUnit(
+            bool audio, sp<ABuffer> *accessUnit) = 0;
+
+    virtual status_t getDuration(int64_t *durationUs) {
+        return INVALID_OPERATION;
+    }
+
+    virtual status_t seekTo(int64_t seekTimeUs) {
+        return INVALID_OPERATION;
+    }
+
+    virtual bool isSeekable() {
+        return false;
+    }
+
+protected:
+    virtual ~Source() {}
+
+private:
+    DISALLOW_EVIL_CONSTRUCTORS(Source);
+};
+
+}  // namespace android
+
+#endif  // NUPLAYER_SOURCE_H_
+
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerStreamListener.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerStreamListener.cpp
new file mode 100644
index 0000000..885ebe4
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerStreamListener.cpp
@@ -0,0 +1,164 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NuPlayerStreamListener"
+#include <utils/Log.h>
+
+#include "NuPlayerStreamListener.h"
+
+#include <binder/MemoryDealer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaErrors.h>
+
+namespace android {
+
+NuPlayer::NuPlayerStreamListener::NuPlayerStreamListener(
+        const sp<IStreamSource> &source,
+        ALooper::handler_id id)
+    : mSource(source),
+      mTargetID(id),
+      mEOS(false),
+      mSendDataNotification(true) {
+    mSource->setListener(this);
+
+    mMemoryDealer = new MemoryDealer(kNumBuffers * kBufferSize);
+    for (size_t i = 0; i < kNumBuffers; ++i) {
+        sp<IMemory> mem = mMemoryDealer->allocate(kBufferSize);
+        CHECK(mem != NULL);
+
+        mBuffers.push(mem);
+    }
+    mSource->setBuffers(mBuffers);
+}
+
+void NuPlayer::NuPlayerStreamListener::start() {
+    for (size_t i = 0; i < kNumBuffers; ++i) {
+        mSource->onBufferAvailable(i);
+    }
+}
+
+void NuPlayer::NuPlayerStreamListener::queueBuffer(size_t index, size_t size) {
+    QueueEntry entry;
+    entry.mIsCommand = false;
+    entry.mIndex = index;
+    entry.mSize = size;
+    entry.mOffset = 0;
+
+    Mutex::Autolock autoLock(mLock);
+    mQueue.push_back(entry);
+
+    if (mSendDataNotification) {
+        mSendDataNotification = false;
+
+        if (mTargetID != 0) {
+            (new AMessage(kWhatMoreDataQueued, mTargetID))->post();
+        }
+    }
+}
+
+void NuPlayer::NuPlayerStreamListener::issueCommand(
+        Command cmd, bool synchronous, const sp<AMessage> &extra) {
+    CHECK(!synchronous);
+
+    QueueEntry entry;
+    entry.mIsCommand = true;
+    entry.mCommand = cmd;
+    entry.mExtra = extra;
+
+    Mutex::Autolock autoLock(mLock);
+    mQueue.push_back(entry);
+
+    if (mSendDataNotification) {
+        mSendDataNotification = false;
+
+        if (mTargetID != 0) {
+            (new AMessage(kWhatMoreDataQueued, mTargetID))->post();
+        }
+    }
+}
+
+ssize_t NuPlayer::NuPlayerStreamListener::read(
+        void *data, size_t size, sp<AMessage> *extra) {
+    CHECK_GT(size, 0u);
+
+    extra->clear();
+
+    Mutex::Autolock autoLock(mLock);
+
+    if (mEOS) {
+        return 0;
+    }
+
+    if (mQueue.empty()) {
+        mSendDataNotification = true;
+
+        return -EWOULDBLOCK;
+    }
+
+    QueueEntry *entry = &*mQueue.begin();
+
+    if (entry->mIsCommand) {
+        switch (entry->mCommand) {
+            case EOS:
+            {
+                mQueue.erase(mQueue.begin());
+                entry = NULL;
+
+                mEOS = true;
+                return 0;
+            }
+
+            case DISCONTINUITY:
+            {
+                *extra = entry->mExtra;
+
+                mQueue.erase(mQueue.begin());
+                entry = NULL;
+
+                return INFO_DISCONTINUITY;
+            }
+
+            default:
+                TRESPASS();
+                break;
+        }
+    }
+
+    size_t copy = entry->mSize;
+    if (copy > size) {
+        copy = size;
+    }
+
+    memcpy(data,
+           (const uint8_t *)mBuffers.editItemAt(entry->mIndex)->pointer()
+            + entry->mOffset,
+           copy);
+
+    entry->mOffset += copy;
+    entry->mSize -= copy;
+
+    if (entry->mSize == 0) {
+        mSource->onBufferAvailable(entry->mIndex);
+        mQueue.erase(mQueue.begin());
+        entry = NULL;
+    }
+
+    return copy;
+}
+
+}  // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerStreamListener.h b/media/libmediaplayerservice/nuplayer/NuPlayerStreamListener.h
new file mode 100644
index 0000000..df0935d
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerStreamListener.h
@@ -0,0 +1,74 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef NUPLAYER_STREAM_LISTENER_H_
+
+#define NUPLAYER_STREAM_LISTENER_H_
+
+#include "NuPlayer.h"
+
+#include <media/IStreamSource.h>
+
+namespace android {
+
+struct MemoryDealer;
+
+struct NuPlayer::NuPlayerStreamListener : public BnStreamListener {
+    NuPlayerStreamListener(
+            const sp<IStreamSource> &source,
+            ALooper::handler_id targetID);
+
+    virtual void queueBuffer(size_t index, size_t size);
+
+    virtual void issueCommand(
+            Command cmd, bool synchronous, const sp<AMessage> &extra);
+
+    void start();
+    ssize_t read(void *data, size_t size, sp<AMessage> *extra);
+
+private:
+    enum {
+        kNumBuffers = 16,
+        kBufferSize = 188 * 20
+    };
+
+    struct QueueEntry {
+        bool mIsCommand;
+
+        size_t mIndex;
+        size_t mSize;
+        size_t mOffset;
+
+        Command mCommand;
+        sp<AMessage> mExtra;
+    };
+
+    Mutex mLock;
+
+    sp<IStreamSource> mSource;
+    ALooper::handler_id mTargetID;
+    sp<MemoryDealer> mMemoryDealer;
+    Vector<sp<IMemory> > mBuffers;
+    List<QueueEntry> mQueue;
+    bool mEOS;
+    bool mSendDataNotification;
+
+    DISALLOW_EVIL_CONSTRUCTORS(NuPlayerStreamListener);
+};
+
+}  // namespace android
+
+#endif // NUPLAYER_STREAM_LISTENER_H_
diff --git a/media/libmediaplayerservice/nuplayer/StreamingSource.cpp b/media/libmediaplayerservice/nuplayer/StreamingSource.cpp
new file mode 100644
index 0000000..2016282
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/StreamingSource.cpp
@@ -0,0 +1,123 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "StreamingSource"
+#include <utils/Log.h>
+
+#include "StreamingSource.h"
+
+#include "ATSParser.h"
+#include "AnotherPacketSource.h"
+#include "NuPlayerStreamListener.h"
+
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MetaData.h>
+
+namespace android {
+
+NuPlayer::StreamingSource::StreamingSource(const sp<IStreamSource> &source)
+    : mSource(source),
+      mEOS(false) {
+}
+
+NuPlayer::StreamingSource::~StreamingSource() {
+}
+
+void NuPlayer::StreamingSource::start() {
+    mStreamListener = new NuPlayerStreamListener(mSource, 0);
+    mTSParser = new ATSParser;
+
+    mStreamListener->start();
+}
+
+bool NuPlayer::StreamingSource::feedMoreTSData() {
+    if (mEOS) {
+        return false;
+    }
+
+    for (int32_t i = 0; i < 10; ++i) {
+        char buffer[188];
+        sp<AMessage> extra;
+        ssize_t n = mStreamListener->read(buffer, sizeof(buffer), &extra);
+
+        if (n == 0) {
+            LOGI("input data EOS reached.");
+            mTSParser->signalEOS(ERROR_END_OF_STREAM);
+            mEOS = true;
+            break;
+        } else if (n == INFO_DISCONTINUITY) {
+            mTSParser->signalDiscontinuity(
+                    ATSParser::DISCONTINUITY_SEEK, extra);
+        } else if (n < 0) {
+            CHECK_EQ(n, -EWOULDBLOCK);
+            break;
+        } else {
+            if (buffer[0] == 0x00) {
+                // XXX legacy
+                mTSParser->signalDiscontinuity(
+                        buffer[1] == 0x00
+                            ? ATSParser::DISCONTINUITY_SEEK
+                            : ATSParser::DISCONTINUITY_FORMATCHANGE,
+                        extra);
+            } else {
+                mTSParser->feedTSPacket(buffer, sizeof(buffer));
+            }
+        }
+    }
+
+    return true;
+}
+
+sp<MetaData> NuPlayer::StreamingSource::getFormat(bool audio) {
+    ATSParser::SourceType type =
+        audio ? ATSParser::MPEG2ADTS_AUDIO : ATSParser::AVC_VIDEO;
+
+    sp<AnotherPacketSource> source =
+        static_cast<AnotherPacketSource *>(mTSParser->getSource(type).get());
+
+    if (source == NULL) {
+        return NULL;
+    }
+
+    return source->getFormat();
+}
+
+status_t NuPlayer::StreamingSource::dequeueAccessUnit(
+        bool audio, sp<ABuffer> *accessUnit) {
+    ATSParser::SourceType type =
+        audio ? ATSParser::MPEG2ADTS_AUDIO : ATSParser::AVC_VIDEO;
+
+    sp<AnotherPacketSource> source =
+        static_cast<AnotherPacketSource *>(mTSParser->getSource(type).get());
+
+    if (source == NULL) {
+        return -EWOULDBLOCK;
+    }
+
+    status_t finalResult;
+    if (!source->hasBufferAvailable(&finalResult)) {
+        return finalResult == OK ? -EWOULDBLOCK : finalResult;
+    }
+
+    return source->dequeueAccessUnit(accessUnit);
+}
+
+}  // namespace android
+
diff --git a/media/libmediaplayerservice/nuplayer/StreamingSource.h b/media/libmediaplayerservice/nuplayer/StreamingSource.h
new file mode 100644
index 0000000..7abce84
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/StreamingSource.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef STREAMING_SOURCE_H_
+
+#define STREAMING_SOURCE_H_
+
+#include "NuPlayer.h"
+#include "NuPlayerSource.h"
+
+namespace android {
+
+struct ABuffer;
+struct ATSParser;
+
+struct NuPlayer::StreamingSource : public NuPlayer::Source {
+    StreamingSource(const sp<IStreamSource> &source);
+
+    virtual void start();
+
+    // Returns true iff more data was available, false on EOS.
+    virtual bool feedMoreTSData();
+
+    virtual sp<MetaData> getFormat(bool audio);
+    virtual status_t dequeueAccessUnit(bool audio, sp<ABuffer> *accessUnit);
+
+protected:
+    virtual ~StreamingSource();
+
+private:
+    sp<IStreamSource> mSource;
+    bool mEOS;
+    sp<NuPlayerStreamListener> mStreamListener;
+    sp<ATSParser> mTSParser;
+
+    DISALLOW_EVIL_CONSTRUCTORS(StreamingSource);
+};
+
+}  // namespace android
+
+#endif  // STREAMING_SOURCE_H_
diff --git a/media/libstagefright/AACExtractor.cpp b/media/libstagefright/AACExtractor.cpp
new file mode 100644
index 0000000..4203b6e
--- /dev/null
+++ b/media/libstagefright/AACExtractor.cpp
@@ -0,0 +1,323 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "AACExtractor"
+#include <utils/Log.h>
+
+#include "include/AACExtractor.h"
+#include "include/avc_utils.h"
+
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/MediaBufferGroup.h>
+#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MetaData.h>
+#include <utils/String8.h>
+
+namespace android {
+
+#define ADTS_HEADER_LENGTH 7
+
+class AACSource : public MediaSource {
+public:
+    AACSource(const sp<DataSource> &source,
+              const sp<MetaData> &meta,
+              const Vector<uint64_t> &offset_vector,
+              int64_t frame_duration_us);
+
+    virtual status_t start(MetaData *params = NULL);
+    virtual status_t stop();
+
+    virtual sp<MetaData> getFormat();
+
+    virtual status_t read(
+            MediaBuffer **buffer, const ReadOptions *options = NULL);
+
+protected:
+    virtual ~AACSource();
+
+private:
+    static const size_t kMaxFrameSize;
+    sp<DataSource> mDataSource;
+    sp<MetaData> mMeta;
+
+    off64_t mOffset;
+    int64_t mCurrentTimeUs;
+    bool mStarted;
+    MediaBufferGroup *mGroup;
+
+    Vector<uint64_t> mOffsetVector;
+    int64_t mFrameDurationUs;
+
+    AACSource(const AACSource &);
+    AACSource &operator=(const AACSource &);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+// Returns the sample rate based on the sampling frequency index
+uint32_t get_sample_rate(const uint8_t sf_index)
+{
+    static const uint32_t sample_rates[] =
+    {
+        96000, 88200, 64000, 48000, 44100, 32000,
+        24000, 22050, 16000, 12000, 11025, 8000
+    };
+
+    if (sf_index < sizeof(sample_rates) / sizeof(sample_rates[0])) {
+        return sample_rates[sf_index];
+    }
+
+    return 0;
+}
+
+static size_t getFrameSize(const sp<DataSource> &source, off64_t offset) {
+    size_t frameSize = 0;
+
+    uint8_t syncword[2];
+    if (source->readAt(0, &syncword, 2) != 2) {
+        return 0;
+    }
+    if ((syncword[0] != 0xff) || ((syncword[1] & 0xf6) != 0xf0)) {
+        return 0;
+    }
+
+    uint8_t protectionAbsent;
+    if (source->readAt(offset + 1, &protectionAbsent, 1) < 1) {
+        return 0;
+    }
+    protectionAbsent &= 0x1;
+
+    uint8_t header[3];
+    if (source->readAt(offset + 3, &header, 3) < 3) {
+        return 0;
+    }
+
+    frameSize = (header[0] & 0x3) << 11 | header[1] << 3 | header[2] >> 5;
+    frameSize += ADTS_HEADER_LENGTH + protectionAbsent ? 0 : 2;
+
+    return frameSize;
+}
+
+AACExtractor::AACExtractor(const sp<DataSource> &source)
+    : mDataSource(source),
+      mInitCheck(NO_INIT),
+      mFrameDurationUs(0) {
+    String8 mimeType;
+    float confidence;
+    if (!SniffAAC(mDataSource, &mimeType, &confidence, NULL)) {
+        return;
+    }
+
+    uint8_t profile, sf_index, channel, header[2];
+    if (mDataSource->readAt(2, &header, 2) < 2) {
+        return;
+    }
+
+    profile = (header[0] >> 6) & 0x3;
+    sf_index = (header[0] >> 2) & 0xf;
+    uint32_t sr = get_sample_rate(sf_index);
+    if (sr == 0) {
+        return;
+    }
+    channel = (header[0] & 0x1) << 2 | (header[1] >> 6);
+
+    mMeta = MakeAACCodecSpecificData(profile, sf_index, channel);
+
+    off64_t offset = 0;
+    off64_t streamSize, numFrames = 0;
+    size_t frameSize = 0;
+    int64_t duration = 0;
+
+    if (mDataSource->getSize(&streamSize) == OK) {
+         while (offset < streamSize) {
+            if ((frameSize = getFrameSize(source, offset)) == 0) {
+                return;
+            }
+
+            mOffsetVector.push(offset);
+
+            offset += frameSize;
+            numFrames ++;
+        }
+
+        // Round up and get the duration
+        mFrameDurationUs = (1024 * 1000000ll + (sr - 1)) / sr;
+        duration = numFrames * mFrameDurationUs;
+        mMeta->setInt64(kKeyDuration, duration);
+    }
+
+    mInitCheck = OK;
+}
+
+AACExtractor::~AACExtractor() {
+}
+
+sp<MetaData> AACExtractor::getMetaData() {
+    sp<MetaData> meta = new MetaData;
+
+    if (mInitCheck != OK) {
+        return meta;
+    }
+
+    meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_AAC_ADTS);
+
+    return meta;
+}
+
+size_t AACExtractor::countTracks() {
+    return mInitCheck == OK ? 1 : 0;
+}
+
+sp<MediaSource> AACExtractor::getTrack(size_t index) {
+    if (mInitCheck != OK || index != 0) {
+        return NULL;
+    }
+
+    return new AACSource(mDataSource, mMeta, mOffsetVector, mFrameDurationUs);
+}
+
+sp<MetaData> AACExtractor::getTrackMetaData(size_t index, uint32_t flags) {
+    if (mInitCheck != OK || index != 0) {
+        return NULL;
+    }
+
+    return mMeta;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+// 8192 = 2^13, 13bit AAC frame size (in bytes)
+const size_t AACSource::kMaxFrameSize = 8192;
+
+AACSource::AACSource(
+        const sp<DataSource> &source, const sp<MetaData> &meta,
+        const Vector<uint64_t> &offset_vector,
+        int64_t frame_duration_us)
+    : mDataSource(source),
+      mMeta(meta),
+      mOffset(0),
+      mCurrentTimeUs(0),
+      mStarted(false),
+      mGroup(NULL),
+      mOffsetVector(offset_vector),
+      mFrameDurationUs(frame_duration_us) {
+}
+
+AACSource::~AACSource() {
+    if (mStarted) {
+        stop();
+    }
+}
+
+status_t AACSource::start(MetaData *params) {
+    CHECK(!mStarted);
+
+    mOffset = 0;
+    mCurrentTimeUs = 0;
+    mGroup = new MediaBufferGroup;
+    mGroup->add_buffer(new MediaBuffer(kMaxFrameSize));
+    mStarted = true;
+
+    return OK;
+}
+
+status_t AACSource::stop() {
+    CHECK(mStarted);
+
+    delete mGroup;
+    mGroup = NULL;
+
+    mStarted = false;
+    return OK;
+}
+
+sp<MetaData> AACSource::getFormat() {
+    return mMeta;
+}
+
+status_t AACSource::read(
+        MediaBuffer **out, const ReadOptions *options) {
+    *out = NULL;
+
+    int64_t seekTimeUs;
+    ReadOptions::SeekMode mode;
+    if (options && options->getSeekTo(&seekTimeUs, &mode)) {
+        if (mFrameDurationUs > 0) {
+            int64_t seekFrame = seekTimeUs / mFrameDurationUs;
+            mCurrentTimeUs = seekFrame * mFrameDurationUs;
+
+            mOffset = mOffsetVector.itemAt(seekFrame);
+        }
+    }
+
+    size_t frameSize, frameSizeWithoutHeader;
+    if ((frameSize = getFrameSize(mDataSource, mOffset)) == 0) {
+        return ERROR_END_OF_STREAM;
+    }
+
+    MediaBuffer *buffer;
+    status_t err = mGroup->acquire_buffer(&buffer);
+    if (err != OK) {
+        return err;
+    }
+
+    frameSizeWithoutHeader = frameSize - ADTS_HEADER_LENGTH;
+    if (mDataSource->readAt(mOffset + ADTS_HEADER_LENGTH, buffer->data(),
+                frameSizeWithoutHeader) != (ssize_t)frameSizeWithoutHeader) {
+        buffer->release();
+        buffer = NULL;
+
+        return ERROR_IO;
+    }
+
+    buffer->set_range(0, frameSizeWithoutHeader);
+    buffer->meta_data()->setInt64(kKeyTime, mCurrentTimeUs);
+    buffer->meta_data()->setInt32(kKeyIsSyncFrame, 1);
+
+    mOffset += frameSize;
+    mCurrentTimeUs += mFrameDurationUs;
+
+    *out = buffer;
+    return OK;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+bool SniffAAC(
+        const sp<DataSource> &source, String8 *mimeType, float *confidence,
+        sp<AMessage> *) {
+    uint8_t header[2];
+
+    if (source->readAt(0, &header, 2) != 2) {
+        return false;
+    }
+
+    // ADTS syncword
+    if ((header[0] == 0xff) && ((header[1] & 0xf6) == 0xf0)) {
+        *mimeType = MEDIA_MIMETYPE_AUDIO_AAC_ADTS;
+        *confidence = 0.2;
+        return true;
+    }
+
+    return false;
+}
+
+}  // namespace android
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
new file mode 100644
index 0000000..d590ab9
--- /dev/null
+++ b/media/libstagefright/ACodec.cpp
@@ -0,0 +1,2307 @@
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ACodec"
+
+#include <media/stagefright/ACodec.h>
+
+#include <binder/MemoryDealer.h>
+
+#include <media/stagefright/foundation/hexdump.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/NativeWindowWrapper.h>
+#include <media/stagefright/OMXClient.h>
+
+#include <surfaceflinger/Surface.h>
+#include <gui/SurfaceTextureClient.h>
+
+#include <OMX_Component.h>
+
+namespace android {
+
+template<class T>
+static void InitOMXParams(T *params) {
+    params->nSize = sizeof(T);
+    params->nVersion.s.nVersionMajor = 1;
+    params->nVersion.s.nVersionMinor = 0;
+    params->nVersion.s.nRevision = 0;
+    params->nVersion.s.nStep = 0;
+}
+
+struct CodecObserver : public BnOMXObserver {
+    CodecObserver() {}
+
+    void setNotificationMessage(const sp<AMessage> &msg) {
+        mNotify = msg;
+    }
+
+    // from IOMXObserver
+    virtual void onMessage(const omx_message &omx_msg) {
+        sp<AMessage> msg = mNotify->dup();
+
+        msg->setInt32("type", omx_msg.type);
+        msg->setPointer("node", omx_msg.node);
+
+        switch (omx_msg.type) {
+            case omx_message::EVENT:
+            {
+                msg->setInt32("event", omx_msg.u.event_data.event);
+                msg->setInt32("data1", omx_msg.u.event_data.data1);
+                msg->setInt32("data2", omx_msg.u.event_data.data2);
+                break;
+            }
+
+            case omx_message::EMPTY_BUFFER_DONE:
+            {
+                msg->setPointer("buffer", omx_msg.u.buffer_data.buffer);
+                break;
+            }
+
+            case omx_message::FILL_BUFFER_DONE:
+            {
+                msg->setPointer(
+                        "buffer", omx_msg.u.extended_buffer_data.buffer);
+                msg->setInt32(
+                        "range_offset",
+                        omx_msg.u.extended_buffer_data.range_offset);
+                msg->setInt32(
+                        "range_length",
+                        omx_msg.u.extended_buffer_data.range_length);
+                msg->setInt32(
+                        "flags",
+                        omx_msg.u.extended_buffer_data.flags);
+                msg->setInt64(
+                        "timestamp",
+                        omx_msg.u.extended_buffer_data.timestamp);
+                msg->setPointer(
+                        "platform_private",
+                        omx_msg.u.extended_buffer_data.platform_private);
+                msg->setPointer(
+                        "data_ptr",
+                        omx_msg.u.extended_buffer_data.data_ptr);
+                break;
+            }
+
+            default:
+                TRESPASS();
+                break;
+        }
+
+        msg->post();
+    }
+
+protected:
+    virtual ~CodecObserver() {}
+
+private:
+    sp<AMessage> mNotify;
+
+    DISALLOW_EVIL_CONSTRUCTORS(CodecObserver);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct ACodec::BaseState : public AState {
+    BaseState(ACodec *codec, const sp<AState> &parentState = NULL);
+
+protected:
+    enum PortMode {
+        KEEP_BUFFERS,
+        RESUBMIT_BUFFERS,
+        FREE_BUFFERS,
+    };
+
+    ACodec *mCodec;
+
+    virtual PortMode getPortMode(OMX_U32 portIndex);
+
+    virtual bool onMessageReceived(const sp<AMessage> &msg);
+
+    virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
+
+    virtual void onOutputBufferDrained(const sp<AMessage> &msg);
+    virtual void onInputBufferFilled(const sp<AMessage> &msg);
+
+    void postFillThisBuffer(BufferInfo *info);
+
+private:
+    bool onOMXMessage(const sp<AMessage> &msg);
+
+    bool onOMXEmptyBufferDone(IOMX::buffer_id bufferID);
+
+    bool onOMXFillBufferDone(
+            IOMX::buffer_id bufferID,
+            size_t rangeOffset, size_t rangeLength,
+            OMX_U32 flags,
+            int64_t timeUs,
+            void *platformPrivate,
+            void *dataPtr);
+
+    void getMoreInputDataIfPossible();
+
+    DISALLOW_EVIL_CONSTRUCTORS(BaseState);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct ACodec::UninitializedState : public ACodec::BaseState {
+    UninitializedState(ACodec *codec);
+
+protected:
+    virtual bool onMessageReceived(const sp<AMessage> &msg);
+
+private:
+    void onSetup(const sp<AMessage> &msg);
+
+    DISALLOW_EVIL_CONSTRUCTORS(UninitializedState);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct ACodec::LoadedToIdleState : public ACodec::BaseState {
+    LoadedToIdleState(ACodec *codec);
+
+protected:
+    virtual bool onMessageReceived(const sp<AMessage> &msg);
+    virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
+    virtual void stateEntered();
+
+private:
+    status_t allocateBuffers();
+
+    DISALLOW_EVIL_CONSTRUCTORS(LoadedToIdleState);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct ACodec::IdleToExecutingState : public ACodec::BaseState {
+    IdleToExecutingState(ACodec *codec);
+
+protected:
+    virtual bool onMessageReceived(const sp<AMessage> &msg);
+    virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
+    virtual void stateEntered();
+
+private:
+    DISALLOW_EVIL_CONSTRUCTORS(IdleToExecutingState);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct ACodec::ExecutingState : public ACodec::BaseState {
+    ExecutingState(ACodec *codec);
+
+    void submitOutputBuffers();
+
+    // Submit output buffers to the decoder, submit input buffers to client
+    // to fill with data.
+    void resume();
+
+    // Returns true iff input and output buffers are in play.
+    bool active() const { return mActive; }
+
+protected:
+    virtual PortMode getPortMode(OMX_U32 portIndex);
+    virtual bool onMessageReceived(const sp<AMessage> &msg);
+    virtual void stateEntered();
+
+    virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
+
+private:
+    bool mActive;
+
+    DISALLOW_EVIL_CONSTRUCTORS(ExecutingState);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct ACodec::OutputPortSettingsChangedState : public ACodec::BaseState {
+    OutputPortSettingsChangedState(ACodec *codec);
+
+protected:
+    virtual PortMode getPortMode(OMX_U32 portIndex);
+    virtual bool onMessageReceived(const sp<AMessage> &msg);
+    virtual void stateEntered();
+
+    virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
+
+private:
+    DISALLOW_EVIL_CONSTRUCTORS(OutputPortSettingsChangedState);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct ACodec::ExecutingToIdleState : public ACodec::BaseState {
+    ExecutingToIdleState(ACodec *codec);
+
+protected:
+    virtual bool onMessageReceived(const sp<AMessage> &msg);
+    virtual void stateEntered();
+
+    virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
+
+    virtual void onOutputBufferDrained(const sp<AMessage> &msg);
+    virtual void onInputBufferFilled(const sp<AMessage> &msg);
+
+private:
+    void changeStateIfWeOwnAllBuffers();
+
+    DISALLOW_EVIL_CONSTRUCTORS(ExecutingToIdleState);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct ACodec::IdleToLoadedState : public ACodec::BaseState {
+    IdleToLoadedState(ACodec *codec);
+
+protected:
+    virtual bool onMessageReceived(const sp<AMessage> &msg);
+    virtual void stateEntered();
+
+    virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
+
+private:
+    DISALLOW_EVIL_CONSTRUCTORS(IdleToLoadedState);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct ACodec::ErrorState : public ACodec::BaseState {
+    ErrorState(ACodec *codec);
+
+protected:
+    virtual bool onMessageReceived(const sp<AMessage> &msg);
+    virtual void stateEntered();
+
+    virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
+
+private:
+    DISALLOW_EVIL_CONSTRUCTORS(ErrorState);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct ACodec::FlushingState : public ACodec::BaseState {
+    FlushingState(ACodec *codec);
+
+protected:
+    virtual bool onMessageReceived(const sp<AMessage> &msg);
+    virtual void stateEntered();
+
+    virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
+
+    virtual void onOutputBufferDrained(const sp<AMessage> &msg);
+    virtual void onInputBufferFilled(const sp<AMessage> &msg);
+
+private:
+    bool mFlushComplete[2];
+
+    void changeStateIfWeOwnAllBuffers();
+
+    DISALLOW_EVIL_CONSTRUCTORS(FlushingState);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+ACodec::ACodec()
+    : mNode(NULL),
+      mSentFormat(false) {
+    mUninitializedState = new UninitializedState(this);
+    mLoadedToIdleState = new LoadedToIdleState(this);
+    mIdleToExecutingState = new IdleToExecutingState(this);
+    mExecutingState = new ExecutingState(this);
+
+    mOutputPortSettingsChangedState =
+        new OutputPortSettingsChangedState(this);
+
+    mExecutingToIdleState = new ExecutingToIdleState(this);
+    mIdleToLoadedState = new IdleToLoadedState(this);
+    mErrorState = new ErrorState(this);
+    mFlushingState = new FlushingState(this);
+
+    mPortEOS[kPortIndexInput] = mPortEOS[kPortIndexOutput] = false;
+
+    changeState(mUninitializedState);
+}
+
+ACodec::~ACodec() {
+}
+
+void ACodec::setNotificationMessage(const sp<AMessage> &msg) {
+    mNotify = msg;
+}
+
+void ACodec::initiateSetup(const sp<AMessage> &msg) {
+    msg->setWhat(kWhatSetup);
+    msg->setTarget(id());
+    msg->post();
+}
+
+void ACodec::signalFlush() {
+    (new AMessage(kWhatFlush, id()))->post();
+}
+
+void ACodec::signalResume() {
+    (new AMessage(kWhatResume, id()))->post();
+}
+
+void ACodec::initiateShutdown() {
+    (new AMessage(kWhatShutdown, id()))->post();
+}
+
+status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) {
+    CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
+
+    CHECK(mDealer[portIndex] == NULL);
+    CHECK(mBuffers[portIndex].isEmpty());
+
+    if (mNativeWindow != NULL && portIndex == kPortIndexOutput) {
+        return allocateOutputBuffersFromNativeWindow();
+    }
+
+    OMX_PARAM_PORTDEFINITIONTYPE def;
+    InitOMXParams(&def);
+    def.nPortIndex = portIndex;
+
+    status_t err = mOMX->getParameter(
+            mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+    if (err != OK) {
+        return err;
+    }
+
+    LOGV("[%s] Allocating %lu buffers of size %lu on %s port",
+            mComponentName.c_str(),
+            def.nBufferCountActual, def.nBufferSize,
+            portIndex == kPortIndexInput ? "input" : "output");
+
+    size_t totalSize = def.nBufferCountActual * def.nBufferSize;
+    mDealer[portIndex] = new MemoryDealer(totalSize, "OMXCodec");
+
+    for (OMX_U32 i = 0; i < def.nBufferCountActual; ++i) {
+        sp<IMemory> mem = mDealer[portIndex]->allocate(def.nBufferSize);
+        CHECK(mem.get() != NULL);
+
+        IOMX::buffer_id buffer;
+#if 0
+        err = mOMX->allocateBufferWithBackup(mNode, portIndex, mem, &buffer);
+#else
+        err = mOMX->useBuffer(mNode, portIndex, mem, &buffer);
+#endif
+
+        if (err != OK) {
+            return err;
+        }
+
+        BufferInfo info;
+        info.mBufferID = buffer;
+        info.mStatus = BufferInfo::OWNED_BY_US;
+        info.mData = new ABuffer(mem->pointer(), def.nBufferSize);
+        mBuffers[portIndex].push(info);
+    }
+
+    return OK;
+}
+
+status_t ACodec::allocateOutputBuffersFromNativeWindow() {
+    OMX_PARAM_PORTDEFINITIONTYPE def;
+    InitOMXParams(&def);
+    def.nPortIndex = kPortIndexOutput;
+
+    status_t err = mOMX->getParameter(
+            mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+    if (err != OK) {
+        return err;
+    }
+
+    err = native_window_set_buffers_geometry(
+            mNativeWindow.get(),
+            def.format.video.nFrameWidth,
+            def.format.video.nFrameHeight,
+            def.format.video.eColorFormat);
+
+    if (err != 0) {
+        LOGE("native_window_set_buffers_geometry failed: %s (%d)",
+                strerror(-err), -err);
+        return err;
+    }
+
+    // Set up the native window.
+    OMX_U32 usage = 0;
+    err = mOMX->getGraphicBufferUsage(mNode, kPortIndexOutput, &usage);
+    if (err != 0) {
+        LOGW("querying usage flags from OMX IL component failed: %d", err);
+        // XXX: Currently this error is logged, but not fatal.
+        usage = 0;
+    }
+
+    err = native_window_set_usage(
+            mNativeWindow.get(),
+            usage | GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_EXTERNAL_DISP);
+
+    if (err != 0) {
+        LOGE("native_window_set_usage failed: %s (%d)", strerror(-err), -err);
+        return err;
+    }
+
+    int minUndequeuedBufs = 0;
+    err = mNativeWindow->query(
+            mNativeWindow.get(), NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
+            &minUndequeuedBufs);
+
+    if (err != 0) {
+        LOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)",
+                strerror(-err), -err);
+        return err;
+    }
+
+    // XXX: Is this the right logic to use?  It's not clear to me what the OMX
+    // buffer counts refer to - how do they account for the renderer holding on
+    // to buffers?
+    if (def.nBufferCountActual < def.nBufferCountMin + minUndequeuedBufs) {
+        OMX_U32 newBufferCount = def.nBufferCountMin + minUndequeuedBufs;
+        def.nBufferCountActual = newBufferCount;
+        err = mOMX->setParameter(
+                mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+        if (err != OK) {
+            LOGE("[%s] setting nBufferCountActual to %lu failed: %d",
+                    mComponentName.c_str(), newBufferCount, err);
+            return err;
+        }
+    }
+
+    err = native_window_set_buffer_count(
+            mNativeWindow.get(), def.nBufferCountActual);
+
+    if (err != 0) {
+        LOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err),
+                -err);
+        return err;
+    }
+
+    LOGV("[%s] Allocating %lu buffers from a native window of size %lu on "
+         "output port",
+         mComponentName.c_str(), def.nBufferCountActual, def.nBufferSize);
+
+    // Dequeue buffers and send them to OMX
+    for (OMX_U32 i = 0; i < def.nBufferCountActual; i++) {
+        android_native_buffer_t *buf;
+        err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf);
+        if (err != 0) {
+            LOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err);
+            break;
+        }
+
+        sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false));
+        BufferInfo info;
+        info.mStatus = BufferInfo::OWNED_BY_US;
+        info.mData = new ABuffer(0);
+        info.mGraphicBuffer = graphicBuffer;
+        mBuffers[kPortIndexOutput].push(info);
+
+        IOMX::buffer_id bufferId;
+        err = mOMX->useGraphicBuffer(mNode, kPortIndexOutput, graphicBuffer,
+                &bufferId);
+        if (err != 0) {
+            LOGE("registering GraphicBuffer %lu with OMX IL component failed: "
+                 "%d", i, err);
+            break;
+        }
+
+        mBuffers[kPortIndexOutput].editItemAt(i).mBufferID = bufferId;
+
+        LOGV("[%s] Registered graphic buffer with ID %p (pointer = %p)",
+             mComponentName.c_str(),
+             bufferId, graphicBuffer.get());
+    }
+
+    OMX_U32 cancelStart;
+    OMX_U32 cancelEnd;
+
+    if (err != 0) {
+        // If an error occurred while dequeuing we need to cancel any buffers
+        // that were dequeued.
+        cancelStart = 0;
+        cancelEnd = mBuffers[kPortIndexOutput].size();
+    } else {
+        // Return the last two buffers to the native window.
+        cancelStart = def.nBufferCountActual - minUndequeuedBufs;
+        cancelEnd = def.nBufferCountActual;
+    }
+
+    for (OMX_U32 i = cancelStart; i < cancelEnd; i++) {
+        BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i);
+        cancelBufferToNativeWindow(info);
+    }
+
+    return err;
+}
+
+status_t ACodec::cancelBufferToNativeWindow(BufferInfo *info) {
+    CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US);
+
+    LOGV("[%s] Calling cancelBuffer on buffer %p",
+         mComponentName.c_str(), info->mBufferID);
+
+    int err = mNativeWindow->cancelBuffer(
+        mNativeWindow.get(), info->mGraphicBuffer.get());
+
+    CHECK_EQ(err, 0);
+
+    info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW;
+
+    return OK;
+}
+
+ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() {
+    android_native_buffer_t *buf;
+    CHECK_EQ(mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf), 0);
+
+    for (size_t i = mBuffers[kPortIndexOutput].size(); i-- > 0;) {
+        BufferInfo *info =
+            &mBuffers[kPortIndexOutput].editItemAt(i);
+
+        if (info->mGraphicBuffer->handle == buf->handle) {
+            CHECK_EQ((int)info->mStatus,
+                     (int)BufferInfo::OWNED_BY_NATIVE_WINDOW);
+
+            info->mStatus = BufferInfo::OWNED_BY_US;
+
+            return info;
+        }
+    }
+
+    TRESPASS();
+
+    return NULL;
+}
+
+status_t ACodec::freeBuffersOnPort(OMX_U32 portIndex) {
+    for (size_t i = mBuffers[portIndex].size(); i-- > 0;) {
+        CHECK_EQ((status_t)OK, freeBuffer(portIndex, i));
+    }
+
+    mDealer[portIndex].clear();
+
+    return OK;
+}
+
+status_t ACodec::freeOutputBuffersNotOwnedByComponent() {
+    for (size_t i = mBuffers[kPortIndexOutput].size(); i-- > 0;) {
+        BufferInfo *info =
+            &mBuffers[kPortIndexOutput].editItemAt(i);
+
+        if (info->mStatus !=
+                BufferInfo::OWNED_BY_COMPONENT) {
+            // We shouldn't have sent out any buffers to the client at this
+            // point.
+            CHECK_NE((int)info->mStatus, (int)BufferInfo::OWNED_BY_DOWNSTREAM);
+
+            CHECK_EQ((status_t)OK, freeBuffer(kPortIndexOutput, i));
+        }
+    }
+
+    return OK;
+}
+
+status_t ACodec::freeBuffer(OMX_U32 portIndex, size_t i) {
+    BufferInfo *info = &mBuffers[portIndex].editItemAt(i);
+
+    CHECK(info->mStatus == BufferInfo::OWNED_BY_US
+            || info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW);
+
+    if (portIndex == kPortIndexOutput && mNativeWindow != NULL
+            && info->mStatus == BufferInfo::OWNED_BY_US) {
+        CHECK_EQ((status_t)OK, cancelBufferToNativeWindow(info));
+    }
+
+    CHECK_EQ(mOMX->freeBuffer(
+                mNode, portIndex, info->mBufferID),
+             (status_t)OK);
+
+    mBuffers[portIndex].removeAt(i);
+
+    return OK;
+}
+
+ACodec::BufferInfo *ACodec::findBufferByID(
+        uint32_t portIndex, IOMX::buffer_id bufferID,
+        ssize_t *index) {
+    for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) {
+        BufferInfo *info = &mBuffers[portIndex].editItemAt(i);
+
+        if (info->mBufferID == bufferID) {
+            if (index != NULL) {
+                *index = i;
+            }
+            return info;
+        }
+    }
+
+    TRESPASS();
+
+    return NULL;
+}
+
+void ACodec::setComponentRole(
+        bool isEncoder, const char *mime) {
+    struct MimeToRole {
+        const char *mime;
+        const char *decoderRole;
+        const char *encoderRole;
+    };
+
+    static const MimeToRole kMimeToRole[] = {
+        { MEDIA_MIMETYPE_AUDIO_MPEG,
+            "audio_decoder.mp3", "audio_encoder.mp3" },
+        { MEDIA_MIMETYPE_AUDIO_AMR_NB,
+            "audio_decoder.amrnb", "audio_encoder.amrnb" },
+        { MEDIA_MIMETYPE_AUDIO_AMR_WB,
+            "audio_decoder.amrwb", "audio_encoder.amrwb" },
+        { MEDIA_MIMETYPE_AUDIO_AAC,
+            "audio_decoder.aac", "audio_encoder.aac" },
+        { MEDIA_MIMETYPE_VIDEO_AVC,
+            "video_decoder.avc", "video_encoder.avc" },
+        { MEDIA_MIMETYPE_VIDEO_MPEG4,
+            "video_decoder.mpeg4", "video_encoder.mpeg4" },
+        { MEDIA_MIMETYPE_VIDEO_H263,
+            "video_decoder.h263", "video_encoder.h263" },
+    };
+
+    static const size_t kNumMimeToRole =
+        sizeof(kMimeToRole) / sizeof(kMimeToRole[0]);
+
+    size_t i;
+    for (i = 0; i < kNumMimeToRole; ++i) {
+        if (!strcasecmp(mime, kMimeToRole[i].mime)) {
+            break;
+        }
+    }
+
+    if (i == kNumMimeToRole) {
+        return;
+    }
+
+    const char *role =
+        isEncoder ? kMimeToRole[i].encoderRole
+                  : kMimeToRole[i].decoderRole;
+
+    if (role != NULL) {
+        OMX_PARAM_COMPONENTROLETYPE roleParams;
+        InitOMXParams(&roleParams);
+
+        strncpy((char *)roleParams.cRole,
+                role, OMX_MAX_STRINGNAME_SIZE - 1);
+
+        roleParams.cRole[OMX_MAX_STRINGNAME_SIZE - 1] = '\0';
+
+        status_t err = mOMX->setParameter(
+                mNode, OMX_IndexParamStandardComponentRole,
+                &roleParams, sizeof(roleParams));
+
+        if (err != OK) {
+            LOGW("[%s] Failed to set standard component role '%s'.",
+                 mComponentName.c_str(), role);
+        }
+    }
+}
+
+void ACodec::configureCodec(
+        const char *mime, const sp<AMessage> &msg) {
+    setComponentRole(false /* isEncoder */, mime);
+
+    if (!strncasecmp(mime, "video/", 6)) {
+        int32_t width, height;
+        CHECK(msg->findInt32("width", &width));
+        CHECK(msg->findInt32("height", &height));
+
+        CHECK_EQ(setupVideoDecoder(mime, width, height),
+                 (status_t)OK);
+    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) {
+        int32_t numChannels, sampleRate;
+        CHECK(msg->findInt32("channel-count", &numChannels));
+        CHECK(msg->findInt32("sample-rate", &sampleRate));
+
+        CHECK_EQ(setupAACDecoder(numChannels, sampleRate), (status_t)OK);
+    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) {
+    } else {
+        TRESPASS();
+    }
+
+    int32_t maxInputSize;
+    if (msg->findInt32("max-input-size", &maxInputSize)) {
+        CHECK_EQ(setMinBufferSize(kPortIndexInput, (size_t)maxInputSize),
+                 (status_t)OK);
+    } else if (!strcmp("OMX.Nvidia.aac.decoder", mComponentName.c_str())) {
+        CHECK_EQ(setMinBufferSize(kPortIndexInput, 8192),  // XXX
+                 (status_t)OK);
+    }
+}
+
+status_t ACodec::setMinBufferSize(OMX_U32 portIndex, size_t size) {
+    OMX_PARAM_PORTDEFINITIONTYPE def;
+    InitOMXParams(&def);
+    def.nPortIndex = portIndex;
+
+    status_t err = mOMX->getParameter(
+            mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+    if (err != OK) {
+        return err;
+    }
+
+    if (def.nBufferSize >= size) {
+        return OK;
+    }
+
+    def.nBufferSize = size;
+
+    err = mOMX->setParameter(
+            mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+    if (err != OK) {
+        return err;
+    }
+
+    err = mOMX->getParameter(
+            mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+    if (err != OK) {
+        return err;
+    }
+
+    CHECK(def.nBufferSize >= size);
+
+    return OK;
+}
+
+status_t ACodec::setupAACDecoder(int32_t numChannels, int32_t sampleRate) {
+    OMX_AUDIO_PARAM_AACPROFILETYPE profile;
+    InitOMXParams(&profile);
+    profile.nPortIndex = kPortIndexInput;
+
+    status_t err = mOMX->getParameter(
+            mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile));
+
+    if (err != OK) {
+        return err;
+    }
+
+    profile.nChannels = numChannels;
+    profile.nSampleRate = sampleRate;
+    profile.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4ADTS;
+
+    err = mOMX->setParameter(
+            mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile));
+
+    return err;
+}
+
+status_t ACodec::setVideoPortFormatType(
+        OMX_U32 portIndex,
+        OMX_VIDEO_CODINGTYPE compressionFormat,
+        OMX_COLOR_FORMATTYPE colorFormat) {
+    OMX_VIDEO_PARAM_PORTFORMATTYPE format;
+    InitOMXParams(&format);
+    format.nPortIndex = portIndex;
+    format.nIndex = 0;
+    bool found = false;
+
+    OMX_U32 index = 0;
+    for (;;) {
+        format.nIndex = index;
+        status_t err = mOMX->getParameter(
+                mNode, OMX_IndexParamVideoPortFormat,
+                &format, sizeof(format));
+
+        if (err != OK) {
+            return err;
+        }
+
+        // The following assertion is violated by TI's video decoder.
+        // CHECK_EQ(format.nIndex, index);
+
+        if (!strcmp("OMX.TI.Video.encoder", mComponentName.c_str())) {
+            if (portIndex == kPortIndexInput
+                    && colorFormat == format.eColorFormat) {
+                // eCompressionFormat does not seem right.
+                found = true;
+                break;
+            }
+            if (portIndex == kPortIndexOutput
+                    && compressionFormat == format.eCompressionFormat) {
+                // eColorFormat does not seem right.
+                found = true;
+                break;
+            }
+        }
+
+        if (format.eCompressionFormat == compressionFormat
+            && format.eColorFormat == colorFormat) {
+            found = true;
+            break;
+        }
+
+        ++index;
+    }
+
+    if (!found) {
+        return UNKNOWN_ERROR;
+    }
+
+    status_t err = mOMX->setParameter(
+            mNode, OMX_IndexParamVideoPortFormat,
+            &format, sizeof(format));
+
+    return err;
+}
+
+status_t ACodec::setSupportedOutputFormat() {
+    OMX_VIDEO_PARAM_PORTFORMATTYPE format;
+    InitOMXParams(&format);
+    format.nPortIndex = kPortIndexOutput;
+    format.nIndex = 0;
+
+    status_t err = mOMX->getParameter(
+            mNode, OMX_IndexParamVideoPortFormat,
+            &format, sizeof(format));
+    CHECK_EQ(err, (status_t)OK);
+    CHECK_EQ((int)format.eCompressionFormat, (int)OMX_VIDEO_CodingUnused);
+
+    static const int OMX_QCOM_COLOR_FormatYVU420SemiPlanar = 0x7FA30C00;
+
+    CHECK(format.eColorFormat == OMX_COLOR_FormatYUV420Planar
+           || format.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar
+           || format.eColorFormat == OMX_COLOR_FormatCbYCrY
+           || format.eColorFormat == OMX_QCOM_COLOR_FormatYVU420SemiPlanar);
+
+    return mOMX->setParameter(
+            mNode, OMX_IndexParamVideoPortFormat,
+            &format, sizeof(format));
+}
+
+status_t ACodec::setupVideoDecoder(
+        const char *mime, int32_t width, int32_t height) {
+    OMX_VIDEO_CODINGTYPE compressionFormat = OMX_VIDEO_CodingUnused;
+    if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime)) {
+        compressionFormat = OMX_VIDEO_CodingAVC;
+    } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG4, mime)) {
+        compressionFormat = OMX_VIDEO_CodingMPEG4;
+    } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_H263, mime)) {
+        compressionFormat = OMX_VIDEO_CodingH263;
+    } else {
+        TRESPASS();
+    }
+
+    status_t err = setVideoPortFormatType(
+            kPortIndexInput, compressionFormat, OMX_COLOR_FormatUnused);
+
+    if (err != OK) {
+        return err;
+    }
+
+    err = setSupportedOutputFormat();
+
+    if (err != OK) {
+        return err;
+    }
+
+    err = setVideoFormatOnPort(
+            kPortIndexInput, width, height, compressionFormat);
+
+    if (err != OK) {
+        return err;
+    }
+
+    err = setVideoFormatOnPort(
+            kPortIndexOutput, width, height, OMX_VIDEO_CodingUnused);
+
+    if (err != OK) {
+        return err;
+    }
+
+    return OK;
+}
+
+status_t ACodec::setVideoFormatOnPort(
+        OMX_U32 portIndex,
+        int32_t width, int32_t height, OMX_VIDEO_CODINGTYPE compressionFormat) {
+    OMX_PARAM_PORTDEFINITIONTYPE def;
+    InitOMXParams(&def);
+    def.nPortIndex = portIndex;
+
+    OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video;
+
+    status_t err = mOMX->getParameter(
+            mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+    CHECK_EQ(err, (status_t)OK);
+
+    if (portIndex == kPortIndexInput) {
+        // XXX Need a (much) better heuristic to compute input buffer sizes.
+        const size_t X = 64 * 1024;
+        if (def.nBufferSize < X) {
+            def.nBufferSize = X;
+        }
+    }
+
+    CHECK_EQ((int)def.eDomain, (int)OMX_PortDomainVideo);
+
+    video_def->nFrameWidth = width;
+    video_def->nFrameHeight = height;
+
+    if (portIndex == kPortIndexInput) {
+        video_def->eCompressionFormat = compressionFormat;
+        video_def->eColorFormat = OMX_COLOR_FormatUnused;
+    }
+
+    err = mOMX->setParameter(
+            mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+    return err;
+}
+
+status_t ACodec::initNativeWindow() {
+    if (mNativeWindow != NULL) {
+        return mOMX->enableGraphicBuffers(mNode, kPortIndexOutput, OMX_TRUE);
+    }
+
+    mOMX->enableGraphicBuffers(mNode, kPortIndexOutput, OMX_FALSE);
+    return OK;
+}
+
+bool ACodec::allYourBuffersAreBelongToUs(
+        OMX_U32 portIndex) {
+    for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) {
+        BufferInfo *info = &mBuffers[portIndex].editItemAt(i);
+
+        if (info->mStatus != BufferInfo::OWNED_BY_US
+                && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) {
+            LOGV("[%s] Buffer %p on port %ld still has status %d",
+                    mComponentName.c_str(),
+                    info->mBufferID, portIndex, info->mStatus);
+            return false;
+        }
+    }
+
+    return true;
+}
+
+bool ACodec::allYourBuffersAreBelongToUs() {
+    return allYourBuffersAreBelongToUs(kPortIndexInput)
+        && allYourBuffersAreBelongToUs(kPortIndexOutput);
+}
+
+void ACodec::deferMessage(const sp<AMessage> &msg) {
+    bool wasEmptyBefore = mDeferredQueue.empty();
+    mDeferredQueue.push_back(msg);
+}
+
+void ACodec::processDeferredMessages() {
+    List<sp<AMessage> > queue = mDeferredQueue;
+    mDeferredQueue.clear();
+
+    List<sp<AMessage> >::iterator it = queue.begin();
+    while (it != queue.end()) {
+        onMessageReceived(*it++);
+    }
+}
+
+void ACodec::sendFormatChange() {
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("what", kWhatOutputFormatChanged);
+
+    OMX_PARAM_PORTDEFINITIONTYPE def;
+    InitOMXParams(&def);
+    def.nPortIndex = kPortIndexOutput;
+
+    CHECK_EQ(mOMX->getParameter(
+                mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)),
+             (status_t)OK);
+
+    CHECK_EQ((int)def.eDir, (int)OMX_DirOutput);
+
+    switch (def.eDomain) {
+        case OMX_PortDomainVideo:
+        {
+            OMX_VIDEO_PORTDEFINITIONTYPE *videoDef = &def.format.video;
+
+            notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW);
+            notify->setInt32("width", videoDef->nFrameWidth);
+            notify->setInt32("height", videoDef->nFrameHeight);
+
+            OMX_CONFIG_RECTTYPE rect;
+            InitOMXParams(&rect);
+            rect.nPortIndex = kPortIndexOutput;
+
+            if (mOMX->getConfig(
+                        mNode, OMX_IndexConfigCommonOutputCrop,
+                        &rect, sizeof(rect)) != OK) {
+                rect.nLeft = 0;
+                rect.nTop = 0;
+                rect.nWidth = videoDef->nFrameWidth;
+                rect.nHeight = videoDef->nFrameHeight;
+            }
+
+            CHECK_GE(rect.nLeft, 0);
+            CHECK_GE(rect.nTop, 0);
+            CHECK_GE(rect.nWidth, 0u);
+            CHECK_GE(rect.nHeight, 0u);
+            CHECK_LE(rect.nLeft + rect.nWidth - 1, videoDef->nFrameWidth);
+            CHECK_LE(rect.nTop + rect.nHeight - 1, videoDef->nFrameHeight);
+
+            notify->setRect(
+                    "crop",
+                    rect.nLeft,
+                    rect.nTop,
+                    rect.nLeft + rect.nWidth - 1,
+                    rect.nTop + rect.nHeight - 1);
+
+            if (mNativeWindow != NULL) {
+                android_native_rect_t crop;
+                crop.left = rect.nLeft;
+                crop.top = rect.nTop;
+                crop.right = rect.nLeft + rect.nWidth - 1;
+                crop.bottom = rect.nTop + rect.nHeight - 1;
+
+                CHECK_EQ(0, native_window_set_crop(
+                            mNativeWindow.get(), &crop));
+            }
+            break;
+        }
+
+        case OMX_PortDomainAudio:
+        {
+            OMX_AUDIO_PORTDEFINITIONTYPE *audioDef = &def.format.audio;
+            CHECK_EQ((int)audioDef->eEncoding, (int)OMX_AUDIO_CodingPCM);
+
+            OMX_AUDIO_PARAM_PCMMODETYPE params;
+            InitOMXParams(&params);
+            params.nPortIndex = kPortIndexOutput;
+
+            CHECK_EQ(mOMX->getParameter(
+                        mNode, OMX_IndexParamAudioPcm,
+                        &params, sizeof(params)),
+                     (status_t)OK);
+
+            CHECK(params.nChannels == 1 || params.bInterleaved);
+            CHECK_EQ(params.nBitPerSample, 16u);
+            CHECK_EQ((int)params.eNumData, (int)OMX_NumericalDataSigned);
+            CHECK_EQ((int)params.ePCMMode, (int)OMX_AUDIO_PCMModeLinear);
+
+            notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW);
+            notify->setInt32("channel-count", params.nChannels);
+            notify->setInt32("sample-rate", params.nSamplingRate);
+            break;
+        }
+
+        default:
+            TRESPASS();
+    }
+
+    notify->post();
+
+    mSentFormat = true;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+ACodec::BaseState::BaseState(ACodec *codec, const sp<AState> &parentState)
+    : AState(parentState),
+      mCodec(codec) {
+}
+
+ACodec::BaseState::PortMode ACodec::BaseState::getPortMode(OMX_U32 portIndex) {
+    return KEEP_BUFFERS;
+}
+
+bool ACodec::BaseState::onMessageReceived(const sp<AMessage> &msg) {
+    switch (msg->what()) {
+        case kWhatInputBufferFilled:
+        {
+            onInputBufferFilled(msg);
+            break;
+        }
+
+        case kWhatOutputBufferDrained:
+        {
+            onOutputBufferDrained(msg);
+            break;
+        }
+
+        case ACodec::kWhatOMXMessage:
+        {
+            return onOMXMessage(msg);
+        }
+
+        default:
+            return false;
+    }
+
+    return true;
+}
+
+bool ACodec::BaseState::onOMXMessage(const sp<AMessage> &msg) {
+    int32_t type;
+    CHECK(msg->findInt32("type", &type));
+
+    IOMX::node_id nodeID;
+    CHECK(msg->findPointer("node", &nodeID));
+    CHECK_EQ(nodeID, mCodec->mNode);
+
+    switch (type) {
+        case omx_message::EVENT:
+        {
+            int32_t event, data1, data2;
+            CHECK(msg->findInt32("event", &event));
+            CHECK(msg->findInt32("data1", &data1));
+            CHECK(msg->findInt32("data2", &data2));
+
+            return onOMXEvent(
+                    static_cast<OMX_EVENTTYPE>(event),
+                    static_cast<OMX_U32>(data1),
+                    static_cast<OMX_U32>(data2));
+        }
+
+        case omx_message::EMPTY_BUFFER_DONE:
+        {
+            IOMX::buffer_id bufferID;
+            CHECK(msg->findPointer("buffer", &bufferID));
+
+            return onOMXEmptyBufferDone(bufferID);
+        }
+
+        case omx_message::FILL_BUFFER_DONE:
+        {
+            IOMX::buffer_id bufferID;
+            CHECK(msg->findPointer("buffer", &bufferID));
+
+            int32_t rangeOffset, rangeLength, flags;
+            int64_t timeUs;
+            void *platformPrivate;
+            void *dataPtr;
+
+            CHECK(msg->findInt32("range_offset", &rangeOffset));
+            CHECK(msg->findInt32("range_length", &rangeLength));
+            CHECK(msg->findInt32("flags", &flags));
+            CHECK(msg->findInt64("timestamp", &timeUs));
+            CHECK(msg->findPointer("platform_private", &platformPrivate));
+            CHECK(msg->findPointer("data_ptr", &dataPtr));
+
+            return onOMXFillBufferDone(
+                    bufferID,
+                    (size_t)rangeOffset, (size_t)rangeLength,
+                    (OMX_U32)flags,
+                    timeUs,
+                    platformPrivate,
+                    dataPtr);
+        }
+
+        default:
+            TRESPASS();
+            break;
+    }
+}
+
+bool ACodec::BaseState::onOMXEvent(
+        OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
+    if (event != OMX_EventError) {
+        LOGV("[%s] EVENT(%d, 0x%08lx, 0x%08lx)",
+             mCodec->mComponentName.c_str(), event, data1, data2);
+
+        return false;
+    }
+
+    LOGE("[%s] ERROR(0x%08lx, 0x%08lx)",
+         mCodec->mComponentName.c_str(), data1, data2);
+
+    mCodec->changeState(mCodec->mErrorState);
+
+    return true;
+}
+
+bool ACodec::BaseState::onOMXEmptyBufferDone(IOMX::buffer_id bufferID) {
+    LOGV("[%s] onOMXEmptyBufferDone %p",
+         mCodec->mComponentName.c_str(), bufferID);
+
+    BufferInfo *info =
+        mCodec->findBufferByID(kPortIndexInput, bufferID);
+
+    CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_COMPONENT);
+    info->mStatus = BufferInfo::OWNED_BY_US;
+
+    PortMode mode = getPortMode(kPortIndexInput);
+
+    switch (mode) {
+        case KEEP_BUFFERS:
+            break;
+
+        case RESUBMIT_BUFFERS:
+            postFillThisBuffer(info);
+            break;
+
+        default:
+        {
+            CHECK_EQ((int)mode, (int)FREE_BUFFERS);
+            TRESPASS();  // Not currently used
+            break;
+        }
+    }
+
+    return true;
+}
+
+void ACodec::BaseState::postFillThisBuffer(BufferInfo *info) {
+    if (mCodec->mPortEOS[kPortIndexInput]) {
+        return;
+    }
+
+    CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US);
+
+    sp<AMessage> notify = mCodec->mNotify->dup();
+    notify->setInt32("what", ACodec::kWhatFillThisBuffer);
+    notify->setPointer("buffer-id", info->mBufferID);
+
+    info->mData->meta()->clear();
+    notify->setObject("buffer", info->mData);
+
+    sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, mCodec->id());
+    reply->setPointer("buffer-id", info->mBufferID);
+
+    notify->setMessage("reply", reply);
+
+    notify->post();
+
+    info->mStatus = BufferInfo::OWNED_BY_UPSTREAM;
+}
+
+void ACodec::BaseState::onInputBufferFilled(const sp<AMessage> &msg) {
+    IOMX::buffer_id bufferID;
+    CHECK(msg->findPointer("buffer-id", &bufferID));
+
+    sp<RefBase> obj;
+    int32_t err = OK;
+    if (!msg->findObject("buffer", &obj)) {
+        CHECK(msg->findInt32("err", &err));
+
+        LOGV("[%s] saw error %d instead of an input buffer",
+             mCodec->mComponentName.c_str(), err);
+
+        obj.clear();
+    }
+
+    sp<ABuffer> buffer = static_cast<ABuffer *>(obj.get());
+
+    BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID);
+    CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_UPSTREAM);
+
+    info->mStatus = BufferInfo::OWNED_BY_US;
+
+    PortMode mode = getPortMode(kPortIndexInput);
+
+    switch (mode) {
+        case KEEP_BUFFERS:
+        {
+            if (buffer == NULL) {
+                mCodec->mPortEOS[kPortIndexInput] = true;
+            }
+            break;
+        }
+
+        case RESUBMIT_BUFFERS:
+        {
+            if (buffer != NULL) {
+                CHECK(!mCodec->mPortEOS[kPortIndexInput]);
+
+                int64_t timeUs;
+                CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+
+                OMX_U32 flags = OMX_BUFFERFLAG_ENDOFFRAME;
+
+                int32_t isCSD;
+                if (buffer->meta()->findInt32("csd", &isCSD) && isCSD != 0) {
+                    flags |= OMX_BUFFERFLAG_CODECCONFIG;
+                }
+
+                if (buffer != info->mData) {
+                    if (0 && !(flags & OMX_BUFFERFLAG_CODECCONFIG)) {
+                        LOGV("[%s] Needs to copy input data.",
+                             mCodec->mComponentName.c_str());
+                    }
+
+                    CHECK_LE(buffer->size(), info->mData->capacity());
+                    memcpy(info->mData->data(), buffer->data(), buffer->size());
+                }
+
+                LOGV("[%s] calling emptyBuffer %p",
+                     mCodec->mComponentName.c_str(), bufferID);
+
+                CHECK_EQ(mCodec->mOMX->emptyBuffer(
+                            mCodec->mNode,
+                            bufferID,
+                            0,
+                            buffer->size(),
+                            flags,
+                            timeUs),
+                         (status_t)OK);
+
+                info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
+
+                getMoreInputDataIfPossible();
+            } else if (!mCodec->mPortEOS[kPortIndexInput]) {
+                LOGV("[%s] Signalling EOS on the input port",
+                     mCodec->mComponentName.c_str());
+
+                LOGV("[%s] calling emptyBuffer %p",
+                     mCodec->mComponentName.c_str(), bufferID);
+
+                CHECK_EQ(mCodec->mOMX->emptyBuffer(
+                            mCodec->mNode,
+                            bufferID,
+                            0,
+                            0,
+                            OMX_BUFFERFLAG_EOS,
+                            0),
+                         (status_t)OK);
+
+                info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
+
+                mCodec->mPortEOS[kPortIndexInput] = true;
+            }
+            break;
+
+            default:
+                CHECK_EQ((int)mode, (int)FREE_BUFFERS);
+                break;
+        }
+    }
+}
+
+void ACodec::BaseState::getMoreInputDataIfPossible() {
+    if (mCodec->mPortEOS[kPortIndexInput]) {
+        return;
+    }
+
+    BufferInfo *eligible = NULL;
+
+    for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) {
+        BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i);
+
+#if 0
+        if (info->mStatus == BufferInfo::OWNED_BY_UPSTREAM) {
+            // There's already a "read" pending.
+            return;
+        }
+#endif
+
+        if (info->mStatus == BufferInfo::OWNED_BY_US) {
+            eligible = info;
+        }
+    }
+
+    if (eligible == NULL) {
+        return;
+    }
+
+    postFillThisBuffer(eligible);
+}
+
+bool ACodec::BaseState::onOMXFillBufferDone(
+        IOMX::buffer_id bufferID,
+        size_t rangeOffset, size_t rangeLength,
+        OMX_U32 flags,
+        int64_t timeUs,
+        void *platformPrivate,
+        void *dataPtr) {
+    LOGV("[%s] onOMXFillBufferDone %p",
+         mCodec->mComponentName.c_str(), bufferID);
+
+    ssize_t index;
+    BufferInfo *info =
+        mCodec->findBufferByID(kPortIndexOutput, bufferID, &index);
+
+    CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_COMPONENT);
+
+    info->mStatus = BufferInfo::OWNED_BY_US;
+
+    PortMode mode = getPortMode(kPortIndexOutput);
+
+    switch (mode) {
+        case KEEP_BUFFERS:
+            break;
+
+        case RESUBMIT_BUFFERS:
+        {
+            if (rangeLength == 0) {
+                if (!(flags & OMX_BUFFERFLAG_EOS)) {
+                    LOGV("[%s] calling fillBuffer %p",
+                         mCodec->mComponentName.c_str(), info->mBufferID);
+
+                    CHECK_EQ(mCodec->mOMX->fillBuffer(
+                                mCodec->mNode, info->mBufferID),
+                             (status_t)OK);
+
+                    info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
+                }
+            } else {
+                if (!mCodec->mSentFormat) {
+                    mCodec->sendFormatChange();
+                }
+
+                if (mCodec->mNativeWindow == NULL) {
+                    info->mData->setRange(rangeOffset, rangeLength);
+                }
+
+                info->mData->meta()->setInt64("timeUs", timeUs);
+
+                sp<AMessage> notify = mCodec->mNotify->dup();
+                notify->setInt32("what", ACodec::kWhatDrainThisBuffer);
+                notify->setPointer("buffer-id", info->mBufferID);
+                notify->setObject("buffer", info->mData);
+
+                sp<AMessage> reply =
+                    new AMessage(kWhatOutputBufferDrained, mCodec->id());
+
+                reply->setPointer("buffer-id", info->mBufferID);
+
+                notify->setMessage("reply", reply);
+
+                notify->post();
+
+                info->mStatus = BufferInfo::OWNED_BY_DOWNSTREAM;
+            }
+
+            if (flags & OMX_BUFFERFLAG_EOS) {
+                sp<AMessage> notify = mCodec->mNotify->dup();
+                notify->setInt32("what", ACodec::kWhatEOS);
+                notify->post();
+
+                mCodec->mPortEOS[kPortIndexOutput] = true;
+            }
+            break;
+        }
+
+        default:
+        {
+            CHECK_EQ((int)mode, (int)FREE_BUFFERS);
+
+            CHECK_EQ((status_t)OK,
+                     mCodec->freeBuffer(kPortIndexOutput, index));
+            break;
+        }
+    }
+
+    return true;
+}
+
+void ACodec::BaseState::onOutputBufferDrained(const sp<AMessage> &msg) {
+    IOMX::buffer_id bufferID;
+    CHECK(msg->findPointer("buffer-id", &bufferID));
+
+    ssize_t index;
+    BufferInfo *info =
+        mCodec->findBufferByID(kPortIndexOutput, bufferID, &index);
+    CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_DOWNSTREAM);
+
+    int32_t render;
+    if (mCodec->mNativeWindow != NULL
+            && msg->findInt32("render", &render) && render != 0) {
+        // The client wants this buffer to be rendered.
+
+        CHECK_EQ(mCodec->mNativeWindow->queueBuffer(
+                    mCodec->mNativeWindow.get(),
+                    info->mGraphicBuffer.get()),
+                 0);
+
+        info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW;
+    } else {
+        info->mStatus = BufferInfo::OWNED_BY_US;
+    }
+
+    PortMode mode = getPortMode(kPortIndexOutput);
+
+    switch (mode) {
+        case KEEP_BUFFERS:
+        {
+            // XXX fishy, revisit!!! What about the FREE_BUFFERS case below?
+
+            if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) {
+                // We cannot resubmit the buffer we just rendered, dequeue
+                // the spare instead.
+
+                info = mCodec->dequeueBufferFromNativeWindow();
+            }
+            break;
+        }
+
+        case RESUBMIT_BUFFERS:
+        {
+            if (!mCodec->mPortEOS[kPortIndexOutput]) {
+                if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) {
+                    // We cannot resubmit the buffer we just rendered, dequeue
+                    // the spare instead.
+
+                    info = mCodec->dequeueBufferFromNativeWindow();
+                }
+
+                LOGV("[%s] calling fillBuffer %p",
+                     mCodec->mComponentName.c_str(), info->mBufferID);
+
+                CHECK_EQ(mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID),
+                         (status_t)OK);
+
+                info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
+            }
+            break;
+        }
+
+        default:
+        {
+            CHECK_EQ((int)mode, (int)FREE_BUFFERS);
+
+            CHECK_EQ((status_t)OK,
+                     mCodec->freeBuffer(kPortIndexOutput, index));
+            break;
+        }
+    }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+ACodec::UninitializedState::UninitializedState(ACodec *codec)
+    : BaseState(codec) {
+}
+
+bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) {
+    bool handled = false;
+
+    switch (msg->what()) {
+        case ACodec::kWhatSetup:
+        {
+            onSetup(msg);
+
+            handled = true;
+            break;
+        }
+
+        case ACodec::kWhatShutdown:
+        {
+            sp<AMessage> notify = mCodec->mNotify->dup();
+            notify->setInt32("what", ACodec::kWhatShutdownCompleted);
+            notify->post();
+
+            handled = true;
+        }
+
+        case ACodec::kWhatFlush:
+        {
+            sp<AMessage> notify = mCodec->mNotify->dup();
+            notify->setInt32("what", ACodec::kWhatFlushCompleted);
+            notify->post();
+
+            handled = true;
+        }
+
+        default:
+            return BaseState::onMessageReceived(msg);
+    }
+
+    return handled;
+}
+
+void ACodec::UninitializedState::onSetup(
+        const sp<AMessage> &msg) {
+    OMXClient client;
+    CHECK_EQ(client.connect(), (status_t)OK);
+
+    sp<IOMX> omx = client.interface();
+
+    AString mime;
+    CHECK(msg->findString("mime", &mime));
+
+    AString componentName;
+
+    if (!strcasecmp(mime.c_str(), MEDIA_MIMETYPE_VIDEO_AVC)) {
+        componentName = "OMX.Nvidia.h264.decode";
+    } else if (!strcasecmp(mime.c_str(), MEDIA_MIMETYPE_AUDIO_AAC)) {
+        componentName = "OMX.Nvidia.aac.decoder";
+    } else if (!strcasecmp(mime.c_str(), MEDIA_MIMETYPE_AUDIO_MPEG)) {
+        componentName = "OMX.Nvidia.mp3.decoder";
+    } else {
+        TRESPASS();
+    }
+
+    sp<CodecObserver> observer = new CodecObserver;
+
+    IOMX::node_id node;
+    CHECK_EQ(omx->allocateNode(componentName.c_str(), observer, &node),
+             (status_t)OK);
+
+    sp<AMessage> notify = new AMessage(kWhatOMXMessage, mCodec->id());
+    observer->setNotificationMessage(notify);
+
+    mCodec->mComponentName = componentName;
+    mCodec->mOMX = omx;
+    mCodec->mNode = node;
+
+    mCodec->mPortEOS[kPortIndexInput] =
+        mCodec->mPortEOS[kPortIndexOutput] = false;
+
+    mCodec->configureCodec(mime.c_str(), msg);
+
+    sp<RefBase> obj;
+    if (msg->findObject("native-window", &obj)) {
+        sp<NativeWindowWrapper> nativeWindow(
+                static_cast<NativeWindowWrapper *>(obj.get()));
+        CHECK(nativeWindow != NULL);
+        mCodec->mNativeWindow = nativeWindow->getNativeWindow();
+    }
+
+    CHECK_EQ((status_t)OK, mCodec->initNativeWindow());
+
+    CHECK_EQ(omx->sendCommand(node, OMX_CommandStateSet, OMX_StateIdle),
+             (status_t)OK);
+
+    mCodec->changeState(mCodec->mLoadedToIdleState);
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+ACodec::LoadedToIdleState::LoadedToIdleState(ACodec *codec)
+    : BaseState(codec) {
+}
+
+void ACodec::LoadedToIdleState::stateEntered() {
+    LOGV("[%s] Now Loaded->Idle", mCodec->mComponentName.c_str());
+
+    CHECK_EQ(allocateBuffers(), (status_t)OK);
+}
+
+status_t ACodec::LoadedToIdleState::allocateBuffers() {
+    status_t err = mCodec->allocateBuffersOnPort(kPortIndexInput);
+
+    if (err != OK) {
+        return err;
+    }
+
+    return mCodec->allocateBuffersOnPort(kPortIndexOutput);
+}
+
+bool ACodec::LoadedToIdleState::onMessageReceived(const sp<AMessage> &msg) {
+    switch (msg->what()) {
+        case kWhatShutdown:
+        {
+            mCodec->deferMessage(msg);
+            return true;
+        }
+
+        default:
+            return BaseState::onMessageReceived(msg);
+    }
+}
+
+bool ACodec::LoadedToIdleState::onOMXEvent(
+        OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
+    switch (event) {
+        case OMX_EventCmdComplete:
+        {
+            CHECK_EQ(data1, (OMX_U32)OMX_CommandStateSet);
+            CHECK_EQ(data2, (OMX_U32)OMX_StateIdle);
+
+            CHECK_EQ(mCodec->mOMX->sendCommand(
+                        mCodec->mNode, OMX_CommandStateSet, OMX_StateExecuting),
+                     (status_t)OK);
+
+            mCodec->changeState(mCodec->mIdleToExecutingState);
+
+            return true;
+        }
+
+        default:
+            return BaseState::onOMXEvent(event, data1, data2);
+    }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+ACodec::IdleToExecutingState::IdleToExecutingState(ACodec *codec)
+    : BaseState(codec) {
+}
+
+void ACodec::IdleToExecutingState::stateEntered() {
+    LOGV("[%s] Now Idle->Executing", mCodec->mComponentName.c_str());
+}
+
+bool ACodec::IdleToExecutingState::onMessageReceived(const sp<AMessage> &msg) {
+    switch (msg->what()) {
+        case kWhatShutdown:
+        {
+            mCodec->deferMessage(msg);
+            return true;
+        }
+
+        default:
+            return BaseState::onMessageReceived(msg);
+    }
+}
+
+bool ACodec::IdleToExecutingState::onOMXEvent(
+        OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
+    switch (event) {
+        case OMX_EventCmdComplete:
+        {
+            CHECK_EQ(data1, (OMX_U32)OMX_CommandStateSet);
+            CHECK_EQ(data2, (OMX_U32)OMX_StateExecuting);
+
+            mCodec->mExecutingState->resume();
+            mCodec->changeState(mCodec->mExecutingState);
+
+            return true;
+        }
+
+        default:
+            return BaseState::onOMXEvent(event, data1, data2);
+    }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+ACodec::ExecutingState::ExecutingState(ACodec *codec)
+    : BaseState(codec),
+      mActive(false) {
+}
+
+ACodec::BaseState::PortMode ACodec::ExecutingState::getPortMode(
+        OMX_U32 portIndex) {
+    return RESUBMIT_BUFFERS;
+}
+
+void ACodec::ExecutingState::submitOutputBuffers() {
+    for (size_t i = 0; i < mCodec->mBuffers[kPortIndexOutput].size(); ++i) {
+        BufferInfo *info = &mCodec->mBuffers[kPortIndexOutput].editItemAt(i);
+
+        if (mCodec->mNativeWindow != NULL) {
+            CHECK(info->mStatus == BufferInfo::OWNED_BY_US
+                    || info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW);
+
+            if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) {
+                continue;
+            }
+
+            status_t err = mCodec->mNativeWindow->lockBuffer(
+                    mCodec->mNativeWindow.get(),
+                    info->mGraphicBuffer.get());
+            CHECK_EQ(err, (status_t)OK);
+        } else {
+            CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US);
+        }
+
+        LOGV("[%s] calling fillBuffer %p",
+             mCodec->mComponentName.c_str(), info->mBufferID);
+
+        CHECK_EQ(mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID),
+                 (status_t)OK);
+
+        info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
+    }
+}
+
+void ACodec::ExecutingState::resume() {
+    if (mActive) {
+        LOGV("[%s] We're already active, no need to resume.",
+             mCodec->mComponentName.c_str());
+
+        return;
+    }
+
+    submitOutputBuffers();
+
+    // Post the first input buffer.
+    CHECK_GT(mCodec->mBuffers[kPortIndexInput].size(), 0u);
+    BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(0);
+
+    postFillThisBuffer(info);
+
+    mActive = true;
+}
+
+void ACodec::ExecutingState::stateEntered() {
+    LOGV("[%s] Now Executing", mCodec->mComponentName.c_str());
+
+    mCodec->processDeferredMessages();
+}
+
+bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) {
+    bool handled = false;
+
+    switch (msg->what()) {
+        case kWhatShutdown:
+        {
+            mActive = false;
+
+            CHECK_EQ(mCodec->mOMX->sendCommand(
+                        mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle),
+                     (status_t)OK);
+
+            mCodec->changeState(mCodec->mExecutingToIdleState);
+
+            handled = true;
+            break;
+        }
+
+        case kWhatFlush:
+        {
+            mActive = false;
+
+            CHECK_EQ(mCodec->mOMX->sendCommand(
+                        mCodec->mNode, OMX_CommandFlush, OMX_ALL),
+                     (status_t)OK);
+
+            mCodec->changeState(mCodec->mFlushingState);
+
+            handled = true;
+            break;
+        }
+
+        case kWhatResume:
+        {
+            resume();
+
+            handled = true;
+            break;
+        }
+
+        default:
+            handled = BaseState::onMessageReceived(msg);
+            break;
+    }
+
+    return handled;
+}
+
+bool ACodec::ExecutingState::onOMXEvent(
+        OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
+    switch (event) {
+        case OMX_EventPortSettingsChanged:
+        {
+            CHECK_EQ(data1, (OMX_U32)kPortIndexOutput);
+
+            if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) {
+                CHECK_EQ(mCodec->mOMX->sendCommand(
+                            mCodec->mNode,
+                            OMX_CommandPortDisable, kPortIndexOutput),
+                         (status_t)OK);
+
+                mCodec->freeOutputBuffersNotOwnedByComponent();
+
+                mCodec->changeState(mCodec->mOutputPortSettingsChangedState);
+            } else if (data2 == OMX_IndexConfigCommonOutputCrop) {
+                mCodec->mSentFormat = false;
+            } else {
+                LOGV("[%s] OMX_EventPortSettingsChanged 0x%08lx",
+                     mCodec->mComponentName.c_str(), data2);
+            }
+
+            return true;
+        }
+
+        case OMX_EventBufferFlag:
+        {
+            return true;
+        }
+
+        default:
+            return BaseState::onOMXEvent(event, data1, data2);
+    }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+ACodec::OutputPortSettingsChangedState::OutputPortSettingsChangedState(
+        ACodec *codec)
+    : BaseState(codec) {
+}
+
+ACodec::BaseState::PortMode ACodec::OutputPortSettingsChangedState::getPortMode(
+        OMX_U32 portIndex) {
+    if (portIndex == kPortIndexOutput) {
+        return FREE_BUFFERS;
+    }
+
+    CHECK_EQ(portIndex, (OMX_U32)kPortIndexInput);
+
+    return RESUBMIT_BUFFERS;
+}
+
+bool ACodec::OutputPortSettingsChangedState::onMessageReceived(
+        const sp<AMessage> &msg) {
+    bool handled = false;
+
+    switch (msg->what()) {
+        case kWhatFlush:
+        case kWhatShutdown:
+        case kWhatResume:
+        {
+            if (msg->what() == kWhatResume) {
+                LOGV("[%s] Deferring resume", mCodec->mComponentName.c_str());
+            }
+
+            mCodec->deferMessage(msg);
+            handled = true;
+            break;
+        }
+
+        default:
+            handled = BaseState::onMessageReceived(msg);
+            break;
+    }
+
+    return handled;
+}
+
+void ACodec::OutputPortSettingsChangedState::stateEntered() {
+    LOGV("[%s] Now handling output port settings change",
+         mCodec->mComponentName.c_str());
+}
+
+bool ACodec::OutputPortSettingsChangedState::onOMXEvent(
+        OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
+    switch (event) {
+        case OMX_EventCmdComplete:
+        {
+            if (data1 == (OMX_U32)OMX_CommandPortDisable) {
+                CHECK_EQ(data2, (OMX_U32)kPortIndexOutput);
+
+                LOGV("[%s] Output port now disabled.",
+                        mCodec->mComponentName.c_str());
+
+                CHECK(mCodec->mBuffers[kPortIndexOutput].isEmpty());
+                mCodec->mDealer[kPortIndexOutput].clear();
+
+                CHECK_EQ(mCodec->mOMX->sendCommand(
+                            mCodec->mNode, OMX_CommandPortEnable, kPortIndexOutput),
+                         (status_t)OK);
+
+                CHECK_EQ(mCodec->allocateBuffersOnPort(kPortIndexOutput),
+                         (status_t)OK);
+
+                return true;
+            } else if (data1 == (OMX_U32)OMX_CommandPortEnable) {
+                CHECK_EQ(data2, (OMX_U32)kPortIndexOutput);
+
+                mCodec->mSentFormat = false;
+
+                LOGV("[%s] Output port now reenabled.",
+                        mCodec->mComponentName.c_str());
+
+                if (mCodec->mExecutingState->active()) {
+                    mCodec->mExecutingState->submitOutputBuffers();
+                }
+
+                mCodec->changeState(mCodec->mExecutingState);
+
+                return true;
+            }
+
+            return false;
+        }
+
+        default:
+            return false;
+    }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+ACodec::ExecutingToIdleState::ExecutingToIdleState(ACodec *codec)
+    : BaseState(codec) {
+}
+
+bool ACodec::ExecutingToIdleState::onMessageReceived(const sp<AMessage> &msg) {
+    bool handled = false;
+
+    switch (msg->what()) {
+        case kWhatFlush:
+        {
+            // Don't send me a flush request if you previously wanted me
+            // to shutdown.
+            TRESPASS();
+            break;
+        }
+
+        case kWhatShutdown:
+        {
+            // We're already doing that...
+
+            handled = true;
+            break;
+        }
+
+        default:
+            handled = BaseState::onMessageReceived(msg);
+            break;
+    }
+
+    return handled;
+}
+
+void ACodec::ExecutingToIdleState::stateEntered() {
+    LOGV("[%s] Now Executing->Idle", mCodec->mComponentName.c_str());
+
+    mCodec->mSentFormat = false;
+}
+
+bool ACodec::ExecutingToIdleState::onOMXEvent(
+        OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
+    switch (event) {
+        case OMX_EventCmdComplete:
+        {
+            CHECK_EQ(data1, (OMX_U32)OMX_CommandStateSet);
+            CHECK_EQ(data2, (OMX_U32)OMX_StateIdle);
+
+            changeStateIfWeOwnAllBuffers();
+
+            return true;
+        }
+
+        case OMX_EventPortSettingsChanged:
+        case OMX_EventBufferFlag:
+        {
+            // We're shutting down and don't care about this anymore.
+            return true;
+        }
+
+        default:
+            return BaseState::onOMXEvent(event, data1, data2);
+    }
+}
+void ACodec::ExecutingToIdleState::changeStateIfWeOwnAllBuffers() {
+    if (mCodec->allYourBuffersAreBelongToUs()) {
+        CHECK_EQ(mCodec->mOMX->sendCommand(
+                    mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded),
+                 (status_t)OK);
+
+        CHECK_EQ(mCodec->freeBuffersOnPort(kPortIndexInput), (status_t)OK);
+        CHECK_EQ(mCodec->freeBuffersOnPort(kPortIndexOutput), (status_t)OK);
+
+        mCodec->changeState(mCodec->mIdleToLoadedState);
+    }
+}
+
+void ACodec::ExecutingToIdleState::onInputBufferFilled(
+        const sp<AMessage> &msg) {
+    BaseState::onInputBufferFilled(msg);
+
+    changeStateIfWeOwnAllBuffers();
+}
+
+void ACodec::ExecutingToIdleState::onOutputBufferDrained(
+        const sp<AMessage> &msg) {
+    BaseState::onOutputBufferDrained(msg);
+
+    changeStateIfWeOwnAllBuffers();
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+ACodec::IdleToLoadedState::IdleToLoadedState(ACodec *codec)
+    : BaseState(codec) {
+}
+
+bool ACodec::IdleToLoadedState::onMessageReceived(const sp<AMessage> &msg) {
+    bool handled = false;
+
+    switch (msg->what()) {
+        case kWhatShutdown:
+        {
+            // We're already doing that...
+
+            handled = true;
+            break;
+        }
+
+        case kWhatFlush:
+        {
+            // Don't send me a flush request if you previously wanted me
+            // to shutdown.
+            TRESPASS();
+            break;
+        }
+
+        default:
+            handled = BaseState::onMessageReceived(msg);
+            break;
+    }
+
+    return handled;
+}
+
+void ACodec::IdleToLoadedState::stateEntered() {
+    LOGV("[%s] Now Idle->Loaded", mCodec->mComponentName.c_str());
+}
+
+bool ACodec::IdleToLoadedState::onOMXEvent(
+        OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
+    switch (event) {
+        case OMX_EventCmdComplete:
+        {
+            CHECK_EQ(data1, (OMX_U32)OMX_CommandStateSet);
+            CHECK_EQ(data2, (OMX_U32)OMX_StateLoaded);
+
+            LOGV("[%s] Now Loaded", mCodec->mComponentName.c_str());
+
+            CHECK_EQ(mCodec->mOMX->freeNode(mCodec->mNode), (status_t)OK);
+
+            mCodec->mNativeWindow.clear();
+            mCodec->mNode = NULL;
+            mCodec->mOMX.clear();
+            mCodec->mComponentName.clear();
+
+            mCodec->changeState(mCodec->mUninitializedState);
+
+            sp<AMessage> notify = mCodec->mNotify->dup();
+            notify->setInt32("what", ACodec::kWhatShutdownCompleted);
+            notify->post();
+
+            return true;
+        }
+
+        default:
+            return BaseState::onOMXEvent(event, data1, data2);
+    }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+ACodec::ErrorState::ErrorState(ACodec *codec)
+    : BaseState(codec) {
+}
+
+bool ACodec::ErrorState::onMessageReceived(const sp<AMessage> &msg) {
+    return BaseState::onMessageReceived(msg);
+}
+
+void ACodec::ErrorState::stateEntered() {
+    LOGV("[%s] Now in ErrorState", mCodec->mComponentName.c_str());
+}
+
+bool ACodec::ErrorState::onOMXEvent(
+        OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
+    LOGV("EVENT(%d, 0x%08lx, 0x%08lx)", event, data1, data2);
+    return true;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+ACodec::FlushingState::FlushingState(ACodec *codec)
+    : BaseState(codec) {
+}
+
+void ACodec::FlushingState::stateEntered() {
+    LOGV("[%s] Now Flushing", mCodec->mComponentName.c_str());
+
+    mFlushComplete[kPortIndexInput] = mFlushComplete[kPortIndexOutput] = false;
+}
+
+bool ACodec::FlushingState::onMessageReceived(const sp<AMessage> &msg) {
+    bool handled = false;
+
+    switch (msg->what()) {
+        case kWhatShutdown:
+        {
+            mCodec->deferMessage(msg);
+            break;
+        }
+
+        case kWhatFlush:
+        {
+            // We're already doing this right now.
+            handled = true;
+            break;
+        }
+
+        default:
+            handled = BaseState::onMessageReceived(msg);
+            break;
+    }
+
+    return handled;
+}
+
+bool ACodec::FlushingState::onOMXEvent(
+        OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
+    switch (event) {
+        case OMX_EventCmdComplete:
+        {
+            CHECK_EQ(data1, (OMX_U32)OMX_CommandFlush);
+
+            if (data2 == kPortIndexInput || data2 == kPortIndexOutput) {
+                CHECK(!mFlushComplete[data2]);
+                mFlushComplete[data2] = true;
+            } else {
+                CHECK_EQ(data2, OMX_ALL);
+                CHECK(mFlushComplete[kPortIndexInput]);
+                CHECK(mFlushComplete[kPortIndexOutput]);
+
+                changeStateIfWeOwnAllBuffers();
+            }
+
+            return true;
+        }
+
+        case OMX_EventPortSettingsChanged:
+        {
+            sp<AMessage> msg = new AMessage(kWhatOMXMessage, mCodec->id());
+            msg->setInt32("type", omx_message::EVENT);
+            msg->setPointer("node", mCodec->mNode);
+            msg->setInt32("event", event);
+            msg->setInt32("data1", data1);
+            msg->setInt32("data2", data2);
+
+            LOGV("[%s] Deferring OMX_EventPortSettingsChanged",
+                 mCodec->mComponentName.c_str());
+
+            mCodec->deferMessage(msg);
+
+            return true;
+        }
+
+        default:
+            return BaseState::onOMXEvent(event, data1, data2);
+    }
+
+    return true;
+}
+
+void ACodec::FlushingState::onOutputBufferDrained(const sp<AMessage> &msg) {
+    BaseState::onOutputBufferDrained(msg);
+
+    changeStateIfWeOwnAllBuffers();
+}
+
+void ACodec::FlushingState::onInputBufferFilled(const sp<AMessage> &msg) {
+    BaseState::onInputBufferFilled(msg);
+
+    changeStateIfWeOwnAllBuffers();
+}
+
+void ACodec::FlushingState::changeStateIfWeOwnAllBuffers() {
+    if (mFlushComplete[kPortIndexInput]
+            && mFlushComplete[kPortIndexOutput]
+            && mCodec->allYourBuffersAreBelongToUs()) {
+        sp<AMessage> notify = mCodec->mNotify->dup();
+        notify->setInt32("what", ACodec::kWhatFlushCompleted);
+        notify->post();
+
+        mCodec->mPortEOS[kPortIndexInput] =
+            mCodec->mPortEOS[kPortIndexOutput] = false;
+
+        mCodec->changeState(mCodec->mExecutingState);
+    }
+}
+
+}  // namespace android
diff --git a/media/libstagefright/AMRExtractor.cpp b/media/libstagefright/AMRExtractor.cpp
index 1b05528..7eca5e4 100644
--- a/media/libstagefright/AMRExtractor.cpp
+++ b/media/libstagefright/AMRExtractor.cpp
@@ -35,8 +35,9 @@
 public:
     AMRSource(const sp<DataSource> &source,
               const sp<MetaData> &meta,
-              size_t frameSize,
-              bool isWide);
+              bool isWide,
+              const off64_t *offset_table,
+              size_t offset_table_length);
 
     virtual status_t start(MetaData *params = NULL);
     virtual status_t stop();
@@ -52,14 +53,16 @@
 private:
     sp<DataSource> mDataSource;
     sp<MetaData> mMeta;
-    size_t mFrameSize;
     bool mIsWide;
 
-    off_t mOffset;
+    off64_t mOffset;
     int64_t mCurrentTimeUs;
     bool mStarted;
     MediaBufferGroup *mGroup;
 
+    off64_t mOffsetTable[OFFSET_TABLE_LEN];
+    size_t mOffsetTableLength;
+
     AMRSource(const AMRSource &);
     AMRSource &operator=(const AMRSource &);
 };
@@ -67,13 +70,25 @@
 ////////////////////////////////////////////////////////////////////////////////
 
 static size_t getFrameSize(bool isWide, unsigned FT) {
-    static const size_t kFrameSizeNB[8] = {
-        95, 103, 118, 134, 148, 159, 204, 244
+    static const size_t kFrameSizeNB[16] = {
+        95, 103, 118, 134, 148, 159, 204, 244,
+        39, 43, 38, 37, // SID
+        0, 0, 0, // future use
+        0 // no data
     };
-    static const size_t kFrameSizeWB[9] = {
-        132, 177, 253, 285, 317, 365, 397, 461, 477
+    static const size_t kFrameSizeWB[16] = {
+        132, 177, 253, 285, 317, 365, 397, 461, 477,
+        40, // SID
+        0, 0, 0, 0, // future use
+        0, // speech lost
+        0 // no data
     };
 
+    if (FT > 15 || (isWide && FT > 9 && FT < 14) || (!isWide && FT > 11 && FT < 15)) {
+        LOGE("illegal AMR frame type %d", FT);
+        return 0;
+    }
+
     size_t frameSize = isWide ? kFrameSizeWB[FT] : kFrameSizeNB[FT];
 
     // Round up bits to bytes and add 1 for the header byte.
@@ -82,9 +97,26 @@
     return frameSize;
 }
 
+static status_t getFrameSizeByOffset(const sp<DataSource> &source,
+        off64_t offset, bool isWide, size_t *frameSize) {
+    uint8_t header;
+    if (source->readAt(offset, &header, 1) < 1) {
+        return ERROR_IO;
+    }
+
+    unsigned FT = (header >> 3) & 0x0f;
+
+    *frameSize = getFrameSize(isWide, FT);
+    if (*frameSize == 0) {
+        return ERROR_MALFORMED;
+    }
+    return OK;
+}
+
 AMRExtractor::AMRExtractor(const sp<DataSource> &source)
     : mDataSource(source),
-      mInitCheck(NO_INIT) {
+      mInitCheck(NO_INIT),
+      mOffsetTableLength(0) {
     String8 mimeType;
     float confidence;
     if (!SniffAMR(mDataSource, &mimeType, &confidence, NULL)) {
@@ -101,25 +133,29 @@
     mMeta->setInt32(kKeyChannelCount, 1);
     mMeta->setInt32(kKeySampleRate, mIsWide ? 16000 : 8000);
 
-    size_t offset = mIsWide ? 9 : 6;
-    uint8_t header;
-    if (mDataSource->readAt(offset, &header, 1) != 1) {
-        return;
-    }
+    off64_t offset = mIsWide ? 9 : 6;
+    off64_t streamSize;
+    size_t frameSize, numFrames = 0;
+    int64_t duration = 0;
 
-    unsigned FT = (header >> 3) & 0x0f;
-
-    if (FT > 8 || (!mIsWide && FT > 7)) {
-        return;
-    }
-
-    mFrameSize = getFrameSize(mIsWide, FT);
-
-    off_t streamSize;
     if (mDataSource->getSize(&streamSize) == OK) {
-        off_t numFrames = streamSize / mFrameSize;
+         while (offset < streamSize) {
+            if (getFrameSizeByOffset(source, offset, mIsWide, &frameSize) != OK) {
+                return;
+            }
 
-        mMeta->setInt64(kKeyDuration, 20000ll * numFrames);
+            if ((numFrames % 50 == 0) && (numFrames / 50 < OFFSET_TABLE_LEN)) {
+                CHECK_EQ(mOffsetTableLength, numFrames / 50);
+                mOffsetTable[mOffsetTableLength] = offset - (mIsWide ? 9: 6);
+                mOffsetTableLength ++;
+            }
+
+            offset += frameSize;
+            duration += 20000;  // Each frame is 20ms
+            numFrames ++;
+        }
+
+        mMeta->setInt64(kKeyDuration, duration);
     }
 
     mInitCheck = OK;
@@ -149,7 +185,8 @@
         return NULL;
     }
 
-    return new AMRSource(mDataSource, mMeta, mFrameSize, mIsWide);
+    return new AMRSource(mDataSource, mMeta, mIsWide,
+            mOffsetTable, mOffsetTableLength);
 }
 
 sp<MetaData> AMRExtractor::getTrackMetaData(size_t index, uint32_t flags) {
@@ -164,15 +201,18 @@
 
 AMRSource::AMRSource(
         const sp<DataSource> &source, const sp<MetaData> &meta,
-        size_t frameSize, bool isWide)
+        bool isWide, const off64_t *offset_table, size_t offset_table_length)
     : mDataSource(source),
       mMeta(meta),
-      mFrameSize(frameSize),
       mIsWide(isWide),
       mOffset(mIsWide ? 9 : 6),
       mCurrentTimeUs(0),
       mStarted(false),
-      mGroup(NULL) {
+      mGroup(NULL),
+      mOffsetTableLength(offset_table_length) {
+    if (mOffsetTableLength > 0 && mOffsetTableLength <= OFFSET_TABLE_LEN) {
+        memcpy ((char*)mOffsetTable, (char*)offset_table, sizeof(off64_t) * mOffsetTableLength);
+    }
 }
 
 AMRSource::~AMRSource() {
@@ -214,9 +254,25 @@
     int64_t seekTimeUs;
     ReadOptions::SeekMode mode;
     if (options && options->getSeekTo(&seekTimeUs, &mode)) {
+        size_t size;
         int64_t seekFrame = seekTimeUs / 20000ll;  // 20ms per frame.
         mCurrentTimeUs = seekFrame * 20000ll;
-        mOffset = seekFrame * mFrameSize + (mIsWide ? 9 : 6);
+
+        int index = seekFrame / 50;
+        if (index >= mOffsetTableLength) {
+            index = mOffsetTableLength - 1;
+        }
+
+        mOffset = mOffsetTable[index] + (mIsWide ? 9 : 6);
+
+        for (int i = 0; i< seekFrame - index * 50; i++) {
+            status_t err;
+            if ((err = getFrameSizeByOffset(mDataSource, mOffset,
+                            mIsWide, &size)) != OK) {
+                return err;
+            }
+            mOffset += size;
+        }
     }
 
     uint8_t header;
@@ -236,16 +292,11 @@
 
     unsigned FT = (header >> 3) & 0x0f;
 
-    if (FT > 8 || (!mIsWide && FT > 7)) {
-
-        LOGE("illegal AMR frame type %d", FT);
-
+    size_t frameSize = getFrameSize(mIsWide, FT);
+    if (frameSize == 0) {
         return ERROR_MALFORMED;
     }
 
-    size_t frameSize = getFrameSize(mIsWide, FT);
-    CHECK_EQ(frameSize, mFrameSize);
-
     MediaBuffer *buffer;
     status_t err = mGroup->acquire_buffer(&buffer);
     if (err != OK) {
diff --git a/media/libstagefright/AMRWriter.cpp b/media/libstagefright/AMRWriter.cpp
index c0b1abe..0db3d1d 100644
--- a/media/libstagefright/AMRWriter.cpp
+++ b/media/libstagefright/AMRWriter.cpp
@@ -24,20 +24,28 @@
 #include <media/mediarecorder.h>
 #include <sys/prctl.h>
 #include <sys/resource.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
 
 namespace android {
 
 AMRWriter::AMRWriter(const char *filename)
-    : mFile(fopen(filename, "wb")),
-      mInitCheck(mFile != NULL ? OK : NO_INIT),
+    : mFd(-1),
+      mInitCheck(NO_INIT),
       mStarted(false),
       mPaused(false),
       mResumed(false) {
+
+    mFd = open(filename, O_CREAT | O_LARGEFILE | O_TRUNC);
+    if (mFd >= 0) {
+        mInitCheck = OK;
+    }
 }
 
 AMRWriter::AMRWriter(int fd)
-    : mFile(fdopen(fd, "wb")),
-      mInitCheck(mFile != NULL ? OK : NO_INIT),
+    : mFd(dup(fd)),
+      mInitCheck(mFd < 0? NO_INIT: OK),
       mStarted(false),
       mPaused(false),
       mResumed(false) {
@@ -48,9 +56,9 @@
         stop();
     }
 
-    if (mFile != NULL) {
-        fclose(mFile);
-        mFile = NULL;
+    if (mFd != -1) {
+        close(mFd);
+        mFd = -1;
     }
 }
 
@@ -90,8 +98,8 @@
     mSource = source;
 
     const char *kHeader = isWide ? "#!AMR-WB\n" : "#!AMR\n";
-    size_t n = strlen(kHeader);
-    if (fwrite(kHeader, 1, n, mFile) != n) {
+    ssize_t n = strlen(kHeader);
+    if (write(mFd, kHeader, n) != n) {
         return ERROR_IO;
     }
 
@@ -240,11 +248,9 @@
             notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_MAX_DURATION_REACHED, 0);
             break;
         }
-        ssize_t n = fwrite(
-                (const uint8_t *)buffer->data() + buffer->range_offset(),
-                1,
-                buffer->range_length(),
-                mFile);
+        ssize_t n = write(mFd,
+                        (const uint8_t *)buffer->data() + buffer->range_offset(),
+                        buffer->range_length());
 
         if (n < (ssize_t)buffer->range_length()) {
             buffer->release();
@@ -266,9 +272,8 @@
         notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_COMPLETION_STATUS, UNKNOWN_ERROR);
     }
 
-    fflush(mFile);
-    fclose(mFile);
-    mFile = NULL;
+    close(mFd);
+    mFd = -1;
     mReachedEOS = true;
     if (err == ERROR_END_OF_STREAM) {
         return OK;
diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk
index d674547..88069e9 100644
--- a/media/libstagefright/Android.mk
+++ b/media/libstagefright/Android.mk
@@ -4,16 +4,21 @@
 include frameworks/base/media/libstagefright/codecs/common/Config.mk
 
 LOCAL_SRC_FILES:=                         \
+        ACodec.cpp                        \
+        AACExtractor.cpp                  \
         AMRExtractor.cpp                  \
         AMRWriter.cpp                     \
         AudioPlayer.cpp                   \
         AudioSource.cpp                   \
         AwesomePlayer.cpp                 \
         CameraSource.cpp                  \
+        CameraSourceTimeLapse.cpp         \
+        VideoSourceDownSampler.cpp        \
         DataSource.cpp                    \
         DRMExtractor.cpp                  \
         ESDS.cpp                          \
         FileSource.cpp                    \
+        FLACExtractor.cpp                 \
         HTTPStream.cpp                    \
         JPEGSource.cpp                    \
         MP3Extractor.cpp                  \
@@ -25,6 +30,7 @@
         MediaDefs.cpp                     \
         MediaExtractor.cpp                \
         MediaSource.cpp                   \
+        MediaSourceSplitter.cpp           \
         MetaData.cpp                      \
         NuCachedSource2.cpp               \
         NuHTTPDataSource.cpp              \
@@ -41,15 +47,19 @@
         TimeSource.cpp                    \
         TimedEventQueue.cpp               \
         Utils.cpp                         \
+        VBRISeeker.cpp                    \
         WAVExtractor.cpp                  \
+        WVMExtractor.cpp                  \
+        XINGSeeker.cpp                    \
         avc_utils.cpp                     \
-        string.cpp
 
 LOCAL_C_INCLUDES:= \
 	$(JNI_H_INCLUDE) \
         $(TOP)/frameworks/base/include/media/stagefright/openmax \
+        $(TOP)/external/flac/include \
         $(TOP)/external/tremolo \
-        $(TOP)/frameworks/base/media/libstagefright/rtsp
+        $(TOP)/frameworks/base/media/libstagefright/rtsp \
+        $(TOP)/external/openssl/include \
 
 LOCAL_SHARED_LIBRARIES := \
         libbinder         \
@@ -60,10 +70,15 @@
         libsonivox        \
         libvorbisidec     \
         libsurfaceflinger_client \
+        libstagefright_yuv \
         libcamera_client \
-        libdrmframework
+        libdrmframework  \
+        libcrypto        \
+        libssl           \
+        libgui
 
 LOCAL_STATIC_LIBRARIES := \
+        libstagefright_color_conversion \
         libstagefright_aacdec \
         libstagefright_aacenc \
         libstagefright_amrnbdec \
@@ -84,13 +99,13 @@
         libstagefright_rtsp \
         libstagefright_id3 \
         libstagefright_g711dec \
+        libFLAC \
 
 LOCAL_SHARED_LIBRARIES += \
         libstagefright_amrnb_common \
         libstagefright_enc_common \
         libstagefright_avc_common \
         libstagefright_foundation \
-        libstagefright_color_conversion
 
 ifeq ($(TARGET_OS)-$(TARGET_SIMULATOR),linux-true)
         LOCAL_LDLIBS += -lpthread -ldl
diff --git a/media/libstagefright/AudioPlayer.cpp b/media/libstagefright/AudioPlayer.cpp
index 5ff934d..e7c0299 100644
--- a/media/libstagefright/AudioPlayer.cpp
+++ b/media/libstagefright/AudioPlayer.cpp
@@ -286,7 +286,9 @@
     }
 
     if (mReachedEOS) {
-        return 0;
+        memset(data, 0, size);
+
+        return size;
     }
 
     size_t size_done = 0;
diff --git a/media/libstagefright/AudioSource.cpp b/media/libstagefright/AudioSource.cpp
index 29f16d8..bbdec02 100644
--- a/media/libstagefright/AudioSource.cpp
+++ b/media/libstagefright/AudioSource.cpp
@@ -18,38 +18,54 @@
 #define LOG_TAG "AudioSource"
 #include <utils/Log.h>
 
-#include <media/stagefright/AudioSource.h>
-
 #include <media/AudioRecord.h>
-#include <media/stagefright/MediaBufferGroup.h>
-#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/AudioSource.h>
+#include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MetaData.h>
+#include <media/stagefright/foundation/ADebug.h>
 #include <cutils/properties.h>
 #include <stdlib.h>
 
 namespace android {
 
+static void AudioRecordCallbackFunction(int event, void *user, void *info) {
+    AudioSource *source = (AudioSource *) user;
+    switch (event) {
+        case AudioRecord::EVENT_MORE_DATA: {
+            source->dataCallbackTimestamp(*((AudioRecord::Buffer *) info), systemTime() / 1000);
+            break;
+        }
+        case AudioRecord::EVENT_OVERRUN: {
+            LOGW("AudioRecord reported overrun!");
+            break;
+        }
+        default:
+            // does nothing
+            break;
+    }
+}
+
 AudioSource::AudioSource(
         int inputSource, uint32_t sampleRate, uint32_t channels)
     : mStarted(false),
-      mCollectStats(false),
+      mSampleRate(sampleRate),
       mPrevSampleTimeUs(0),
-      mTotalLostFrames(0),
-      mPrevLostBytes(0),
-      mGroup(NULL) {
+      mNumFramesReceived(0),
+      mNumClientOwnedBuffers(0) {
 
     LOGV("sampleRate: %d, channels: %d", sampleRate, channels);
     CHECK(channels == 1 || channels == 2);
     uint32_t flags = AudioRecord::RECORD_AGC_ENABLE |
                      AudioRecord::RECORD_NS_ENABLE  |
                      AudioRecord::RECORD_IIR_ENABLE;
-
     mRecord = new AudioRecord(
                 inputSource, sampleRate, AudioSystem::PCM_16_BIT,
                 channels > 1? AudioSystem::CHANNEL_IN_STEREO: AudioSystem::CHANNEL_IN_MONO,
                 4 * kMaxBufferSize / sizeof(int16_t), /* Enable ping-pong buffers */
-                flags);
+                flags,
+                AudioRecordCallbackFunction,
+                this);
 
     mInitCheck = mRecord->initCheck();
 }
@@ -68,6 +84,7 @@
 }
 
 status_t AudioSource::start(MetaData *params) {
+    Mutex::Autolock autoLock(mLock);
     if (mStarted) {
         return UNKNOWN_ERROR;
     }
@@ -76,12 +93,6 @@
         return NO_INIT;
     }
 
-    char value[PROPERTY_VALUE_MAX];
-    if (property_get("media.stagefright.record-stats", value, NULL)
-        && (!strcmp(value, "1") || !strcasecmp(value, "true"))) {
-        mCollectStats = true;
-    }
-
     mTrackMaxAmplitude = false;
     mMaxAmplitude = 0;
     mInitialReadTimeUs = 0;
@@ -91,18 +102,36 @@
         mStartTimeUs = startTimeUs;
     }
     status_t err = mRecord->start();
-
     if (err == OK) {
-        mGroup = new MediaBufferGroup;
-        mGroup->add_buffer(new MediaBuffer(kMaxBufferSize));
-
         mStarted = true;
+    } else {
+        delete mRecord;
+        mRecord = NULL;
     }
 
+
     return err;
 }
 
+void AudioSource::releaseQueuedFrames_l() {
+    LOGV("releaseQueuedFrames_l");
+    List<MediaBuffer *>::iterator it;
+    while (!mBuffersReceived.empty()) {
+        it = mBuffersReceived.begin();
+        (*it)->release();
+        mBuffersReceived.erase(it);
+    }
+}
+
+void AudioSource::waitOutstandingEncodingFrames_l() {
+    LOGV("waitOutstandingEncodingFrames_l: %lld", mNumClientOwnedBuffers);
+    while (mNumClientOwnedBuffers > 0) {
+        mFrameEncodingCompletionCondition.wait(mLock);
+    }
+}
+
 status_t AudioSource::stop() {
+    Mutex::Autolock autoLock(mLock);
     if (!mStarted) {
         return UNKNOWN_ERROR;
     }
@@ -111,67 +140,29 @@
         return NO_INIT;
     }
 
-    mRecord->stop();
-
-    delete mGroup;
-    mGroup = NULL;
-
     mStarted = false;
-
-    if (mCollectStats) {
-        LOGI("Total lost audio frames: %lld",
-            mTotalLostFrames + (mPrevLostBytes >> 1));
-    }
+    mRecord->stop();
+    waitOutstandingEncodingFrames_l();
+    releaseQueuedFrames_l();
 
     return OK;
 }
 
 sp<MetaData> AudioSource::getFormat() {
+    Mutex::Autolock autoLock(mLock);
     if (mInitCheck != OK) {
         return 0;
     }
 
     sp<MetaData> meta = new MetaData;
     meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_RAW);
-    meta->setInt32(kKeySampleRate, mRecord->getSampleRate());
+    meta->setInt32(kKeySampleRate, mSampleRate);
     meta->setInt32(kKeyChannelCount, mRecord->channelCount());
     meta->setInt32(kKeyMaxInputSize, kMaxBufferSize);
 
     return meta;
 }
 
-/*
- * Returns -1 if frame skipping request is too long.
- * Returns  0 if there is no need to skip frames.
- * Returns  1 if we need to skip frames.
- */
-static int skipFrame(int64_t timestampUs,
-        const MediaSource::ReadOptions *options) {
-
-    int64_t skipFrameUs;
-    if (!options || !options->getSkipFrame(&skipFrameUs)) {
-        return 0;
-    }
-
-    if (skipFrameUs <= timestampUs) {
-        return 0;
-    }
-
-    // Safe guard against the abuse of the kSkipFrame_Option.
-    if (skipFrameUs - timestampUs >= 1E6) {
-        LOGE("Frame skipping requested is way too long: %lld us",
-            skipFrameUs - timestampUs);
-
-        return -1;
-    }
-
-    LOGV("skipFrame: %lld us > timestamp: %lld us",
-        skipFrameUs, timestampUs);
-
-    return 1;
-
-}
-
 void AudioSource::rampVolume(
         int32_t startFrame, int32_t rampDurationFrames,
         uint8_t *data,   size_t bytes) {
@@ -206,134 +197,131 @@
 
 status_t AudioSource::read(
         MediaBuffer **out, const ReadOptions *options) {
+    Mutex::Autolock autoLock(mLock);
+    *out = NULL;
 
     if (mInitCheck != OK) {
         return NO_INIT;
     }
 
-    int64_t readTimeUs = systemTime() / 1000;
-    *out = NULL;
-
-    MediaBuffer *buffer;
-    CHECK_EQ(mGroup->acquire_buffer(&buffer), OK);
-
-    int err = 0;
-    while (mStarted) {
-
-        uint32_t numFramesRecorded;
-        mRecord->getPosition(&numFramesRecorded);
-
-
-        if (numFramesRecorded == 0 && mPrevSampleTimeUs == 0) {
-            mInitialReadTimeUs = readTimeUs;
-            // Initial delay
-            if (mStartTimeUs > 0) {
-                mStartTimeUs = readTimeUs - mStartTimeUs;
-            } else {
-                // Assume latency is constant.
-                mStartTimeUs += mRecord->latency() * 1000;
-            }
-            mPrevSampleTimeUs = mStartTimeUs;
-        }
-
-        uint32_t sampleRate = mRecord->getSampleRate();
-
-        // Insert null frames when lost frames are detected.
-        int64_t timestampUs = mPrevSampleTimeUs;
-        uint32_t numLostBytes = mRecord->getInputFramesLost() << 1;
-        numLostBytes += mPrevLostBytes;
-#if 0
-        // Simulate lost frames
-        numLostBytes = ((rand() * 1.0 / RAND_MAX)) * 2 * kMaxBufferSize;
-        numLostBytes &= 0xFFFFFFFE; // Alignment requirement
-
-        // Reduce the chance to lose
-        if (rand() * 1.0 / RAND_MAX >= 0.05) {
-            numLostBytes = 0;
-        }
-#endif
-        if (numLostBytes > 0) {
-            if (numLostBytes > kMaxBufferSize) {
-                mPrevLostBytes = numLostBytes - kMaxBufferSize;
-                numLostBytes = kMaxBufferSize;
-            } else {
-                mPrevLostBytes = 0;
-            }
-
-            CHECK_EQ(numLostBytes & 1, 0);
-            timestampUs += ((1000000LL * (numLostBytes >> 1)) +
-                    (sampleRate >> 1)) / sampleRate;
-
-            CHECK(timestampUs > mPrevSampleTimeUs);
-            if (mCollectStats) {
-                mTotalLostFrames += (numLostBytes >> 1);
-            }
-            if ((err = skipFrame(timestampUs, options)) == -1) {
-                buffer->release();
-                return UNKNOWN_ERROR;
-            } else if (err != 0) {
-                continue;
-            }
-            memset(buffer->data(), 0, numLostBytes);
-            buffer->set_range(0, numLostBytes);
-            if (numFramesRecorded == 0) {
-                buffer->meta_data()->setInt64(kKeyAnchorTime, mStartTimeUs);
-            }
-            buffer->meta_data()->setInt64(kKeyTime, mStartTimeUs + mPrevSampleTimeUs);
-            buffer->meta_data()->setInt64(kKeyDriftTime, readTimeUs - mInitialReadTimeUs);
-            mPrevSampleTimeUs = timestampUs;
-            *out = buffer;
-            return OK;
-        }
-
-        ssize_t n = mRecord->read(buffer->data(), buffer->size());
-        if (n < 0) {
-            buffer->release();
-            return (status_t)n;
-        }
-
-        int64_t recordDurationUs = (1000000LL * n >> 1) / sampleRate;
-        timestampUs += recordDurationUs;
-        if ((err = skipFrame(timestampUs, options)) == -1) {
-            buffer->release();
-            return UNKNOWN_ERROR;
-        } else if (err != 0) {
-            continue;
-        }
-
-        if (mPrevSampleTimeUs - mStartTimeUs < kAutoRampStartUs) {
-            // Mute the initial video recording signal
-            memset((uint8_t *) buffer->data(), 0, n);
-        } else if (mPrevSampleTimeUs - mStartTimeUs < kAutoRampStartUs + kAutoRampDurationUs) {
-            int32_t autoRampDurationFrames =
-                    (kAutoRampDurationUs * sampleRate + 500000LL) / 1000000LL;
-
-            int32_t autoRampStartFrames =
-                    (kAutoRampStartUs * sampleRate + 500000LL) / 1000000LL;
-
-            int32_t nFrames = numFramesRecorded - autoRampStartFrames;
-            rampVolume(nFrames, autoRampDurationFrames, (uint8_t *) buffer->data(), n);
-        }
-        if (mTrackMaxAmplitude) {
-            trackMaxAmplitude((int16_t *) buffer->data(), n >> 1);
-        }
-
-        if (numFramesRecorded == 0) {
-            buffer->meta_data()->setInt64(kKeyAnchorTime, mStartTimeUs);
-        }
-
-        buffer->meta_data()->setInt64(kKeyTime, mStartTimeUs + mPrevSampleTimeUs);
-        buffer->meta_data()->setInt64(kKeyDriftTime, readTimeUs - mInitialReadTimeUs);
-        CHECK(timestampUs > mPrevSampleTimeUs);
-        mPrevSampleTimeUs = timestampUs;
-        LOGV("initial delay: %lld, sample rate: %d, timestamp: %lld",
-                mStartTimeUs, sampleRate, timestampUs);
-
-        buffer->set_range(0, n);
-
-        *out = buffer;
+    while (mStarted && mBuffersReceived.empty()) {
+        mFrameAvailableCondition.wait(mLock);
+    }
+    if (!mStarted) {
         return OK;
     }
+    MediaBuffer *buffer = *mBuffersReceived.begin();
+    mBuffersReceived.erase(mBuffersReceived.begin());
+    ++mNumClientOwnedBuffers;
+    buffer->setObserver(this);
+    buffer->add_ref();
+
+    // Mute/suppress the recording sound
+    int64_t timeUs;
+    CHECK(buffer->meta_data()->findInt64(kKeyTime, &timeUs));
+    int64_t elapsedTimeUs = timeUs - mStartTimeUs;
+    if (elapsedTimeUs < kAutoRampStartUs) {
+        memset((uint8_t *) buffer->data(), 0, buffer->range_length());
+    } else if (elapsedTimeUs < kAutoRampStartUs + kAutoRampDurationUs) {
+        int32_t autoRampDurationFrames =
+                    (kAutoRampDurationUs * mSampleRate + 500000LL) / 1000000LL;
+
+        int32_t autoRampStartFrames =
+                    (kAutoRampStartUs * mSampleRate + 500000LL) / 1000000LL;
+
+        int32_t nFrames = mNumFramesReceived - autoRampStartFrames;
+        rampVolume(nFrames, autoRampDurationFrames,
+                (uint8_t *) buffer->data(), buffer->range_length());
+    }
+
+    // Track the max recording signal amplitude.
+    if (mTrackMaxAmplitude) {
+        trackMaxAmplitude(
+            (int16_t *) buffer->data(), buffer->range_length() >> 1);
+    }
+
+    *out = buffer;
+    return OK;
+}
+
+void AudioSource::signalBufferReturned(MediaBuffer *buffer) {
+    LOGV("signalBufferReturned: %p", buffer->data());
+    Mutex::Autolock autoLock(mLock);
+    --mNumClientOwnedBuffers;
+    buffer->setObserver(0);
+    buffer->release();
+    mFrameEncodingCompletionCondition.signal();
+    return;
+}
+
+status_t AudioSource::dataCallbackTimestamp(
+        const AudioRecord::Buffer& audioBuffer, int64_t timeUs) {
+    LOGV("dataCallbackTimestamp: %lld us", timeUs);
+    Mutex::Autolock autoLock(mLock);
+    if (!mStarted) {
+        LOGW("Spurious callback from AudioRecord. Drop the audio data.");
+        return OK;
+    }
+
+    // Drop retrieved and previously lost audio data.
+    if (mNumFramesReceived == 0 && timeUs < mStartTimeUs) {
+        mRecord->getInputFramesLost();
+        LOGV("Drop audio data at %lld/%lld us", timeUs, mStartTimeUs);
+        return OK;
+    }
+
+    if (mNumFramesReceived == 0 && mPrevSampleTimeUs == 0) {
+        mInitialReadTimeUs = timeUs;
+        // Initial delay
+        if (mStartTimeUs > 0) {
+            mStartTimeUs = timeUs - mStartTimeUs;
+        } else {
+            // Assume latency is constant.
+            mStartTimeUs += mRecord->latency() * 1000;
+        }
+        mPrevSampleTimeUs = mStartTimeUs;
+    }
+
+    int64_t timestampUs = mPrevSampleTimeUs;
+
+    size_t numLostBytes = 0;
+    if (mNumFramesReceived > 0) {  // Ignore earlier frame lost
+        // getInputFramesLost() returns the number of lost frames.
+        // Convert number of frames lost to number of bytes lost.
+        numLostBytes = mRecord->getInputFramesLost() * mRecord->frameSize();
+    }
+
+    CHECK_EQ(numLostBytes & 1, 0u);
+    CHECK_EQ(audioBuffer.size & 1, 0u);
+    size_t bufferSize = numLostBytes + audioBuffer.size;
+    MediaBuffer *buffer = new MediaBuffer(bufferSize);
+    if (numLostBytes > 0) {
+        memset(buffer->data(), 0, numLostBytes);
+        memcpy((uint8_t *) buffer->data() + numLostBytes,
+                    audioBuffer.i16, audioBuffer.size);
+    } else {
+        if (audioBuffer.size == 0) {
+            LOGW("Nothing is available from AudioRecord callback buffer");
+            buffer->release();
+            return OK;
+        }
+        memcpy((uint8_t *) buffer->data(),
+                audioBuffer.i16, audioBuffer.size);
+    }
+
+    buffer->set_range(0, bufferSize);
+    timestampUs += ((1000000LL * (bufferSize >> 1)) +
+                    (mSampleRate >> 1)) / mSampleRate;
+
+    if (mNumFramesReceived == 0) {
+        buffer->meta_data()->setInt64(kKeyAnchorTime, mStartTimeUs);
+    }
+    buffer->meta_data()->setInt64(kKeyTime, mPrevSampleTimeUs);
+    buffer->meta_data()->setInt64(kKeyDriftTime, timeUs - mInitialReadTimeUs);
+    mPrevSampleTimeUs = timestampUs;
+    mNumFramesReceived += buffer->range_length() / sizeof(int16_t);
+    mBuffersReceived.push_back(buffer);
+    mFrameAvailableCondition.signal();
 
     return OK;
 }
diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp
index 6dc61c7..5734c7e 100644
--- a/media/libstagefright/AwesomePlayer.cpp
+++ b/media/libstagefright/AwesomePlayer.cpp
@@ -22,37 +22,42 @@
 
 #include "include/ARTSPController.h"
 #include "include/AwesomePlayer.h"
-#include "include/LiveSource.h"
 #include "include/SoftwareRenderer.h"
 #include "include/NuCachedSource2.h"
 #include "include/ThrottledSource.h"
 #include "include/MPEG2TSExtractor.h"
 
-#include "ARTPSession.h"
-#include "APacketSource.h"
-#include "ASessionDescription.h"
-#include "UDPPusher.h"
-
 #include <binder/IPCThreadState.h>
+#include <binder/IServiceManager.h>
+#include <media/IMediaPlayerService.h>
+#include <media/stagefright/foundation/hexdump.h>
+#include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/AudioPlayer.h>
 #include <media/stagefright/DataSource.h>
 #include <media/stagefright/FileSource.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaExtractor.h>
-#include <media/stagefright/MediaDebug.h>
 #include <media/stagefright/MediaSource.h>
 #include <media/stagefright/MetaData.h>
 #include <media/stagefright/OMXCodec.h>
 
-#include <surfaceflinger/ISurface.h>
+#include <surfaceflinger/Surface.h>
+#include <gui/ISurfaceTexture.h>
+#include <gui/SurfaceTextureClient.h>
+#include <surfaceflinger/ISurfaceComposer.h>
 
 #include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+#define USE_SURFACE_ALLOC 1
+#define FRAME_DROP_FREQ 0
 
 namespace android {
 
 static int64_t kLowWaterMarkUs = 2000000ll;  // 2secs
 static int64_t kHighWaterMarkUs = 10000000ll;  // 10secs
+static int64_t kHighWaterMarkRTSPUs = 4000000ll;  // 4secs
 static const size_t kLowWaterMarkBytes = 40000;
 static const size_t kHighWaterMarkBytes = 200000;
 
@@ -79,49 +84,10 @@
     AwesomeEvent &operator=(const AwesomeEvent &);
 };
 
-struct AwesomeRemoteRenderer : public AwesomeRenderer {
-    AwesomeRemoteRenderer(const sp<IOMXRenderer> &target)
-        : mTarget(target) {
-    }
-
-    virtual status_t initCheck() const {
-        return OK;
-    }
-
-    virtual void render(MediaBuffer *buffer) {
-        void *id;
-        if (buffer->meta_data()->findPointer(kKeyBufferID, &id)) {
-            mTarget->render((IOMX::buffer_id)id);
-        }
-    }
-
-private:
-    sp<IOMXRenderer> mTarget;
-
-    AwesomeRemoteRenderer(const AwesomeRemoteRenderer &);
-    AwesomeRemoteRenderer &operator=(const AwesomeRemoteRenderer &);
-};
-
 struct AwesomeLocalRenderer : public AwesomeRenderer {
     AwesomeLocalRenderer(
-            bool previewOnly,
-            const char *componentName,
-            OMX_COLOR_FORMATTYPE colorFormat,
-            const sp<ISurface> &surface,
-            size_t displayWidth, size_t displayHeight,
-            size_t decodedWidth, size_t decodedHeight,
-            int32_t rotationDegrees)
-        : mInitCheck(NO_INIT),
-          mTarget(NULL),
-          mLibHandle(NULL) {
-            mInitCheck = init(previewOnly, componentName,
-                 colorFormat, surface, displayWidth,
-                 displayHeight, decodedWidth, decodedHeight,
-                 rotationDegrees);
-    }
-
-    virtual status_t initCheck() const {
-        return mInitCheck;
+            const sp<ANativeWindow> &nativeWindow, const sp<MetaData> &meta)
+        : mTarget(new SoftwareRenderer(nativeWindow, meta)) {
     }
 
     virtual void render(MediaBuffer *buffer) {
@@ -137,119 +103,86 @@
     virtual ~AwesomeLocalRenderer() {
         delete mTarget;
         mTarget = NULL;
-
-        if (mLibHandle) {
-            dlclose(mLibHandle);
-            mLibHandle = NULL;
-        }
     }
 
 private:
-    status_t mInitCheck;
-    VideoRenderer *mTarget;
-    void *mLibHandle;
-
-    status_t init(
-            bool previewOnly,
-            const char *componentName,
-            OMX_COLOR_FORMATTYPE colorFormat,
-            const sp<ISurface> &surface,
-            size_t displayWidth, size_t displayHeight,
-            size_t decodedWidth, size_t decodedHeight,
-            int32_t rotationDegrees);
+    SoftwareRenderer *mTarget;
 
     AwesomeLocalRenderer(const AwesomeLocalRenderer &);
     AwesomeLocalRenderer &operator=(const AwesomeLocalRenderer &);;
 };
 
-status_t AwesomeLocalRenderer::init(
-        bool previewOnly,
-        const char *componentName,
-        OMX_COLOR_FORMATTYPE colorFormat,
-        const sp<ISurface> &surface,
-        size_t displayWidth, size_t displayHeight,
-        size_t decodedWidth, size_t decodedHeight,
-        int32_t rotationDegrees) {
-    if (!previewOnly) {
-        // We will stick to the vanilla software-color-converting renderer
-        // for "previewOnly" mode, to avoid unneccessarily switching overlays
-        // more often than necessary.
+struct AwesomeNativeWindowRenderer : public AwesomeRenderer {
+    AwesomeNativeWindowRenderer(
+            const sp<ANativeWindow> &nativeWindow,
+            int32_t rotationDegrees)
+        : mNativeWindow(nativeWindow) {
+        applyRotation(rotationDegrees);
+    }
 
-        mLibHandle = dlopen("libstagefrighthw.so", RTLD_NOW);
+    virtual void render(MediaBuffer *buffer) {
+        status_t err = mNativeWindow->queueBuffer(
+                mNativeWindow.get(), buffer->graphicBuffer().get());
+        if (err != 0) {
+            LOGE("queueBuffer failed with error %s (%d)", strerror(-err),
+                    -err);
+            return;
+        }
 
-        if (mLibHandle) {
-            typedef VideoRenderer *(*CreateRendererWithRotationFunc)(
-                    const sp<ISurface> &surface,
-                    const char *componentName,
-                    OMX_COLOR_FORMATTYPE colorFormat,
-                    size_t displayWidth, size_t displayHeight,
-                    size_t decodedWidth, size_t decodedHeight,
-                    int32_t rotationDegrees);
+        sp<MetaData> metaData = buffer->meta_data();
+        metaData->setInt32(kKeyRendered, 1);
+    }
 
-            typedef VideoRenderer *(*CreateRendererFunc)(
-                    const sp<ISurface> &surface,
-                    const char *componentName,
-                    OMX_COLOR_FORMATTYPE colorFormat,
-                    size_t displayWidth, size_t displayHeight,
-                    size_t decodedWidth, size_t decodedHeight);
+protected:
+    virtual ~AwesomeNativeWindowRenderer() {}
 
-            CreateRendererWithRotationFunc funcWithRotation =
-                (CreateRendererWithRotationFunc)dlsym(
-                        mLibHandle,
-                        "_Z26createRendererWithRotationRKN7android2spINS_8"
-                        "ISurfaceEEEPKc20OMX_COLOR_FORMATTYPEjjjji");
+private:
+    sp<ANativeWindow> mNativeWindow;
 
-            if (funcWithRotation) {
-                mTarget =
-                    (*funcWithRotation)(
-                            surface, componentName, colorFormat,
-                            displayWidth, displayHeight,
-                            decodedWidth, decodedHeight,
-                            rotationDegrees);
-            } else {
-                if (rotationDegrees != 0) {
-                    LOGW("renderer does not support rotation.");
-                }
+    void applyRotation(int32_t rotationDegrees) {
+        uint32_t transform;
+        switch (rotationDegrees) {
+            case 0: transform = 0; break;
+            case 90: transform = HAL_TRANSFORM_ROT_90; break;
+            case 180: transform = HAL_TRANSFORM_ROT_180; break;
+            case 270: transform = HAL_TRANSFORM_ROT_270; break;
+            default: transform = 0; break;
+        }
 
-                CreateRendererFunc func =
-                    (CreateRendererFunc)dlsym(
-                            mLibHandle,
-                            "_Z14createRendererRKN7android2spINS_8ISurfaceEEEPKc20"
-                            "OMX_COLOR_FORMATTYPEjjjj");
-
-                if (func) {
-                    mTarget =
-                        (*func)(surface, componentName, colorFormat,
-                            displayWidth, displayHeight,
-                            decodedWidth, decodedHeight);
-                }
-            }
+        if (transform) {
+            CHECK_EQ(0, native_window_set_buffers_transform(
+                        mNativeWindow.get(), transform));
         }
     }
 
-    if (mTarget != NULL) {
-        return OK;
-    }
+    AwesomeNativeWindowRenderer(const AwesomeNativeWindowRenderer &);
+    AwesomeNativeWindowRenderer &operator=(
+            const AwesomeNativeWindowRenderer &);
+};
 
-    mTarget = new SoftwareRenderer(
-            colorFormat, surface, displayWidth, displayHeight,
-            decodedWidth, decodedHeight, rotationDegrees);
+// To collect the decoder usage
+void addBatteryData(uint32_t params) {
+    sp<IBinder> binder =
+        defaultServiceManager()->getService(String16("media.player"));
+    sp<IMediaPlayerService> service = interface_cast<IMediaPlayerService>(binder);
+    CHECK(service.get() != NULL);
 
-    return ((SoftwareRenderer *)mTarget)->initCheck();
+    service->addBatteryData(params);
 }
 
+////////////////////////////////////////////////////////////////////////////////
 AwesomePlayer::AwesomePlayer()
     : mQueueStarted(false),
       mTimeSource(NULL),
       mVideoRendererIsPreview(false),
       mAudioPlayer(NULL),
+      mDisplayWidth(0),
+      mDisplayHeight(0),
       mFlags(0),
       mExtractorFlags(0),
-      mLastVideoBuffer(NULL),
       mVideoBuffer(NULL),
-      mSuspensionState(NULL),
       mDecryptHandle(NULL) {
-    CHECK_EQ(mClient.connect(), OK);
+    CHECK_EQ(mClient.connect(), (status_t)OK);
 
     DataSource::RegisterDefaultSniffers();
 
@@ -259,6 +192,8 @@
     mStreamDoneEventPending = false;
     mBufferingEvent = new AwesomeEvent(this, &AwesomePlayer::onBufferingUpdate);
     mBufferingEventPending = false;
+    mVideoLagEvent = new AwesomeEvent(this, &AwesomePlayer::onVideoLagUpdate);
+    mVideoEventPending = false;
 
     mCheckAudioStatusEvent = new AwesomeEvent(
             this, &AwesomePlayer::onCheckAudioStatus);
@@ -285,6 +220,8 @@
     mStreamDoneEventPending = false;
     mQueue.cancelEvent(mCheckAudioStatusEvent->eventID());
     mAudioStatusEventPending = false;
+    mQueue.cancelEvent(mVideoLagEvent->eventID());
+    mVideoLagEventPending = false;
 
     if (!keepBufferingGoing) {
         mQueue.cancelEvent(mBufferingEvent->eventID());
@@ -311,6 +248,22 @@
 
     if (headers) {
         mUriHeaders = *headers;
+
+        ssize_t index = mUriHeaders.indexOfKey(String8("x-hide-urls-from-log"));
+        if (index >= 0) {
+            // Browser is in "incognito" mode, suppress logging URLs.
+
+            // This isn't something that should be passed to the server.
+            mUriHeaders.removeItemsAt(index);
+
+            mFlags |= INCOGNITO;
+        }
+    }
+
+    if (!(mFlags & INCOGNITO)) {
+        LOGI("setDataSource_l('%s')", mUri.string());
+    } else {
+        LOGI("setDataSource_l(URL suppressed)");
     }
 
     // The actual work will be done during preparation in the call to
@@ -339,6 +292,10 @@
     return setDataSource_l(dataSource);
 }
 
+status_t AwesomePlayer::setDataSource(const sp<IStreamSource> &source) {
+    return INVALID_OPERATION;
+}
+
 status_t AwesomePlayer::setDataSource_l(
         const sp<DataSource> &dataSource) {
     sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource);
@@ -348,9 +305,11 @@
     }
 
     dataSource->getDrmInfo(&mDecryptHandle, &mDrmManagerClient);
-    if (mDecryptHandle != NULL
-            && RightsStatus::RIGHTS_VALID != mDecryptHandle->status) {
-        notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, ERROR_NO_LICENSE);
+    if (mDecryptHandle != NULL) {
+        CHECK(mDrmManagerClient);
+        if (RightsStatus::RIGHTS_VALID != mDecryptHandle->status) {
+            notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, ERROR_NO_LICENSE);
+        }
     }
 
     return setDataSource_l(extractor);
@@ -390,6 +349,18 @@
         if (!haveVideo && !strncasecmp(mime, "video/", 6)) {
             setVideoSource(extractor->getTrack(i));
             haveVideo = true;
+
+            // Set the presentation/display size
+            int32_t displayWidth, displayHeight;
+            bool success = meta->findInt32(kKeyDisplayWidth, &displayWidth);
+            if (success) {
+                success = meta->findInt32(kKeyDisplayHeight, &displayHeight);
+            }
+            if (success) {
+                mDisplayWidth = displayWidth;
+                mDisplayHeight = displayHeight;
+            }
+
         } else if (!haveAudio && !strncasecmp(mime, "audio/", 6)) {
             setAudioSource(extractor->getTrack(i));
             haveAudio = true;
@@ -428,6 +399,9 @@
 }
 
 void AwesomePlayer::reset_l() {
+    mDisplayWidth = 0;
+    mDisplayHeight = 0;
+
     if (mDecryptHandle != NULL) {
             mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
                     Playback::STOP, 0);
@@ -435,11 +409,31 @@
             mDrmManagerClient = NULL;
     }
 
+    if (mFlags & PLAYING) {
+        uint32_t params = IMediaPlayerService::kBatteryDataTrackDecoder;
+        if ((mAudioSource != NULL) && (mAudioSource != mAudioTrack)) {
+            params |= IMediaPlayerService::kBatteryDataTrackAudio;
+        }
+        if (mVideoSource != NULL) {
+            params |= IMediaPlayerService::kBatteryDataTrackVideo;
+        }
+        addBatteryData(params);
+    }
+
     if (mFlags & PREPARING) {
         mFlags |= PREPARE_CANCELLED;
         if (mConnectingDataSource != NULL) {
             LOGI("interrupting the connection process");
             mConnectingDataSource->disconnect();
+        } else if (mConnectingRTSPController != NULL) {
+            LOGI("interrupting the connection process");
+            mConnectingRTSPController->disconnect();
+        }
+
+        if (mFlags & PREPARING_CONNECTED) {
+            // We are basically done preparing, we're just buffering
+            // enough data to start playback, we can safely interrupt that.
+            finishAsyncPrepare_l();
         }
     }
 
@@ -473,11 +467,6 @@
 
     mVideoRenderer.clear();
 
-    if (mLastVideoBuffer) {
-        mLastVideoBuffer->release();
-        mLastVideoBuffer = NULL;
-    }
-
     if (mVideoBuffer) {
         mVideoBuffer->release();
         mVideoBuffer = NULL;
@@ -488,10 +477,6 @@
         mRTSPController.clear();
     }
 
-    mRTPPusher.clear();
-    mRTCPPusher.clear();
-    mRTPSession.clear();
-
     if (mVideoSource != NULL) {
         mVideoSource->stop();
 
@@ -509,11 +494,10 @@
     mDurationUs = -1;
     mFlags = 0;
     mExtractorFlags = 0;
-    mVideoWidth = mVideoHeight = -1;
     mTimeSourceDeltaUs = 0;
     mVideoTimeUs = 0;
 
-    mSeeking = false;
+    mSeeking = NO_SEEK;
     mSeekNotificationSent = false;
     mSeekTimeUs = 0;
 
@@ -522,9 +506,6 @@
 
     mFileSource.clear();
 
-    delete mSuspensionState;
-    mSuspensionState = NULL;
-
     mBitrate = -1;
 }
 
@@ -539,7 +520,7 @@
 }
 
 bool AwesomePlayer::getBitrate(int64_t *bitrate) {
-    off_t size;
+    off64_t size;
     if (mDurationUs >= 0 && mCachedSource != NULL
             && mCachedSource->getSize(&size) == OK) {
         *bitrate = size * 8000000ll / mDurationUs;  // in bits/sec
@@ -564,14 +545,44 @@
         *durationUs = mRTSPController->getQueueDurationUs(eos);
         return true;
     } else if (mCachedSource != NULL && getBitrate(&bitrate)) {
-        size_t cachedDataRemaining = mCachedSource->approxDataRemaining(eos);
+        status_t finalStatus;
+        size_t cachedDataRemaining = mCachedSource->approxDataRemaining(&finalStatus);
         *durationUs = cachedDataRemaining * 8000000ll / bitrate;
+        *eos = (finalStatus != OK);
         return true;
     }
 
     return false;
 }
 
+void AwesomePlayer::ensureCacheIsFetching_l() {
+    if (mCachedSource != NULL) {
+        mCachedSource->resumeFetchingIfNecessary();
+    }
+}
+
+void AwesomePlayer::onVideoLagUpdate() {
+    Mutex::Autolock autoLock(mLock);
+    if (!mVideoLagEventPending) {
+        return;
+    }
+    mVideoLagEventPending = false;
+
+    int64_t audioTimeUs = mAudioPlayer->getMediaTimeUs();
+    int64_t videoLateByUs = audioTimeUs - mVideoTimeUs;
+
+    if (videoLateByUs > 300000ll) {
+        LOGV("video late by %lld ms.", videoLateByUs / 1000ll);
+
+        notifyListener_l(
+                MEDIA_INFO,
+                MEDIA_INFO_VIDEO_TRACK_LAGGING,
+                videoLateByUs / 1000ll);
+    }
+
+    postVideoLagEvent_l();
+}
+
 void AwesomePlayer::onBufferingUpdate() {
     Mutex::Autolock autoLock(mLock);
     if (!mBufferingEventPending) {
@@ -580,11 +591,14 @@
     mBufferingEventPending = false;
 
     if (mCachedSource != NULL) {
-        bool eos;
-        size_t cachedDataRemaining = mCachedSource->approxDataRemaining(&eos);
+        status_t finalStatus;
+        size_t cachedDataRemaining = mCachedSource->approxDataRemaining(&finalStatus);
+        bool eos = (finalStatus != OK);
 
         if (eos) {
-            notifyListener_l(MEDIA_BUFFERING_UPDATE, 100);
+            if (finalStatus == ERROR_END_OF_STREAM) {
+                notifyListener_l(MEDIA_BUFFERING_UPDATE, 100);
+            }
             if (mFlags & PREPARING) {
                 LOGV("cache has reached EOS, prepare is done.");
                 finishAsyncPrepare_l();
@@ -611,6 +625,7 @@
                          kLowWaterMarkBytes);
                     mFlags |= CACHE_UNDERRUN;
                     pause_l();
+                    ensureCacheIsFetching_l();
                     notifyListener_l(MEDIA_INFO, MEDIA_INFO_BUFFERING_START);
                 } else if (eos || cachedDataRemaining > kHighWaterMarkBytes) {
                     if (mFlags & CACHE_UNDERRUN) {
@@ -632,14 +647,21 @@
     int64_t cachedDurationUs;
     bool eos;
     if (getCachedDuration_l(&cachedDurationUs, &eos)) {
+        LOGV("cachedDurationUs = %.2f secs, eos=%d",
+             cachedDurationUs / 1E6, eos);
+
+        int64_t highWaterMarkUs =
+            (mRTSPController != NULL) ? kHighWaterMarkRTSPUs : kHighWaterMarkUs;
+
         if ((mFlags & PLAYING) && !eos
                 && (cachedDurationUs < kLowWaterMarkUs)) {
             LOGI("cache is running low (%.2f secs) , pausing.",
                  cachedDurationUs / 1E6);
             mFlags |= CACHE_UNDERRUN;
             pause_l();
+            ensureCacheIsFetching_l();
             notifyListener_l(MEDIA_INFO, MEDIA_INFO_BUFFERING_START);
-        } else if (eos || cachedDurationUs > kHighWaterMarkUs) {
+        } else if (eos || cachedDurationUs > highWaterMarkUs) {
             if (mFlags & CACHE_UNDERRUN) {
                 LOGI("cache has filled up (%.2f secs), resuming.",
                      cachedDurationUs / 1E6);
@@ -657,39 +679,6 @@
     postBufferingEvent_l();
 }
 
-void AwesomePlayer::partial_reset_l() {
-    // Only reset the video renderer and shut down the video decoder.
-    // Then instantiate a new video decoder and resume video playback.
-
-    mVideoRenderer.clear();
-
-    if (mLastVideoBuffer) {
-        mLastVideoBuffer->release();
-        mLastVideoBuffer = NULL;
-    }
-
-    if (mVideoBuffer) {
-        mVideoBuffer->release();
-        mVideoBuffer = NULL;
-    }
-
-    {
-        mVideoSource->stop();
-
-        // The following hack is necessary to ensure that the OMX
-        // component is completely released by the time we may try
-        // to instantiate it again.
-        wp<MediaSource> tmp = mVideoSource;
-        mVideoSource.clear();
-        while (tmp.promote() != NULL) {
-            usleep(1000);
-        }
-        IPCThreadState::self()->flushCommands();
-    }
-
-    CHECK_EQ(OK, initVideoDecoder(OMXCodec::kIgnoreCodecSpecificData));
-}
-
 void AwesomePlayer::onStreamDone() {
     // Posted whenever any stream finishes playing.
 
@@ -699,21 +688,7 @@
     }
     mStreamDoneEventPending = false;
 
-    if (mStreamDoneStatus == INFO_DISCONTINUITY) {
-        // This special status is returned because an http live stream's
-        // video stream switched to a different bandwidth at this point
-        // and future data may have been encoded using different parameters.
-        // This requires us to shutdown the video decoder and reinstantiate
-        // a fresh one.
-
-        LOGV("INFO_DISCONTINUITY");
-
-        CHECK(mVideoSource != NULL);
-
-        partial_reset_l();
-        postVideoEvent_l();
-        return;
-    } else if (mStreamDoneStatus != ERROR_END_OF_STREAM) {
+    if (mStreamDoneStatus != ERROR_END_OF_STREAM) {
         LOGV("MEDIA_ERROR %d", mStreamDoneStatus);
 
         notifyListener_l(
@@ -758,6 +733,8 @@
 }
 
 status_t AwesomePlayer::play_l() {
+    mFlags &= ~SEEK_PREVIEW;
+
     if (mFlags & PLAYING) {
         return OK;
     }
@@ -788,25 +765,6 @@
                 mAudioPlayer = new AudioPlayer(mAudioSink, this);
                 mAudioPlayer->setSource(mAudioSource);
 
-                // We've already started the MediaSource in order to enable
-                // the prefetcher to read its data.
-                status_t err = mAudioPlayer->start(
-                        true /* sourceAlreadyStarted */);
-
-                if (err != OK) {
-                    delete mAudioPlayer;
-                    mAudioPlayer = NULL;
-
-                    mFlags &= ~(PLAYING | FIRST_FRAME);
-
-                    if (mDecryptHandle != NULL) {
-                        mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
-                                 Playback::STOP, 0);
-                    }
-
-                    return err;
-                }
-
                 mTimeSource = mAudioPlayer;
 
                 deferredAudioSeek = true;
@@ -814,8 +772,26 @@
                 mWatchForAudioSeekComplete = false;
                 mWatchForAudioEOS = true;
             }
-        } else {
-            mAudioPlayer->resume();
+        }
+
+        CHECK(!(mFlags & AUDIO_RUNNING));
+
+        if (mVideoSource == NULL) {
+            status_t err = startAudioPlayer_l();
+
+            if (err != OK) {
+                delete mAudioPlayer;
+                mAudioPlayer = NULL;
+
+                mFlags &= ~(PLAYING | FIRST_FRAME);
+
+                if (mDecryptHandle != NULL) {
+                    mDrmManagerClient->setPlaybackStatus(
+                            mDecryptHandle, Playback::STOP, 0);
+                }
+
+                return err;
+            }
         }
     }
 
@@ -826,6 +802,10 @@
     if (mVideoSource != NULL) {
         // Kick off video playback
         postVideoEvent_l();
+
+        if (mAudioSource != NULL && mVideoSource != NULL) {
+            postVideoLagEvent_l();
+        }
     }
 
     if (deferredAudioSeek) {
@@ -840,14 +820,98 @@
         seekTo_l(0);
     }
 
+    uint32_t params = IMediaPlayerService::kBatteryDataCodecStarted
+        | IMediaPlayerService::kBatteryDataTrackDecoder;
+    if ((mAudioSource != NULL) && (mAudioSource != mAudioTrack)) {
+        params |= IMediaPlayerService::kBatteryDataTrackAudio;
+    }
+    if (mVideoSource != NULL) {
+        params |= IMediaPlayerService::kBatteryDataTrackVideo;
+    }
+    addBatteryData(params);
+
     return OK;
 }
 
-status_t AwesomePlayer::initRenderer_l() {
-    if (mISurface == NULL) {
+status_t AwesomePlayer::startAudioPlayer_l() {
+    CHECK(!(mFlags & AUDIO_RUNNING));
+
+    if (mAudioSource == NULL || mAudioPlayer == NULL) {
         return OK;
     }
 
+    if (!(mFlags & AUDIOPLAYER_STARTED)) {
+        mFlags |= AUDIOPLAYER_STARTED;
+
+        // We've already started the MediaSource in order to enable
+        // the prefetcher to read its data.
+        status_t err = mAudioPlayer->start(
+                true /* sourceAlreadyStarted */);
+
+        if (err != OK) {
+            notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err);
+            return err;
+        }
+    } else {
+        mAudioPlayer->resume();
+    }
+
+    mFlags |= AUDIO_RUNNING;
+
+    mWatchForAudioEOS = true;
+
+    return OK;
+}
+
+void AwesomePlayer::notifyVideoSize_l() {
+    sp<MetaData> meta = mVideoSource->getFormat();
+
+    int32_t cropLeft, cropTop, cropRight, cropBottom;
+    if (!meta->findRect(
+                kKeyCropRect, &cropLeft, &cropTop, &cropRight, &cropBottom)) {
+        int32_t width, height;
+        CHECK(meta->findInt32(kKeyWidth, &width));
+        CHECK(meta->findInt32(kKeyHeight, &height));
+
+        cropLeft = cropTop = 0;
+        cropRight = width - 1;
+        cropBottom = height - 1;
+
+        LOGV("got dimensions only %d x %d", width, height);
+    } else {
+        LOGV("got crop rect %d, %d, %d, %d",
+             cropLeft, cropTop, cropRight, cropBottom);
+    }
+
+    int32_t usableWidth = cropRight - cropLeft + 1;
+    int32_t usableHeight = cropBottom - cropTop + 1;
+    if (mDisplayWidth != 0) {
+        usableWidth = mDisplayWidth;
+    }
+    if (mDisplayHeight != 0) {
+        usableHeight = mDisplayHeight;
+    }
+
+    int32_t rotationDegrees;
+    if (!mVideoTrack->getFormat()->findInt32(
+                kKeyRotation, &rotationDegrees)) {
+        rotationDegrees = 0;
+    }
+
+    if (rotationDegrees == 90 || rotationDegrees == 270) {
+        notifyListener_l(
+                MEDIA_SET_VIDEO_SIZE, usableHeight, usableWidth);
+    } else {
+        notifyListener_l(
+                MEDIA_SET_VIDEO_SIZE, usableWidth, usableHeight);
+    }
+}
+
+void AwesomePlayer::initRenderer_l() {
+    if (mNativeWindow == NULL) {
+        return;
+    }
+
     sp<MetaData> meta = mVideoSource->getFormat();
 
     int32_t format;
@@ -870,37 +934,19 @@
     // before creating a new one.
     IPCThreadState::self()->flushCommands();
 
-    if (!strncmp("OMX.", component, 4)) {
-        // Our OMX codecs allocate buffers on the media_server side
-        // therefore they require a remote IOMXRenderer that knows how
-        // to display them.
-
-        sp<IOMXRenderer> native =
-            mClient.interface()->createRenderer(
-                    mISurface, component,
-                    (OMX_COLOR_FORMATTYPE)format,
-                    decodedWidth, decodedHeight,
-                    mVideoWidth, mVideoHeight,
-                    rotationDegrees);
-
-        if (native == NULL) {
-            return NO_INIT;
-        }
-
-        mVideoRenderer = new AwesomeRemoteRenderer(native);
+    if (USE_SURFACE_ALLOC && strncmp(component, "OMX.", 4) == 0) {
+        // Hardware decoders avoid the CPU color conversion by decoding
+        // directly to ANativeBuffers, so we must use a renderer that
+        // just pushes those buffers to the ANativeWindow.
+        mVideoRenderer =
+            new AwesomeNativeWindowRenderer(mNativeWindow, rotationDegrees);
     } else {
         // Other decoders are instantiated locally and as a consequence
-        // allocate their buffers in local address space.
-        mVideoRenderer = new AwesomeLocalRenderer(
-            false,  // previewOnly
-            component,
-            (OMX_COLOR_FORMATTYPE)format,
-            mISurface,
-            mVideoWidth, mVideoHeight,
-            decodedWidth, decodedHeight, rotationDegrees);
+        // allocate their buffers in local address space.  This renderer
+        // then performs a color conversion and copy to get the data
+        // into the ANativeBuffer.
+        mVideoRenderer = new AwesomeLocalRenderer(mNativeWindow, meta);
     }
-
-    return mVideoRenderer->initCheck();
 }
 
 status_t AwesomePlayer::pause() {
@@ -918,7 +964,7 @@
 
     cancelPlayerEvents(true /* keepBufferingGoing */);
 
-    if (mAudioPlayer != NULL) {
+    if (mAudioPlayer != NULL && (mFlags & AUDIO_RUNNING)) {
         if (at_eos) {
             // If we played the audio stream to completion we
             // want to make sure that all samples remaining in the audio
@@ -927,6 +973,8 @@
         } else {
             mAudioPlayer->pause();
         }
+
+        mFlags &= ~AUDIO_RUNNING;
     }
 
     mFlags &= ~PLAYING;
@@ -936,6 +984,16 @@
                 Playback::PAUSE, 0);
     }
 
+    uint32_t params = IMediaPlayerService::kBatteryDataTrackDecoder;
+    if ((mAudioSource != NULL) && (mAudioSource != mAudioTrack)) {
+        params |= IMediaPlayerService::kBatteryDataTrackAudio;
+    }
+    if (mVideoSource != NULL) {
+        params |= IMediaPlayerService::kBatteryDataTrackVideo;
+    }
+
+    addBatteryData(params);
+
     return OK;
 }
 
@@ -943,10 +1001,21 @@
     return (mFlags & PLAYING) || (mFlags & CACHE_UNDERRUN);
 }
 
-void AwesomePlayer::setISurface(const sp<ISurface> &isurface) {
+void AwesomePlayer::setSurface(const sp<Surface> &surface) {
     Mutex::Autolock autoLock(mLock);
 
-    mISurface = isurface;
+    mSurface = surface;
+    mNativeWindow = surface;
+}
+
+void AwesomePlayer::setSurfaceTexture(const sp<ISurfaceTexture> &surfaceTexture) {
+    Mutex::Autolock autoLock(mLock);
+
+    mSurface.clear();
+    if (surfaceTexture != NULL) {
+        mNativeWindow = new SurfaceTextureClient(surfaceTexture);
+    }
+
 }
 
 void AwesomePlayer::setAudioSink(
@@ -984,7 +1053,7 @@
     if (mRTSPController != NULL) {
         *positionUs = mRTSPController->getNormalPlayTimeUs();
     }
-    else if (mSeeking) {
+    else if (mSeeking != NO_SEEK) {
         *positionUs = mSeekTimeUs;
     } else if (mVideoSource != NULL) {
         Mutex::Autolock autoLock(mMiscStateLock);
@@ -1028,7 +1097,7 @@
         play_l();
     }
 
-    mSeeking = true;
+    mSeeking = SEEK;
     mSeekNotificationSent = false;
     mSeekTimeUs = timeUs;
     mFlags &= ~(AT_EOS | AUDIO_AT_EOS | VIDEO_AT_EOS);
@@ -1041,13 +1110,18 @@
 
         notifyListener_l(MEDIA_SEEK_COMPLETE);
         mSeekNotificationSent = true;
+
+        if ((mFlags & PREPARED) && mVideoSource != NULL) {
+            mFlags |= SEEK_PREVIEW;
+            postVideoEvent_l();
+        }
     }
 
     return OK;
 }
 
 void AwesomePlayer::seekAudioIfNecessary_l() {
-    if (mSeeking && mVideoSource == NULL && mAudioPlayer != NULL) {
+    if (mSeeking != NO_SEEK && mVideoSource == NULL && mAudioPlayer != NULL) {
         mAudioPlayer->seekTo(mSeekTimeUs);
 
         mWatchForAudioSeekComplete = true;
@@ -1063,20 +1137,6 @@
     }
 }
 
-status_t AwesomePlayer::getVideoDimensions(
-        int32_t *width, int32_t *height) const {
-    Mutex::Autolock autoLock(mLock);
-
-    if (mVideoWidth < 0 || mVideoHeight < 0) {
-        return UNKNOWN_ERROR;
-    }
-
-    *width = mVideoWidth;
-    *height = mVideoHeight;
-
-    return OK;
-}
-
 void AwesomePlayer::setAudioSource(sp<MediaSource> source) {
     CHECK(source != NULL);
 
@@ -1130,11 +1190,24 @@
 }
 
 status_t AwesomePlayer::initVideoDecoder(uint32_t flags) {
+
+    // Either the application or the DRM system can independently say
+    // that there must be a hardware-protected path to an external video sink.
+    // For now we always require a hardware-protected path to external video sink
+    // if content is DRMed, but eventually this could be optional per DRM agent.
+    // When the application wants protection, then
+    //   (USE_SURFACE_ALLOC && (mSurface != 0) &&
+    //   (mSurface->getFlags() & ISurfaceComposer::eProtectedByApp))
+    // will be true, but that part is already handled by SurfaceFlinger.
+    if (mDecryptHandle != NULL) {
+        flags |= OMXCodec::kEnableGrallocUsageProtected;
+    }
+    LOGV("initVideoDecoder flags=0x%x", flags);
     mVideoSource = OMXCodec::Create(
             mClient.interface(), mVideoTrack->getFormat(),
             false, // createEncoder
             mVideoTrack,
-            NULL, flags);
+            NULL, flags, USE_SURFACE_ALLOC ? mNativeWindow : NULL);
 
     if (mVideoSource != NULL) {
         int64_t durationUs;
@@ -1145,9 +1218,6 @@
             }
         }
 
-        CHECK(mVideoTrack->getFormat()->findInt32(kKeyWidth, &mVideoWidth));
-        CHECK(mVideoTrack->getFormat()->findInt32(kKeyHeight, &mVideoHeight));
-
         status_t err = mVideoSource->start();
 
         if (err != OK) {
@@ -1160,20 +1230,23 @@
 }
 
 void AwesomePlayer::finishSeekIfNecessary(int64_t videoTimeUs) {
-    if (!mSeeking) {
+    if (mSeeking == SEEK_VIDEO_ONLY) {
+        mSeeking = NO_SEEK;
+        return;
+    }
+
+    if (mSeeking == NO_SEEK || (mFlags & SEEK_PREVIEW)) {
         return;
     }
 
     if (mAudioPlayer != NULL) {
-        LOGV("seeking audio to %lld us (%.2f secs).", timeUs, timeUs / 1E6);
+        LOGV("seeking audio to %lld us (%.2f secs).", videoTimeUs, videoTimeUs / 1E6);
 
         // If we don't have a video time, seek audio to the originally
         // requested seek time instead.
 
         mAudioPlayer->seekTo(videoTimeUs < 0 ? mSeekTimeUs : videoTimeUs);
-        mAudioPlayer->resume();
         mWatchForAudioSeekComplete = true;
-        mWatchForAudioEOS = true;
     } else if (!mSeekNotificationSent) {
         // If we're playing video only, report seek complete now,
         // otherwise audio player will notify us later.
@@ -1181,8 +1254,15 @@
     }
 
     mFlags |= FIRST_FRAME;
-    mSeeking = false;
+    mSeeking = NO_SEEK;
     mSeekNotificationSent = false;
+
+    if (mDecryptHandle != NULL) {
+        mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
+                Playback::PAUSE, 0);
+        mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
+                Playback::START, videoTimeUs / 1000);
+    }
 }
 
 void AwesomePlayer::onVideoEvent() {
@@ -1194,18 +1274,14 @@
     }
     mVideoEventPending = false;
 
-    if (mSeeking) {
-        if (mLastVideoBuffer) {
-            mLastVideoBuffer->release();
-            mLastVideoBuffer = NULL;
-        }
-
+    if (mSeeking != NO_SEEK) {
         if (mVideoBuffer) {
             mVideoBuffer->release();
             mVideoBuffer = NULL;
         }
 
-        if (mCachedSource != NULL && mAudioSource != NULL) {
+        if (mSeeking == SEEK && mCachedSource != NULL && mAudioSource != NULL
+                && !(mFlags & SEEK_PREVIEW)) {
             // We're going to seek the video source first, followed by
             // the audio source.
             // In order to avoid jumps in the DataSource offset caused by
@@ -1214,8 +1290,10 @@
             // locations, we'll "pause" the audio source, causing it to
             // stop reading input data until a subsequent seek.
 
-            if (mAudioPlayer != NULL) {
+            if (mAudioPlayer != NULL && (mFlags & AUDIO_RUNNING)) {
                 mAudioPlayer->pause();
+
+                mFlags &= ~AUDIO_RUNNING;
             }
             mAudioSource->pause();
         }
@@ -1223,40 +1301,38 @@
 
     if (!mVideoBuffer) {
         MediaSource::ReadOptions options;
-        if (mSeeking) {
+        if (mSeeking != NO_SEEK) {
             LOGV("seeking to %lld us (%.2f secs)", mSeekTimeUs, mSeekTimeUs / 1E6);
 
             options.setSeekTo(
-                    mSeekTimeUs, MediaSource::ReadOptions::SEEK_CLOSEST_SYNC);
+                    mSeekTimeUs,
+                    mSeeking == SEEK_VIDEO_ONLY
+                        ? MediaSource::ReadOptions::SEEK_NEXT_SYNC
+                        : MediaSource::ReadOptions::SEEK_CLOSEST_SYNC);
         }
         for (;;) {
             status_t err = mVideoSource->read(&mVideoBuffer, &options);
             options.clearSeekTo();
 
             if (err != OK) {
-                CHECK_EQ(mVideoBuffer, NULL);
+                CHECK(mVideoBuffer == NULL);
 
                 if (err == INFO_FORMAT_CHANGED) {
                     LOGV("VideoSource signalled format change.");
 
+                    notifyVideoSize_l();
+
                     if (mVideoRenderer != NULL) {
                         mVideoRendererIsPreview = false;
-                        err = initRenderer_l();
-
-                        if (err == OK) {
-                            continue;
-                        }
-
-                        // fall through
-                    } else {
-                        continue;
+                        initRenderer_l();
                     }
+                    continue;
                 }
 
                 // So video playback is complete, but we may still have
                 // a seek request pending that needs to be applied
                 // to the audio track.
-                if (mSeeking) {
+                if (mSeeking != NO_SEEK) {
                     LOGV("video stream ended while seeking!");
                 }
                 finishSeekIfNecessary(-1);
@@ -1282,26 +1358,34 @@
     int64_t timeUs;
     CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs));
 
+    if (mSeeking == SEEK_VIDEO_ONLY) {
+        if (mSeekTimeUs > timeUs) {
+            LOGI("XXX mSeekTimeUs = %lld us, timeUs = %lld us",
+                 mSeekTimeUs, timeUs);
+        }
+    }
+
     {
         Mutex::Autolock autoLock(mMiscStateLock);
         mVideoTimeUs = timeUs;
     }
 
-    bool wasSeeking = mSeeking;
+    SeekType wasSeeking = mSeeking;
     finishSeekIfNecessary(timeUs);
 
+    if (mAudioPlayer != NULL && !(mFlags & (AUDIO_RUNNING | SEEK_PREVIEW))) {
+        status_t err = startAudioPlayer_l();
+        if (err != OK) {
+            LOGE("Startung the audio player failed w/ err %d", err);
+            return;
+        }
+    }
+
     TimeSource *ts = (mFlags & AUDIO_AT_EOS) ? &mSystemTimeSource : mTimeSource;
 
-    if (mDecryptHandle != NULL) {
-        mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
-                Playback::PAUSE, 0);
-        mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
-                Playback::START, timeUs / 1000);
-    }
-
     if (mFlags & FIRST_FRAME) {
         mFlags &= ~FIRST_FRAME;
-
+        mSinceLastDropped = 0;
         mTimeSourceDeltaUs = ts->getRealTimeUs() - timeUs;
     }
 
@@ -1311,64 +1395,83 @@
         mTimeSourceDeltaUs = realTimeUs - mediaTimeUs;
     }
 
-    int64_t nowUs = ts->getRealTimeUs() - mTimeSourceDeltaUs;
+    if (wasSeeking == SEEK_VIDEO_ONLY) {
+        int64_t nowUs = ts->getRealTimeUs() - mTimeSourceDeltaUs;
 
-    int64_t latenessUs = nowUs - timeUs;
+        int64_t latenessUs = nowUs - timeUs;
 
-    if (wasSeeking) {
+        if (latenessUs > 0) {
+            LOGI("after SEEK_VIDEO_ONLY we're late by %.2f secs", latenessUs / 1E6);
+        }
+    }
+
+    if (wasSeeking == NO_SEEK) {
         // Let's display the first frame after seeking right away.
-        latenessUs = 0;
-    }
 
-    if (mRTPSession != NULL) {
-        // We'll completely ignore timestamps for gtalk videochat
-        // and we'll play incoming video as fast as we get it.
-        latenessUs = 0;
-    }
+        int64_t nowUs = ts->getRealTimeUs() - mTimeSourceDeltaUs;
 
-    if (latenessUs > 40000) {
-        // We're more than 40ms late.
-        LOGV("we're late by %lld us (%.2f secs)", latenessUs, latenessUs / 1E6);
+        int64_t latenessUs = nowUs - timeUs;
 
-        mVideoBuffer->release();
-        mVideoBuffer = NULL;
+        if (latenessUs > 500000ll
+                && mRTSPController == NULL
+                && mAudioPlayer != NULL
+                && mAudioPlayer->getMediaTimeMapping(
+                    &realTimeUs, &mediaTimeUs)) {
+            LOGI("we're much too late (%.2f secs), video skipping ahead",
+                 latenessUs / 1E6);
 
-        postVideoEvent_l();
-        return;
-    }
+            mVideoBuffer->release();
+            mVideoBuffer = NULL;
 
-    if (latenessUs < -10000) {
-        // We're more than 10ms early.
+            mSeeking = SEEK_VIDEO_ONLY;
+            mSeekTimeUs = mediaTimeUs;
 
-        postVideoEvent_l(10000);
-        return;
+            postVideoEvent_l();
+            return;
+        }
+
+        if (latenessUs > 40000) {
+            // We're more than 40ms late.
+            LOGV("we're late by %lld us (%.2f secs)", latenessUs, latenessUs / 1E6);
+            if ( mSinceLastDropped > FRAME_DROP_FREQ)
+            {
+                LOGV("we're late by %lld us (%.2f secs) dropping one after %d frames", latenessUs, latenessUs / 1E6, mSinceLastDropped);
+                mSinceLastDropped = 0;
+                mVideoBuffer->release();
+                mVideoBuffer = NULL;
+
+                postVideoEvent_l();
+                return;
+            }
+        }
+
+        if (latenessUs < -10000) {
+            // We're more than 10ms early.
+
+            postVideoEvent_l(10000);
+            return;
+        }
     }
 
     if (mVideoRendererIsPreview || mVideoRenderer == NULL) {
         mVideoRendererIsPreview = false;
 
-        status_t err = initRenderer_l();
-
-        if (err != OK) {
-            finishSeekIfNecessary(-1);
-
-            mFlags |= VIDEO_AT_EOS;
-            postStreamDoneEvent_l(err);
-            return;
-        }
+        initRenderer_l();
     }
 
     if (mVideoRenderer != NULL) {
+        mSinceLastDropped++;
         mVideoRenderer->render(mVideoBuffer);
     }
 
-    if (mLastVideoBuffer) {
-        mLastVideoBuffer->release();
-        mLastVideoBuffer = NULL;
-    }
-    mLastVideoBuffer = mVideoBuffer;
+    mVideoBuffer->release();
     mVideoBuffer = NULL;
 
+    if (wasSeeking != NO_SEEK && (mFlags & SEEK_PREVIEW)) {
+        mFlags &= ~SEEK_PREVIEW;
+        return;
+    }
+
     postVideoEvent_l();
 }
 
@@ -1399,6 +1502,14 @@
     mQueue.postEventWithDelay(mBufferingEvent, 1000000ll);
 }
 
+void AwesomePlayer::postVideoLagEvent_l() {
+    if (mVideoLagEventPending) {
+        return;
+    }
+    mVideoLagEventPending = true;
+    mQueue.postEventWithDelay(mVideoLagEvent, 1000000ll);
+}
+
 void AwesomePlayer::postCheckAudioStatusEvent_l() {
     if (mAudioStatusEventPending) {
         return;
@@ -1425,7 +1536,7 @@
             mSeekNotificationSent = true;
         }
 
-        mSeeking = false;
+        mSeeking = NO_SEEK;
     }
 
     status_t finalStatus;
@@ -1498,8 +1609,10 @@
 status_t AwesomePlayer::finishSetDataSource_l() {
     sp<DataSource> dataSource;
 
-    if (!strncasecmp("http://", mUri.string(), 7)) {
-        mConnectingDataSource = new NuHTTPDataSource;
+    if (!strncasecmp("http://", mUri.string(), 7)
+            || !strncasecmp("https://", mUri.string(), 8)) {
+        mConnectingDataSource = new NuHTTPDataSource(
+                (mFlags & INCOGNITO) ? NuHTTPDataSource::kFlagIncognito : 0);
 
         mLock.unlock();
         status_t err = mConnectingDataSource->connect(mUri, &mUriHeaders);
@@ -1532,11 +1645,11 @@
         mLock.unlock();
 
         for (;;) {
-            bool eos;
+            status_t finalStatus;
             size_t cachedDataRemaining =
-                mCachedSource->approxDataRemaining(&eos);
+                mCachedSource->approxDataRemaining(&finalStatus);
 
-            if (eos || cachedDataRemaining >= kHighWaterMarkBytes
+            if (finalStatus != OK || cachedDataRemaining >= kHighWaterMarkBytes
                     || (mFlags & PREPARE_CANCELLED)) {
                 break;
             }
@@ -1550,134 +1663,6 @@
             LOGI("Prepare cancelled while waiting for initial cache fill.");
             return UNKNOWN_ERROR;
         }
-    } else if (!strncasecmp(mUri.string(), "httplive://", 11)) {
-        String8 uri("http://");
-        uri.append(mUri.string() + 11);
-
-        sp<LiveSource> liveSource = new LiveSource(uri.string());
-
-        mCachedSource = new NuCachedSource2(liveSource);
-        dataSource = mCachedSource;
-
-        sp<MediaExtractor> extractor =
-            MediaExtractor::Create(dataSource, MEDIA_MIMETYPE_CONTAINER_MPEG2TS);
-
-        static_cast<MPEG2TSExtractor *>(extractor.get())
-            ->setLiveSource(liveSource);
-
-        return setDataSource_l(extractor);
-    } else if (!strncmp("rtsp://gtalk/", mUri.string(), 13)) {
-        if (mLooper == NULL) {
-            mLooper = new ALooper;
-            mLooper->setName("gtalk rtp");
-            mLooper->start(
-                    false /* runOnCallingThread */,
-                    false /* canCallJava */,
-                    PRIORITY_HIGHEST);
-        }
-
-        const char *startOfCodecString = &mUri.string()[13];
-        const char *startOfSlash1 = strchr(startOfCodecString, '/');
-        if (startOfSlash1 == NULL) {
-            return BAD_VALUE;
-        }
-        const char *startOfWidthString = &startOfSlash1[1];
-        const char *startOfSlash2 = strchr(startOfWidthString, '/');
-        if (startOfSlash2 == NULL) {
-            return BAD_VALUE;
-        }
-        const char *startOfHeightString = &startOfSlash2[1];
-
-        String8 codecString(startOfCodecString, startOfSlash1 - startOfCodecString);
-        String8 widthString(startOfWidthString, startOfSlash2 - startOfWidthString);
-        String8 heightString(startOfHeightString);
-
-#if 0
-        mRTPPusher = new UDPPusher("/data/misc/rtpout.bin", 5434);
-        mLooper->registerHandler(mRTPPusher);
-
-        mRTCPPusher = new UDPPusher("/data/misc/rtcpout.bin", 5435);
-        mLooper->registerHandler(mRTCPPusher);
-#endif
-
-        mRTPSession = new ARTPSession;
-        mLooper->registerHandler(mRTPSession);
-
-#if 0
-        // My AMR SDP
-        static const char *raw =
-            "v=0\r\n"
-            "o=- 64 233572944 IN IP4 127.0.0.0\r\n"
-            "s=QuickTime\r\n"
-            "t=0 0\r\n"
-            "a=range:npt=0-315\r\n"
-            "a=isma-compliance:2,2.0,2\r\n"
-            "m=audio 5434 RTP/AVP 97\r\n"
-            "c=IN IP4 127.0.0.1\r\n"
-            "b=AS:30\r\n"
-            "a=rtpmap:97 AMR/8000/1\r\n"
-            "a=fmtp:97 octet-align\r\n";
-#elif 1
-        String8 sdp;
-        sdp.appendFormat(
-            "v=0\r\n"
-            "o=- 64 233572944 IN IP4 127.0.0.0\r\n"
-            "s=QuickTime\r\n"
-            "t=0 0\r\n"
-            "a=range:npt=0-315\r\n"
-            "a=isma-compliance:2,2.0,2\r\n"
-            "m=video 5434 RTP/AVP 97\r\n"
-            "c=IN IP4 127.0.0.1\r\n"
-            "b=AS:30\r\n"
-            "a=rtpmap:97 %s/90000\r\n"
-            "a=cliprect:0,0,%s,%s\r\n"
-            "a=framesize:97 %s-%s\r\n",
-
-            codecString.string(),
-            heightString.string(), widthString.string(),
-            widthString.string(), heightString.string()
-            );
-        const char *raw = sdp.string();
-
-#endif
-
-        sp<ASessionDescription> desc = new ASessionDescription;
-        CHECK(desc->setTo(raw, strlen(raw)));
-
-        CHECK_EQ(mRTPSession->setup(desc), (status_t)OK);
-
-        if (mRTPPusher != NULL) {
-            mRTPPusher->start();
-        }
-
-        if (mRTCPPusher != NULL) {
-            mRTCPPusher->start();
-        }
-
-        CHECK_EQ(mRTPSession->countTracks(), 1u);
-        sp<MediaSource> source = mRTPSession->trackAt(0);
-
-#if 0
-        bool eos;
-        while (((APacketSource *)source.get())
-                ->getQueuedDuration(&eos) < 5000000ll && !eos) {
-            usleep(100000ll);
-        }
-#endif
-
-        const char *mime;
-        CHECK(source->getFormat()->findCString(kKeyMIMEType, &mime));
-
-        if (!strncasecmp("video/", mime, 6)) {
-            setVideoSource(source);
-        } else {
-            CHECK(!strncasecmp("audio/", mime, 6));
-            setAudioSource(source);
-        }
-
-        mExtractorFlags = MediaExtractor::CAN_PAUSE;
-
-        return OK;
     } else if (!strncasecmp("rtsp://", mUri.string(), 7)) {
         if (mLooper == NULL) {
             mLooper = new ALooper;
@@ -1685,7 +1670,13 @@
             mLooper->start();
         }
         mRTSPController = new ARTSPController(mLooper);
+        mConnectingRTSPController = mRTSPController;
+
+        mLock.unlock();
         status_t err = mRTSPController->connect(mUri.string());
+        mLock.lock();
+
+        mConnectingRTSPController.clear();
 
         LOGI("ARTSPController::connect returned %d", err);
 
@@ -1711,9 +1702,16 @@
     }
 
     dataSource->getDrmInfo(&mDecryptHandle, &mDrmManagerClient);
-    if (mDecryptHandle != NULL
-            && RightsStatus::RIGHTS_VALID != mDecryptHandle->status) {
-        notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, ERROR_NO_LICENSE);
+    if (mDecryptHandle != NULL) {
+        CHECK(mDrmManagerClient);
+        if (RightsStatus::RIGHTS_VALID == mDecryptHandle->status) {
+            if (DecryptApiType::WV_BASED == mDecryptHandle->decryptApiType) {
+                LOGD("Setting mCachedSource to NULL for WVM\n");
+                mCachedSource.clear();
+            }
+        } else {
+            notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, ERROR_NO_LICENSE);
+        }
     }
 
     return setDataSource_l(extractor);
@@ -1727,7 +1725,7 @@
     }
 
     mPrepareResult = err;
-    mFlags &= ~(PREPARING|PREPARE_CANCELLED);
+    mFlags &= ~(PREPARING|PREPARE_CANCELLED|PREPARING_CONNECTED);
     mAsyncPrepareEvent = NULL;
     mPreparedCondition.broadcast();
 }
@@ -1775,6 +1773,8 @@
         }
     }
 
+    mFlags |= PREPARING_CONNECTED;
+
     if (mCachedSource != NULL || mRTSPController != NULL) {
         postBufferingEvent_l();
     } else {
@@ -1784,175 +1784,22 @@
 
 void AwesomePlayer::finishAsyncPrepare_l() {
     if (mIsAsyncPrepare) {
-        if (mVideoWidth < 0 || mVideoHeight < 0) {
+        if (mVideoSource == NULL) {
             notifyListener_l(MEDIA_SET_VIDEO_SIZE, 0, 0);
         } else {
-            int32_t rotationDegrees;
-            if (!mVideoTrack->getFormat()->findInt32(
-                        kKeyRotation, &rotationDegrees)) {
-                rotationDegrees = 0;
-            }
-
-#if 1
-            if (rotationDegrees == 90 || rotationDegrees == 270) {
-                notifyListener_l(
-                        MEDIA_SET_VIDEO_SIZE, mVideoHeight, mVideoWidth);
-            } else
-#endif
-            {
-                notifyListener_l(
-                        MEDIA_SET_VIDEO_SIZE, mVideoWidth, mVideoHeight);
-            }
+            notifyVideoSize_l();
         }
 
         notifyListener_l(MEDIA_PREPARED);
     }
 
     mPrepareResult = OK;
-    mFlags &= ~(PREPARING|PREPARE_CANCELLED);
+    mFlags &= ~(PREPARING|PREPARE_CANCELLED|PREPARING_CONNECTED);
     mFlags |= PREPARED;
     mAsyncPrepareEvent = NULL;
     mPreparedCondition.broadcast();
 }
 
-status_t AwesomePlayer::suspend() {
-    LOGV("suspend");
-    Mutex::Autolock autoLock(mLock);
-
-    if (mSuspensionState != NULL) {
-        if (mLastVideoBuffer == NULL) {
-            //go into here if video is suspended again
-            //after resuming without being played between
-            //them
-            SuspensionState *state = mSuspensionState;
-            mSuspensionState = NULL;
-            reset_l();
-            mSuspensionState = state;
-            return OK;
-        }
-
-        delete mSuspensionState;
-        mSuspensionState = NULL;
-    }
-
-    if (mFlags & PREPARING) {
-        mFlags |= PREPARE_CANCELLED;
-        if (mConnectingDataSource != NULL) {
-            LOGI("interrupting the connection process");
-            mConnectingDataSource->disconnect();
-        }
-    }
-
-    while (mFlags & PREPARING) {
-        mPreparedCondition.wait(mLock);
-    }
-
-    SuspensionState *state = new SuspensionState;
-    state->mUri = mUri;
-    state->mUriHeaders = mUriHeaders;
-    state->mFileSource = mFileSource;
-
-    state->mFlags = mFlags & (PLAYING | AUTO_LOOPING | LOOPING | AT_EOS);
-    getPosition(&state->mPositionUs);
-
-    if (mLastVideoBuffer) {
-        size_t size = mLastVideoBuffer->range_length();
-
-        if (size) {
-            int32_t unreadable;
-            if (!mLastVideoBuffer->meta_data()->findInt32(
-                        kKeyIsUnreadable, &unreadable)
-                    || unreadable == 0) {
-                state->mLastVideoFrameSize = size;
-                state->mLastVideoFrame = malloc(size);
-                memcpy(state->mLastVideoFrame,
-                       (const uint8_t *)mLastVideoBuffer->data()
-                            + mLastVideoBuffer->range_offset(),
-                       size);
-
-                state->mVideoWidth = mVideoWidth;
-                state->mVideoHeight = mVideoHeight;
-
-                sp<MetaData> meta = mVideoSource->getFormat();
-                CHECK(meta->findInt32(kKeyColorFormat, &state->mColorFormat));
-                CHECK(meta->findInt32(kKeyWidth, &state->mDecodedWidth));
-                CHECK(meta->findInt32(kKeyHeight, &state->mDecodedHeight));
-            } else {
-                LOGV("Unable to save last video frame, we have no access to "
-                     "the decoded video data.");
-            }
-        }
-    }
-
-    reset_l();
-
-    mSuspensionState = state;
-
-    return OK;
-}
-
-status_t AwesomePlayer::resume() {
-    LOGV("resume");
-    Mutex::Autolock autoLock(mLock);
-
-    if (mSuspensionState == NULL) {
-        return INVALID_OPERATION;
-    }
-
-    SuspensionState *state = mSuspensionState;
-    mSuspensionState = NULL;
-
-    status_t err;
-    if (state->mFileSource != NULL) {
-        err = setDataSource_l(state->mFileSource);
-
-        if (err == OK) {
-            mFileSource = state->mFileSource;
-        }
-    } else {
-        err = setDataSource_l(state->mUri, &state->mUriHeaders);
-    }
-
-    if (err != OK) {
-        delete state;
-        state = NULL;
-
-        return err;
-    }
-
-    seekTo_l(state->mPositionUs);
-
-    mFlags = state->mFlags & (AUTO_LOOPING | LOOPING | AT_EOS);
-
-    if (state->mLastVideoFrame && mISurface != NULL) {
-        mVideoRenderer =
-            new AwesomeLocalRenderer(
-                    true,  // previewOnly
-                    "",
-                    (OMX_COLOR_FORMATTYPE)state->mColorFormat,
-                    mISurface,
-                    state->mVideoWidth,
-                    state->mVideoHeight,
-                    state->mDecodedWidth,
-                    state->mDecodedHeight,
-                    0);
-
-        mVideoRendererIsPreview = true;
-
-        ((AwesomeLocalRenderer *)mVideoRenderer.get())->render(
-                state->mLastVideoFrame, state->mLastVideoFrameSize);
-    }
-
-    if (state->mFlags & PLAYING) {
-        play_l();
-    }
-
-    mSuspensionState = state;
-    state = NULL;
-
-    return OK;
-}
-
 uint32_t AwesomePlayer::flags() const {
     return mExtractorFlags;
 }
@@ -1966,4 +1813,3 @@
 }
 
 }  // namespace android
-
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index 89cb135..8a24bc4 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -27,6 +27,7 @@
 #include <media/stagefright/MetaData.h>
 #include <camera/Camera.h>
 #include <camera/CameraParameters.h>
+#include <surfaceflinger/Surface.h>
 #include <utils/String8.h>
 #include <cutils/properties.h>
 
@@ -65,6 +66,11 @@
 void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr) {
     LOGV("postData(%d, ptr:%p, size:%d)",
          msgType, dataPtr->pointer(), dataPtr->size());
+
+    sp<CameraSource> source = mSource.promote();
+    if (source.get() != NULL) {
+        source->dataCallback(msgType, dataPtr);
+    }
 }
 
 void CameraSourceListener::postDataTimestamp(
@@ -77,6 +83,10 @@
 }
 
 static int32_t getColorFormat(const char* colorFormat) {
+    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) {
+       return OMX_COLOR_FormatYUV420Planar;
+    }
+
     if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) {
        return OMX_COLOR_FormatYUV422SemiPlanar;
     }
@@ -99,72 +109,409 @@
     CHECK_EQ(0, "Unknown color format");
 }
 
-// static
 CameraSource *CameraSource::Create() {
-    sp<Camera> camera = Camera::connect(0);
+    Size size;
+    size.width = -1;
+    size.height = -1;
 
-    if (camera.get() == NULL) {
-        return NULL;
-    }
-
-    return new CameraSource(camera);
+    sp<ICamera> camera;
+    return new CameraSource(camera, 0, size, -1, NULL, false);
 }
 
 // static
-CameraSource *CameraSource::CreateFromCamera(const sp<Camera> &camera) {
-    if (camera.get() == NULL) {
-        return NULL;
-    }
+CameraSource *CameraSource::CreateFromCamera(
+    const sp<ICamera>& camera,
+    int32_t cameraId,
+    Size videoSize,
+    int32_t frameRate,
+    const sp<Surface>& surface,
+    bool storeMetaDataInVideoBuffers) {
 
-    return new CameraSource(camera);
+    CameraSource *source = new CameraSource(camera, cameraId,
+                    videoSize, frameRate, surface,
+                    storeMetaDataInVideoBuffers);
+    return source;
 }
 
-CameraSource::CameraSource(const sp<Camera> &camera)
-    : mCamera(camera),
-      mFirstFrameTimeUs(0),
-      mLastFrameTimestampUs(0),
+CameraSource::CameraSource(
+    const sp<ICamera>& camera,
+    int32_t cameraId,
+    Size videoSize,
+    int32_t frameRate,
+    const sp<Surface>& surface,
+    bool storeMetaDataInVideoBuffers)
+    : mCameraFlags(0),
+      mVideoFrameRate(-1),
+      mCamera(0),
+      mSurface(surface),
       mNumFramesReceived(0),
+      mLastFrameTimestampUs(0),
+      mStarted(false),
       mNumFramesEncoded(0),
+      mFirstFrameTimeUs(0),
       mNumFramesDropped(0),
       mNumGlitches(0),
       mGlitchDurationThresholdUs(200000),
-      mCollectStats(false),
-      mStarted(false) {
+      mCollectStats(false) {
 
+    mVideoSize.width  = -1;
+    mVideoSize.height = -1;
+
+    mInitCheck = init(camera, cameraId,
+                    videoSize, frameRate,
+                    storeMetaDataInVideoBuffers);
+}
+
+status_t CameraSource::initCheck() const {
+    return mInitCheck;
+}
+
+status_t CameraSource::isCameraAvailable(
+    const sp<ICamera>& camera, int32_t cameraId) {
+
+    if (camera == 0) {
+        mCamera = Camera::connect(cameraId);
+        mCameraFlags &= ~FLAGS_HOT_CAMERA;
+    } else {
+        mCamera = Camera::create(camera);
+        mCameraFlags |= FLAGS_HOT_CAMERA;
+    }
+
+    // Is camera available?
+    if (mCamera == 0) {
+        LOGE("Camera connection could not be established.");
+        return -EBUSY;
+    }
+    if (!(mCameraFlags & FLAGS_HOT_CAMERA)) {
+        mCamera->lock();
+    }
+    return OK;
+}
+
+
+/*
+ * Check to see whether the requested video width and height is one
+ * of the supported sizes.
+ * @param width the video frame width in pixels
+ * @param height the video frame height in pixels
+ * @param suppportedSizes the vector of sizes that we check against
+ * @return true if the dimension (width and height) is supported.
+ */
+static bool isVideoSizeSupported(
+    int32_t width, int32_t height,
+    const Vector<Size>& supportedSizes) {
+
+    LOGV("isVideoSizeSupported");
+    for (size_t i = 0; i < supportedSizes.size(); ++i) {
+        if (width  == supportedSizes[i].width &&
+            height == supportedSizes[i].height) {
+            return true;
+        }
+    }
+    return false;
+}
+
+/*
+ * If the preview and video output is separate, we only set the
+ * the video size, and applications should set the preview size
+ * to some proper value, and the recording framework will not
+ * change the preview size; otherwise, if the video and preview
+ * output is the same, we need to set the preview to be the same
+ * as the requested video size.
+ *
+ */
+/*
+ * Query the camera to retrieve the supported video frame sizes
+ * and also to see whether CameraParameters::setVideoSize()
+ * is supported or not.
+ * @param params CameraParameters to retrieve the information
+ * @@param isSetVideoSizeSupported retunrs whether method
+ *      CameraParameters::setVideoSize() is supported or not.
+ * @param sizes returns the vector of Size objects for the
+ *      supported video frame sizes advertised by the camera.
+ */
+static void getSupportedVideoSizes(
+    const CameraParameters& params,
+    bool *isSetVideoSizeSupported,
+    Vector<Size>& sizes) {
+
+    *isSetVideoSizeSupported = true;
+    params.getSupportedVideoSizes(sizes);
+    if (sizes.size() == 0) {
+        LOGD("Camera does not support setVideoSize()");
+        params.getSupportedPreviewSizes(sizes);
+        *isSetVideoSizeSupported = false;
+    }
+}
+
+/*
+ * Check whether the camera has the supported color format
+ * @param params CameraParameters to retrieve the information
+ * @return OK if no error.
+ */
+status_t CameraSource::isCameraColorFormatSupported(
+        const CameraParameters& params) {
+    mColorFormat = getColorFormat(params.get(
+            CameraParameters::KEY_VIDEO_FRAME_FORMAT));
+    if (mColorFormat == -1) {
+        return BAD_VALUE;
+    }
+    return OK;
+}
+
+/*
+ * Configure the camera to use the requested video size
+ * (width and height) and/or frame rate. If both width and
+ * height are -1, configuration on the video size is skipped.
+ * if frameRate is -1, configuration on the frame rate
+ * is skipped. Skipping the configuration allows one to
+ * use the current camera setting without the need to
+ * actually know the specific values (see Create() method).
+ *
+ * @param params the CameraParameters to be configured
+ * @param width the target video frame width in pixels
+ * @param height the target video frame height in pixels
+ * @param frameRate the target frame rate in frames per second.
+ * @return OK if no error.
+ */
+status_t CameraSource::configureCamera(
+        CameraParameters* params,
+        int32_t width, int32_t height,
+        int32_t frameRate) {
+
+    Vector<Size> sizes;
+    bool isSetVideoSizeSupportedByCamera = true;
+    getSupportedVideoSizes(*params, &isSetVideoSizeSupportedByCamera, sizes);
+    bool isCameraParamChanged = false;
+    if (width != -1 && height != -1) {
+        if (!isVideoSizeSupported(width, height, sizes)) {
+            LOGE("Video dimension (%dx%d) is unsupported", width, height);
+            releaseCamera();
+            return BAD_VALUE;
+        }
+        if (isSetVideoSizeSupportedByCamera) {
+            params->setVideoSize(width, height);
+        } else {
+            params->setPreviewSize(width, height);
+        }
+        isCameraParamChanged = true;
+    } else if ((width == -1 && height != -1) ||
+               (width != -1 && height == -1)) {
+        // If one and only one of the width and height is -1
+        // we reject such a request.
+        LOGE("Requested video size (%dx%d) is not supported", width, height);
+        releaseCamera();
+        return BAD_VALUE;
+    } else {  // width == -1 && height == -1
+        // Do not configure the camera.
+        // Use the current width and height value setting from the camera.
+    }
+
+    if (frameRate != -1) {
+        CHECK(frameRate > 0 && frameRate <= 120);
+        const char* supportedFrameRates =
+                params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES);
+        CHECK(supportedFrameRates != NULL);
+        LOGV("Supported frame rates: %s", supportedFrameRates);
+        char buf[4];
+        snprintf(buf, 4, "%d", frameRate);
+        if (strstr(supportedFrameRates, buf) == NULL) {
+            LOGE("Requested frame rate (%d) is not supported: %s",
+                frameRate, supportedFrameRates);
+            releaseCamera();
+            return BAD_VALUE;
+        }
+
+        // The frame rate is supported, set the camera to the requested value.
+        params->setPreviewFrameRate(frameRate);
+        isCameraParamChanged = true;
+    } else {  // frameRate == -1
+        // Do not configure the camera.
+        // Use the current frame rate value setting from the camera
+    }
+
+    if (isCameraParamChanged) {
+        // Either frame rate or frame size needs to be changed.
+        String8 s = params->flatten();
+        if (OK != mCamera->setParameters(s)) {
+            LOGE("Could not change settings."
+                 " Someone else is using camera %p?", mCamera.get());
+            return -EBUSY;
+        }
+    }
+    return OK;
+}
+
+/*
+ * Check whether the requested video frame size
+ * has been successfully configured or not. If both width and height
+ * are -1, check on the current width and height value setting
+ * is performed.
+ *
+ * @param params CameraParameters to retrieve the information
+ * @param the target video frame width in pixels to check against
+ * @param the target video frame height in pixels to check against
+ * @return OK if no error
+ */
+status_t CameraSource::checkVideoSize(
+        const CameraParameters& params,
+        int32_t width, int32_t height) {
+
+    // The actual video size is the same as the preview size
+    // if the camera hal does not support separate video and
+    // preview output. In this case, we retrieve the video
+    // size from preview.
+    int32_t frameWidthActual = -1;
+    int32_t frameHeightActual = -1;
+    Vector<Size> sizes;
+    params.getSupportedVideoSizes(sizes);
+    if (sizes.size() == 0) {
+        // video size is the same as preview size
+        params.getPreviewSize(&frameWidthActual, &frameHeightActual);
+    } else {
+        // video size may not be the same as preview
+        params.getVideoSize(&frameWidthActual, &frameHeightActual);
+    }
+    if (frameWidthActual < 0 || frameHeightActual < 0) {
+        LOGE("Failed to retrieve video frame size (%dx%d)",
+                frameWidthActual, frameHeightActual);
+        return UNKNOWN_ERROR;
+    }
+
+    // Check the actual video frame size against the target/requested
+    // video frame size.
+    if (width != -1 && height != -1) {
+        if (frameWidthActual != width || frameHeightActual != height) {
+            LOGE("Failed to set video frame size to %dx%d. "
+                    "The actual video size is %dx%d ", width, height,
+                    frameWidthActual, frameHeightActual);
+            return UNKNOWN_ERROR;
+        }
+    }
+
+    // Good now.
+    mVideoSize.width = frameWidthActual;
+    mVideoSize.height = frameHeightActual;
+    return OK;
+}
+
+/*
+ * Check the requested frame rate has been successfully configured or not.
+ * If the target frameRate is -1, check on the current frame rate value
+ * setting is performed.
+ *
+ * @param params CameraParameters to retrieve the information
+ * @param the target video frame rate to check against
+ * @return OK if no error.
+ */
+status_t CameraSource::checkFrameRate(
+        const CameraParameters& params,
+        int32_t frameRate) {
+
+    int32_t frameRateActual = params.getPreviewFrameRate();
+    if (frameRateActual < 0) {
+        LOGE("Failed to retrieve preview frame rate (%d)", frameRateActual);
+        return UNKNOWN_ERROR;
+    }
+
+    // Check the actual video frame rate against the target/requested
+    // video frame rate.
+    if (frameRate != -1 && (frameRateActual - frameRate) != 0) {
+        LOGE("Failed to set preview frame rate to %d fps. The actual "
+                "frame rate is %d", frameRate, frameRateActual);
+        return UNKNOWN_ERROR;
+    }
+
+    // Good now.
+    mVideoFrameRate = frameRateActual;
+    return OK;
+}
+
+/*
+ * Initialize the CameraSource to so that it becomes
+ * ready for providing the video input streams as requested.
+ * @param camera the camera object used for the video source
+ * @param cameraId if camera == 0, use camera with this id
+ *      as the video source
+ * @param videoSize the target video frame size. If both
+ *      width and height in videoSize is -1, use the current
+ *      width and heigth settings by the camera
+ * @param frameRate the target frame rate in frames per second.
+ *      if it is -1, use the current camera frame rate setting.
+ * @param storeMetaDataInVideoBuffers request to store meta
+ *      data or real YUV data in video buffers. Request to
+ *      store meta data in video buffers may not be honored
+ *      if the source does not support this feature.
+ *
+ * @return OK if no error.
+ */
+status_t CameraSource::init(
+        const sp<ICamera>& camera,
+        int32_t cameraId,
+        Size videoSize,
+        int32_t frameRate,
+        bool storeMetaDataInVideoBuffers) {
+
+    status_t err = OK;
     int64_t token = IPCThreadState::self()->clearCallingIdentity();
-    String8 s = mCamera->getParameters();
+
+    if ((err  = isCameraAvailable(camera, cameraId)) != OK) {
+        return err;
+    }
+    CameraParameters params(mCamera->getParameters());
+    if ((err = isCameraColorFormatSupported(params)) != OK) {
+        return err;
+    }
+
+    // Set the camera to use the requested video frame size
+    // and/or frame rate.
+    if ((err = configureCamera(&params,
+                    videoSize.width, videoSize.height,
+                    frameRate))) {
+        return err;
+    }
+
+    // Check on video frame size and frame rate.
+    CameraParameters newCameraParams(mCamera->getParameters());
+    if ((err = checkVideoSize(newCameraParams,
+                videoSize.width, videoSize.height)) != OK) {
+        return err;
+    }
+    if ((err = checkFrameRate(newCameraParams, frameRate)) != OK) {
+        return err;
+    }
+
+    // This CHECK is good, since we just passed the lock/unlock
+    // check earlier by calling mCamera->setParameters().
+    CHECK_EQ(OK, mCamera->setPreviewDisplay(mSurface));
+
+    // By default, do not store metadata in video buffers
+    mIsMetaDataStoredInVideoBuffers = false;
+    mCamera->storeMetaDataInBuffers(false);
+    if (storeMetaDataInVideoBuffers) {
+        if (OK == mCamera->storeMetaDataInBuffers(true)) {
+            mIsMetaDataStoredInVideoBuffers = true;
+        }
+    }
+
     IPCThreadState::self()->restoreCallingIdentity(token);
 
-    printf("params: \"%s\"\n", s.string());
-
-    int32_t width, height, stride, sliceHeight;
-    CameraParameters params(s);
-    params.getPreviewSize(&width, &height);
-
-    // Calculate glitch duraton threshold based on frame rate
-    int32_t frameRate = params.getPreviewFrameRate();
-    int64_t glitchDurationUs = (1000000LL / frameRate);
+    int64_t glitchDurationUs = (1000000LL / mVideoFrameRate);
     if (glitchDurationUs > mGlitchDurationThresholdUs) {
         mGlitchDurationThresholdUs = glitchDurationUs;
     }
 
-    const char *colorFormatStr = params.get(CameraParameters::KEY_VIDEO_FRAME_FORMAT);
-    CHECK(colorFormatStr != NULL);
-    int32_t colorFormat = getColorFormat(colorFormatStr);
-
     // XXX: query camera for the stride and slice height
     // when the capability becomes available.
-    stride = width;
-    sliceHeight = height;
-
     mMeta = new MetaData;
-    mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW);
-    mMeta->setInt32(kKeyColorFormat, colorFormat);
-    mMeta->setInt32(kKeyWidth, width);
-    mMeta->setInt32(kKeyHeight, height);
-    mMeta->setInt32(kKeyStride, stride);
-    mMeta->setInt32(kKeySliceHeight, sliceHeight);
-
+    mMeta->setCString(kKeyMIMEType,  MEDIA_MIMETYPE_VIDEO_RAW);
+    mMeta->setInt32(kKeyColorFormat, mColorFormat);
+    mMeta->setInt32(kKeyWidth,       mVideoSize.width);
+    mMeta->setInt32(kKeyHeight,      mVideoSize.height);
+    mMeta->setInt32(kKeyStride,      mVideoSize.width);
+    mMeta->setInt32(kKeySliceHeight, mVideoSize.height);
+    mMeta->setInt32(kKeyFrameRate,   mVideoFrameRate);
+    return OK;
 }
 
 CameraSource::~CameraSource() {
@@ -173,8 +520,17 @@
     }
 }
 
+void CameraSource::startCameraRecording() {
+    CHECK_EQ(OK, mCamera->startRecording());
+    CHECK(mCamera->recordingEnabled());
+}
+
 status_t CameraSource::start(MetaData *meta) {
     CHECK(!mStarted);
+    if (mInitCheck != OK) {
+        LOGE("CameraSource is not initialized yet");
+        return mInitCheck;
+    }
 
     char value[PROPERTY_VALUE_MAX];
     if (property_get("media.stagefright.record-stats", value, NULL)
@@ -188,31 +544,51 @@
         mStartTimeUs = startTimeUs;
     }
 
+    // Call setListener first before calling startCameraRecording()
+    // to avoid recording frames being dropped.
     int64_t token = IPCThreadState::self()->clearCallingIdentity();
     mCamera->setListener(new CameraSourceListener(this));
-    CHECK_EQ(OK, mCamera->startRecording());
+    startCameraRecording();
     IPCThreadState::self()->restoreCallingIdentity(token);
 
     mStarted = true;
     return OK;
 }
 
+void CameraSource::stopCameraRecording() {
+    mCamera->setListener(NULL);
+    mCamera->stopRecording();
+}
+
+void CameraSource::releaseCamera() {
+    LOGV("releaseCamera");
+    if ((mCameraFlags & FLAGS_HOT_CAMERA) == 0) {
+        LOGV("Camera was cold when we started, stopping preview");
+        mCamera->stopPreview();
+    }
+    mCamera->unlock();
+    mCamera.clear();
+    mCamera = 0;
+    mCameraFlags = 0;
+}
+
 status_t CameraSource::stop() {
-    LOGV("stop");
+    LOGD("stop: E");
     Mutex::Autolock autoLock(mLock);
     mStarted = false;
     mFrameAvailableCondition.signal();
 
     int64_t token = IPCThreadState::self()->clearCallingIdentity();
-    mCamera->setListener(NULL);
-    mCamera->stopRecording();
     releaseQueuedFrames();
     while (!mFramesBeingEncoded.empty()) {
-        LOGI("Waiting for outstanding frames being encoded: %d",
+        if (NO_ERROR !=
+            mFrameCompleteCondition.waitRelative(mLock, 3000000000LL)) {
+            LOGW("Timed out waiting for outstanding frames being encoded: %d",
                 mFramesBeingEncoded.size());
-        mFrameCompleteCondition.wait(mLock);
+        }
     }
-    mCamera = NULL;
+    stopCameraRecording();
+    releaseCamera();
     IPCThreadState::self()->restoreCallingIdentity(token);
 
     if (mCollectStats) {
@@ -221,15 +597,26 @@
                 mLastFrameTimestampUs - mFirstFrameTimeUs);
     }
 
+    if (mNumGlitches > 0) {
+        LOGW("%d long delays between neighboring video frames", mNumGlitches);
+    }
+
     CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped);
+    LOGD("stop: X");
     return OK;
 }
 
+void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
+    if (mCamera != NULL) {
+        mCamera->releaseRecordingFrame(frame);
+    }
+}
+
 void CameraSource::releaseQueuedFrames() {
     List<sp<IMemory> >::iterator it;
     while (!mFramesReceived.empty()) {
         it = mFramesReceived.begin();
-        mCamera->releaseRecordingFrame(*it);
+        releaseRecordingFrame(*it);
         mFramesReceived.erase(it);
         ++mNumFramesDropped;
     }
@@ -241,7 +628,7 @@
 
 void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
     int64_t token = IPCThreadState::self()->clearCallingIdentity();
-    mCamera->releaseRecordingFrame(frame);
+    releaseRecordingFrame(frame);
     IPCThreadState::self()->restoreCallingIdentity(token);
 }
 
@@ -251,7 +638,6 @@
     for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
          it != mFramesBeingEncoded.end(); ++it) {
         if ((*it)->pointer() ==  buffer->data()) {
-
             releaseOneRecordingFrame((*it));
             mFramesBeingEncoded.erase(it);
             ++mNumFramesEncoded;
@@ -281,45 +667,26 @@
 
     {
         Mutex::Autolock autoLock(mLock);
-        while (mStarted) {
-            while(mFramesReceived.empty()) {
-                mFrameAvailableCondition.wait(mLock);
-            }
-
-            if (!mStarted) {
-                return OK;
-            }
-
-            frame = *mFramesReceived.begin();
-            mFramesReceived.erase(mFramesReceived.begin());
-
-            frameTime = *mFrameTimes.begin();
-            mFrameTimes.erase(mFrameTimes.begin());
-            int64_t skipTimeUs;
-            if (!options || !options->getSkipFrame(&skipTimeUs)) {
-                skipTimeUs = frameTime;
-            }
-            if (skipTimeUs > frameTime) {
-                LOGV("skipTimeUs: %lld us > frameTime: %lld us",
-                    skipTimeUs, frameTime);
-                releaseOneRecordingFrame(frame);
-                ++mNumFramesDropped;
-                // Safeguard against the abuse of the kSkipFrame_Option.
-                if (skipTimeUs - frameTime >= 1E6) {
-                    LOGE("Frame skipping requested is way too long: %lld us",
-                        skipTimeUs - frameTime);
-                    return UNKNOWN_ERROR;
-                }
-            } else {
-                mFramesBeingEncoded.push_back(frame);
-                *buffer = new MediaBuffer(frame->pointer(), frame->size());
-                (*buffer)->setObserver(this);
-                (*buffer)->add_ref();
-                (*buffer)->meta_data()->setInt64(kKeyTime, frameTime);
-
-                return OK;
+        while (mStarted && mFramesReceived.empty()) {
+            if (NO_ERROR !=
+                mFrameAvailableCondition.waitRelative(mLock, 3000000000LL)) {
+                LOGW("Timed out waiting for incoming camera video frames: %lld us",
+                    mLastFrameTimestampUs);
             }
         }
+        if (!mStarted) {
+            return OK;
+        }
+        frame = *mFramesReceived.begin();
+        mFramesReceived.erase(mFramesReceived.begin());
+
+        frameTime = *mFrameTimes.begin();
+        mFrameTimes.erase(mFrameTimes.begin());
+        mFramesBeingEncoded.push_back(frame);
+        *buffer = new MediaBuffer(frame->pointer(), frame->size());
+        (*buffer)->setObserver(this);
+        (*buffer)->add_ref();
+        (*buffer)->meta_data()->setInt64(kKeyTime, frameTime);
     }
     return OK;
 }
@@ -328,21 +695,27 @@
         int32_t msgType, const sp<IMemory> &data) {
     LOGV("dataCallbackTimestamp: timestamp %lld us", timestampUs);
     Mutex::Autolock autoLock(mLock);
-    if (!mStarted) {
+    if (!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs)) {
+        LOGV("Drop frame at %lld/%lld us", timestampUs, mStartTimeUs);
         releaseOneRecordingFrame(data);
-        ++mNumFramesReceived;
-        ++mNumFramesDropped;
         return;
     }
 
     if (mNumFramesReceived > 0 &&
         timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) {
         if (mNumGlitches % 10 == 0) {  // Don't spam the log
-            LOGW("Long delay detected in video recording");
+            LOGV("Long delay detected in video recording");
         }
         ++mNumGlitches;
     }
 
+    // May need to skip frame or modify timestamp. Currently implemented
+    // by the subclass CameraSourceTimeLapse.
+    if (skipCurrentFrame(timestampUs)) {
+        releaseOneRecordingFrame(data);
+        return;
+    }
+
     mLastFrameTimestampUs = timestampUs;
     if (mNumFramesReceived == 0) {
         mFirstFrameTimeUs = timestampUs;
@@ -367,4 +740,31 @@
     mFrameAvailableCondition.signal();
 }
 
+size_t CameraSource::getNumberOfVideoBuffers() const {
+    LOGV("getNumberOfVideoBuffers");
+    size_t nBuffers = 0;
+    int64_t token = IPCThreadState::self()->clearCallingIdentity();
+    if (mInitCheck == OK && mCamera != 0) {
+        nBuffers = mCamera->getNumberOfVideoBuffers();
+    }
+    IPCThreadState::self()->restoreCallingIdentity(token);
+    return nBuffers;
+}
+
+sp<IMemory> CameraSource::getVideoBuffer(size_t index) const {
+    LOGV("getVideoBuffer: %d", index);
+    sp<IMemory> buffer = 0;
+    int64_t token = IPCThreadState::self()->clearCallingIdentity();
+    if (mInitCheck == OK && mCamera != 0) {
+        buffer = mCamera->getVideoBuffer(index);
+    }
+    IPCThreadState::self()->restoreCallingIdentity(token);
+    return buffer;
+}
+
+bool CameraSource::isMetaDataStoredInVideoBuffers() const {
+    LOGV("isMetaDataStoredInVideoBuffers");
+    return mIsMetaDataStoredInVideoBuffers;
+}
+
 }  // namespace android
diff --git a/media/libstagefright/CameraSourceTimeLapse.cpp b/media/libstagefright/CameraSourceTimeLapse.cpp
new file mode 100644
index 0000000..3689557
--- /dev/null
+++ b/media/libstagefright/CameraSourceTimeLapse.cpp
@@ -0,0 +1,539 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "CameraSourceTimeLapse"
+
+#include <binder/IPCThreadState.h>
+#include <binder/MemoryBase.h>
+#include <binder/MemoryHeapBase.h>
+#include <media/stagefright/CameraSource.h>
+#include <media/stagefright/CameraSourceTimeLapse.h>
+#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/YUVImage.h>
+#include <media/stagefright/YUVCanvas.h>
+#include <camera/Camera.h>
+#include <camera/CameraParameters.h>
+#include <ui/Rect.h>
+#include <utils/String8.h>
+#include <utils/Vector.h>
+#include "OMX_Video.h"
+#include <limits.h>
+
+namespace android {
+
+// static
+CameraSourceTimeLapse *CameraSourceTimeLapse::CreateFromCamera(
+        const sp<ICamera> &camera,
+        int32_t cameraId,
+        Size videoSize,
+        int32_t videoFrameRate,
+        const sp<Surface>& surface,
+        int64_t timeBetweenTimeLapseFrameCaptureUs) {
+
+    CameraSourceTimeLapse *source = new
+            CameraSourceTimeLapse(camera, cameraId,
+                videoSize, videoFrameRate, surface,
+                timeBetweenTimeLapseFrameCaptureUs);
+
+    if (source != NULL) {
+        if (source->initCheck() != OK) {
+            delete source;
+            return NULL;
+        }
+    }
+    return source;
+}
+
+CameraSourceTimeLapse::CameraSourceTimeLapse(
+        const sp<ICamera>& camera,
+        int32_t cameraId,
+        Size videoSize,
+        int32_t videoFrameRate,
+        const sp<Surface>& surface,
+        int64_t timeBetweenTimeLapseFrameCaptureUs)
+    : CameraSource(camera, cameraId, videoSize, videoFrameRate, surface, true),
+      mTimeBetweenTimeLapseFrameCaptureUs(timeBetweenTimeLapseFrameCaptureUs),
+      mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate),
+      mLastTimeLapseFrameRealTimestampUs(0),
+      mSkipCurrentFrame(false) {
+
+    LOGD("starting time lapse mode: %lld us", mTimeBetweenTimeLapseFrameCaptureUs);
+    mVideoWidth = videoSize.width;
+    mVideoHeight = videoSize.height;
+
+    if (trySettingVideoSize(videoSize.width, videoSize.height)) {
+        mUseStillCameraForTimeLapse = false;
+    } else {
+        // TODO: Add a check to see that mTimeBetweenTimeLapseFrameCaptureUs is greater
+        // than the fastest rate at which the still camera can take pictures.
+        mUseStillCameraForTimeLapse = true;
+        CHECK(setPictureSizeToClosestSupported(videoSize.width, videoSize.height));
+        mNeedCropping = computeCropRectangleOffset();
+        mMeta->setInt32(kKeyWidth, videoSize.width);
+        mMeta->setInt32(kKeyHeight, videoSize.height);
+    }
+
+    // Initialize quick stop variables.
+    mQuickStop = false;
+    mForceRead = false;
+    mLastReadBufferCopy = NULL;
+    mStopWaitingForIdleCamera = false;
+}
+
+CameraSourceTimeLapse::~CameraSourceTimeLapse() {
+}
+
+void CameraSourceTimeLapse::startQuickReadReturns() {
+    Mutex::Autolock autoLock(mQuickStopLock);
+    LOGV("Enabling quick read returns");
+
+    // Enable quick stop mode.
+    mQuickStop = true;
+
+    if (mUseStillCameraForTimeLapse) {
+        // wake up the thread right away.
+        mTakePictureCondition.signal();
+    } else {
+        // Force dataCallbackTimestamp() coming from the video camera to not skip the
+        // next frame as we want read() to get a get a frame right away.
+        mForceRead = true;
+    }
+}
+
+bool CameraSourceTimeLapse::trySettingVideoSize(int32_t width, int32_t height) {
+    LOGV("trySettingVideoSize: %dx%d", width, height);
+    int64_t token = IPCThreadState::self()->clearCallingIdentity();
+    String8 s = mCamera->getParameters();
+
+    CameraParameters params(s);
+    Vector<Size> supportedSizes;
+    params.getSupportedVideoSizes(supportedSizes);
+    bool videoOutputSupported = false;
+    if (supportedSizes.size() == 0) {
+        params.getSupportedPreviewSizes(supportedSizes);
+    } else {
+        videoOutputSupported = true;
+    }
+
+    bool videoSizeSupported = false;
+    for (uint32_t i = 0; i < supportedSizes.size(); ++i) {
+        int32_t pictureWidth = supportedSizes[i].width;
+        int32_t pictureHeight = supportedSizes[i].height;
+
+        if ((pictureWidth == width) && (pictureHeight == height)) {
+            videoSizeSupported = true;
+        }
+    }
+
+    bool isSuccessful = false;
+    if (videoSizeSupported) {
+        LOGV("Video size (%d, %d) is supported", width, height);
+        if (videoOutputSupported) {
+            params.setVideoSize(width, height);
+        } else {
+            params.setPreviewSize(width, height);
+        }
+        if (mCamera->setParameters(params.flatten()) == OK) {
+            isSuccessful = true;
+        } else {
+            LOGE("Failed to set preview size to %dx%d", width, height);
+            isSuccessful = false;
+        }
+    }
+
+    IPCThreadState::self()->restoreCallingIdentity(token);
+    return isSuccessful;
+}
+
+bool CameraSourceTimeLapse::setPictureSizeToClosestSupported(int32_t width, int32_t height) {
+    LOGV("setPictureSizeToClosestSupported: %dx%d", width, height);
+    int64_t token = IPCThreadState::self()->clearCallingIdentity();
+    String8 s = mCamera->getParameters();
+    IPCThreadState::self()->restoreCallingIdentity(token);
+
+    CameraParameters params(s);
+    Vector<Size> supportedSizes;
+    params.getSupportedPictureSizes(supportedSizes);
+
+    int32_t minPictureSize = INT_MAX;
+    for (uint32_t i = 0; i < supportedSizes.size(); ++i) {
+        int32_t pictureWidth = supportedSizes[i].width;
+        int32_t pictureHeight = supportedSizes[i].height;
+
+        if ((pictureWidth >= width) && (pictureHeight >= height)) {
+            int32_t pictureSize = pictureWidth*pictureHeight;
+            if (pictureSize < minPictureSize) {
+                minPictureSize = pictureSize;
+                mPictureWidth = pictureWidth;
+                mPictureHeight = pictureHeight;
+            }
+        }
+    }
+    LOGV("Picture size = (%d, %d)", mPictureWidth, mPictureHeight);
+    return (minPictureSize != INT_MAX);
+}
+
+bool CameraSourceTimeLapse::computeCropRectangleOffset() {
+    if ((mPictureWidth == mVideoWidth) && (mPictureHeight == mVideoHeight)) {
+        return false;
+    }
+
+    CHECK((mPictureWidth > mVideoWidth) && (mPictureHeight > mVideoHeight));
+
+    int32_t widthDifference = mPictureWidth - mVideoWidth;
+    int32_t heightDifference = mPictureHeight - mVideoHeight;
+
+    mCropRectStartX = widthDifference/2;
+    mCropRectStartY = heightDifference/2;
+
+    LOGV("setting crop rectangle offset to (%d, %d)", mCropRectStartX, mCropRectStartY);
+
+    return true;
+}
+
+void CameraSourceTimeLapse::signalBufferReturned(MediaBuffer* buffer) {
+    Mutex::Autolock autoLock(mQuickStopLock);
+    if (mQuickStop && (buffer == mLastReadBufferCopy)) {
+        buffer->setObserver(NULL);
+        buffer->release();
+    } else {
+        return CameraSource::signalBufferReturned(buffer);
+    }
+}
+
+void createMediaBufferCopy(const MediaBuffer& sourceBuffer, int64_t frameTime, MediaBuffer **newBuffer) {
+    size_t sourceSize = sourceBuffer.size();
+    void* sourcePointer = sourceBuffer.data();
+
+    (*newBuffer) = new MediaBuffer(sourceSize);
+    memcpy((*newBuffer)->data(), sourcePointer, sourceSize);
+
+    (*newBuffer)->meta_data()->setInt64(kKeyTime, frameTime);
+}
+
+void CameraSourceTimeLapse::fillLastReadBufferCopy(MediaBuffer& sourceBuffer) {
+    int64_t frameTime;
+    CHECK(sourceBuffer.meta_data()->findInt64(kKeyTime, &frameTime));
+    createMediaBufferCopy(sourceBuffer, frameTime, &mLastReadBufferCopy);
+    mLastReadBufferCopy->add_ref();
+    mLastReadBufferCopy->setObserver(this);
+}
+
+status_t CameraSourceTimeLapse::read(
+        MediaBuffer **buffer, const ReadOptions *options) {
+    if (mLastReadBufferCopy == NULL) {
+        mLastReadStatus = CameraSource::read(buffer, options);
+
+        // mQuickStop may have turned to true while read was blocked. Make a copy of
+        // the buffer in that case.
+        Mutex::Autolock autoLock(mQuickStopLock);
+        if (mQuickStop && *buffer) {
+            fillLastReadBufferCopy(**buffer);
+        }
+        return mLastReadStatus;
+    } else {
+        (*buffer) = mLastReadBufferCopy;
+        (*buffer)->add_ref();
+        return mLastReadStatus;
+    }
+}
+
+// static
+void *CameraSourceTimeLapse::ThreadTimeLapseWrapper(void *me) {
+    CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me);
+    source->threadTimeLapseEntry();
+    return NULL;
+}
+
+void CameraSourceTimeLapse::threadTimeLapseEntry() {
+    while (mStarted) {
+        {
+            Mutex::Autolock autoLock(mCameraIdleLock);
+            if (!mCameraIdle) {
+                mCameraIdleCondition.wait(mCameraIdleLock);
+            }
+            CHECK(mCameraIdle);
+            mCameraIdle = false;
+        }
+
+        // Even if mQuickStop == true we need to take one more picture
+        // as a read() may be blocked, waiting for a frame to get available.
+        // After this takePicture, if mQuickStop == true, we can safely exit
+        // this thread as read() will make a copy of this last frame and keep
+        // returning it in the quick stop mode.
+        Mutex::Autolock autoLock(mQuickStopLock);
+        CHECK_EQ(OK, mCamera->takePicture(CAMERA_MSG_RAW_IMAGE));
+        if (mQuickStop) {
+            LOGV("threadTimeLapseEntry: Exiting due to mQuickStop = true");
+            return;
+        }
+        mTakePictureCondition.waitRelative(mQuickStopLock,
+                mTimeBetweenTimeLapseFrameCaptureUs * 1000);
+    }
+    LOGV("threadTimeLapseEntry: Exiting due to mStarted = false");
+}
+
+void CameraSourceTimeLapse::startCameraRecording() {
+    if (mUseStillCameraForTimeLapse) {
+        LOGV("start time lapse recording using still camera");
+
+        int64_t token = IPCThreadState::self()->clearCallingIdentity();
+        String8 s = mCamera->getParameters();
+
+        CameraParameters params(s);
+        params.setPictureSize(mPictureWidth, mPictureHeight);
+        mCamera->setParameters(params.flatten());
+        mCameraIdle = true;
+        mStopWaitingForIdleCamera = false;
+
+        // disable shutter sound and play the recording sound.
+        mCamera->sendCommand(CAMERA_CMD_ENABLE_SHUTTER_SOUND, 0, 0);
+        mCamera->sendCommand(CAMERA_CMD_PLAY_RECORDING_SOUND, 0, 0);
+        IPCThreadState::self()->restoreCallingIdentity(token);
+
+        // create a thread which takes pictures in a loop
+        pthread_attr_t attr;
+        pthread_attr_init(&attr);
+        pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
+
+        pthread_create(&mThreadTimeLapse, &attr, ThreadTimeLapseWrapper, this);
+        pthread_attr_destroy(&attr);
+    } else {
+        LOGV("start time lapse recording using video camera");
+        CHECK_EQ(OK, mCamera->startRecording());
+    }
+}
+
+void CameraSourceTimeLapse::stopCameraRecording() {
+    if (mUseStillCameraForTimeLapse) {
+        void *dummy;
+        pthread_join(mThreadTimeLapse, &dummy);
+
+        // Last takePicture may still be underway. Wait for the camera to get
+        // idle.
+        Mutex::Autolock autoLock(mCameraIdleLock);
+        mStopWaitingForIdleCamera = true;
+        if (!mCameraIdle) {
+            mCameraIdleCondition.wait(mCameraIdleLock);
+        }
+        CHECK(mCameraIdle);
+        mCamera->setListener(NULL);
+
+        // play the recording sound.
+        mCamera->sendCommand(CAMERA_CMD_PLAY_RECORDING_SOUND, 0, 0);
+    } else {
+        mCamera->setListener(NULL);
+        mCamera->stopRecording();
+    }
+    if (mLastReadBufferCopy) {
+        mLastReadBufferCopy->release();
+        mLastReadBufferCopy = NULL;
+    }
+}
+
+void CameraSourceTimeLapse::releaseRecordingFrame(const sp<IMemory>& frame) {
+    if (!mUseStillCameraForTimeLapse &&
+        mCamera != NULL) {
+        mCamera->releaseRecordingFrame(frame);
+    }
+}
+
+sp<IMemory> CameraSourceTimeLapse::createIMemoryCopy(const sp<IMemory> &source_data) {
+    size_t source_size = source_data->size();
+    void* source_pointer = source_data->pointer();
+
+    sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(source_size);
+    sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, source_size);
+    memcpy(newMemory->pointer(), source_pointer, source_size);
+    return newMemory;
+}
+
+// Allocates IMemory of final type MemoryBase with the given size.
+sp<IMemory> allocateIMemory(size_t size) {
+    sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(size);
+    sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, size);
+    return newMemory;
+}
+
+// static
+void *CameraSourceTimeLapse::ThreadStartPreviewWrapper(void *me) {
+    CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me);
+    source->threadStartPreview();
+    return NULL;
+}
+
+void CameraSourceTimeLapse::threadStartPreview() {
+    CHECK_EQ(OK, mCamera->startPreview());
+    Mutex::Autolock autoLock(mCameraIdleLock);
+    mCameraIdle = true;
+    mCameraIdleCondition.signal();
+}
+
+void CameraSourceTimeLapse::restartPreview() {
+    // Start this in a different thread, so that the dataCallback can return
+    LOGV("restartPreview");
+    pthread_attr_t attr;
+    pthread_attr_init(&attr);
+    pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
+
+    pthread_t threadPreview;
+    pthread_create(&threadPreview, &attr, ThreadStartPreviewWrapper, this);
+    pthread_attr_destroy(&attr);
+}
+
+sp<IMemory> CameraSourceTimeLapse::cropYUVImage(const sp<IMemory> &source_data) {
+    // find the YUV format
+    int32_t srcFormat;
+    CHECK(mMeta->findInt32(kKeyColorFormat, &srcFormat));
+    YUVImage::YUVFormat yuvFormat;
+    if (srcFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
+        yuvFormat = YUVImage::YUV420SemiPlanar;
+    } else {
+        CHECK_EQ(srcFormat, OMX_COLOR_FormatYUV420Planar);
+        yuvFormat = YUVImage::YUV420Planar;
+    }
+
+    // allocate memory for cropped image and setup a canvas using it.
+    sp<IMemory> croppedImageMemory = allocateIMemory(
+            YUVImage::bufferSize(yuvFormat, mVideoWidth, mVideoHeight));
+    YUVImage yuvImageCropped(yuvFormat,
+            mVideoWidth, mVideoHeight,
+            (uint8_t *)croppedImageMemory->pointer());
+    YUVCanvas yuvCanvasCrop(yuvImageCropped);
+
+    YUVImage yuvImageSource(yuvFormat,
+            mPictureWidth, mPictureHeight,
+            (uint8_t *)source_data->pointer());
+    yuvCanvasCrop.CopyImageRect(
+            Rect(mCropRectStartX, mCropRectStartY,
+                mCropRectStartX + mVideoWidth,
+                mCropRectStartY + mVideoHeight),
+            0, 0,
+            yuvImageSource);
+
+    return croppedImageMemory;
+}
+
+void CameraSourceTimeLapse::dataCallback(int32_t msgType, const sp<IMemory> &data) {
+    if (msgType == CAMERA_MSG_COMPRESSED_IMAGE) {
+        // takePicture will complete after this callback, so restart preview.
+        restartPreview();
+        return;
+    }
+    if (msgType != CAMERA_MSG_RAW_IMAGE) {
+        return;
+    }
+
+    LOGV("dataCallback for timelapse still frame");
+    CHECK_EQ(true, mUseStillCameraForTimeLapse);
+
+    int64_t timestampUs;
+    if (mNumFramesReceived == 0) {
+        timestampUs = mStartTimeUs;
+    } else {
+        timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
+    }
+
+    if (mNeedCropping) {
+        sp<IMemory> croppedImageData = cropYUVImage(data);
+        dataCallbackTimestamp(timestampUs, msgType, croppedImageData);
+    } else {
+        sp<IMemory> dataCopy = createIMemoryCopy(data);
+        dataCallbackTimestamp(timestampUs, msgType, dataCopy);
+    }
+}
+
+bool CameraSourceTimeLapse::skipCurrentFrame(int64_t timestampUs) {
+    if (mSkipCurrentFrame) {
+        mSkipCurrentFrame = false;
+        return true;
+    } else {
+        return false;
+    }
+}
+
+bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) {
+    if (!mUseStillCameraForTimeLapse) {
+        if (mLastTimeLapseFrameRealTimestampUs == 0) {
+            // First time lapse frame. Initialize mLastTimeLapseFrameRealTimestampUs
+            // to current time (timestampUs) and save frame data.
+            LOGV("dataCallbackTimestamp timelapse: initial frame");
+
+            mLastTimeLapseFrameRealTimestampUs = *timestampUs;
+            return false;
+        }
+
+        {
+            Mutex::Autolock autoLock(mQuickStopLock);
+
+            // mForceRead may be set to true by startQuickReadReturns(). In that
+            // case don't skip this frame.
+            if (mForceRead) {
+                LOGV("dataCallbackTimestamp timelapse: forced read");
+                mForceRead = false;
+                *timestampUs = mLastFrameTimestampUs;
+                return false;
+            }
+        }
+
+        // Workaround to bypass the first 2 input frames for skipping.
+        // The first 2 output frames from the encoder are: decoder specific info and
+        // the compressed video frame data for the first input video frame.
+        if (mNumFramesEncoded >= 1 && *timestampUs <
+                (mLastTimeLapseFrameRealTimestampUs + mTimeBetweenTimeLapseFrameCaptureUs)) {
+            // Skip all frames from last encoded frame until
+            // sufficient time (mTimeBetweenTimeLapseFrameCaptureUs) has passed.
+            // Tell the camera to release its recording frame and return.
+            LOGV("dataCallbackTimestamp timelapse: skipping intermediate frame");
+            return true;
+        } else {
+            // Desired frame has arrived after mTimeBetweenTimeLapseFrameCaptureUs time:
+            // - Reset mLastTimeLapseFrameRealTimestampUs to current time.
+            // - Artificially modify timestampUs to be one frame time (1/framerate) ahead
+            // of the last encoded frame's time stamp.
+            LOGV("dataCallbackTimestamp timelapse: got timelapse frame");
+
+            mLastTimeLapseFrameRealTimestampUs = *timestampUs;
+            *timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
+            return false;
+        }
+    }
+    return false;
+}
+
+void CameraSourceTimeLapse::dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
+            const sp<IMemory> &data) {
+    if (!mUseStillCameraForTimeLapse) {
+        mSkipCurrentFrame = skipFrameAndModifyTimeStamp(&timestampUs);
+    } else {
+        Mutex::Autolock autoLock(mCameraIdleLock);
+        // If we are using the still camera and stop() has been called, it may
+        // be waiting for the camera to get idle. In that case return
+        // immediately. Calling CameraSource::dataCallbackTimestamp() will lead
+        // to a deadlock since it tries to access CameraSource::mLock which in
+        // this case is held by CameraSource::stop() currently waiting for the
+        // camera to get idle. And camera will not get idle until this call
+        // returns.
+        if (mStopWaitingForIdleCamera) {
+            return;
+        }
+    }
+    CameraSource::dataCallbackTimestamp(timestampUs, msgType, data);
+}
+
+}  // namespace android
diff --git a/media/libstagefright/DRMExtractor.cpp b/media/libstagefright/DRMExtractor.cpp
index aa9ad23..2809df5 100644
--- a/media/libstagefright/DRMExtractor.cpp
+++ b/media/libstagefright/DRMExtractor.cpp
@@ -38,12 +38,12 @@
 
 namespace android {
 
-DrmManagerClient* gDrmManagerClient = NULL;
-
 class DRMSource : public MediaSource {
 public:
     DRMSource(const sp<MediaSource> &mediaSource,
-            DecryptHandle* decryptHandle, int32_t trackId, DrmBuffer* ipmpBox);
+            DecryptHandle *decryptHandle,
+            DrmManagerClient *managerClient,
+            int32_t trackId, DrmBuffer *ipmpBox);
 
     virtual status_t start(MetaData *params = NULL);
     virtual status_t stop();
@@ -57,6 +57,7 @@
 private:
     sp<MediaSource> mOriginalMediaSource;
     DecryptHandle* mDecryptHandle;
+    DrmManagerClient* mDrmManagerClient;
     size_t mTrackId;
     mutable Mutex mDRMLock;
     size_t mNALLengthSize;
@@ -69,13 +70,17 @@
 ////////////////////////////////////////////////////////////////////////////////
 
 DRMSource::DRMSource(const sp<MediaSource> &mediaSource,
-        DecryptHandle* decryptHandle, int32_t trackId, DrmBuffer* ipmpBox)
+        DecryptHandle *decryptHandle,
+        DrmManagerClient *managerClient,
+        int32_t trackId, DrmBuffer *ipmpBox)
     : mOriginalMediaSource(mediaSource),
       mDecryptHandle(decryptHandle),
+      mDrmManagerClient(managerClient),
       mTrackId(trackId),
       mNALLengthSize(0),
       mWantsNALFragments(false) {
-    gDrmManagerClient->initializeDecryptUnit(
+    CHECK(mDrmManagerClient);
+    mDrmManagerClient->initializeDecryptUnit(
             mDecryptHandle, trackId, ipmpBox);
 
     const char *mime;
@@ -100,7 +105,7 @@
 
 DRMSource::~DRMSource() {
     Mutex::Autolock autoLock(mDRMLock);
-    gDrmManagerClient->finalizeDecryptUnit(mDecryptHandle, mTrackId);
+    mDrmManagerClient->finalizeDecryptUnit(mDecryptHandle, mTrackId);
 }
 
 status_t DRMSource::start(MetaData *params) {
@@ -140,7 +145,7 @@
     decryptedDrmBuffer.data = new char[len];
     DrmBuffer *pDecryptedDrmBuffer = &decryptedDrmBuffer;
 
-    if ((err = gDrmManagerClient->decrypt(mDecryptHandle, mTrackId,
+    if ((err = mDrmManagerClient->decrypt(mDecryptHandle, mTrackId,
             &encryptedDrmBuffer, &pDecryptedDrmBuffer)) != DRM_NO_ERROR) {
 
         if (decryptedDrmBuffer.data) {
@@ -234,12 +239,13 @@
 
 DRMExtractor::DRMExtractor(const sp<DataSource> &source, const char* mime)
     : mDataSource(source),
-      mDecryptHandle(NULL) {
+      mDecryptHandle(NULL),
+      mDrmManagerClient(NULL) {
     mOriginalExtractor = MediaExtractor::Create(source, mime);
     mOriginalExtractor->setDrmFlag(true);
+    mOriginalExtractor->getMetaData()->setInt32(kKeyIsDRM, 1);
 
-    DrmManagerClient *client;
-    source->getDrmInfo(&mDecryptHandle, &client);
+    source->getDrmInfo(&mDecryptHandle, &mDrmManagerClient);
 }
 
 DRMExtractor::~DRMExtractor() {
@@ -260,7 +266,8 @@
     ipmpBox.data = mOriginalExtractor->getDrmTrackInfo(trackID, &(ipmpBox.length));
     CHECK(ipmpBox.length > 0);
 
-    return new DRMSource(originalMediaSource, mDecryptHandle, trackID, &ipmpBox);
+    return new DRMSource(originalMediaSource, mDecryptHandle, mDrmManagerClient,
+            trackID, &ipmpBox);
 }
 
 sp<MetaData> DRMExtractor::getTrackMetaData(size_t index, uint32_t flags) {
@@ -271,27 +278,20 @@
     return mOriginalExtractor->getMetaData();
 }
 
-static Mutex gDRMSnifferMutex;
 bool SniffDRM(
     const sp<DataSource> &source, String8 *mimeType, float *confidence,
         sp<AMessage> *) {
-    {
-        Mutex::Autolock autoLock(gDRMSnifferMutex);
-        if (gDrmManagerClient == NULL) {
-            gDrmManagerClient = new DrmManagerClient();
-        }
-    }
-
-    DecryptHandle *decryptHandle = source->DrmInitialization(gDrmManagerClient);
+    DecryptHandle *decryptHandle = source->DrmInitialization();
 
     if (decryptHandle != NULL) {
         if (decryptHandle->decryptApiType == DecryptApiType::CONTAINER_BASED) {
-            *mimeType = String8("drm+container_based+");
+            *mimeType = String8("drm+container_based+") + decryptHandle->mimeType;
         } else if (decryptHandle->decryptApiType == DecryptApiType::ELEMENTARY_STREAM_BASED) {
-            *mimeType = String8("drm+es_based+");
+            *mimeType = String8("drm+es_based+") + decryptHandle->mimeType;
+        } else if (decryptHandle->decryptApiType == DecryptApiType::WV_BASED) {
+            *mimeType = MEDIA_MIMETYPE_CONTAINER_WVM;
+            LOGW("SniffWVM: found match\n");
         }
-
-        *mimeType += decryptHandle->mimeType;
         *confidence = 10.0f;
 
         return true;
diff --git a/media/libstagefright/DataSource.cpp b/media/libstagefright/DataSource.cpp
index 0b8997c..3b38208 100644
--- a/media/libstagefright/DataSource.cpp
+++ b/media/libstagefright/DataSource.cpp
@@ -23,6 +23,8 @@
 #include "include/NuCachedSource2.h"
 #include "include/NuHTTPDataSource.h"
 #include "include/DRMExtractor.h"
+#include "include/FLACExtractor.h"
+#include "include/AACExtractor.h"
 
 #include "matroska/MatroskaExtractor.h"
 
@@ -36,7 +38,7 @@
 
 namespace android {
 
-bool DataSource::getUInt16(off_t offset, uint16_t *x) {
+bool DataSource::getUInt16(off64_t offset, uint16_t *x) {
     *x = 0;
 
     uint8_t byte[2];
@@ -49,7 +51,7 @@
     return true;
 }
 
-status_t DataSource::getSize(off_t *size) {
+status_t DataSource::getSize(off64_t *size) {
     *size = 0;
 
     return ERROR_UNSUPPORTED;
@@ -104,9 +106,11 @@
     RegisterSniffer(SniffMatroska);
     RegisterSniffer(SniffOgg);
     RegisterSniffer(SniffWAV);
+    RegisterSniffer(SniffFLAC);
     RegisterSniffer(SniffAMR);
     RegisterSniffer(SniffMPEG2TS);
     RegisterSniffer(SniffMP3);
+    RegisterSniffer(SniffAAC);
 
     char value[PROPERTY_VALUE_MAX];
     if (property_get("drm.service.enabled", value, NULL)
@@ -121,7 +125,8 @@
     sp<DataSource> source;
     if (!strncasecmp("file://", uri, 7)) {
         source = new FileSource(uri + 7);
-    } else if (!strncasecmp("http://", uri, 7)) {
+    } else if (!strncasecmp("http://", uri, 7)
+            || !strncasecmp("https://", uri, 8)) {
         sp<NuHTTPDataSource> httpSource = new NuHTTPDataSource;
         if (httpSource->connect(uri, headers) != OK) {
             return NULL;
diff --git a/media/libstagefright/FLACExtractor.cpp b/media/libstagefright/FLACExtractor.cpp
new file mode 100644
index 0000000..8ba5a2d
--- /dev/null
+++ b/media/libstagefright/FLACExtractor.cpp
@@ -0,0 +1,813 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "FLACExtractor"
+#include <utils/Log.h>
+
+#include "include/FLACExtractor.h"
+// Vorbis comments
+#include "include/OggExtractor.h"
+// libFLAC parser
+#include "FLAC/stream_decoder.h"
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/MediaBufferGroup.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MediaBuffer.h>
+
+namespace android {
+
+class FLACParser;
+
+class FLACSource : public MediaSource {
+
+public:
+    FLACSource(
+            const sp<DataSource> &dataSource,
+            const sp<MetaData> &trackMetadata);
+
+    virtual status_t start(MetaData *params);
+    virtual status_t stop();
+    virtual sp<MetaData> getFormat();
+
+    virtual status_t read(
+            MediaBuffer **buffer, const ReadOptions *options = NULL);
+
+protected:
+    virtual ~FLACSource();
+
+private:
+    sp<DataSource> mDataSource;
+    sp<MetaData> mTrackMetadata;
+    sp<FLACParser> mParser;
+    bool mInitCheck;
+    bool mStarted;
+
+    status_t init();
+
+    // no copy constructor or assignment
+    FLACSource(const FLACSource &);
+    FLACSource &operator=(const FLACSource &);
+
+};
+
+// FLACParser wraps a C libFLAC parser aka stream decoder
+
+class FLACParser : public RefBase {
+
+public:
+    FLACParser(
+        const sp<DataSource> &dataSource,
+        // If metadata pointers aren't provided, we don't fill them
+        const sp<MetaData> &fileMetadata = 0,
+        const sp<MetaData> &trackMetadata = 0);
+
+    status_t initCheck() const {
+        return mInitCheck;
+    }
+
+    // stream properties
+    unsigned getMaxBlockSize() const {
+        return mStreamInfo.max_blocksize;
+    }
+    unsigned getSampleRate() const {
+        return mStreamInfo.sample_rate;
+    }
+    unsigned getChannels() const {
+        return mStreamInfo.channels;
+    }
+    unsigned getBitsPerSample() const {
+        return mStreamInfo.bits_per_sample;
+    }
+    FLAC__uint64 getTotalSamples() const {
+        return mStreamInfo.total_samples;
+    }
+
+    // media buffers
+    void allocateBuffers();
+    void releaseBuffers();
+    MediaBuffer *readBuffer() {
+        return readBuffer(false, 0LL);
+    }
+    MediaBuffer *readBuffer(FLAC__uint64 sample) {
+        return readBuffer(true, sample);
+    }
+
+protected:
+    virtual ~FLACParser();
+
+private:
+    sp<DataSource> mDataSource;
+    sp<MetaData> mFileMetadata;
+    sp<MetaData> mTrackMetadata;
+    bool mInitCheck;
+
+    // media buffers
+    size_t mMaxBufferSize;
+    MediaBufferGroup *mGroup;
+    void (*mCopy)(short *dst, const int *const *src, unsigned nSamples);
+
+    // handle to underlying libFLAC parser
+    FLAC__StreamDecoder *mDecoder;
+
+    // current position within the data source
+    off64_t mCurrentPos;
+    bool mEOF;
+
+    // cached when the STREAMINFO metadata is parsed by libFLAC
+    FLAC__StreamMetadata_StreamInfo mStreamInfo;
+    bool mStreamInfoValid;
+
+    // cached when a decoded PCM block is "written" by libFLAC parser
+    bool mWriteRequested;
+    bool mWriteCompleted;
+    FLAC__FrameHeader mWriteHeader;
+    const FLAC__int32 * const *mWriteBuffer;
+
+    // most recent error reported by libFLAC parser
+    FLAC__StreamDecoderErrorStatus mErrorStatus;
+
+    status_t init();
+    MediaBuffer *readBuffer(bool doSeek, FLAC__uint64 sample);
+
+    // no copy constructor or assignment
+    FLACParser(const FLACParser &);
+    FLACParser &operator=(const FLACParser &);
+
+    // FLAC parser callbacks as C++ instance methods
+    FLAC__StreamDecoderReadStatus readCallback(
+            FLAC__byte buffer[], size_t *bytes);
+    FLAC__StreamDecoderSeekStatus seekCallback(
+            FLAC__uint64 absolute_byte_offset);
+    FLAC__StreamDecoderTellStatus tellCallback(
+            FLAC__uint64 *absolute_byte_offset);
+    FLAC__StreamDecoderLengthStatus lengthCallback(
+            FLAC__uint64 *stream_length);
+    FLAC__bool eofCallback();
+    FLAC__StreamDecoderWriteStatus writeCallback(
+            const FLAC__Frame *frame, const FLAC__int32 * const buffer[]);
+    void metadataCallback(const FLAC__StreamMetadata *metadata);
+    void errorCallback(FLAC__StreamDecoderErrorStatus status);
+
+    // FLAC parser callbacks as C-callable functions
+    static FLAC__StreamDecoderReadStatus read_callback(
+            const FLAC__StreamDecoder *decoder,
+            FLAC__byte buffer[], size_t *bytes,
+            void *client_data);
+    static FLAC__StreamDecoderSeekStatus seek_callback(
+            const FLAC__StreamDecoder *decoder,
+            FLAC__uint64 absolute_byte_offset,
+            void *client_data);
+    static FLAC__StreamDecoderTellStatus tell_callback(
+            const FLAC__StreamDecoder *decoder,
+            FLAC__uint64 *absolute_byte_offset,
+            void *client_data);
+    static FLAC__StreamDecoderLengthStatus length_callback(
+            const FLAC__StreamDecoder *decoder,
+            FLAC__uint64 *stream_length,
+            void *client_data);
+    static FLAC__bool eof_callback(
+            const FLAC__StreamDecoder *decoder,
+            void *client_data);
+    static FLAC__StreamDecoderWriteStatus write_callback(
+            const FLAC__StreamDecoder *decoder,
+            const FLAC__Frame *frame, const FLAC__int32 * const buffer[],
+            void *client_data);
+    static void metadata_callback(
+            const FLAC__StreamDecoder *decoder,
+            const FLAC__StreamMetadata *metadata,
+            void *client_data);
+    static void error_callback(
+            const FLAC__StreamDecoder *decoder,
+            FLAC__StreamDecoderErrorStatus status,
+            void *client_data);
+
+};
+
+// The FLAC parser calls our C++ static callbacks using C calling conventions,
+// inside FLAC__stream_decoder_process_until_end_of_metadata
+// and FLAC__stream_decoder_process_single.
+// We immediately then call our corresponding C++ instance methods
+// with the same parameter list, but discard redundant information.
+
+FLAC__StreamDecoderReadStatus FLACParser::read_callback(
+        const FLAC__StreamDecoder *decoder, FLAC__byte buffer[],
+        size_t *bytes, void *client_data)
+{
+    return ((FLACParser *) client_data)->readCallback(buffer, bytes);
+}
+
+FLAC__StreamDecoderSeekStatus FLACParser::seek_callback(
+        const FLAC__StreamDecoder *decoder,
+        FLAC__uint64 absolute_byte_offset, void *client_data)
+{
+    return ((FLACParser *) client_data)->seekCallback(absolute_byte_offset);
+}
+
+FLAC__StreamDecoderTellStatus FLACParser::tell_callback(
+        const FLAC__StreamDecoder *decoder,
+        FLAC__uint64 *absolute_byte_offset, void *client_data)
+{
+    return ((FLACParser *) client_data)->tellCallback(absolute_byte_offset);
+}
+
+FLAC__StreamDecoderLengthStatus FLACParser::length_callback(
+        const FLAC__StreamDecoder *decoder,
+        FLAC__uint64 *stream_length, void *client_data)
+{
+    return ((FLACParser *) client_data)->lengthCallback(stream_length);
+}
+
+FLAC__bool FLACParser::eof_callback(
+        const FLAC__StreamDecoder *decoder, void *client_data)
+{
+    return ((FLACParser *) client_data)->eofCallback();
+}
+
+FLAC__StreamDecoderWriteStatus FLACParser::write_callback(
+        const FLAC__StreamDecoder *decoder, const FLAC__Frame *frame,
+        const FLAC__int32 * const buffer[], void *client_data)
+{
+    return ((FLACParser *) client_data)->writeCallback(frame, buffer);
+}
+
+void FLACParser::metadata_callback(
+        const FLAC__StreamDecoder *decoder,
+        const FLAC__StreamMetadata *metadata, void *client_data)
+{
+    ((FLACParser *) client_data)->metadataCallback(metadata);
+}
+
+void FLACParser::error_callback(
+        const FLAC__StreamDecoder *decoder,
+        FLAC__StreamDecoderErrorStatus status, void *client_data)
+{
+    ((FLACParser *) client_data)->errorCallback(status);
+}
+
+// These are the corresponding callbacks with C++ calling conventions
+
+FLAC__StreamDecoderReadStatus FLACParser::readCallback(
+        FLAC__byte buffer[], size_t *bytes)
+{
+    size_t requested = *bytes;
+    ssize_t actual = mDataSource->readAt(mCurrentPos, buffer, requested);
+    if (0 > actual) {
+        *bytes = 0;
+        return FLAC__STREAM_DECODER_READ_STATUS_ABORT;
+    } else if (0 == actual) {
+        *bytes = 0;
+        mEOF = true;
+        return FLAC__STREAM_DECODER_READ_STATUS_END_OF_STREAM;
+    } else {
+        assert(actual <= requested);
+        *bytes = actual;
+        mCurrentPos += actual;
+        return FLAC__STREAM_DECODER_READ_STATUS_CONTINUE;
+    }
+}
+
+FLAC__StreamDecoderSeekStatus FLACParser::seekCallback(
+        FLAC__uint64 absolute_byte_offset)
+{
+    mCurrentPos = absolute_byte_offset;
+    mEOF = false;
+    return FLAC__STREAM_DECODER_SEEK_STATUS_OK;
+}
+
+FLAC__StreamDecoderTellStatus FLACParser::tellCallback(
+        FLAC__uint64 *absolute_byte_offset)
+{
+    *absolute_byte_offset = mCurrentPos;
+    return FLAC__STREAM_DECODER_TELL_STATUS_OK;
+}
+
+FLAC__StreamDecoderLengthStatus FLACParser::lengthCallback(
+        FLAC__uint64 *stream_length)
+{
+    off64_t size;
+    if (OK == mDataSource->getSize(&size)) {
+        *stream_length = size;
+        return FLAC__STREAM_DECODER_LENGTH_STATUS_OK;
+    } else {
+        return FLAC__STREAM_DECODER_LENGTH_STATUS_UNSUPPORTED;
+    }
+}
+
+FLAC__bool FLACParser::eofCallback()
+{
+    return mEOF;
+}
+
+FLAC__StreamDecoderWriteStatus FLACParser::writeCallback(
+        const FLAC__Frame *frame, const FLAC__int32 * const buffer[])
+{
+    if (mWriteRequested) {
+        mWriteRequested = false;
+        // FLAC parser doesn't free or realloc buffer until next frame or finish
+        mWriteHeader = frame->header;
+        mWriteBuffer = buffer;
+        mWriteCompleted = true;
+        return FLAC__STREAM_DECODER_WRITE_STATUS_CONTINUE;
+    } else {
+        LOGE("FLACParser::writeCallback unexpected");
+        return FLAC__STREAM_DECODER_WRITE_STATUS_ABORT;
+    }
+}
+
+void FLACParser::metadataCallback(const FLAC__StreamMetadata *metadata)
+{
+    switch (metadata->type) {
+    case FLAC__METADATA_TYPE_STREAMINFO:
+        if (!mStreamInfoValid) {
+            mStreamInfo = metadata->data.stream_info;
+            mStreamInfoValid = true;
+        } else {
+            LOGE("FLACParser::metadataCallback unexpected STREAMINFO");
+        }
+        break;
+    case FLAC__METADATA_TYPE_VORBIS_COMMENT:
+        {
+        const FLAC__StreamMetadata_VorbisComment *vc;
+        vc = &metadata->data.vorbis_comment;
+        for (FLAC__uint32 i = 0; i < vc->num_comments; ++i) {
+            FLAC__StreamMetadata_VorbisComment_Entry *vce;
+            vce = &vc->comments[i];
+            if (mFileMetadata != 0) {
+                parseVorbisComment(mFileMetadata, (const char *) vce->entry,
+                        vce->length);
+            }
+        }
+        }
+        break;
+    case FLAC__METADATA_TYPE_PICTURE:
+        if (mFileMetadata != 0) {
+            const FLAC__StreamMetadata_Picture *p = &metadata->data.picture;
+            mFileMetadata->setData(kKeyAlbumArt,
+                    MetaData::TYPE_NONE, p->data, p->data_length);
+            mFileMetadata->setCString(kKeyAlbumArtMIME, p->mime_type);
+        }
+        break;
+    default:
+        LOGW("FLACParser::metadataCallback unexpected type %u", metadata->type);
+        break;
+    }
+}
+
+void FLACParser::errorCallback(FLAC__StreamDecoderErrorStatus status)
+{
+    LOGE("FLACParser::errorCallback status=%d", status);
+    mErrorStatus = status;
+}
+
+// Copy samples from FLAC native 32-bit non-interleaved to 16-bit interleaved.
+// These are candidates for optimization if needed.
+
+static void copyMono8(short *dst, const int *const *src, unsigned nSamples)
+{
+    for (unsigned i = 0; i < nSamples; ++i) {
+        *dst++ = src[0][i] << 8;
+    }
+}
+
+static void copyStereo8(short *dst, const int *const *src, unsigned nSamples)
+{
+    for (unsigned i = 0; i < nSamples; ++i) {
+        *dst++ = src[0][i] << 8;
+        *dst++ = src[1][i] << 8;
+    }
+}
+
+static void copyMono16(short *dst, const int *const *src, unsigned nSamples)
+{
+    for (unsigned i = 0; i < nSamples; ++i) {
+        *dst++ = src[0][i];
+    }
+}
+
+static void copyStereo16(short *dst, const int *const *src, unsigned nSamples)
+{
+    for (unsigned i = 0; i < nSamples; ++i) {
+        *dst++ = src[0][i];
+        *dst++ = src[1][i];
+    }
+}
+
+// 24-bit versions should do dithering or noise-shaping, here or in AudioFlinger
+
+static void copyMono24(short *dst, const int *const *src, unsigned nSamples)
+{
+    for (unsigned i = 0; i < nSamples; ++i) {
+        *dst++ = src[0][i] >> 8;
+    }
+}
+
+static void copyStereo24(short *dst, const int *const *src, unsigned nSamples)
+{
+    for (unsigned i = 0; i < nSamples; ++i) {
+        *dst++ = src[0][i] >> 8;
+        *dst++ = src[1][i] >> 8;
+    }
+}
+
+static void copyTrespass(short *dst, const int *const *src, unsigned nSamples)
+{
+    TRESPASS();
+}
+
+// FLACParser
+
+FLACParser::FLACParser(
+        const sp<DataSource> &dataSource,
+        const sp<MetaData> &fileMetadata,
+        const sp<MetaData> &trackMetadata)
+    : mDataSource(dataSource),
+      mFileMetadata(fileMetadata),
+      mTrackMetadata(trackMetadata),
+      mInitCheck(false),
+      mMaxBufferSize(0),
+      mGroup(NULL),
+      mCopy(copyTrespass),
+      mDecoder(NULL),
+      mCurrentPos(0LL),
+      mEOF(false),
+      mStreamInfoValid(false),
+      mWriteRequested(false),
+      mWriteCompleted(false),
+      mWriteBuffer(NULL),
+      mErrorStatus((FLAC__StreamDecoderErrorStatus) -1)
+{
+    LOGV("FLACParser::FLACParser");
+    memset(&mStreamInfo, 0, sizeof(mStreamInfo));
+    memset(&mWriteHeader, 0, sizeof(mWriteHeader));
+    mInitCheck = init();
+}
+
+FLACParser::~FLACParser()
+{
+    LOGV("FLACParser::~FLACParser");
+    if (mDecoder != NULL) {
+        FLAC__stream_decoder_delete(mDecoder);
+        mDecoder = NULL;
+    }
+}
+
+status_t FLACParser::init()
+{
+    // setup libFLAC parser
+    mDecoder = FLAC__stream_decoder_new();
+    if (mDecoder == NULL) {
+        // The new should succeed, since probably all it does is a malloc
+        // that always succeeds in Android.  But to avoid dependence on the
+        // libFLAC internals, we check and log here.
+        LOGE("new failed");
+        return NO_INIT;
+    }
+    FLAC__stream_decoder_set_md5_checking(mDecoder, false);
+    FLAC__stream_decoder_set_metadata_ignore_all(mDecoder);
+    FLAC__stream_decoder_set_metadata_respond(
+            mDecoder, FLAC__METADATA_TYPE_STREAMINFO);
+    FLAC__stream_decoder_set_metadata_respond(
+            mDecoder, FLAC__METADATA_TYPE_PICTURE);
+    FLAC__stream_decoder_set_metadata_respond(
+            mDecoder, FLAC__METADATA_TYPE_VORBIS_COMMENT);
+    FLAC__StreamDecoderInitStatus initStatus;
+    initStatus = FLAC__stream_decoder_init_stream(
+            mDecoder,
+            read_callback, seek_callback, tell_callback,
+            length_callback, eof_callback, write_callback,
+            metadata_callback, error_callback, (void *) this);
+    if (initStatus != FLAC__STREAM_DECODER_INIT_STATUS_OK) {
+        // A failure here probably indicates a programming error and so is
+        // unlikely to happen. But we check and log here similarly to above.
+        LOGE("init_stream failed %d", initStatus);
+        return NO_INIT;
+    }
+    // parse all metadata
+    if (!FLAC__stream_decoder_process_until_end_of_metadata(mDecoder)) {
+        LOGE("end_of_metadata failed");
+        return NO_INIT;
+    }
+    if (mStreamInfoValid) {
+        // check channel count
+        switch (getChannels()) {
+        case 1:
+        case 2:
+            break;
+        default:
+            LOGE("unsupported channel count %u", getChannels());
+            return NO_INIT;
+        }
+        // check bit depth
+        switch (getBitsPerSample()) {
+        case 8:
+        case 16:
+        case 24:
+            break;
+        default:
+            LOGE("unsupported bits per sample %u", getBitsPerSample());
+            return NO_INIT;
+        }
+        // check sample rate
+        switch (getSampleRate()) {
+        case  8000:
+        case 11025:
+        case 12000:
+        case 16000:
+        case 22050:
+        case 24000:
+        case 32000:
+        case 44100:
+        case 48000:
+            break;
+        default:
+            // 96000 would require a proper downsampler in AudioFlinger
+            LOGE("unsupported sample rate %u", getSampleRate());
+            return NO_INIT;
+        }
+        // configure the appropriate copy function, defaulting to trespass
+        static const struct {
+            unsigned mChannels;
+            unsigned mBitsPerSample;
+            void (*mCopy)(short *dst, const int *const *src, unsigned nSamples);
+        } table[] = {
+            { 1,  8, copyMono8    },
+            { 2,  8, copyStereo8  },
+            { 1, 16, copyMono16   },
+            { 2, 16, copyStereo16 },
+            { 1, 24, copyMono24   },
+            { 2, 24, copyStereo24 },
+        };
+        for (unsigned i = 0; i < sizeof(table)/sizeof(table[0]); ++i) {
+            if (table[i].mChannels == getChannels() &&
+                    table[i].mBitsPerSample == getBitsPerSample()) {
+                mCopy = table[i].mCopy;
+                break;
+            }
+        }
+        // populate track metadata
+        if (mTrackMetadata != 0) {
+            mTrackMetadata->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_RAW);
+            mTrackMetadata->setInt32(kKeyChannelCount, getChannels());
+            mTrackMetadata->setInt32(kKeySampleRate, getSampleRate());
+            // sample rate is non-zero, so division by zero not possible
+            mTrackMetadata->setInt64(kKeyDuration,
+                    (getTotalSamples() * 1000000LL) / getSampleRate());
+        }
+    } else {
+        LOGE("missing STREAMINFO");
+        return NO_INIT;
+    }
+    if (mFileMetadata != 0) {
+        mFileMetadata->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_FLAC);
+    }
+    return OK;
+}
+
+void FLACParser::allocateBuffers()
+{
+    CHECK(mGroup == NULL);
+    mGroup = new MediaBufferGroup;
+    mMaxBufferSize = getMaxBlockSize() * getChannels() * sizeof(short);
+    mGroup->add_buffer(new MediaBuffer(mMaxBufferSize));
+}
+
+void FLACParser::releaseBuffers()
+{
+    CHECK(mGroup != NULL);
+    delete mGroup;
+    mGroup = NULL;
+}
+
+MediaBuffer *FLACParser::readBuffer(bool doSeek, FLAC__uint64 sample)
+{
+    mWriteRequested = true;
+    mWriteCompleted = false;
+    if (doSeek) {
+        // We implement the seek callback, so this works without explicit flush
+        if (!FLAC__stream_decoder_seek_absolute(mDecoder, sample)) {
+            LOGE("FLACParser::readBuffer seek to sample %llu failed", sample);
+            return NULL;
+        }
+        LOGV("FLACParser::readBuffer seek to sample %llu succeeded", sample);
+    } else {
+        if (!FLAC__stream_decoder_process_single(mDecoder)) {
+            LOGE("FLACParser::readBuffer process_single failed");
+            return NULL;
+        }
+    }
+    if (!mWriteCompleted) {
+        LOGV("FLACParser::readBuffer write did not complete");
+        return NULL;
+    }
+    // verify that block header keeps the promises made by STREAMINFO
+    unsigned blocksize = mWriteHeader.blocksize;
+    if (blocksize == 0 || blocksize > getMaxBlockSize()) {
+        LOGE("FLACParser::readBuffer write invalid blocksize %u", blocksize);
+        return NULL;
+    }
+    if (mWriteHeader.sample_rate != getSampleRate() ||
+        mWriteHeader.channels != getChannels() ||
+        mWriteHeader.bits_per_sample != getBitsPerSample()) {
+        LOGE("FLACParser::readBuffer write changed parameters mid-stream");
+    }
+    // acquire a media buffer
+    CHECK(mGroup != NULL);
+    MediaBuffer *buffer;
+    status_t err = mGroup->acquire_buffer(&buffer);
+    if (err != OK) {
+        return NULL;
+    }
+    size_t bufferSize = blocksize * getChannels() * sizeof(short);
+    CHECK(bufferSize <= mMaxBufferSize);
+    short *data = (short *) buffer->data();
+    buffer->set_range(0, bufferSize);
+    // copy PCM from FLAC write buffer to our media buffer, with interleaving
+    (*mCopy)(data, mWriteBuffer, blocksize);
+    // fill in buffer metadata
+    CHECK(mWriteHeader.number_type == FLAC__FRAME_NUMBER_TYPE_SAMPLE_NUMBER);
+    FLAC__uint64 sampleNumber = mWriteHeader.number.sample_number;
+    int64_t timeUs = (1000000LL * sampleNumber) / getSampleRate();
+    buffer->meta_data()->setInt64(kKeyTime, timeUs);
+    buffer->meta_data()->setInt32(kKeyIsSyncFrame, 1);
+    return buffer;
+}
+
+// FLACsource
+
+FLACSource::FLACSource(
+        const sp<DataSource> &dataSource,
+        const sp<MetaData> &trackMetadata)
+    : mDataSource(dataSource),
+      mTrackMetadata(trackMetadata),
+      mParser(0),
+      mInitCheck(false),
+      mStarted(false)
+{
+    LOGV("FLACSource::FLACSource");
+    mInitCheck = init();
+}
+
+FLACSource::~FLACSource()
+{
+    LOGV("~FLACSource::FLACSource");
+    if (mStarted) {
+        stop();
+    }
+}
+
+status_t FLACSource::start(MetaData *params)
+{
+    LOGV("FLACSource::start");
+
+    CHECK(!mStarted);
+    mParser->allocateBuffers();
+    mStarted = true;
+
+    return OK;
+}
+
+status_t FLACSource::stop()
+{
+    LOGV("FLACSource::stop");
+
+    CHECK(mStarted);
+    mParser->releaseBuffers();
+    mStarted = false;
+
+    return OK;
+}
+
+sp<MetaData> FLACSource::getFormat()
+{
+    return mTrackMetadata;
+}
+
+status_t FLACSource::read(
+        MediaBuffer **outBuffer, const ReadOptions *options)
+{
+    MediaBuffer *buffer;
+    // process an optional seek request
+    int64_t seekTimeUs;
+    ReadOptions::SeekMode mode;
+    if ((NULL != options) && options->getSeekTo(&seekTimeUs, &mode)) {
+        FLAC__uint64 sample;
+        if (seekTimeUs <= 0LL) {
+            sample = 0LL;
+        } else {
+            // sample and total samples are both zero-based, and seek to EOF ok
+            sample = (seekTimeUs * mParser->getSampleRate()) / 1000000LL;
+            if (sample >= mParser->getTotalSamples()) {
+                sample = mParser->getTotalSamples();
+            }
+        }
+        buffer = mParser->readBuffer(sample);
+    // otherwise read sequentially
+    } else {
+        buffer = mParser->readBuffer();
+    }
+    *outBuffer = buffer;
+    return buffer != NULL ? (status_t) OK : (status_t) ERROR_END_OF_STREAM;
+}
+
+status_t FLACSource::init()
+{
+    LOGV("FLACSource::init");
+    // re-use the same track metadata passed into constructor from FLACExtractor
+    mParser = new FLACParser(mDataSource);
+    return mParser->initCheck();
+}
+
+// FLACExtractor
+
+FLACExtractor::FLACExtractor(
+        const sp<DataSource> &dataSource)
+    : mDataSource(dataSource),
+      mInitCheck(false)
+{
+    LOGV("FLACExtractor::FLACExtractor");
+    mInitCheck = init();
+}
+
+FLACExtractor::~FLACExtractor()
+{
+    LOGV("~FLACExtractor::FLACExtractor");
+}
+
+size_t FLACExtractor::countTracks()
+{
+    return mInitCheck == OK ? 1 : 0;
+}
+
+sp<MediaSource> FLACExtractor::getTrack(size_t index)
+{
+    if (mInitCheck != OK || index > 0) {
+        return NULL;
+    }
+    return new FLACSource(mDataSource, mTrackMetadata);
+}
+
+sp<MetaData> FLACExtractor::getTrackMetaData(
+        size_t index, uint32_t flags)
+{
+    if (mInitCheck != OK || index > 0) {
+        return NULL;
+    }
+    return mTrackMetadata;
+}
+
+status_t FLACExtractor::init()
+{
+    mFileMetadata = new MetaData;
+    mTrackMetadata = new MetaData;
+    // FLACParser will fill in the metadata for us
+    mParser = new FLACParser(mDataSource, mFileMetadata, mTrackMetadata);
+    return mParser->initCheck();
+}
+
+sp<MetaData> FLACExtractor::getMetaData()
+{
+    return mFileMetadata;
+}
+
+// Sniffer
+
+bool SniffFLAC(
+        const sp<DataSource> &source, String8 *mimeType, float *confidence,
+        sp<AMessage> *)
+{
+    // first 4 is the signature word
+    // second 4 is the sizeof STREAMINFO
+    // 042 is the mandatory STREAMINFO
+    // no need to read rest of the header, as a premature EOF will be caught later
+    uint8_t header[4+4];
+    if (source->readAt(0, header, sizeof(header)) != sizeof(header)
+            || memcmp("fLaC\0\0\0\042", header, 4+4))
+    {
+        return false;
+    }
+
+    *mimeType = MEDIA_MIMETYPE_AUDIO_FLAC;
+    *confidence = 0.5;
+
+    return true;
+}
+
+}  // namespace android
diff --git a/media/libstagefright/FileSource.cpp b/media/libstagefright/FileSource.cpp
index b46d8d0..02a78c9 100644
--- a/media/libstagefright/FileSource.cpp
+++ b/media/libstagefright/FileSource.cpp
@@ -16,12 +16,16 @@
 
 #include <media/stagefright/FileSource.h>
 #include <media/stagefright/MediaDebug.h>
+#include <sys/types.h>
+#include <unistd.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
 
 namespace android {
 
 FileSource::FileSource(const char *filename)
-    : mFile(fopen(filename, "rb")),
-      mFd(fileno(mFile)),
+    : mFd(-1),
       mOffset(0),
       mLength(-1),
       mDecryptHandle(NULL),
@@ -29,11 +33,12 @@
       mDrmBufOffset(0),
       mDrmBufSize(0),
       mDrmBuf(NULL){
+
+    mFd = open(filename, O_LARGEFILE | O_RDONLY);
 }
 
 FileSource::FileSource(int fd, int64_t offset, int64_t length)
-    : mFile(fdopen(fd, "rb")),
-      mFd(fd),
+    : mFd(fd),
       mOffset(offset),
       mLength(length),
       mDecryptHandle(NULL),
@@ -46,26 +51,35 @@
 }
 
 FileSource::~FileSource() {
-    if (mFile != NULL) {
-        fclose(mFile);
-        mFile = NULL;
+    if (mFd >= 0) {
+        close(mFd);
+        mFd = -1;
     }
 
     if (mDrmBuf != NULL) {
         delete[] mDrmBuf;
         mDrmBuf = NULL;
     }
+
     if (mDecryptHandle != NULL) {
+        // To release mDecryptHandle
+        CHECK(mDrmManagerClient);
         mDrmManagerClient->closeDecryptSession(mDecryptHandle);
+        mDecryptHandle = NULL;
+    }
+
+    if (mDrmManagerClient != NULL) {
+        delete mDrmManagerClient;
+        mDrmManagerClient = NULL;
     }
 }
 
 status_t FileSource::initCheck() const {
-    return mFile != NULL ? OK : NO_INIT;
+    return mFd >= 0 ? OK : NO_INIT;
 }
 
-ssize_t FileSource::readAt(off_t offset, void *data, size_t size) {
-    if (mFile == NULL) {
+ssize_t FileSource::readAt(off64_t offset, void *data, size_t size) {
+    if (mFd < 0) {
         return NO_INIT;
     }
 
@@ -85,18 +99,18 @@
             == mDecryptHandle->decryptApiType) {
         return readAtDRM(offset, data, size);
    } else {
-        int err = fseeko(mFile, offset + mOffset, SEEK_SET);
-        if (err < 0) {
+        off64_t result = lseek64(mFd, offset + mOffset, SEEK_SET);
+        if (result == -1) {
             LOGE("seek to %lld failed", offset + mOffset);
             return UNKNOWN_ERROR;
         }
 
-        return fread(data, 1, size, mFile);
+        return ::read(mFd, data, size);
     }
 }
 
-status_t FileSource::getSize(off_t *size) {
-    if (mFile == NULL) {
+status_t FileSource::getSize(off64_t *size) {
+    if (mFd < 0) {
         return NO_INIT;
     }
 
@@ -106,20 +120,27 @@
         return OK;
     }
 
-    fseek(mFile, 0, SEEK_END);
-    *size = ftello(mFile);
+    *size = lseek64(mFd, 0, SEEK_END);
 
     return OK;
 }
 
-DecryptHandle* FileSource::DrmInitialization(DrmManagerClient* client) {
-    mDrmManagerClient = client;
+DecryptHandle* FileSource::DrmInitialization() {
+    if (mDrmManagerClient == NULL) {
+        mDrmManagerClient = new DrmManagerClient();
+    }
+
+    if (mDrmManagerClient == NULL) {
+        return NULL;
+    }
+
     if (mDecryptHandle == NULL) {
         mDecryptHandle = mDrmManagerClient->openDecryptSession(
                 mFd, mOffset, mLength);
     }
 
     if (mDecryptHandle == NULL) {
+        delete mDrmManagerClient;
         mDrmManagerClient = NULL;
     }
 
@@ -132,7 +153,7 @@
     *client = mDrmManagerClient;
 }
 
-ssize_t FileSource::readAtDRM(off_t offset, void *data, size_t size) {
+ssize_t FileSource::readAtDRM(off64_t offset, void *data, size_t size) {
     size_t DRM_CACHE_SIZE = 1024;
     if (mDrmBuf == NULL) {
         mDrmBuf = new unsigned char[DRM_CACHE_SIZE];
diff --git a/media/libstagefright/HTTPStream.cpp b/media/libstagefright/HTTPStream.cpp
index 4ad1c12..2caf211 100644
--- a/media/libstagefright/HTTPStream.cpp
+++ b/media/libstagefright/HTTPStream.cpp
@@ -34,18 +34,27 @@
 
 #include <media/stagefright/foundation/ADebug.h>
 
+#include <openssl/ssl.h>
+
 namespace android {
 
 // static
-const char *HTTPStream::kStatusKey = ":status:";
+const char *HTTPStream::kStatusKey = ":status:";  // MUST be lowercase.
 
 HTTPStream::HTTPStream()
     : mState(READY),
-      mSocket(-1) {
+      mSocket(-1),
+      mSSLContext(NULL),
+      mSSL(NULL) {
 }
 
 HTTPStream::~HTTPStream() {
     disconnect();
+
+    if (mSSLContext != NULL) {
+        SSL_CTX_free((SSL_CTX *)mSSLContext);
+        mSSLContext = NULL;
+    }
 }
 
 static bool MakeSocketBlocking(int s, bool blocking) {
@@ -124,7 +133,85 @@
     return result;
 }
 
-status_t HTTPStream::connect(const char *server, int port) {
+// Apparently under out linux closing a socket descriptor from one thread
+// will not unblock a pending send/recv on that socket on another thread.
+static ssize_t MySendReceive(
+        int s, void *data, size_t size, int flags, bool sendData) {
+    ssize_t result = 0;
+
+    while (size > 0) {
+        fd_set rs, ws, es;
+        FD_ZERO(&rs);
+        FD_ZERO(&ws);
+        FD_ZERO(&es);
+        FD_SET(s, sendData ? &ws : &rs);
+        FD_SET(s, &es);
+
+        struct timeval tv;
+        tv.tv_sec = 0;
+        tv.tv_usec = 100000ll;
+
+        int nfds = ::select(
+                s + 1,
+                sendData ? NULL : &rs,
+                sendData ? &ws : NULL,
+                &es,
+                &tv);
+
+        if (nfds < 0) {
+            if (errno == EINTR) {
+                continue;
+            }
+
+            result = -errno;
+            break;
+        } else if (nfds == 0) {
+            // timeout
+
+            continue;
+        }
+
+        CHECK_EQ(nfds, 1);
+
+        ssize_t nbytes =
+            sendData ? send(s, data, size, flags) : recv(s, data, size, flags);
+
+        if (nbytes < 0) {
+            if (errno == EINTR) {
+                continue;
+            }
+
+            result = -errno;
+            break;
+        } else if (nbytes == 0) {
+            result = 0;
+            break;
+        }
+
+        data = (uint8_t *)data + nbytes;
+        size -= nbytes;
+
+        result = nbytes;
+        break;
+    }
+
+    return result;
+}
+
+static ssize_t MySend(int s, const void *data, size_t size, int flags) {
+    return MySendReceive(
+            s, const_cast<void *>(data), size, flags, true /* sendData */);
+}
+
+static ssize_t MyReceive(int s, void *data, size_t size, int flags) {
+    return MySendReceive(s, data, size, flags, false /* sendData */);
+}
+
+status_t HTTPStream::connect(const char *server, int port, bool https) {
+    if (port < 0) {
+        port = https ? 443 : 80;
+    }
+
     Mutex::Autolock autoLock(mLock);
 
     status_t err = OK;
@@ -193,6 +280,47 @@
         return res;
     }
 
+    if (https) {
+        CHECK(mSSL == NULL);
+
+        if (mSSLContext == NULL) {
+            SSL_library_init();
+
+            mSSLContext = SSL_CTX_new(TLSv1_client_method());
+
+            if (mSSLContext == NULL) {
+                LOGE("failed to create SSL context");
+                mState = READY;
+                return ERROR_IO;
+            }
+        }
+
+        mSSL = SSL_new((SSL_CTX *)mSSLContext);
+
+        if (mSSL == NULL) {
+            LOGE("failed to create SSL session");
+
+            mState = READY;
+            return ERROR_IO;
+        }
+
+        int res = SSL_set_fd((SSL *)mSSL, mSocket);
+
+        if (res == 1) {
+            res = SSL_connect((SSL *)mSSL);
+        }
+
+        if (res != 1) {
+            SSL_free((SSL *)mSSL);
+            mSSL = NULL;
+
+            LOGE("failed to connect over SSL");
+            mState = READY;
+
+            return ERROR_IO;
+        }
+    }
+
     mState = CONNECTED;
 
     return OK;
@@ -205,6 +333,13 @@
         return ERROR_NOT_CONNECTED;
     }
 
+    if (mSSL != NULL) {
+        SSL_shutdown((SSL *)mSSL);
+
+        SSL_free((SSL *)mSSL);
+        mSSL = NULL;
+    }
+
     CHECK(mSocket >= 0);
     close(mSocket);
     mSocket = -1;
@@ -220,16 +355,21 @@
     }
 
     while (size > 0) {
-        ssize_t n = ::send(mSocket, data, size, 0);
+        ssize_t n;
+        if (mSSL != NULL) {
+            n = SSL_write((SSL *)mSSL, data, size);
+
+            if (n < 0) {
+                n = -SSL_get_error((SSL *)mSSL, n);
+            }
+        } else {
+            n = MySend(mSocket, data, size, 0);
+        }
 
         if (n < 0) {
-            if (errno == EINTR) {
-                continue;
-            }
-
             disconnect();
 
-            return ERROR_IO;
+            return n;
         } else if (n == 0) {
             disconnect();
 
@@ -265,12 +405,18 @@
 
     for (;;) {
         char c;
-        ssize_t n = recv(mSocket, &c, 1, 0);
-        if (n < 0) {
-            if (errno == EINTR) {
-                continue;
-            }
+        ssize_t n;
+        if (mSSL != NULL) {
+            n = SSL_read((SSL *)mSSL, &c, 1);
 
+            if (n < 0) {
+                n = -SSL_get_error((SSL *)mSSL, n);
+            }
+        } else {
+            n = MyReceive(mSocket, &c, 1, 0);
+        }
+
+        if (n < 0) {
             disconnect();
 
             return ERROR_IO;
@@ -315,7 +461,7 @@
         return err;
     }
 
-    mHeaders.add(string(kStatusKey), string(line));
+    mHeaders.add(AString(kStatusKey), AString(line));
 
     char *spacePos = strchr(line, ' ');
     if (spacePos == NULL) {
@@ -359,7 +505,10 @@
 
         char *colonPos = strchr(line, ':');
         if (colonPos == NULL) {
-            mHeaders.add(string(line), string());
+            AString key = line;
+            key.tolower();
+
+            mHeaders.add(key, AString());
         } else {
             char *end_of_key = colonPos;
             while (end_of_key > line && isspace(end_of_key[-1])) {
@@ -373,7 +522,10 @@
 
             *end_of_key = '\0';
 
-            mHeaders.add(string(line), string(start_of_value));
+            AString key = line;
+            key.tolower();
+
+            mHeaders.add(key, AString(start_of_value));
         }
     }
 
@@ -383,14 +535,19 @@
 ssize_t HTTPStream::receive(void *data, size_t size) {
     size_t total = 0;
     while (total < size) {
-        ssize_t n = recv(mSocket, (char *)data + total, size - total, 0);
+        ssize_t n;
+        if (mSSL != NULL) {
+            n = SSL_read((SSL *)mSSL, (char *)data + total, size - total);
+
+            if (n < 0) {
+                n = -SSL_get_error((SSL *)mSSL, n);
+            }
+        } else {
+            n = MyReceive(mSocket, (char *)data + total, size - total, 0);
+        }
 
         if (n < 0) {
-            if (errno == EINTR) {
-                continue;
-            }
-
-            LOGE("recv failed, errno = %d (%s)", errno, strerror(errno));
+            LOGE("recv failed, errno = %d (%s)", (int)n, strerror(-n));
 
             disconnect();
             return (ssize_t)ERROR_IO;
@@ -409,8 +566,11 @@
     return (ssize_t)total;
 }
 
-bool HTTPStream::find_header_value(const string &key, string *value) const {
-    ssize_t index = mHeaders.indexOfKey(key);
+bool HTTPStream::find_header_value(const AString &key, AString *value) const {
+    AString key_lower = key;
+    key_lower.tolower();
+
+    ssize_t index = mHeaders.indexOfKey(key_lower);
     if (index < 0) {
         value->clear();
         return false;
diff --git a/media/libstagefright/JPEGSource.cpp b/media/libstagefright/JPEGSource.cpp
index ec81097..e818115 100644
--- a/media/libstagefright/JPEGSource.cpp
+++ b/media/libstagefright/JPEGSource.cpp
@@ -142,7 +142,7 @@
     mWidth = 0;
     mHeight = 0;
 
-    off_t i = 0;
+    off64_t i = 0;
 
     uint16_t soi;
     if (!mSource->getUInt16(i, &soi)) {
diff --git a/media/libstagefright/MP3Extractor.cpp b/media/libstagefright/MP3Extractor.cpp
index ed14a4b..03ce202 100644
--- a/media/libstagefright/MP3Extractor.cpp
+++ b/media/libstagefright/MP3Extractor.cpp
@@ -21,6 +21,8 @@
 #include "include/MP3Extractor.h"
 
 #include "include/ID3.h"
+#include "include/VBRISeeker.h"
+#include "include/XINGSeeker.h"
 
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/DataSource.h>
@@ -42,10 +44,11 @@
 // Yes ... there are things that must indeed match...
 static const uint32_t kMask = 0xfffe0c00;
 
-static bool get_mp3_frame_size(
+// static
+bool MP3Extractor::get_mp3_frame_size(
         uint32_t header, size_t *frame_size,
-        int *out_sampling_rate = NULL, int *out_channels = NULL,
-        int *out_bitrate = NULL) {
+        int *out_sampling_rate, int *out_channels,
+        int *out_bitrate) {
     *frame_size = 0;
 
     if (out_sampling_rate) {
@@ -178,136 +181,13 @@
     return true;
 }
 
-static bool parse_xing_header(
-        const sp<DataSource> &source, off_t first_frame_pos,
-        int32_t *frame_number = NULL, int32_t *byte_number = NULL,
-        char *table_of_contents = NULL, int32_t *quality_indicator = NULL,
-        int64_t *duration = NULL) {
-
-    if (frame_number) {
-        *frame_number = 0;
-    }
-    if (byte_number) {
-        *byte_number = 0;
-    }
-    if (table_of_contents) {
-        table_of_contents[0] = 0;
-    }
-    if (quality_indicator) {
-        *quality_indicator = 0;
-    }
-    if (duration) {
-        *duration = 0;
-    }
-
-    uint8_t buffer[4];
-    int offset = first_frame_pos;
-    if (source->readAt(offset, &buffer, 4) < 4) { // get header
-        return false;
-    }
-    offset += 4;
-
-    uint8_t id, layer, sr_index, mode;
-    layer = (buffer[1] >> 1) & 3;
-    id = (buffer[1] >> 3) & 3;
-    sr_index = (buffer[2] >> 2) & 3;
-    mode = (buffer[3] >> 6) & 3;
-    if (layer == 0) {
-        return false;
-    }
-    if (id == 1) {
-        return false;
-    }
-    if (sr_index == 3) {
-        return false;
-    }
-    // determine offset of XING header
-    if(id&1) { // mpeg1
-        if (mode != 3) offset += 32;
-        else offset += 17;
-    } else { // mpeg2
-        if (mode != 3) offset += 17;
-        else offset += 9;
-    }
-
-    if (source->readAt(offset, &buffer, 4) < 4) { // XING header ID
-        return false;
-    }
-    offset += 4;
-    // Check XING ID
-    if ((buffer[0] != 'X') || (buffer[1] != 'i')
-                || (buffer[2] != 'n') || (buffer[3] != 'g')) {
-        if ((buffer[0] != 'I') || (buffer[1] != 'n')
-                    || (buffer[2] != 'f') || (buffer[3] != 'o')) {
-            return false;
-        }
-    }
-
-    if (source->readAt(offset, &buffer, 4) < 4) { // flags
-        return false;
-    }
-    offset += 4;
-    uint32_t flags = U32_AT(buffer);
-
-    if (flags & 0x0001) {  // Frames field is present
-        if (source->readAt(offset, buffer, 4) < 4) {
-             return false;
-        }
-        if (frame_number) {
-           *frame_number = U32_AT(buffer);
-        }
-        int32_t frame = U32_AT(buffer);
-        // Samples per Frame: 1. index = MPEG Version ID, 2. index = Layer
-        const int samplesPerFrames[2][3] =
-        {
-            { 384, 1152, 576  }, // MPEG 2, 2.5: layer1, layer2, layer3
-            { 384, 1152, 1152 }, // MPEG 1: layer1, layer2, layer3
-        };
-        // sampling rates in hertz: 1. index = MPEG Version ID, 2. index = sampling rate index
-        const int samplingRates[4][3] =
-        {
-            { 11025, 12000, 8000,  },    // MPEG 2.5
-            { 0,     0,     0,     },    // reserved
-            { 22050, 24000, 16000, },    // MPEG 2
-            { 44100, 48000, 32000, }     // MPEG 1
-        };
-        if (duration) {
-            *duration = (int64_t)frame * samplesPerFrames[id&1][3-layer] * 1000000LL
-                / samplingRates[id][sr_index];
-        }
-        offset += 4;
-    }
-    if (flags & 0x0002) {  // Bytes field is present
-        if (byte_number) {
-            if (source->readAt(offset, buffer, 4) < 4) {
-                return false;
-            }
-            *byte_number = U32_AT(buffer);
-        }
-        offset += 4;
-    }
-    if (flags & 0x0004) {  // TOC field is present
-       if (table_of_contents) {
-            if (source->readAt(offset + 1, table_of_contents, 99) < 99) {
-                return false;
-            }
-        }
-        offset += 100;
-    }
-    if (flags & 0x0008) {  // Quality indicator field is present
-        if (quality_indicator) {
-            if (source->readAt(offset, buffer, 4) < 4) {
-                return false;
-            }
-            *quality_indicator = U32_AT(buffer);
-        }
-    }
-    return true;
-}
-
 static bool Resync(
         const sp<DataSource> &source, uint32_t match_header,
-        off_t *inout_pos, uint32_t *out_header) {
+        off64_t *inout_pos, off64_t *post_id3_pos, uint32_t *out_header) {
+    if (post_id3_pos != NULL) {
+        *post_id3_pos = 0;
+    }
+
     if (*inout_pos == 0) {
         // Skip an optional ID3 header if syncing at the very beginning
         // of the datasource.
@@ -337,16 +217,20 @@
 
             *inout_pos += len;
 
-            LOGV("skipped ID3 tag, new starting offset is %ld (0x%08lx)",
+            LOGV("skipped ID3 tag, new starting offset is %lld (0x%016llx)",
                  *inout_pos, *inout_pos);
         }
+
+        if (post_id3_pos != NULL) {
+            *post_id3_pos = *inout_pos;
+        }
     }
 
-    off_t pos = *inout_pos;
+    off64_t pos = *inout_pos;
     bool valid = false;
 
     const size_t kMaxReadBytes = 1024;
-    const off_t kMaxBytesChecked = 128 * 1024;
+    const size_t kMaxBytesChecked = 128 * 1024;
     uint8_t buf[kMaxReadBytes];
     ssize_t bytesToRead = kMaxReadBytes;
     ssize_t totalBytesRead = 0;
@@ -357,7 +241,7 @@
     do {
         if (pos >= *inout_pos + kMaxBytesChecked) {
             // Don't scan forever.
-            LOGV("giving up at offset %ld", pos);
+            LOGV("giving up at offset %lld", pos);
             break;
         }
 
@@ -398,20 +282,21 @@
 
         size_t frame_size;
         int sample_rate, num_channels, bitrate;
-        if (!get_mp3_frame_size(header, &frame_size,
-                               &sample_rate, &num_channels, &bitrate)) {
+        if (!MP3Extractor::get_mp3_frame_size(
+                    header, &frame_size,
+                    &sample_rate, &num_channels, &bitrate)) {
             ++pos;
             ++tmp;
             --remainingBytes;
             continue;
         }
 
-        LOGV("found possible 1st frame at %ld (header = 0x%08x)", pos, header);
+        LOGV("found possible 1st frame at %lld (header = 0x%08x)", pos, header);
 
         // We found what looks like a valid frame,
         // now find its successors.
 
-        off_t test_pos = pos + frame_size;
+        off64_t test_pos = pos + frame_size;
 
         valid = true;
         for (int j = 0; j < 3; ++j) {
@@ -431,12 +316,13 @@
             }
 
             size_t test_frame_size;
-            if (!get_mp3_frame_size(test_header, &test_frame_size)) {
+            if (!MP3Extractor::get_mp3_frame_size(
+                        test_header, &test_frame_size)) {
                 valid = false;
                 break;
             }
 
-            LOGV("found subsequent frame #%d at %ld", j + 2, test_pos);
+            LOGV("found subsequent frame #%d at %lld", j + 2, test_pos);
 
             test_pos += test_frame_size;
         }
@@ -463,8 +349,8 @@
 public:
     MP3Source(
             const sp<MetaData> &meta, const sp<DataSource> &source,
-            off_t first_frame_pos, uint32_t fixed_header,
-            int32_t byte_number, const char *table_of_contents);
+            off64_t first_frame_pos, uint32_t fixed_header,
+            const sp<MP3Seeker> &seeker);
 
     virtual status_t start(MetaData *params = NULL);
     virtual status_t stop();
@@ -480,14 +366,12 @@
 private:
     sp<MetaData> mMeta;
     sp<DataSource> mDataSource;
-    off_t mFirstFramePos;
+    off64_t mFirstFramePos;
     uint32_t mFixedHeader;
-    off_t mCurrentPos;
+    off64_t mCurrentPos;
     int64_t mCurrentTimeUs;
     bool mStarted;
-    int32_t mByteNumber; // total number of bytes in this MP3
-    // TOC entries in XING header. Skip the first one since it's always 0.
-    char mTableOfContents[99];
+    sp<MP3Seeker> mSeeker;
     MediaBufferGroup *mGroup;
 
     MP3Source(const MP3Source &);
@@ -499,25 +383,28 @@
     : mInitCheck(NO_INIT),
       mDataSource(source),
       mFirstFramePos(-1),
-      mFixedHeader(0),
-      mByteNumber(0) {
-    off_t pos = 0;
+      mFixedHeader(0) {
+    off64_t pos = 0;
+    off64_t post_id3_pos;
     uint32_t header;
     bool success;
 
     int64_t meta_offset;
     uint32_t meta_header;
+    int64_t meta_post_id3_offset;
     if (meta != NULL
             && meta->findInt64("offset", &meta_offset)
-            && meta->findInt32("header", (int32_t *)&meta_header)) {
+            && meta->findInt32("header", (int32_t *)&meta_header)
+            && meta->findInt64("post-id3-offset", &meta_post_id3_offset)) {
         // The sniffer has already done all the hard work for us, simply
         // accept its judgement.
-        pos = (off_t)meta_offset;
+        pos = (off64_t)meta_offset;
         header = meta_header;
+        post_id3_pos = (off64_t)meta_post_id3_offset;
 
         success = true;
     } else {
-        success = Resync(mDataSource, 0, &pos, &header);
+        success = Resync(mDataSource, 0, &pos, &post_id3_pos, &header);
     }
 
     if (!success) {
@@ -542,21 +429,27 @@
     mMeta->setInt32(kKeyBitRate, bitrate * 1000);
     mMeta->setInt32(kKeyChannelCount, num_channels);
 
-    int64_t duration;
-    parse_xing_header(
-            mDataSource, mFirstFramePos, NULL, &mByteNumber,
-            mTableOfContents, NULL, &duration);
-    if (duration > 0) {
-        mMeta->setInt64(kKeyDuration, duration);
-    } else {
-        off_t fileSize;
+    mSeeker = XINGSeeker::CreateFromSource(mDataSource, mFirstFramePos);
+
+    if (mSeeker == NULL) {
+        mSeeker = VBRISeeker::CreateFromSource(mDataSource, post_id3_pos);
+    }
+
+    int64_t durationUs;
+
+    if (mSeeker == NULL || !mSeeker->getDuration(&durationUs)) {
+        off64_t fileSize;
         if (mDataSource->getSize(&fileSize) == OK) {
-            mMeta->setInt64(
-                    kKeyDuration,
-                    8000LL * (fileSize - mFirstFramePos) / bitrate);
+            durationUs = 8000LL * (fileSize - mFirstFramePos) / bitrate;
+        } else {
+            durationUs = -1;
         }
     }
 
+    if (durationUs >= 0) {
+        mMeta->setInt64(kKeyDuration, durationUs);
+    }
+
     mInitCheck = OK;
 }
 
@@ -571,7 +464,7 @@
 
     return new MP3Source(
             mMeta, mDataSource, mFirstFramePos, mFixedHeader,
-            mByteNumber, mTableOfContents);
+            mSeeker);
 }
 
 sp<MetaData> MP3Extractor::getTrackMetaData(size_t index, uint32_t flags) {
@@ -586,8 +479,8 @@
 
 MP3Source::MP3Source(
         const sp<MetaData> &meta, const sp<DataSource> &source,
-        off_t first_frame_pos, uint32_t fixed_header,
-        int32_t byte_number, const char *table_of_contents)
+        off64_t first_frame_pos, uint32_t fixed_header,
+        const sp<MP3Seeker> &seeker)
     : mMeta(meta),
       mDataSource(source),
       mFirstFramePos(first_frame_pos),
@@ -595,9 +488,8 @@
       mCurrentPos(0),
       mCurrentTimeUs(0),
       mStarted(false),
-      mByteNumber(byte_number),
+      mSeeker(seeker),
       mGroup(NULL) {
-    memcpy (mTableOfContents, table_of_contents, sizeof(mTableOfContents));
 }
 
 MP3Source::~MP3Source() {
@@ -644,43 +536,21 @@
     int64_t seekTimeUs;
     ReadOptions::SeekMode mode;
     if (options != NULL && options->getSeekTo(&seekTimeUs, &mode)) {
-        int32_t bitrate;
-        if (!mMeta->findInt32(kKeyBitRate, &bitrate)) {
-            // bitrate is in bits/sec.
-            LOGI("no bitrate");
+        int64_t actualSeekTimeUs = seekTimeUs;
+        if (mSeeker == NULL
+                || !mSeeker->getOffsetForTime(&actualSeekTimeUs, &mCurrentPos)) {
+            int32_t bitrate;
+            if (!mMeta->findInt32(kKeyBitRate, &bitrate)) {
+                // bitrate is in bits/sec.
+                LOGI("no bitrate");
 
-            return ERROR_UNSUPPORTED;
-        }
-
-        mCurrentTimeUs = seekTimeUs;
-        // interpolate in TOC to get file seek point in bytes
-        int64_t duration;
-        if ((mByteNumber > 0) && (mTableOfContents[0] > 0)
-            && mMeta->findInt64(kKeyDuration, &duration)) {
-            float percent = (float)seekTimeUs * 100 / duration;
-            float fx;
-            if( percent <= 0.0f ) {
-                fx = 0.0f;
-            } else if( percent >= 100.0f ) {
-                fx = 256.0f;
-            } else {
-                int a = (int)percent;
-                float fa, fb;
-                if ( a == 0 ) {
-                    fa = 0.0f;
-                } else {
-                    fa = (float)mTableOfContents[a-1];
-                }
-                if ( a < 99 ) {
-                    fb = (float)mTableOfContents[a];
-                } else {
-                    fb = 256.0f;
-                }
-                fx = fa + (fb-fa)*(percent-a);
+                return ERROR_UNSUPPORTED;
             }
-            mCurrentPos = mFirstFramePos + (int)((1.0f/256.0f)*fx*mByteNumber);
-        } else {
+
+            mCurrentTimeUs = seekTimeUs;
             mCurrentPos = mFirstFramePos + seekTimeUs * bitrate / 8000000;
+        } else {
+            mCurrentTimeUs = actualSeekTimeUs;
         }
     }
 
@@ -704,15 +574,16 @@
         uint32_t header = U32_AT((const uint8_t *)buffer->data());
 
         if ((header & kMask) == (mFixedHeader & kMask)
-            && get_mp3_frame_size(header, &frame_size, NULL, NULL, &bitrate)) {
+            && MP3Extractor::get_mp3_frame_size(
+                header, &frame_size, NULL, NULL, &bitrate)) {
             break;
         }
 
         // Lost sync.
         LOGV("lost sync! header = 0x%08x, old header = 0x%08x\n", header, mFixedHeader);
 
-        off_t pos = mCurrentPos;
-        if (!Resync(mDataSource, mFixedHeader, &pos, NULL)) {
+        off64_t pos = mCurrentPos;
+        if (!Resync(mDataSource, mFixedHeader, &pos, NULL, NULL)) {
             LOGE("Unable to resync. Signalling end of stream.");
 
             buffer->release();
@@ -818,15 +689,17 @@
 bool SniffMP3(
         const sp<DataSource> &source, String8 *mimeType,
         float *confidence, sp<AMessage> *meta) {
-    off_t pos = 0;
+    off64_t pos = 0;
+    off64_t post_id3_pos;
     uint32_t header;
-    if (!Resync(source, 0, &pos, &header)) {
+    if (!Resync(source, 0, &pos, &post_id3_pos, &header)) {
         return false;
     }
 
     *meta = new AMessage;
     (*meta)->setInt64("offset", pos);
     (*meta)->setInt32("header", header);
+    (*meta)->setInt64("post-id3-offset", post_id3_pos);
 
     *mimeType = MEDIA_MIMETYPE_AUDIO_MPEG;
     *confidence = 0.2f;
diff --git a/media/libstagefright/MPEG2TSWriter.cpp b/media/libstagefright/MPEG2TSWriter.cpp
index 81a2b0d..4e4f289 100644
--- a/media/libstagefright/MPEG2TSWriter.cpp
+++ b/media/libstagefright/MPEG2TSWriter.cpp
@@ -42,12 +42,21 @@
     unsigned streamType() const;
     unsigned incrementContinuityCounter();
 
+    void readMore();
+
     enum {
         kNotifyStartFailed,
         kNotifyBuffer,
         kNotifyReachedEOS,
     };
 
+    sp<ABuffer> lastAccessUnit();
+    int64_t lastAccessUnitTimeUs();
+    void setLastAccessUnit(const sp<ABuffer> &accessUnit);
+
+    void setEOSReceived();
+    bool eosReceived() const;
+
 protected:
     virtual void onMessageReceived(const sp<AMessage> &msg);
 
@@ -67,13 +76,16 @@
 
     sp<ABuffer> mAACBuffer;
 
+    sp<ABuffer> mLastAccessUnit;
+    bool mEOSReceived;
+
     unsigned mStreamType;
     unsigned mContinuityCounter;
 
     void extractCodecSpecificData();
 
-    void appendAACFrames(MediaBuffer *buffer);
-    void flushAACFrames();
+    bool appendAACFrames(MediaBuffer *buffer);
+    bool flushAACFrames();
 
     void postAVCFrame(MediaBuffer *buffer);
 
@@ -83,6 +95,7 @@
 MPEG2TSWriter::SourceInfo::SourceInfo(const sp<MediaSource> &source)
     : mSource(source),
       mLooper(new ALooper),
+      mEOSReceived(false),
       mStreamType(0),
       mContinuityCounter(0) {
     mLooper->setName("MPEG2TSWriter source");
@@ -232,6 +245,7 @@
     sp<AMessage> notify = mNotify->dup();
     notify->setInt32("what", kNotifyBuffer);
     notify->setObject("buffer", out);
+    notify->setInt32("oob", true);
     notify->post();
 }
 
@@ -260,11 +274,13 @@
     notify->post();
 }
 
-void MPEG2TSWriter::SourceInfo::appendAACFrames(MediaBuffer *buffer) {
+bool MPEG2TSWriter::SourceInfo::appendAACFrames(MediaBuffer *buffer) {
+    bool accessUnitPosted = false;
+
     if (mAACBuffer != NULL
             && mAACBuffer->size() + 7 + buffer->range_length()
                     > mAACBuffer->capacity()) {
-        flushAACFrames();
+        accessUnitPosted = flushAACFrames();
     }
 
     if (mAACBuffer == NULL) {
@@ -324,11 +340,13 @@
     ptr += buffer->range_length();
 
     mAACBuffer->setRange(0, ptr - mAACBuffer->data());
+
+    return accessUnitPosted;
 }
 
-void MPEG2TSWriter::SourceInfo::flushAACFrames() {
+bool MPEG2TSWriter::SourceInfo::flushAACFrames() {
     if (mAACBuffer == NULL) {
-        return;
+        return false;
     }
 
     sp<AMessage> notify = mNotify->dup();
@@ -337,6 +355,12 @@
     notify->post();
 
     mAACBuffer.clear();
+
+    return true;
+}
+
+void MPEG2TSWriter::SourceInfo::readMore() {
+    (new AMessage(kWhatRead, id()))->post();
 }
 
 void MPEG2TSWriter::SourceInfo::onMessageReceived(const sp<AMessage> &msg) {
@@ -353,7 +377,7 @@
 
             extractCodecSpecificData();
 
-            (new AMessage(kWhatRead, id()))->post();
+            readMore();
             break;
         }
 
@@ -388,7 +412,9 @@
                            buffer->range_length());
                 } else if (buffer->range_length() > 0) {
                     if (mStreamType == 0x0f) {
-                        appendAACFrames(buffer);
+                        if (!appendAACFrames(buffer)) {
+                            msg->post();
+                        }
                     } else {
                         postAVCFrame(buffer);
                     }
@@ -398,7 +424,7 @@
                 buffer = NULL;
             }
 
-            msg->post();
+            // Do not read more data until told to.
             break;
         }
 
@@ -407,10 +433,39 @@
     }
 }
 
+sp<ABuffer> MPEG2TSWriter::SourceInfo::lastAccessUnit() {
+    return mLastAccessUnit;
+}
+
+void MPEG2TSWriter::SourceInfo::setLastAccessUnit(
+        const sp<ABuffer> &accessUnit) {
+    mLastAccessUnit = accessUnit;
+}
+
+int64_t MPEG2TSWriter::SourceInfo::lastAccessUnitTimeUs() {
+    if (mLastAccessUnit == NULL) {
+        return -1;
+    }
+
+    int64_t timeUs;
+    CHECK(mLastAccessUnit->meta()->findInt64("timeUs", &timeUs));
+
+    return timeUs;
+}
+
+void MPEG2TSWriter::SourceInfo::setEOSReceived() {
+    CHECK(!mEOSReceived);
+    mEOSReceived = true;
+}
+
+bool MPEG2TSWriter::SourceInfo::eosReceived() const {
+    return mEOSReceived;
+}
+
 ////////////////////////////////////////////////////////////////////////////////
 
 MPEG2TSWriter::MPEG2TSWriter(int fd)
-    : mFile(fdopen(fd, "wb")),
+    : mFile(fdopen(dup(fd), "wb")),
       mStarted(false),
       mNumSourcesDone(0),
       mNumTSPacketsWritten(0),
@@ -527,15 +582,89 @@
 
             if (what == SourceInfo::kNotifyReachedEOS
                     || what == SourceInfo::kNotifyStartFailed) {
+                sp<SourceInfo> source = mSources.editItemAt(sourceIndex);
+                source->setEOSReceived();
+
+                sp<ABuffer> buffer = source->lastAccessUnit();
+                source->setLastAccessUnit(NULL);
+
+                if (buffer != NULL) {
+                    writeTS();
+                    writeAccessUnit(sourceIndex, buffer);
+                }
+
                 ++mNumSourcesDone;
             } else if (what == SourceInfo::kNotifyBuffer) {
                 sp<RefBase> obj;
                 CHECK(msg->findObject("buffer", &obj));
 
-                writeTS();
-
                 sp<ABuffer> buffer = static_cast<ABuffer *>(obj.get());
-                writeAccessUnit(sourceIndex, buffer);
+
+                int32_t oob;
+                if (msg->findInt32("oob", &oob) && oob) {
+                    // This is codec specific data delivered out of band.
+                    // It can be written out immediately.
+                    writeTS();
+                    writeAccessUnit(sourceIndex, buffer);
+                    break;
+                }
+
+                // We don't just write out data as we receive it from
+                // the various sources. That would essentially write them
+                // out in random order (as the thread scheduler determines
+                // how the messages are dispatched).
+                // Instead we gather an access unit for all tracks and
+                // write out the one with the smallest timestamp, then
+                // request more data for the written out track.
+                // Rinse, repeat.
+                // If we don't have data on any track we don't write
+                // anything just yet.
+
+                sp<SourceInfo> source = mSources.editItemAt(sourceIndex);
+
+                CHECK(source->lastAccessUnit() == NULL);
+                source->setLastAccessUnit(buffer);
+
+                LOGV("lastAccessUnitTimeUs[%d] = %.2f secs",
+                     sourceIndex, source->lastAccessUnitTimeUs() / 1E6);
+
+                int64_t minTimeUs = -1;
+                size_t minIndex = 0;
+
+                for (size_t i = 0; i < mSources.size(); ++i) {
+                    const sp<SourceInfo> &source = mSources.editItemAt(i);
+
+                    if (source->eosReceived()) {
+                        continue;
+                    }
+
+                    int64_t timeUs = source->lastAccessUnitTimeUs();
+                    if (timeUs < 0) {
+                        minTimeUs = -1;
+                        break;
+                    } else if (minTimeUs < 0 || timeUs < minTimeUs) {
+                        minTimeUs = timeUs;
+                        minIndex = i;
+                    }
+                }
+
+                if (minTimeUs < 0) {
+                    LOGV("not a all tracks have valid data.");
+                    break;
+                }
+
+                LOGV("writing access unit at time %.2f secs (index %d)",
+                     minTimeUs / 1E6, minIndex);
+
+                source = mSources.editItemAt(minIndex);
+
+                buffer = source->lastAccessUnit();
+                source->setLastAccessUnit(NULL);
+
+                writeTS();
+                writeAccessUnit(minIndex, buffer);
+
+                source->readMore();
             }
             break;
         }
diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp
index 5497322..7b96d01 100644
--- a/media/libstagefright/MPEG4Extractor.cpp
+++ b/media/libstagefright/MPEG4Extractor.cpp
@@ -98,11 +98,11 @@
     MPEG4DataSource(const sp<DataSource> &source);
 
     virtual status_t initCheck() const;
-    virtual ssize_t readAt(off_t offset, void *data, size_t size);
-    virtual status_t getSize(off_t *size);
+    virtual ssize_t readAt(off64_t offset, void *data, size_t size);
+    virtual status_t getSize(off64_t *size);
     virtual uint32_t flags();
 
-    status_t setCachedRange(off_t offset, size_t size);
+    status_t setCachedRange(off64_t offset, size_t size);
 
 protected:
     virtual ~MPEG4DataSource();
@@ -111,7 +111,7 @@
     Mutex mLock;
 
     sp<DataSource> mSource;
-    off_t mCachedOffset;
+    off64_t mCachedOffset;
     size_t mCachedSize;
     uint8_t *mCache;
 
@@ -146,7 +146,7 @@
     return mSource->initCheck();
 }
 
-ssize_t MPEG4DataSource::readAt(off_t offset, void *data, size_t size) {
+ssize_t MPEG4DataSource::readAt(off64_t offset, void *data, size_t size) {
     Mutex::Autolock autoLock(mLock);
 
     if (offset >= mCachedOffset
@@ -158,7 +158,7 @@
     return mSource->readAt(offset, data, size);
 }
 
-status_t MPEG4DataSource::getSize(off_t *size) {
+status_t MPEG4DataSource::getSize(off64_t *size) {
     return mSource->getSize(size);
 }
 
@@ -166,7 +166,7 @@
     return mSource->flags();
 }
 
-status_t MPEG4DataSource::setCachedRange(off_t offset, size_t size) {
+status_t MPEG4DataSource::setCachedRange(off64_t offset, size_t size) {
     Mutex::Autolock autoLock(mLock);
 
     clearCache();
@@ -247,6 +247,8 @@
             return MEDIA_MIMETYPE_VIDEO_MPEG4;
 
         case FOURCC('s', '2', '6', '3'):
+        case FOURCC('h', '2', '6', '3'):
+        case FOURCC('H', '2', '6', '3'):
             return MEDIA_MIMETYPE_VIDEO_H263;
 
         case FOURCC('a', 'v', 'c', '1'):
@@ -363,7 +365,7 @@
         return OK;
     }
 
-    off_t offset = 0;
+    off64_t offset = 0;
     status_t err;
     while ((err = parseChunk(&offset, 0)) == OK) {
     }
@@ -404,7 +406,7 @@
 }
 
 // Reads an encoded integer 7 bits at a time until it encounters the high bit clear.
-int32_t readSize(off_t offset,
+int32_t readSize(off64_t offset,
         const sp<DataSource> DataSource, uint8_t *numOfBytes) {
     uint32_t size = 0;
     uint8_t data;
@@ -424,7 +426,7 @@
     return size;
 }
 
-status_t MPEG4Extractor::parseDrmSINF(off_t *offset, off_t data_offset) {
+status_t MPEG4Extractor::parseDrmSINF(off64_t *offset, off64_t data_offset) {
     uint8_t updateIdTag;
     if (mDataSource->readAt(data_offset, &updateIdTag, 1) < 1) {
         return ERROR_IO;
@@ -596,14 +598,14 @@
     s->setTo(tmp);
 }
 
-status_t MPEG4Extractor::parseChunk(off_t *offset, int depth) {
+status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
     uint32_t hdr[2];
     if (mDataSource->readAt(*offset, hdr, 8) < 8) {
         return ERROR_IO;
     }
     uint64_t chunk_size = ntohl(hdr[0]);
     uint32_t chunk_type = ntohl(hdr[1]);
-    off_t data_offset = *offset + 8;
+    off64_t data_offset = *offset + 8;
 
     if (chunk_size == 1) {
         if (mDataSource->readAt(*offset + 8, &chunk_size, 8) < 8) {
@@ -644,11 +646,11 @@
 
     PathAdder autoAdder(&mPath, chunk_type);
 
-    off_t chunk_data_size = *offset + chunk_size - data_offset;
+    off64_t chunk_data_size = *offset + chunk_size - data_offset;
 
     if (chunk_type != FOURCC('c', 'p', 'r', 't')
             && mPath.size() == 5 && underMetaDataPath(mPath)) {
-        off_t stop_offset = *offset + chunk_size;
+        off64_t stop_offset = *offset + chunk_size;
         *offset = data_offset;
         while (*offset < stop_offset) {
             status_t err = parseChunk(offset, depth + 1);
@@ -715,7 +717,7 @@
                 track->meta->setCString(kKeyMIMEType, "application/octet-stream");
             }
 
-            off_t stop_offset = *offset + chunk_size;
+            off64_t stop_offset = *offset + chunk_size;
             *offset = data_offset;
             while (*offset < stop_offset) {
                 status_t err = parseChunk(offset, depth + 1);
@@ -788,7 +790,7 @@
                 return ERROR_IO;
             }
 
-            off_t timescale_offset;
+            off64_t timescale_offset;
 
             if (version == 1) {
                 timescale_offset = data_offset + 4 + 16;
@@ -838,7 +840,7 @@
             }
 
             uint8_t buffer[8];
-            if (chunk_data_size < (off_t)sizeof(buffer)) {
+            if (chunk_data_size < (off64_t)sizeof(buffer)) {
                 return ERROR_MALFORMED;
             }
 
@@ -862,7 +864,7 @@
                 break;
             }
 
-            off_t stop_offset = *offset + chunk_size;
+            off64_t stop_offset = *offset + chunk_size;
             *offset = data_offset + 8;
             for (uint32_t i = 0; i < entry_count; ++i) {
                 status_t err = parseChunk(offset, depth + 1);
@@ -919,7 +921,7 @@
             mLastTrack->meta->setInt32(kKeyChannelCount, num_channels);
             mLastTrack->meta->setInt32(kKeySampleRate, sample_rate);
 
-            off_t stop_offset = *offset + chunk_size;
+            off64_t stop_offset = *offset + chunk_size;
             *offset = data_offset + sizeof(buffer);
             while (*offset < stop_offset) {
                 status_t err = parseChunk(offset, depth + 1);
@@ -936,6 +938,8 @@
 
         case FOURCC('m', 'p', '4', 'v'):
         case FOURCC('s', '2', '6', '3'):
+        case FOURCC('H', '2', '6', '3'):
+        case FOURCC('h', '2', '6', '3'):
         case FOURCC('a', 'v', 'c', '1'):
         {
             mHasVideo = true;
@@ -955,6 +959,13 @@
             uint16_t width = U16_AT(&buffer[6 + 18]);
             uint16_t height = U16_AT(&buffer[6 + 20]);
 
+            // The video sample is not stand-compliant if it has invalid dimension.
+            // Use some default width and height value, and
+            // let the decoder figure out the actual width and height (and thus
+            // be prepared for INFO_FOMRAT_CHANGED event).
+            if (width == 0)  width  = 352;
+            if (height == 0) height = 288;
+
             // printf("*** coding='%s' width=%d height=%d\n",
             //        chunk, width, height);
 
@@ -962,7 +973,7 @@
             mLastTrack->meta->setInt32(kKeyWidth, width);
             mLastTrack->meta->setInt32(kKeyHeight, height);
 
-            off_t stop_offset = *offset + chunk_size;
+            off64_t stop_offset = *offset + chunk_size;
             *offset = data_offset + sizeof(buffer);
             while (*offset < stop_offset) {
                 status_t err = parseChunk(offset, depth + 1);
@@ -1029,8 +1040,23 @@
             // have a 4 byte header (0x00 0x00 0x00 0x01) after conversion,
             // and thus will grow by 2 bytes per fragment.
             mLastTrack->meta->setInt32(kKeyMaxInputSize, max_size + 10 * 2);
-
             *offset += chunk_size;
+
+            // Calculate average frame rate.
+            const char *mime;
+            CHECK(mLastTrack->meta->findCString(kKeyMIMEType, &mime));
+            if (!strncasecmp("video/", mime, 6)) {
+                size_t nSamples = mLastTrack->sampleTable->countSamples();
+                int64_t durationUs;
+                if (mLastTrack->meta->findInt64(kKeyDuration, &durationUs)) {
+                    if (durationUs > 0) {
+                        int32_t frameRate = (nSamples * 1000000LL +
+                                    (durationUs >> 1)) / durationUs;
+                        mLastTrack->meta->setInt32(kKeyFrameRate, frameRate);
+                    }
+                }
+            }
+
             break;
         }
 
@@ -1048,6 +1074,20 @@
             break;
         }
 
+        case FOURCC('c', 't', 't', 's'):
+        {
+            status_t err =
+                mLastTrack->sampleTable->setCompositionTimeToSampleParams(
+                        data_offset, chunk_data_size);
+
+            if (err != OK) {
+                return err;
+            }
+
+            *offset += chunk_size;
+            break;
+        }
+
         case FOURCC('s', 't', 's', 's'):
         {
             status_t err =
@@ -1069,7 +1109,7 @@
             }
 
             uint8_t buffer[256];
-            if (chunk_data_size > (off_t)sizeof(buffer)) {
+            if (chunk_data_size > (off64_t)sizeof(buffer)) {
                 return ERROR_BUFFER_TOO_SMALL;
             }
 
@@ -1108,7 +1148,7 @@
         case FOURCC('a', 'v', 'c', 'C'):
         {
             char buffer[256];
-            if (chunk_data_size > (off_t)sizeof(buffer)) {
+            if (chunk_data_size > (off64_t)sizeof(buffer)) {
                 return ERROR_BUFFER_TOO_SMALL;
             }
 
@@ -1124,10 +1164,41 @@
             break;
         }
 
+        case FOURCC('d', '2', '6', '3'):
+        {
+            /*
+             * d263 contains a fixed 7 bytes part:
+             *   vendor - 4 bytes
+             *   version - 1 byte
+             *   level - 1 byte
+             *   profile - 1 byte
+             * optionally, "d263" box itself may contain a 16-byte
+             * bit rate box (bitr)
+             *   average bit rate - 4 bytes
+             *   max bit rate - 4 bytes
+             */
+            char buffer[23];
+            if (chunk_data_size != 7 &&
+                chunk_data_size != 23) {
+                LOGE("Incorrect D263 box size %lld", chunk_data_size);
+                return ERROR_MALFORMED;
+            }
+
+            if (mDataSource->readAt(
+                    data_offset, buffer, chunk_data_size) < chunk_data_size) {
+                return ERROR_IO;
+            }
+
+            mLastTrack->meta->setData(kKeyD263, kTypeD263, buffer, chunk_data_size);
+
+            *offset += chunk_size;
+            break;
+        }
+
         case FOURCC('m', 'e', 't', 'a'):
         {
             uint8_t buffer[4];
-            if (chunk_data_size < (off_t)sizeof(buffer)) {
+            if (chunk_data_size < (off64_t)sizeof(buffer)) {
                 return ERROR_MALFORMED;
             }
 
@@ -1147,7 +1218,7 @@
                 return OK;
             }
 
-            off_t stop_offset = *offset + chunk_size;
+            off64_t stop_offset = *offset + chunk_size;
             *offset = data_offset + sizeof(buffer);
             while (*offset < stop_offset) {
                 status_t err = parseChunk(offset, depth + 1);
@@ -1232,7 +1303,7 @@
 }
 
 status_t MPEG4Extractor::parseTrackHeader(
-        off_t data_offset, off_t data_size) {
+        off64_t data_offset, off64_t data_size) {
     if (data_size < 4) {
         return ERROR_MALFORMED;
     }
@@ -1246,7 +1317,7 @@
 
     uint8_t buffer[36 + 60];
 
-    if (data_size != (off_t)dynSize + 60) {
+    if (data_size != (off64_t)dynSize + 60) {
         return ERROR_MALFORMED;
     }
 
@@ -1263,7 +1334,9 @@
         mtime = U64_AT(&buffer[12]);
         id = U32_AT(&buffer[20]);
         duration = U64_AT(&buffer[28]);
-    } else if (version == 0) {
+    } else {
+        CHECK_EQ((unsigned)version, 0u);
+
         ctime = U32_AT(&buffer[4]);
         mtime = U32_AT(&buffer[8]);
         id = U32_AT(&buffer[12]);
@@ -1308,15 +1381,17 @@
         mLastTrack->meta->setInt32(kKeyRotation, rotationDegrees);
     }
 
-#if 0
+    // Handle presentation display size, which could be different
+    // from the image size indicated by kKeyWidth and kKeyHeight.
     uint32_t width = U32_AT(&buffer[dynSize + 52]);
     uint32_t height = U32_AT(&buffer[dynSize + 56]);
-#endif
+    mLastTrack->meta->setInt32(kKeyDisplayWidth, width >> 16);
+    mLastTrack->meta->setInt32(kKeyDisplayHeight, height >> 16);
 
     return OK;
 }
 
-status_t MPEG4Extractor::parseMetaData(off_t offset, size_t size) {
+status_t MPEG4Extractor::parseMetaData(off64_t offset, size_t size) {
     if (size < 4) {
         return ERROR_MALFORMED;
     }
@@ -1521,6 +1596,14 @@
         return OK;
     }
 
+    if (objectTypeIndication  == 0x6b) {
+        // The media subtype is MP3 audio
+        // Our software MP3 audio decoder may not be able to handle
+        // packetized MP3 audio; for now, lets just return ERROR_UNSUPPORTED
+        LOGE("MP3 track in MP4/3GPP file is not supported");
+        return ERROR_UNSUPPORTED;
+    }
+
     const uint8_t *csd;
     size_t csd_size;
     if (esds.getCodecSpecificInfo(
@@ -1816,7 +1899,7 @@
         // fall through
     }
 
-    off_t offset;
+    off64_t offset;
     size_t size;
     uint32_t dts;
     bool isSyncSample;
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index 7eb7d46..5d6ea7c 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -33,6 +33,10 @@
 #include <media/stagefright/MediaSource.h>
 #include <media/stagefright/Utils.h>
 #include <media/mediarecorder.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <unistd.h>
 
 #include "include/ESDS.h"
 
@@ -64,7 +68,7 @@
     bool isAvc() const { return mIsAvc; }
     bool isAudio() const { return mIsAudio; }
     bool isMPEG4() const { return mIsMPEG4; }
-    void addChunkOffset(off_t offset);
+    void addChunkOffset(off64_t offset);
     status_t dump(int fd, const Vector<String16>& args) const;
 
 private:
@@ -74,6 +78,7 @@
     volatile bool mDone;
     volatile bool mPaused;
     volatile bool mResumed;
+    volatile bool mStarted;
     bool mIsAvc;
     bool mIsAudio;
     bool mIsMPEG4;
@@ -99,7 +104,7 @@
     List<MediaBuffer *> mChunkSamples;
 
     size_t              mNumStcoTableEntries;
-    List<off_t>         mChunkOffsets;
+    List<off64_t>         mChunkOffsets;
 
     size_t              mNumStscTableEntries;
     struct StscTableEntry {
@@ -214,7 +219,8 @@
 };
 
 MPEG4Writer::MPEG4Writer(const char *filename)
-    : mFile(fopen(filename, "wb")),
+    : mFd(-1),
+      mInitCheck(NO_INIT),
       mUse4ByteNalLength(true),
       mUse32BitOffset(true),
       mIsFileSizeLimitExplicitlyRequested(false),
@@ -224,11 +230,16 @@
       mMdatOffset(0),
       mEstimatedMoovBoxSize(0),
       mInterleaveDurationUs(1000000) {
-    CHECK(mFile != NULL);
+
+    mFd = open(filename, O_CREAT | O_LARGEFILE | O_TRUNC);
+    if (mFd >= 0) {
+        mInitCheck = OK;
+    }
 }
 
 MPEG4Writer::MPEG4Writer(int fd)
-    : mFile(fdopen(fd, "wb")),
+    : mFd(dup(fd)),
+      mInitCheck(mFd < 0? NO_INIT: OK),
       mUse4ByteNalLength(true),
       mUse32BitOffset(true),
       mIsFileSizeLimitExplicitlyRequested(false),
@@ -238,7 +249,6 @@
       mMdatOffset(0),
       mEstimatedMoovBoxSize(0),
       mInterleaveDurationUs(1000000) {
-    CHECK(mFile != NULL);
 }
 
 MPEG4Writer::~MPEG4Writer() {
@@ -368,7 +378,7 @@
 }
 
 status_t MPEG4Writer::start(MetaData *param) {
-    if (mFile == NULL) {
+    if (mInitCheck != OK) {
         return UNKNOWN_ERROR;
     }
 
@@ -459,13 +469,13 @@
         mEstimatedMoovBoxSize = estimateMoovBoxSize(bitRate);
     }
     CHECK(mEstimatedMoovBoxSize >= 8);
-    fseeko(mFile, mFreeBoxOffset, SEEK_SET);
+    lseek64(mFd, mFreeBoxOffset, SEEK_SET);
     writeInt32(mEstimatedMoovBoxSize);
     write("free", 4);
 
     mMdatOffset = mFreeBoxOffset + mEstimatedMoovBoxSize;
     mOffset = mMdatOffset;
-    fseeko(mFile, mMdatOffset, SEEK_SET);
+    lseek64(mFd, mMdatOffset, SEEK_SET);
     if (mUse32BitOffset) {
         write("????mdat", 8);
     } else {
@@ -491,7 +501,7 @@
 }
 
 status_t MPEG4Writer::pause() {
-    if (mFile == NULL) {
+    if (mInitCheck != OK) {
         return OK;
     }
     mPaused = true;
@@ -507,7 +517,7 @@
 }
 
 void MPEG4Writer::stopWriterThread() {
-    LOGV("stopWriterThread");
+    LOGD("Stopping writer thread");
 
     {
         Mutex::Autolock autolock(mLock);
@@ -518,6 +528,7 @@
 
     void *dummy;
     pthread_join(mThread, &dummy);
+    LOGD("Writer thread stopped");
 }
 
 /*
@@ -572,13 +583,15 @@
     writeInt32(0x40000000);  // w
 }
 
+
 status_t MPEG4Writer::stop() {
-    if (mFile == NULL) {
+    if (mInitCheck != OK) {
         return OK;
     }
 
     status_t err = OK;
     int64_t maxDurationUs = 0;
+    int64_t minDurationUs = 0x7fffffffffffffffLL;
     for (List<Track *>::iterator it = mTracks.begin();
          it != mTracks.end(); ++it) {
         status_t status = (*it)->stop();
@@ -590,34 +603,42 @@
         if (durationUs > maxDurationUs) {
             maxDurationUs = durationUs;
         }
+        if (durationUs < minDurationUs) {
+            minDurationUs = durationUs;
+        }
+    }
+
+    if (mTracks.size() > 1) {
+        LOGD("Duration from tracks range is [%lld, %lld] us",
+            minDurationUs, maxDurationUs);
     }
 
     stopWriterThread();
 
     // Do not write out movie header on error.
     if (err != OK) {
-        fflush(mFile);
-        fclose(mFile);
-        mFile = NULL;
+        close(mFd);
+        mFd = -1;
+        mInitCheck = NO_INIT;
         mStarted = false;
         return err;
     }
 
     // Fix up the size of the 'mdat' chunk.
     if (mUse32BitOffset) {
-        fseeko(mFile, mMdatOffset, SEEK_SET);
+        lseek64(mFd, mMdatOffset, SEEK_SET);
         int32_t size = htonl(static_cast<int32_t>(mOffset - mMdatOffset));
-        fwrite(&size, 1, 4, mFile);
+        ::write(mFd, &size, 4);
     } else {
-        fseeko(mFile, mMdatOffset + 8, SEEK_SET);
+        lseek64(mFd, mMdatOffset + 8, SEEK_SET);
         int64_t size = mOffset - mMdatOffset;
         size = hton64(size);
-        fwrite(&size, 1, 8, mFile);
+        ::write(mFd, &size, 8);
     }
-    fseeko(mFile, mOffset, SEEK_SET);
+    lseek64(mFd, mOffset, SEEK_SET);
 
     time_t now = time(NULL);
-    const off_t moovOffset = mOffset;
+    const off64_t moovOffset = mOffset;
     mWriteMoovBoxToMemory = true;
     mMoovBoxBuffer = (uint8_t *) malloc(mEstimatedMoovBoxSize);
     mMoovBoxBufferOffset = 0;
@@ -637,7 +658,7 @@
         writeInt16(0);             // reserved
         writeInt32(0);             // reserved
         writeInt32(0);             // reserved
-        writeCompositionMatrix(0);
+        writeCompositionMatrix(0); // matrix
         writeInt32(0);             // predefined
         writeInt32(0);             // predefined
         writeInt32(0);             // predefined
@@ -659,12 +680,12 @@
         CHECK(mMoovBoxBufferOffset + 8 <= mEstimatedMoovBoxSize);
 
         // Moov box
-        fseeko(mFile, mFreeBoxOffset, SEEK_SET);
+        lseek64(mFd, mFreeBoxOffset, SEEK_SET);
         mOffset = mFreeBoxOffset;
-        write(mMoovBoxBuffer, 1, mMoovBoxBufferOffset, mFile);
+        write(mMoovBoxBuffer, 1, mMoovBoxBufferOffset);
 
         // Free box
-        fseeko(mFile, mOffset, SEEK_SET);
+        lseek64(mFd, mOffset, SEEK_SET);
         writeInt32(mEstimatedMoovBoxSize - mMoovBoxBufferOffset);
         write("free", 4);
 
@@ -678,9 +699,9 @@
 
     CHECK(mBoxes.empty());
 
-    fflush(mFile);
-    fclose(mFile);
-    mFile = NULL;
+    close(mFd);
+    mFd = -1;
+    mInitCheck = NO_INIT;
     mStarted = false;
     return err;
 }
@@ -698,11 +719,12 @@
     mLock.unlock();
 }
 
-off_t MPEG4Writer::addSample_l(MediaBuffer *buffer) {
-    off_t old_offset = mOffset;
+off64_t MPEG4Writer::addSample_l(MediaBuffer *buffer) {
+    off64_t old_offset = mOffset;
 
-    fwrite((const uint8_t *)buffer->data() + buffer->range_offset(),
-           1, buffer->range_length(), mFile);
+    ::write(mFd,
+          (const uint8_t *)buffer->data() + buffer->range_offset(),
+          buffer->range_length());
 
     mOffset += buffer->range_length();
 
@@ -723,33 +745,34 @@
     }
 }
 
-off_t MPEG4Writer::addLengthPrefixedSample_l(MediaBuffer *buffer) {
-    off_t old_offset = mOffset;
+off64_t MPEG4Writer::addLengthPrefixedSample_l(MediaBuffer *buffer) {
+    off64_t old_offset = mOffset;
 
     size_t length = buffer->range_length();
 
     if (mUse4ByteNalLength) {
         uint8_t x = length >> 24;
-        fwrite(&x, 1, 1, mFile);
+        ::write(mFd, &x, 1);
         x = (length >> 16) & 0xff;
-        fwrite(&x, 1, 1, mFile);
+        ::write(mFd, &x, 1);
         x = (length >> 8) & 0xff;
-        fwrite(&x, 1, 1, mFile);
+        ::write(mFd, &x, 1);
         x = length & 0xff;
-        fwrite(&x, 1, 1, mFile);
+        ::write(mFd, &x, 1);
 
-        fwrite((const uint8_t *)buffer->data() + buffer->range_offset(),
-                1, length, mFile);
+        ::write(mFd,
+              (const uint8_t *)buffer->data() + buffer->range_offset(),
+              length);
+
         mOffset += length + 4;
     } else {
         CHECK(length < 65536);
 
         uint8_t x = length >> 8;
-        fwrite(&x, 1, 1, mFile);
+        ::write(mFd, &x, 1);
         x = length & 0xff;
-        fwrite(&x, 1, 1, mFile);
-        fwrite((const uint8_t *)buffer->data() + buffer->range_offset(),
-                1, length, mFile);
+        ::write(mFd, &x, 1);
+        ::write(mFd, (const uint8_t *)buffer->data() + buffer->range_offset(), length);
         mOffset += length + 2;
     }
 
@@ -757,19 +780,21 @@
 }
 
 size_t MPEG4Writer::write(
-        const void *ptr, size_t size, size_t nmemb, FILE *stream) {
+        const void *ptr, size_t size, size_t nmemb) {
 
     const size_t bytes = size * nmemb;
     if (mWriteMoovBoxToMemory) {
-        off_t moovBoxSize = 8 + mMoovBoxBufferOffset + bytes;
+        // This happens only when we write the moov box at the end of
+        // recording, not for each output video/audio frame we receive.
+        off64_t moovBoxSize = 8 + mMoovBoxBufferOffset + bytes;
         if (moovBoxSize > mEstimatedMoovBoxSize) {
-            for (List<off_t>::iterator it = mBoxes.begin();
+            for (List<off64_t>::iterator it = mBoxes.begin();
                  it != mBoxes.end(); ++it) {
                 (*it) += mOffset;
             }
-            fseeko(mFile, mOffset, SEEK_SET);
-            fwrite(mMoovBoxBuffer, 1, mMoovBoxBufferOffset, stream);
-            fwrite(ptr, size, nmemb, stream);
+            lseek64(mFd, mOffset, SEEK_SET);
+            ::write(mFd, mMoovBoxBuffer, mMoovBoxBufferOffset);
+            ::write(mFd, ptr, size * nmemb);
             mOffset += (bytes + mMoovBoxBufferOffset);
             free(mMoovBoxBuffer);
             mMoovBoxBuffer = NULL;
@@ -781,7 +806,7 @@
             mMoovBoxBufferOffset += bytes;
         }
     } else {
-        fwrite(ptr, size, nmemb, stream);
+        ::write(mFd, ptr, size * nmemb);
         mOffset += bytes;
     }
     return bytes;
@@ -800,51 +825,51 @@
 void MPEG4Writer::endBox() {
     CHECK(!mBoxes.empty());
 
-    off_t offset = *--mBoxes.end();
+    off64_t offset = *--mBoxes.end();
     mBoxes.erase(--mBoxes.end());
 
     if (mWriteMoovBoxToMemory) {
        int32_t x = htonl(mMoovBoxBufferOffset - offset);
        memcpy(mMoovBoxBuffer + offset, &x, 4);
     } else {
-        fseeko(mFile, offset, SEEK_SET);
+        lseek64(mFd, offset, SEEK_SET);
         writeInt32(mOffset - offset);
         mOffset -= 4;
-        fseeko(mFile, mOffset, SEEK_SET);
+        lseek64(mFd, mOffset, SEEK_SET);
     }
 }
 
 void MPEG4Writer::writeInt8(int8_t x) {
-    write(&x, 1, 1, mFile);
+    write(&x, 1, 1);
 }
 
 void MPEG4Writer::writeInt16(int16_t x) {
     x = htons(x);
-    write(&x, 1, 2, mFile);
+    write(&x, 1, 2);
 }
 
 void MPEG4Writer::writeInt32(int32_t x) {
     x = htonl(x);
-    write(&x, 1, 4, mFile);
+    write(&x, 1, 4);
 }
 
 void MPEG4Writer::writeInt64(int64_t x) {
     x = hton64(x);
-    write(&x, 1, 8, mFile);
+    write(&x, 1, 8);
 }
 
 void MPEG4Writer::writeCString(const char *s) {
     size_t n = strlen(s);
-    write(s, 1, n + 1, mFile);
+    write(s, 1, n + 1);
 }
 
 void MPEG4Writer::writeFourcc(const char *s) {
     CHECK_EQ(strlen(s), 4);
-    write(s, 1, 4, mFile);
+    write(s, 1, 4);
 }
 
 void MPEG4Writer::write(const void *data, size_t size) {
-    write(data, 1, size, mFile);
+    write(data, 1, size);
 }
 
 bool MPEG4Writer::isFileStreamable() const {
@@ -927,6 +952,7 @@
       mDone(false),
       mPaused(false),
       mResumed(false),
+      mStarted(false),
       mTrackDurationUs(0),
       mEstimatedTrackSizeBytes(0),
       mSamplesHaveSameSize(true),
@@ -988,7 +1014,7 @@
     ++mNumSttsTableEntries;
 }
 
-void MPEG4Writer::Track::addChunkOffset(off_t offset) {
+void MPEG4Writer::Track::addChunkOffset(off64_t offset) {
     ++mNumStcoTableEntries;
     mChunkOffsets.push_back(offset);
 }
@@ -1096,41 +1122,41 @@
     CHECK("Received a chunk for a unknown track" == 0);
 }
 
-void MPEG4Writer::writeFirstChunk(ChunkInfo* info) {
-    LOGV("writeFirstChunk: %p", info->mTrack);
+void MPEG4Writer::writeChunkToFile(Chunk* chunk) {
+    LOGV("writeChunkToFile: %lld from %s track",
+        chunk.mTimestampUs, chunk.mTrack->isAudio()? "audio": "video");
 
-    List<Chunk>::iterator chunkIt = info->mChunks.begin();
-    for (List<MediaBuffer *>::iterator it = chunkIt->mSamples.begin();
-         it != chunkIt->mSamples.end(); ++it) {
+    int32_t isFirstSample = true;
+    while (!chunk->mSamples.empty()) {
+        List<MediaBuffer *>::iterator it = chunk->mSamples.begin();
 
-        off_t offset = info->mTrack->isAvc()
-                            ? addLengthPrefixedSample_l(*it)
-                            : addSample_l(*it);
-        if (it == chunkIt->mSamples.begin()) {
-            info->mTrack->addChunkOffset(offset);
+        off64_t offset = chunk->mTrack->isAvc()
+                                ? addLengthPrefixedSample_l(*it)
+                                : addSample_l(*it);
+
+        if (isFirstSample) {
+            chunk->mTrack->addChunkOffset(offset);
+            isFirstSample = false;
         }
-    }
 
-    // Done with the current chunk.
-    // Release all the samples in this chunk.
-    while (!chunkIt->mSamples.empty()) {
-        List<MediaBuffer *>::iterator it = chunkIt->mSamples.begin();
         (*it)->release();
         (*it) = NULL;
-        chunkIt->mSamples.erase(it);
+        chunk->mSamples.erase(it);
     }
-    chunkIt->mSamples.clear();
-    info->mChunks.erase(chunkIt);
+    chunk->mSamples.clear();
 }
 
-void MPEG4Writer::writeChunks() {
-    LOGV("writeChunks");
+void MPEG4Writer::writeAllChunks() {
+    LOGV("writeAllChunks");
     size_t outstandingChunks = 0;
     while (!mChunkInfos.empty()) {
         List<ChunkInfo>::iterator it = mChunkInfos.begin();
         while (!it->mChunks.empty()) {
-            CHECK_EQ(OK, writeOneChunk());
-            ++outstandingChunks;
+            Chunk chunk;
+            if (findChunkToWrite(&chunk)) {
+                writeChunkToFile(&chunk);
+                ++outstandingChunks;
+            }
         }
         it->mTrack = NULL;
         mChunkInfos.erase(it);
@@ -1139,8 +1165,8 @@
     LOGD("%d chunks are written in the last batch", outstandingChunks);
 }
 
-status_t MPEG4Writer::writeOneChunk() {
-    LOGV("writeOneChunk");
+bool MPEG4Writer::findChunkToWrite(Chunk *chunk) {
+    LOGV("findChunkToWrite");
 
     // Find the smallest timestamp, and write that chunk out
     // XXX: What if some track is just too slow?
@@ -1159,38 +1185,50 @@
 
     if (track == NULL) {
         LOGV("Nothing to be written after all");
-        return OK;
+        return false;
     }
 
     if (mIsFirstChunk) {
         mIsFirstChunk = false;
     }
+
     for (List<ChunkInfo>::iterator it = mChunkInfos.begin();
          it != mChunkInfos.end(); ++it) {
         if (it->mTrack == track) {
-            writeFirstChunk(&(*it));
+            *chunk = *(it->mChunks.begin());
+            it->mChunks.erase(it->mChunks.begin());
+            CHECK_EQ(chunk->mTrack, track);
+            return true;
         }
     }
-    return OK;
+
+    return false;
 }
 
 void MPEG4Writer::threadFunc() {
     LOGV("threadFunc");
 
     prctl(PR_SET_NAME, (unsigned long)"MPEG4Writer", 0, 0, 0);
+
+    Mutex::Autolock autoLock(mLock);
     while (!mDone) {
-        {
-            Mutex::Autolock autolock(mLock);
+        Chunk chunk;
+        bool chunkFound = false;
+
+        while (!mDone && !(chunkFound = findChunkToWrite(&chunk))) {
             mChunkReadyCondition.wait(mLock);
-            CHECK_EQ(writeOneChunk(), OK);
+        }
+
+        // Actual write without holding the lock in order to
+        // reduce the blocking time for media track threads.
+        if (chunkFound) {
+            mLock.unlock();
+            writeChunkToFile(&chunk);
+            mLock.lock();
         }
     }
 
-    {
-        // Write ALL samples
-        Mutex::Autolock autolock(mLock);
-        writeChunks();
-    }
+    writeAllChunks();
 }
 
 status_t MPEG4Writer::startWriterThread() {
@@ -1228,7 +1266,7 @@
     }
 
     int32_t rotationDegrees;
-    if (!mIsAudio && params && params->findInt32(kKeyRotationDegree, &rotationDegrees)) {
+    if (!mIsAudio && params && params->findInt32(kKeyRotation, &rotationDegrees)) {
         mRotation = rotationDegrees;
     }
 
@@ -1243,7 +1281,21 @@
     initTrackingProgressStatus(params);
 
     sp<MetaData> meta = new MetaData;
+    if (mIsRealTimeRecording && mOwner->numTracks() > 1) {
+        /*
+         * This extra delay of accepting incoming audio/video signals
+         * helps to align a/v start time at the beginning of a recording
+         * session, and it also helps eliminate the "recording" sound for
+         * camcorder applications.
+         *
+         * Ideally, this platform-specific value should be defined
+         * in media_profiles.xml file
+         */
+        startTimeUs += 700000;
+    }
+
     meta->setInt64(kKeyTime, startTimeUs);
+
     status_t err = mSource->start(meta.get());
     if (err != OK) {
         mDone = mReachedEOS = true;
@@ -1255,6 +1307,7 @@
     pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
 
     mDone = false;
+    mStarted = true;
     mTrackDurationUs = 0;
     mReachedEOS = false;
     mEstimatedTrackSizeBytes = 0;
@@ -1282,10 +1335,15 @@
 }
 
 status_t MPEG4Writer::Track::stop() {
+    LOGD("Stopping %s track", mIsAudio? "Audio": "Video");
+    if (!mStarted) {
+        LOGE("Stop() called but track is not started");
+        return ERROR_END_OF_STREAM;
+    }
+
     if (mDone) {
         return OK;
     }
-
     mDone = true;
 
     void *dummy;
@@ -1293,6 +1351,7 @@
 
     status_t err = (status_t) dummy;
 
+    LOGD("Stopping %s track source", mIsAudio? "Audio": "Video");
     {
         status_t status = mSource->stop();
         if (err == OK && status != OK && status != ERROR_END_OF_STREAM) {
@@ -1300,6 +1359,7 @@
         }
     }
 
+    LOGD("%s track stopped", mIsAudio? "Audio": "Video");
     return err;
 }
 
@@ -1716,6 +1776,8 @@
     } else {
         prctl(PR_SET_NAME, (unsigned long)"VideoTrackEncoding", 0, 0, 0);
     }
+    setpriority(PRIO_PROCESS, 0, ANDROID_PRIORITY_AUDIO);
+
     sp<MetaData> meta_data;
 
     mNumSamples = 0;
@@ -1896,7 +1958,11 @@
                      ((timestampUs * mTimeScale + 500000LL) / 1000000LL -
                      (lastTimestampUs * mTimeScale + 500000LL) / 1000000LL);
 
-            if (currDurationTicks != lastDurationTicks) {
+            // Force the first sample to have its own stts entry so that
+            // we can adjust its value later to maintain the A/V sync.
+            if (mNumSamples == 3 || currDurationTicks != lastDurationTicks) {
+                LOGV("%s lastDurationUs: %lld us, currDurationTicks: %lld us",
+                        mIsAudio? "Audio": "Video", lastDurationUs, currDurationTicks);
                 addOneSttsTableEntry(sampleCount, lastDurationUs);
                 sampleCount = 1;
             } else {
@@ -1909,6 +1975,8 @@
             }
             previousSampleSize = sampleSize;
         }
+        LOGV("%s timestampUs/lastTimestampUs: %lld/%lld",
+                mIsAudio? "Audio": "Video", timestampUs, lastTimestampUs);
         lastDurationUs = timestampUs - lastTimestampUs;
         lastDurationTicks = currDurationTicks;
         lastTimestampUs = timestampUs;
@@ -1924,7 +1992,7 @@
             trackProgressStatus(timestampUs);
         }
         if (mOwner->numTracks() == 1) {
-            off_t offset = mIsAvc? mOwner->addLengthPrefixedSample_l(copy)
+            off64_t offset = mIsAvc? mOwner->addLengthPrefixedSample_l(copy)
                                  : mOwner->addSample_l(copy);
             if (mChunkOffsets.empty()) {
                 addChunkOffset(offset);
@@ -1980,7 +2048,16 @@
     } else {
         ++sampleCount;  // Count for the last sample
     }
-    addOneSttsTableEntry(sampleCount, lastDurationUs);
+
+    if (mNumSamples <= 2) {
+        addOneSttsTableEntry(1, lastDurationUs);
+        if (sampleCount - 1 > 0) {
+            addOneSttsTableEntry(sampleCount - 1, lastDurationUs);
+        }
+    } else {
+        addOneSttsTableEntry(sampleCount, lastDurationUs);
+    }
+
     mTrackDurationUs += lastDurationUs;
     mReachedEOS = true;
     LOGI("Received total/0-length (%d/%d) buffers and encoded %d frames. - %s",
@@ -2105,6 +2182,9 @@
     int32_t mvhdTimeScale = mOwner->getTimeScale();
     int64_t trakDurationUs = getDurationUs();
 
+    // Compensate for small start time difference from different media tracks
+    int64_t trackStartTimeOffsetUs = 0;
+
     mOwner->beginBox("trak");
 
       mOwner->beginBox("tkhd");
@@ -2125,7 +2205,7 @@
         mOwner->writeInt16(mIsAudio ? 0x100 : 0);  // volume
         mOwner->writeInt16(0);             // reserved
 
-        mOwner->writeCompositionMatrix(mRotation);
+        mOwner->writeCompositionMatrix(mRotation);       // matrix
 
         if (mIsAudio) {
             mOwner->writeInt32(0);
@@ -2143,26 +2223,8 @@
 
       int64_t moovStartTimeUs = mOwner->getStartTimestampUs();
       if (mStartTimestampUs != moovStartTimeUs) {
-        mOwner->beginBox("edts");
-          mOwner->beginBox("elst");
-            mOwner->writeInt32(0);           // version=0, flags=0: 32-bit time
-            mOwner->writeInt32(2);           // never ends with an empty list
-
-            // First elst entry: specify the starting time offset
-            int64_t offsetUs = mStartTimestampUs - moovStartTimeUs;
-            LOGV("OffsetUs: %lld", offsetUs);
-            int32_t seg = (offsetUs * mvhdTimeScale + 5E5) / 1E6;
-            mOwner->writeInt32(seg);         // in mvhd timecale
-            mOwner->writeInt32(-1);          // starting time offset
-            mOwner->writeInt32(1 << 16);     // rate = 1.0
-
-            // Second elst entry: specify the track duration
-            seg = (trakDurationUs * mvhdTimeScale + 5E5) / 1E6;
-            mOwner->writeInt32(seg);         // in mvhd timescale
-            mOwner->writeInt32(0);
-            mOwner->writeInt32(1 << 16);
-          mOwner->endBox();
-        mOwner->endBox();
+          CHECK(mStartTimestampUs > moovStartTimeUs);
+          trackStartTimeOffsetUs = mStartTimestampUs - moovStartTimeUs;
       }
 
       mOwner->beginBox("mdia");
@@ -2263,6 +2325,9 @@
                         CHECK(mCodecSpecificData);
                         CHECK(mCodecSpecificDataSize > 0);
 
+                        // Make sure all sizes encode to a single byte.
+                        CHECK(mCodecSpecificDataSize + 23 < 128);
+
                         mOwner->writeInt32(0);     // version=0, flags=0
                         mOwner->writeInt8(0x03);   // ES_DescrTag
                         mOwner->writeInt8(23 + mCodecSpecificDataSize);
@@ -2415,7 +2480,7 @@
           mOwner->beginBox("stts");
             mOwner->writeInt32(0);  // version=0, flags=0
             mOwner->writeInt32(mNumSttsTableEntries);
-            int64_t prevTimestampUs = 0;
+            int64_t prevTimestampUs = trackStartTimeOffsetUs;
             for (List<SttsTableEntry>::iterator it = mSttsTableEntries.begin();
                  it != mSttsTableEntries.end(); ++it) {
                 mOwner->writeInt32(it->sampleCount);
@@ -2472,7 +2537,7 @@
           mOwner->beginBox(use32BitOffset? "stco": "co64");
             mOwner->writeInt32(0);  // version=0, flags=0
             mOwner->writeInt32(mNumStcoTableEntries);
-            for (List<off_t>::iterator it = mChunkOffsets.begin();
+            for (List<off64_t>::iterator it = mChunkOffsets.begin();
                  it != mChunkOffsets.end(); ++it) {
                 if (use32BitOffset) {
                     mOwner->writeInt32(static_cast<int32_t>(*it));
diff --git a/media/libstagefright/MediaBuffer.cpp b/media/libstagefright/MediaBuffer.cpp
index b973745..a8fadf2 100644
--- a/media/libstagefright/MediaBuffer.cpp
+++ b/media/libstagefright/MediaBuffer.cpp
@@ -25,16 +25,11 @@
 #include <media/stagefright/MediaDebug.h>
 #include <media/stagefright/MetaData.h>
 
+#include <ui/GraphicBuffer.h>
+#include <sys/atomics.h>
+
 namespace android {
 
-// XXX make this truly atomic.
-static int atomic_add(int *value, int delta) {
-    int prev_value = *value;
-    *value += delta;
-
-    return prev_value;
-}
-
 MediaBuffer::MediaBuffer(void *data, size_t size)
     : mObserver(NULL),
       mNextBuffer(NULL),
@@ -61,6 +56,20 @@
       mOriginal(NULL) {
 }
 
+MediaBuffer::MediaBuffer(const sp<GraphicBuffer>& graphicBuffer)
+    : mObserver(NULL),
+      mNextBuffer(NULL),
+      mRefCount(0),
+      mData(NULL),
+      mSize(1),
+      mRangeOffset(0),
+      mRangeLength(mSize),
+      mGraphicBuffer(graphicBuffer),
+      mOwnsData(false),
+      mMetaData(new MetaData),
+      mOriginal(NULL) {
+}
+
 void MediaBuffer::release() {
     if (mObserver == NULL) {
         CHECK_EQ(mRefCount, 0);
@@ -68,7 +77,7 @@
         return;
     }
 
-    int prevCount = atomic_add(&mRefCount, -1);
+    int prevCount = __atomic_dec(&mRefCount);
     if (prevCount == 1) {
         if (mObserver == NULL) {
             delete this;
@@ -88,14 +97,16 @@
 }
 
 void MediaBuffer::add_ref() {
-    atomic_add(&mRefCount, 1);
+    (void) __atomic_inc(&mRefCount);
 }
 
 void *MediaBuffer::data() const {
+    CHECK(mGraphicBuffer == NULL);
     return mData;
 }
 
 size_t MediaBuffer::size() const {
+    CHECK(mGraphicBuffer == NULL);
     return mSize;
 }
 
@@ -108,15 +119,19 @@
 }
 
 void MediaBuffer::set_range(size_t offset, size_t length) {
-    if (offset + length > mSize) {
+    if ((mGraphicBuffer == NULL) && (offset + length > mSize)) {
         LOGE("offset = %d, length = %d, mSize = %d", offset, length, mSize);
     }
-    CHECK(offset + length <= mSize);
+    CHECK((mGraphicBuffer != NULL) || (offset + length <= mSize));
 
     mRangeOffset = offset;
     mRangeLength = length;
 }
 
+sp<GraphicBuffer> MediaBuffer::graphicBuffer() const {
+    return mGraphicBuffer;
+}
+
 sp<MetaData> MediaBuffer::meta_data() {
     return mMetaData;
 }
@@ -158,6 +173,8 @@
 }
 
 MediaBuffer *MediaBuffer::clone() {
+    CHECK_EQ(mGraphicBuffer, NULL);
+
     MediaBuffer *buffer = new MediaBuffer(mData, mSize);
     buffer->set_range(mRangeOffset, mRangeLength);
     buffer->mMetaData = new MetaData(*mMetaData.get());
@@ -169,4 +186,3 @@
 }
 
 }  // namespace android
-
diff --git a/media/libstagefright/MediaDefs.cpp b/media/libstagefright/MediaDefs.cpp
index 7648d42..0be7261 100644
--- a/media/libstagefright/MediaDefs.cpp
+++ b/media/libstagefright/MediaDefs.cpp
@@ -35,6 +35,8 @@
 const char *MEDIA_MIMETYPE_AUDIO_G711_ALAW = "audio/g711-alaw";
 const char *MEDIA_MIMETYPE_AUDIO_G711_MLAW = "audio/g711-mlaw";
 const char *MEDIA_MIMETYPE_AUDIO_RAW = "audio/raw";
+const char *MEDIA_MIMETYPE_AUDIO_FLAC = "audio/flac";
+const char *MEDIA_MIMETYPE_AUDIO_AAC_ADTS = "audio/aac-adts";
 
 const char *MEDIA_MIMETYPE_CONTAINER_MPEG4 = "video/mpeg4";
 const char *MEDIA_MIMETYPE_CONTAINER_WAV = "audio/wav";
@@ -42,4 +44,6 @@
 const char *MEDIA_MIMETYPE_CONTAINER_MATROSKA = "video/x-matroska";
 const char *MEDIA_MIMETYPE_CONTAINER_MPEG2TS = "video/mp2ts";
 
+const char *MEDIA_MIMETYPE_CONTAINER_WVM = "video/wvm";
+
 }  // namespace android
diff --git a/media/libstagefright/MediaExtractor.cpp b/media/libstagefright/MediaExtractor.cpp
index 965c370..23bad5b 100644
--- a/media/libstagefright/MediaExtractor.cpp
+++ b/media/libstagefright/MediaExtractor.cpp
@@ -25,6 +25,9 @@
 #include "include/OggExtractor.h"
 #include "include/MPEG2TSExtractor.h"
 #include "include/DRMExtractor.h"
+#include "include/WVMExtractor.h"
+#include "include/FLACExtractor.h"
+#include "include/AACExtractor.h"
 
 #include "matroska/MatroskaExtractor.h"
 
@@ -64,37 +67,57 @@
              mime, confidence);
     }
 
-    if (!strncmp(mime, "drm", 3)) {
-        const char *originalMime = strrchr(mime, '+') + 1;
-
-        if (!strncmp(mime, "drm+es_based", 12)) {
+    bool isDrm = false;
+    // DRM MIME type syntax is "drm+type+original" where
+    // type is "es_based" or "container_based" and
+    // original is the content's cleartext MIME type
+    if (!strncmp(mime, "drm+", 4)) {
+        const char *originalMime = strchr(mime+4, '+');
+        if (originalMime == NULL) {
+            // second + not found
+            return NULL;
+        }
+        ++originalMime;
+        if (!strncmp(mime, "drm+es_based+", 13)) {
+            // DRMExtractor sets container metadata kKeyIsDRM to 1
             return new DRMExtractor(source, originalMime);
-        } else if (!strncmp(mime, "drm+container_based", 19)) {
+        } else if (!strncmp(mime, "drm+container_based+", 20)) {
             mime = originalMime;
+            isDrm = true;
         } else {
             return NULL;
         }
     }
 
+    MediaExtractor *ret = NULL;
     if (!strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_MPEG4)
             || !strcasecmp(mime, "audio/mp4")) {
-        return new MPEG4Extractor(source);
+        ret = new MPEG4Extractor(source);
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) {
-        return new MP3Extractor(source, meta);
+        ret = new MP3Extractor(source, meta);
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)
             || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) {
-        return new AMRExtractor(source);
+        ret = new AMRExtractor(source);
+    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)) {
+        ret = new FLACExtractor(source);
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_WAV)) {
-        return new WAVExtractor(source);
+        ret = new WAVExtractor(source);
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_OGG)) {
-        return new OggExtractor(source);
+        ret = new OggExtractor(source);
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_MATROSKA)) {
-        return new MatroskaExtractor(source);
+        ret = new MatroskaExtractor(source);
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_MPEG2TS)) {
-        return new MPEG2TSExtractor(source);
+        ret = new MPEG2TSExtractor(source);
+    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_WVM)) {
+        ret = new WVMExtractor(source);
+    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC_ADTS)) {
+        ret = new AACExtractor(source);
+    }
+    if (ret != NULL && isDrm) {
+        ret->getMetaData()->setInt32(kKeyIsDRM, 1);
     }
 
-    return NULL;
+    return ret;
 }
 
 }  // namespace android
diff --git a/media/libstagefright/MediaSource.cpp b/media/libstagefright/MediaSource.cpp
index b4ef338..fd0e79c 100644
--- a/media/libstagefright/MediaSource.cpp
+++ b/media/libstagefright/MediaSource.cpp
@@ -32,7 +32,6 @@
     mOptions = 0;
     mSeekTimeUs = 0;
     mLatenessUs = 0;
-    mSkipFrameUntilTimeUs = 0;
 }
 
 void MediaSource::ReadOptions::setSeekTo(int64_t time_us, SeekMode mode) {
@@ -54,21 +53,6 @@
     return (mOptions & kSeekTo_Option) != 0;
 }
 
-void MediaSource::ReadOptions::clearSkipFrame() {
-    mOptions &= ~kSkipFrame_Option;
-    mSkipFrameUntilTimeUs = 0;
-}
-
-void MediaSource::ReadOptions::setSkipFrame(int64_t timeUs) {
-    mOptions |= kSkipFrame_Option;
-    mSkipFrameUntilTimeUs = timeUs;
-}
-
-bool MediaSource::ReadOptions::getSkipFrame(int64_t *timeUs) const {
-    *timeUs = mSkipFrameUntilTimeUs;
-    return (mOptions & kSkipFrame_Option) != 0;
-}
-
 void MediaSource::ReadOptions::setLateBy(int64_t lateness_us) {
     mLatenessUs = lateness_us;
 }
diff --git a/media/libstagefright/MediaSourceSplitter.cpp b/media/libstagefright/MediaSourceSplitter.cpp
new file mode 100644
index 0000000..abc7012
--- /dev/null
+++ b/media/libstagefright/MediaSourceSplitter.cpp
@@ -0,0 +1,234 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaSourceSplitter"
+#include <utils/Log.h>
+
+#include <media/stagefright/MediaSourceSplitter.h>
+#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MetaData.h>
+
+namespace android {
+
+MediaSourceSplitter::MediaSourceSplitter(sp<MediaSource> mediaSource) {
+    mNumberOfClients = 0;
+    mSource = mediaSource;
+    mSourceStarted = false;
+
+    mNumberOfClientsStarted = 0;
+    mNumberOfCurrentReads = 0;
+    mCurrentReadBit = 0;
+    mLastReadCompleted = true;
+}
+
+MediaSourceSplitter::~MediaSourceSplitter() {
+}
+
+sp<MediaSource> MediaSourceSplitter::createClient() {
+    Mutex::Autolock autoLock(mLock);
+
+    sp<MediaSource> client = new Client(this, mNumberOfClients++);
+    mClientsStarted.push(false);
+    mClientsDesiredReadBit.push(0);
+    return client;
+}
+
+status_t MediaSourceSplitter::start(int clientId, MetaData *params) {
+    Mutex::Autolock autoLock(mLock);
+
+    LOGV("start client (%d)", clientId);
+    if (mClientsStarted[clientId]) {
+        return OK;
+    }
+
+    mNumberOfClientsStarted++;
+
+    if (!mSourceStarted) {
+        LOGV("Starting real source from client (%d)", clientId);
+        status_t err = mSource->start(params);
+
+        if (err == OK) {
+            mSourceStarted = true;
+            mClientsStarted.editItemAt(clientId) = true;
+            mClientsDesiredReadBit.editItemAt(clientId) = !mCurrentReadBit;
+        }
+
+        return err;
+    } else {
+        mClientsStarted.editItemAt(clientId) = true;
+        if (mLastReadCompleted) {
+            // Last read was completed. So join in the threads for the next read.
+            mClientsDesiredReadBit.editItemAt(clientId) = !mCurrentReadBit;
+        } else {
+            // Last read is ongoing. So join in the threads for the current read.
+            mClientsDesiredReadBit.editItemAt(clientId) = mCurrentReadBit;
+        }
+        return OK;
+    }
+}
+
+status_t MediaSourceSplitter::stop(int clientId) {
+    Mutex::Autolock autoLock(mLock);
+
+    LOGV("stop client (%d)", clientId);
+    CHECK(clientId >= 0 && clientId < mNumberOfClients);
+    CHECK(mClientsStarted[clientId]);
+
+    if (--mNumberOfClientsStarted == 0) {
+        LOGV("Stopping real source from client (%d)", clientId);
+        status_t err = mSource->stop();
+        mSourceStarted = false;
+        mClientsStarted.editItemAt(clientId) = false;
+        return err;
+    } else {
+        mClientsStarted.editItemAt(clientId) = false;
+        if (!mLastReadCompleted && (mClientsDesiredReadBit[clientId] == mCurrentReadBit)) {
+            // !mLastReadCompleted implies that buffer has been read from source, but all
+            // clients haven't read it.
+            // mClientsDesiredReadBit[clientId] == mCurrentReadBit implies that this
+            // client would have wanted to read from this buffer. (i.e. it has not yet
+            // called read() for the current read buffer.)
+            // Since other threads may be waiting for all the clients' reads to complete,
+            // signal that this read has been aborted.
+            signalReadComplete_lock(true);
+        }
+        return OK;
+    }
+}
+
+sp<MetaData> MediaSourceSplitter::getFormat(int clientId) {
+    Mutex::Autolock autoLock(mLock);
+
+    LOGV("getFormat client (%d)", clientId);
+    return mSource->getFormat();
+}
+
+status_t MediaSourceSplitter::read(int clientId,
+        MediaBuffer **buffer, const MediaSource::ReadOptions *options) {
+    Mutex::Autolock autoLock(mLock);
+
+    CHECK(clientId >= 0 && clientId < mNumberOfClients);
+
+    LOGV("read client (%d)", clientId);
+    *buffer = NULL;
+
+    if (!mClientsStarted[clientId]) {
+        return OK;
+    }
+
+    if (mCurrentReadBit != mClientsDesiredReadBit[clientId]) {
+        // Desired buffer has not been read from source yet.
+
+        // If the current client is the special client with clientId = 0
+        // then read from source, else wait until the client 0 has finished
+        // reading from source.
+        if (clientId == 0) {
+            // Wait for all client's last read to complete first so as to not
+            // corrupt the buffer at mLastReadMediaBuffer.
+            waitForAllClientsLastRead_lock(clientId);
+
+            readFromSource_lock(options);
+            *buffer = mLastReadMediaBuffer;
+        } else {
+            waitForReadFromSource_lock(clientId);
+
+            *buffer = mLastReadMediaBuffer;
+            (*buffer)->add_ref();
+        }
+        CHECK(mCurrentReadBit == mClientsDesiredReadBit[clientId]);
+    } else {
+        // Desired buffer has already been read from source. Use the cached data.
+        CHECK(clientId != 0);
+
+        *buffer = mLastReadMediaBuffer;
+        (*buffer)->add_ref();
+    }
+
+    mClientsDesiredReadBit.editItemAt(clientId) = !mClientsDesiredReadBit[clientId];
+    signalReadComplete_lock(false);
+
+    return mLastReadStatus;
+}
+
+void MediaSourceSplitter::readFromSource_lock(const MediaSource::ReadOptions *options) {
+    mLastReadStatus = mSource->read(&mLastReadMediaBuffer , options);
+
+    mCurrentReadBit = !mCurrentReadBit;
+    mLastReadCompleted = false;
+    mReadFromSourceCondition.broadcast();
+}
+
+void MediaSourceSplitter::waitForReadFromSource_lock(int32_t clientId) {
+    mReadFromSourceCondition.wait(mLock);
+}
+
+void MediaSourceSplitter::waitForAllClientsLastRead_lock(int32_t clientId) {
+    if (mLastReadCompleted) {
+        return;
+    }
+    mAllReadsCompleteCondition.wait(mLock);
+    CHECK(mLastReadCompleted);
+}
+
+void MediaSourceSplitter::signalReadComplete_lock(bool readAborted) {
+    if (!readAborted) {
+        mNumberOfCurrentReads++;
+    }
+
+    if (mNumberOfCurrentReads == mNumberOfClientsStarted) {
+        mLastReadCompleted = true;
+        mNumberOfCurrentReads = 0;
+        mAllReadsCompleteCondition.broadcast();
+    }
+}
+
+status_t MediaSourceSplitter::pause(int clientId) {
+    return ERROR_UNSUPPORTED;
+}
+
+// Client
+
+MediaSourceSplitter::Client::Client(
+        sp<MediaSourceSplitter> splitter,
+        int32_t clientId) {
+    mSplitter = splitter;
+    mClientId = clientId;
+}
+
+status_t MediaSourceSplitter::Client::start(MetaData *params) {
+    return mSplitter->start(mClientId, params);
+}
+
+status_t MediaSourceSplitter::Client::stop() {
+    return mSplitter->stop(mClientId);
+}
+
+sp<MetaData> MediaSourceSplitter::Client::getFormat() {
+    return mSplitter->getFormat(mClientId);
+}
+
+status_t MediaSourceSplitter::Client::read(
+        MediaBuffer **buffer, const ReadOptions *options) {
+    return mSplitter->read(mClientId, buffer, options);
+}
+
+status_t MediaSourceSplitter::Client::pause() {
+    return mSplitter->pause(mClientId);
+}
+
+}  // namespace android
diff --git a/media/libstagefright/MetaData.cpp b/media/libstagefright/MetaData.cpp
index 63b476e..884f3b4 100644
--- a/media/libstagefright/MetaData.cpp
+++ b/media/libstagefright/MetaData.cpp
@@ -70,6 +70,19 @@
     return setData(key, TYPE_POINTER, &value, sizeof(value));
 }
 
+bool MetaData::setRect(
+        uint32_t key,
+        int32_t left, int32_t top,
+        int32_t right, int32_t bottom) {
+    Rect r;
+    r.mLeft = left;
+    r.mTop = top;
+    r.mRight = right;
+    r.mBottom = bottom;
+
+    return setData(key, TYPE_RECT, &r, sizeof(r));
+}
+
 bool MetaData::findCString(uint32_t key, const char **value) {
     uint32_t type;
     const void *data;
@@ -143,6 +156,28 @@
     return true;
 }
 
+bool MetaData::findRect(
+        uint32_t key,
+        int32_t *left, int32_t *top,
+        int32_t *right, int32_t *bottom) {
+    uint32_t type;
+    const void *data;
+    size_t size;
+    if (!findData(key, &type, &data, &size) || type != TYPE_RECT) {
+        return false;
+    }
+
+    CHECK_EQ(size, sizeof(Rect));
+
+    const Rect *r = (const Rect *)data;
+    *left = r->mLeft;
+    *top = r->mTop;
+    *right = r->mRight;
+    *bottom = r->mBottom;
+
+    return true;
+}
+
 bool MetaData::setData(
         uint32_t key, uint32_t type, const void *data, size_t size) {
     bool overwrote_existing = true;
diff --git a/media/libstagefright/NuCachedSource2.cpp b/media/libstagefright/NuCachedSource2.cpp
index 5b0168b..c7b99b9 100644
--- a/media/libstagefright/NuCachedSource2.cpp
+++ b/media/libstagefright/NuCachedSource2.cpp
@@ -180,8 +180,7 @@
       mFinalStatus(OK),
       mLastAccessPos(0),
       mFetching(true),
-      mLastFetchTimeUs(-1),
-      mSuspended(false) {
+      mLastFetchTimeUs(-1) {
     mLooper->setName("NuCachedSource2");
     mLooper->registerHandler(mReflector);
     mLooper->start();
@@ -202,7 +201,7 @@
     return mSource->initCheck();
 }
 
-status_t NuCachedSource2::getSize(off_t *size) {
+status_t NuCachedSource2::getSize(off64_t *size) {
     return mSource->getSize(size);
 }
 
@@ -224,12 +223,6 @@
             break;
         }
 
-        case kWhatSuspend:
-        {
-            onSuspend();
-            break;
-        }
-
         default:
             TRESPASS();
     }
@@ -271,7 +264,6 @@
 
     bool keepAlive =
         !mFetching
-            && !mSuspended
             && mFinalStatus == OK
             && ALooper::GetNowUs() >= mLastFetchTimeUs + kKeepAliveIntervalUs;
 
@@ -288,7 +280,7 @@
             LOGI("Cache full, done prefetching for now");
             mFetching = false;
         }
-    } else if (!mSuspended) {
+    } else {
         Mutex::Autolock autoLock(mLock);
         restartPrefetcherIfNecessary_l();
     }
@@ -326,15 +318,17 @@
     mCondition.signal();
 }
 
-void NuCachedSource2::restartPrefetcherIfNecessary_l() {
-    static const size_t kGrayArea = 256 * 1024;
+void NuCachedSource2::restartPrefetcherIfNecessary_l(
+        bool ignoreLowWaterThreshold) {
+    static const size_t kGrayArea = 1024 * 1024;
 
     if (mFetching || mFinalStatus != OK) {
         return;
     }
 
-    if (mCacheOffset + mCache->totalSize() - mLastAccessPos
-            >= kLowWaterThreshold) {
+    if (!ignoreLowWaterThreshold
+            && mCacheOffset + mCache->totalSize() - mLastAccessPos
+                >= kLowWaterThreshold) {
         return;
     }
 
@@ -352,10 +346,10 @@
     mFetching = true;
 }
 
-ssize_t NuCachedSource2::readAt(off_t offset, void *data, size_t size) {
+ssize_t NuCachedSource2::readAt(off64_t offset, void *data, size_t size) {
     Mutex::Autolock autoSerializer(mSerializer);
 
-    LOGV("readAt offset %ld, size %d", offset, size);
+    LOGV("readAt offset %lld, size %d", offset, size);
 
     Mutex::Autolock autoLock(mLock);
 
@@ -400,34 +394,34 @@
     return mCacheOffset + mCache->totalSize();
 }
 
-size_t NuCachedSource2::approxDataRemaining(bool *eos) {
+size_t NuCachedSource2::approxDataRemaining(status_t *finalStatus) {
     Mutex::Autolock autoLock(mLock);
-    return approxDataRemaining_l(eos);
+    return approxDataRemaining_l(finalStatus);
 }
 
-size_t NuCachedSource2::approxDataRemaining_l(bool *eos) {
-    *eos = (mFinalStatus != OK);
-    off_t lastBytePosCached = mCacheOffset + mCache->totalSize();
+size_t NuCachedSource2::approxDataRemaining_l(status_t *finalStatus) {
+    *finalStatus = mFinalStatus;
+    off64_t lastBytePosCached = mCacheOffset + mCache->totalSize();
     if (mLastAccessPos < lastBytePosCached) {
         return lastBytePosCached - mLastAccessPos;
     }
     return 0;
 }
 
-ssize_t NuCachedSource2::readInternal(off_t offset, void *data, size_t size) {
-    LOGV("readInternal offset %ld size %d", offset, size);
+ssize_t NuCachedSource2::readInternal(off64_t offset, void *data, size_t size) {
+    LOGV("readInternal offset %lld size %d", offset, size);
 
     Mutex::Autolock autoLock(mLock);
 
     if (offset < mCacheOffset
-            || offset >= (off_t)(mCacheOffset + mCache->totalSize())) {
-        static const off_t kPadding = 32768;
+            || offset >= (off64_t)(mCacheOffset + mCache->totalSize())) {
+        static const off64_t kPadding = 256 * 1024;
 
         // In the presence of multiple decoded streams, once of them will
         // trigger this seek request, the other one will request data "nearby"
         // soon, adjust the seek position so that that subsequent request
         // does not trigger another seek.
-        off_t seekOffset = (offset > kPadding) ? offset - kPadding : 0;
+        off64_t seekOffset = (offset > kPadding) ? offset - kPadding : 0;
 
         seekInternal_l(seekOffset);
     }
@@ -456,15 +450,15 @@
     return -EAGAIN;
 }
 
-status_t NuCachedSource2::seekInternal_l(off_t offset) {
+status_t NuCachedSource2::seekInternal_l(off64_t offset) {
     mLastAccessPos = offset;
 
     if (offset >= mCacheOffset
-            && offset <= (off_t)(mCacheOffset + mCache->totalSize())) {
+            && offset <= (off64_t)(mCacheOffset + mCache->totalSize())) {
         return OK;
     }
 
-    LOGI("new range: offset= %ld", offset);
+    LOGI("new range: offset= %lld", offset);
 
     mCacheOffset = offset;
 
@@ -477,39 +471,22 @@
     return OK;
 }
 
-void NuCachedSource2::clearCacheAndResume() {
-    LOGV("clearCacheAndResume");
-
+void NuCachedSource2::resumeFetchingIfNecessary() {
     Mutex::Autolock autoLock(mLock);
 
-    CHECK(mSuspended);
-
-    mCacheOffset = 0;
-    mFinalStatus = OK;
-    mLastAccessPos = 0;
-    mLastFetchTimeUs = -1;
-
-    size_t totalSize = mCache->totalSize();
-    CHECK_EQ(mCache->releaseFromStart(totalSize), totalSize);
-
-    mFetching = true;
-    mSuspended = false;
+    restartPrefetcherIfNecessary_l(true /* ignore low water threshold */);
 }
 
-void NuCachedSource2::suspend() {
-    (new AMessage(kWhatSuspend, mReflector->id()))->post();
-
-    while (!mSuspended) {
-        usleep(10000);
-    }
+DecryptHandle* NuCachedSource2::DrmInitialization() {
+    return mSource->DrmInitialization();
 }
 
-void NuCachedSource2::onSuspend() {
-    Mutex::Autolock autoLock(mLock);
+void NuCachedSource2::getDrmInfo(DecryptHandle **handle, DrmManagerClient **client) {
+    mSource->getDrmInfo(handle, client);
+}
 
-    mFetching = false;
-    mSuspended = true;
+String8 NuCachedSource2::getUri() {
+    return mSource->getUri();
 }
 
 }  // namespace android
-
diff --git a/media/libstagefright/NuHTTPDataSource.cpp b/media/libstagefright/NuHTTPDataSource.cpp
index af247d5..b24343f 100644
--- a/media/libstagefright/NuHTTPDataSource.cpp
+++ b/media/libstagefright/NuHTTPDataSource.cpp
@@ -21,6 +21,7 @@
 #include "include/NuHTTPDataSource.h"
 
 #include <cutils/properties.h>
+#include <media/stagefright/foundation/ALooper.h>
 #include <media/stagefright/MediaDebug.h>
 #include <media/stagefright/MediaErrors.h>
 
@@ -39,22 +40,30 @@
 }
 
 static bool ParseURL(
-        const char *url, String8 *host, unsigned *port, String8 *path) {
+        const char *url, String8 *host, unsigned *port,
+        String8 *path, bool *https) {
     host->setTo("");
     *port = 0;
     path->setTo("");
 
-    if (strncasecmp("http://", url, 7)) {
+    size_t hostStart;
+    if (!strncasecmp("http://", url, 7)) {
+        hostStart = 7;
+        *https = false;
+    } else if (!strncasecmp("https://", url, 8)) {
+        hostStart = 8;
+        *https = true;
+    } else {
         return false;
     }
 
-    const char *slashPos = strchr(&url[7], '/');
+    const char *slashPos = strchr(&url[hostStart], '/');
 
     if (slashPos == NULL) {
-        host->setTo(&url[7]);
+        host->setTo(&url[hostStart]);
         path->setTo("/");
     } else {
-        host->setTo(&url[7], slashPos - &url[7]);
+        host->setTo(&url[hostStart], slashPos - &url[hostStart]);
         path->setTo(slashPos);
     }
 
@@ -72,29 +81,47 @@
         String8 tmp(host->string(), colonOffset);
         *host = tmp;
     } else {
-        *port = 80;
+        *port = (*https) ? 443 : 80;
     }
 
     return true;
 }
 
-NuHTTPDataSource::NuHTTPDataSource()
-    : mState(DISCONNECTED),
+NuHTTPDataSource::NuHTTPDataSource(uint32_t flags)
+    : mFlags(flags),
+      mState(DISCONNECTED),
       mPort(0),
+      mHTTPS(false),
       mOffset(0),
       mContentLength(0),
       mContentLengthValid(false),
       mHasChunkedTransferEncoding(false),
-      mChunkDataBytesLeft(0) {
+      mChunkDataBytesLeft(0),
+      mNumBandwidthHistoryItems(0),
+      mTotalTransferTimeUs(0),
+      mTotalTransferBytes(0),
+      mDecryptHandle(NULL),
+      mDrmManagerClient(NULL) {
 }
 
 NuHTTPDataSource::~NuHTTPDataSource() {
+    if (mDecryptHandle != NULL) {
+        // To release mDecryptHandle
+        CHECK(mDrmManagerClient);
+        mDrmManagerClient->closeDecryptSession(mDecryptHandle);
+        mDecryptHandle = NULL;
+    }
+
+    if (mDrmManagerClient != NULL) {
+        delete mDrmManagerClient;
+        mDrmManagerClient = NULL;
+    }
 }
 
 status_t NuHTTPDataSource::connect(
         const char *uri,
         const KeyedVector<String8, String8> *overrides,
-        off_t offset) {
+        off64_t offset) {
     String8 headers;
     MakeFullHeaders(overrides, &headers);
 
@@ -104,14 +131,18 @@
 status_t NuHTTPDataSource::connect(
         const char *uri,
         const String8 &headers,
-        off_t offset) {
+        off64_t offset) {
     String8 host, path;
     unsigned port;
-    if (!ParseURL(uri, &host, &port, &path)) {
+
+    mUri = uri;
+
+    bool https;
+    if (!ParseURL(uri, &host, &port, &path, &https)) {
         return ERROR_MALFORMED;
     }
 
-    return connect(host, port, path, headers, offset);
+    return connect(host, port, path, https, headers, offset);
 }
 
 static bool IsRedirectStatusCode(int httpStatus) {
@@ -121,14 +152,19 @@
 
 status_t NuHTTPDataSource::connect(
         const char *host, unsigned port, const char *path,
+        bool https,
         const String8 &headers,
-        off_t offset) {
-    LOGI("connect to %s:%u%s @%ld", host, port, path, offset);
+        off64_t offset) {
+    if (!(mFlags & kFlagIncognito)) {
+        LOGI("connect to %s:%u%s @%lld", host, port, path, offset);
+    } else {
+        LOGI("connect to <URL suppressed> @%lld", offset);
+    }
 
     bool needsToReconnect = true;
 
     if (mState == CONNECTED && host == mHost && port == mPort
-            && offset == mOffset) {
+            && https == mHTTPS && offset == mOffset) {
         if (mContentLengthValid && mOffset == mContentLength) {
             LOGI("Didn't have to reconnect, old one's still good.");
             needsToReconnect = false;
@@ -138,6 +174,7 @@
     mHost = host;
     mPort = port;
     mPath = path;
+    mHTTPS = https;
     mHeaders = headers;
 
     status_t err = OK;
@@ -146,7 +183,7 @@
 
     if (needsToReconnect) {
         mHTTP.disconnect();
-        err = mHTTP.connect(host, port);
+        err = mHTTP.connect(host, port, https);
     }
 
     if (err != OK) {
@@ -165,11 +202,14 @@
         request.append(" HTTP/1.1\r\n");
         request.append("Host: ");
         request.append(mHost);
+        if (mPort != 80) {
+            request.append(StringPrintf(":%u", mPort).c_str());
+        }
         request.append("\r\n");
 
         if (offset != 0) {
             char rangeHeader[128];
-            sprintf(rangeHeader, "Range: bytes=%ld-\r\n", offset);
+            sprintf(rangeHeader, "Range: bytes=%lld-\r\n", offset);
             request.append(rangeHeader);
         }
 
@@ -185,7 +225,7 @@
         }
 
         if (IsRedirectStatusCode(httpStatus)) {
-            string value;
+            AString value;
             CHECK(mHTTP.find_header_value("Location", &value));
 
             mState = DISCONNECTED;
@@ -205,9 +245,8 @@
         mHasChunkedTransferEncoding = false;
 
         {
-            string value;
-            if (mHTTP.find_header_value("Transfer-Encoding", &value)
-                    || mHTTP.find_header_value("Transfer-encoding", &value)) {
+            AString value;
+            if (mHTTP.find_header_value("Transfer-Encoding", &value)) {
                 // We don't currently support any transfer encodings but
                 // chunked.
 
@@ -229,11 +268,11 @@
         applyTimeoutResponse();
 
         if (offset == 0) {
-            string value;
+            AString value;
             unsigned long x;
-            if (mHTTP.find_header_value(string("Content-Length"), &value)
+            if (mHTTP.find_header_value(AString("Content-Length"), &value)
                     && ParseSingleUnsignedLong(value.c_str(), &x)) {
-                mContentLength = (off_t)x;
+                mContentLength = (off64_t)x;
                 mContentLengthValid = true;
             } else {
                 LOGW("Server did not give us the content length!");
@@ -246,9 +285,9 @@
                 return ERROR_UNSUPPORTED;
             }
 
-            string value;
+            AString value;
             unsigned long x;
-            if (mHTTP.find_header_value(string("Content-Range"), &value)) {
+            if (mHTTP.find_header_value(AString("Content-Range"), &value)) {
                 const char *slashPos = strchr(value.c_str(), '/');
                 if (slashPos != NULL
                         && ParseSingleUnsignedLong(slashPos + 1, &x)) {
@@ -338,7 +377,7 @@
     return n;
 }
 
-ssize_t NuHTTPDataSource::readAt(off_t offset, void *data, size_t size) {
+ssize_t NuHTTPDataSource::readAt(off64_t offset, void *data, size_t size) {
     LOGV("readAt offset %ld, size %d", offset, size);
 
     Mutex::Autolock autoLock(mLock);
@@ -347,7 +386,7 @@
         String8 host = mHost;
         String8 path = mPath;
         String8 headers = mHeaders;
-        status_t err = connect(host, mPort, path, headers, offset);
+        status_t err = connect(host, mPort, path, mHTTPS, headers, offset);
 
         if (err != OK) {
             return err;
@@ -365,6 +404,8 @@
 
     size_t numBytesRead = 0;
     while (numBytesRead < size) {
+        int64_t startTimeUs = ALooper::GetNowUs();
+
         ssize_t n =
             internalRead((uint8_t *)data + numBytesRead, size - numBytesRead);
 
@@ -372,6 +413,9 @@
             return n;
         }
 
+        int64_t delayUs = ALooper::GetNowUs() - startTimeUs;
+        addBandwidthMeasurement_l(n, delayUs);
+
         numBytesRead += (size_t)n;
 
         if (n == 0) {
@@ -390,7 +434,7 @@
     return numBytesRead;
 }
 
-status_t NuHTTPDataSource::getSize(off_t *size) {
+status_t NuHTTPDataSource::getSize(off64_t *size) {
     *size = 0;
 
     if (mState != CONNECTED) {
@@ -441,7 +485,7 @@
 }
 
 void NuHTTPDataSource::applyTimeoutResponse() {
-    string timeout;
+    AString timeout;
     if (mHTTP.find_header_value("X-SocketTimeout", &timeout)) {
         const char *s = timeout.c_str();
         char *end;
@@ -456,4 +500,68 @@
     }
 }
 
+bool NuHTTPDataSource::estimateBandwidth(int32_t *bandwidth_bps) {
+    Mutex::Autolock autoLock(mLock);
+
+    if (mNumBandwidthHistoryItems < 2) {
+        return false;
+    }
+
+    *bandwidth_bps = ((double)mTotalTransferBytes * 8E6 / mTotalTransferTimeUs);
+
+    return true;
+}
+
+void NuHTTPDataSource::addBandwidthMeasurement_l(
+        size_t numBytes, int64_t delayUs) {
+    BandwidthEntry entry;
+    entry.mDelayUs = delayUs;
+    entry.mNumBytes = numBytes;
+    mTotalTransferTimeUs += delayUs;
+    mTotalTransferBytes += numBytes;
+
+    mBandwidthHistory.push_back(entry);
+    if (++mNumBandwidthHistoryItems > 100) {
+        BandwidthEntry *entry = &*mBandwidthHistory.begin();
+        mTotalTransferTimeUs -= entry->mDelayUs;
+        mTotalTransferBytes -= entry->mNumBytes;
+        mBandwidthHistory.erase(mBandwidthHistory.begin());
+        --mNumBandwidthHistoryItems;
+    }
+}
+
+DecryptHandle* NuHTTPDataSource::DrmInitialization() {
+    if (mDrmManagerClient == NULL) {
+        mDrmManagerClient = new DrmManagerClient();
+    }
+
+    if (mDrmManagerClient == NULL) {
+        return NULL;
+    }
+
+    if (mDecryptHandle == NULL) {
+        /* Note if redirect occurs, mUri is the redirect uri instead of the
+         * original one
+         */
+        mDecryptHandle = mDrmManagerClient->openDecryptSession(mUri);
+    }
+
+    if (mDecryptHandle == NULL) {
+        delete mDrmManagerClient;
+        mDrmManagerClient = NULL;
+    }
+
+    return mDecryptHandle;
+}
+
+void NuHTTPDataSource::getDrmInfo(DecryptHandle **handle, DrmManagerClient **client) {
+    *handle = mDecryptHandle;
+
+    *client = mDrmManagerClient;
+}
+
+String8 NuHTTPDataSource::getUri() {
+    return mUri;
+}
+
 }  // namespace android
diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp
index b5d00bf..3e26a95 100644
--- a/media/libstagefright/OMXCodec.cpp
+++ b/media/libstagefright/OMXCodec.cpp
@@ -38,10 +38,11 @@
 #include <binder/IServiceManager.h>
 #include <binder/MemoryDealer.h>
 #include <binder/ProcessState.h>
+#include <media/stagefright/foundation/ADebug.h>
 #include <media/IMediaPlayerService.h>
+#include <media/stagefright/HardwareAPI.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaBufferGroup.h>
-#include <media/stagefright/MediaDebug.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaExtractor.h>
 #include <media/stagefright/MetaData.h>
@@ -53,6 +54,7 @@
 #include <OMX_Component.h>
 
 #include "include/ThreadedSource.h"
+#include "include/avc_utils.h"
 
 namespace android {
 
@@ -152,37 +154,37 @@
 
 static const CodecInfo kDecoderInfo[] = {
     { MEDIA_MIMETYPE_IMAGE_JPEG, "OMX.TI.JPEG.decode" },
+//    { MEDIA_MIMETYPE_AUDIO_MPEG, "OMX.Nvidia.mp3.decoder" },
 //    { MEDIA_MIMETYPE_AUDIO_MPEG, "OMX.TI.MP3.decode" },
     { MEDIA_MIMETYPE_AUDIO_MPEG, "MP3Decoder" },
-//    { MEDIA_MIMETYPE_AUDIO_MPEG, "OMX.PV.mp3dec" },
 //    { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.TI.AMR.decode" },
+//    { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.Nvidia.amr.decoder" },
     { MEDIA_MIMETYPE_AUDIO_AMR_NB, "AMRNBDecoder" },
-//    { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.PV.amrdec" },
+//    { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.Nvidia.amrwb.decoder" },
     { MEDIA_MIMETYPE_AUDIO_AMR_WB, "OMX.TI.WBAMR.decode" },
     { MEDIA_MIMETYPE_AUDIO_AMR_WB, "AMRWBDecoder" },
-//    { MEDIA_MIMETYPE_AUDIO_AMR_WB, "OMX.PV.amrdec" },
+//    { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.Nvidia.aac.decoder" },
     { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.TI.AAC.decode" },
     { MEDIA_MIMETYPE_AUDIO_AAC, "AACDecoder" },
-//    { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.PV.aacdec" },
     { MEDIA_MIMETYPE_AUDIO_G711_ALAW, "G711Decoder" },
     { MEDIA_MIMETYPE_AUDIO_G711_MLAW, "G711Decoder" },
+    { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.Nvidia.mp4.decode" },
     { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.7x30.video.decoder.mpeg4" },
     { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.video.decoder.mpeg4" },
     { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.TI.Video.Decoder" },
     { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.SEC.MPEG4.Decoder" },
     { MEDIA_MIMETYPE_VIDEO_MPEG4, "M4vH263Decoder" },
-//    { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.PV.mpeg4dec" },
+    { MEDIA_MIMETYPE_VIDEO_H263, "OMX.Nvidia.h263.decode" },
     { MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.7x30.video.decoder.h263" },
     { MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.video.decoder.h263" },
     { MEDIA_MIMETYPE_VIDEO_H263, "OMX.SEC.H263.Decoder" },
     { MEDIA_MIMETYPE_VIDEO_H263, "M4vH263Decoder" },
-//    { MEDIA_MIMETYPE_VIDEO_H263, "OMX.PV.h263dec" },
+    { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.Nvidia.h264.decode" },
     { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.7x30.video.decoder.avc" },
     { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.video.decoder.avc" },
     { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.TI.Video.Decoder" },
     { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.SEC.AVC.Decoder" },
     { MEDIA_MIMETYPE_VIDEO_AVC, "AVCDecoder" },
-//    { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.PV.avcdec" },
     { MEDIA_MIMETYPE_AUDIO_VORBIS, "VorbisDecoder" },
     { MEDIA_MIMETYPE_VIDEO_VPX, "VPXDecoder" },
 };
@@ -194,25 +196,24 @@
     { MEDIA_MIMETYPE_AUDIO_AMR_WB, "AMRWBEncoder" },
     { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.TI.AAC.encode" },
     { MEDIA_MIMETYPE_AUDIO_AAC, "AACEncoder" },
-//    { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.PV.aacenc" },
     { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.7x30.video.encoder.mpeg4" },
     { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.video.encoder.mpeg4" },
     { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.TI.Video.encoder" },
+    { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.Nvidia.mp4.encoder" },
     { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.SEC.MPEG4.Encoder" },
     { MEDIA_MIMETYPE_VIDEO_MPEG4, "M4vH263Encoder" },
-//    { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.PV.mpeg4enc" },
     { MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.7x30.video.encoder.h263" },
     { MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.video.encoder.h263" },
     { MEDIA_MIMETYPE_VIDEO_H263, "OMX.TI.Video.encoder" },
+    { MEDIA_MIMETYPE_VIDEO_H263, "OMX.Nvidia.h263.encoder" },
     { MEDIA_MIMETYPE_VIDEO_H263, "OMX.SEC.H263.Encoder" },
     { MEDIA_MIMETYPE_VIDEO_H263, "M4vH263Encoder" },
-//    { MEDIA_MIMETYPE_VIDEO_H263, "OMX.PV.h263enc" },
     { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.7x30.video.encoder.avc" },
     { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.video.encoder.avc" },
     { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.TI.Video.encoder" },
+    { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.Nvidia.h264.encoder" },
     { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.SEC.AVC.Encoder" },
     { MEDIA_MIMETYPE_VIDEO_AVC, "AVCEncoder" },
-//    { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.PV.avcenc" },
 };
 
 #undef OPTIONAL
@@ -266,39 +267,6 @@
     return NULL;
 }
 
-enum {
-    kAVCProfileBaseline      = 0x42,
-    kAVCProfileMain          = 0x4d,
-    kAVCProfileExtended      = 0x58,
-    kAVCProfileHigh          = 0x64,
-    kAVCProfileHigh10        = 0x6e,
-    kAVCProfileHigh422       = 0x7a,
-    kAVCProfileHigh444       = 0xf4,
-    kAVCProfileCAVLC444Intra = 0x2c
-};
-
-static const char *AVCProfileToString(uint8_t profile) {
-    switch (profile) {
-        case kAVCProfileBaseline:
-            return "Baseline";
-        case kAVCProfileMain:
-            return "Main";
-        case kAVCProfileExtended:
-            return "Extended";
-        case kAVCProfileHigh:
-            return "High";
-        case kAVCProfileHigh10:
-            return "High 10";
-        case kAVCProfileHigh422:
-            return "High 422";
-        case kAVCProfileHigh444:
-            return "High 444";
-        case kAVCProfileCAVLC444Intra:
-            return "CAVLC 444 Intra";
-        default:   return "Unknown";
-    }
-}
-
 template<class T>
 static void InitOMXParams(T *params) {
     params->nSize = sizeof(T);
@@ -309,16 +277,15 @@
 }
 
 static bool IsSoftwareCodec(const char *componentName) {
-    if (!strncmp("OMX.PV.", componentName, 7)) {
-        return true;
+    if (!strncmp("OMX.", componentName, 4)) {
+        return false;
     }
 
-    return false;
+    return true;
 }
 
 // A sort order in which non-OMX components are first,
-// followed by software codecs, i.e. OMX.PV.*, followed
-// by all the others.
+// followed by software codecs, and followed by all the others.
 static int CompareSoftwareCodecsFirst(
         const String8 *elem1, const String8 *elem2) {
     bool isNotOMX1 = strncmp(elem1->string(), "OMX.", 4);
@@ -352,9 +319,13 @@
         const char *componentName, bool isEncoder) {
     uint32_t quirks = 0;
 
-    if (!strcmp(componentName, "OMX.PV.avcdec")) {
-        quirks |= kWantsNALFragments;
+    if (!strcmp(componentName, "OMX.Nvidia.amr.decoder") ||
+         !strcmp(componentName, "OMX.Nvidia.amrwb.decoder") ||
+         !strcmp(componentName, "OMX.Nvidia.aac.decoder") ||
+         !strcmp(componentName, "OMX.Nvidia.mp3.decoder")) {
+        quirks |= kDecoderLiesAboutNumberOfChannels;
     }
+
     if (!strcmp(componentName, "OMX.TI.MP3.decode")) {
         quirks |= kNeedsFlushBeforeDisable;
         quirks |= kDecoderLiesAboutNumberOfChannels;
@@ -414,13 +385,6 @@
         quirks |= kOutputBuffersAreUnreadable;
     }
 
-    if (!strncmp(componentName, "OMX.SEC.", 8) && isEncoder) {
-        // These input buffers contain meta data (for instance,
-        // information helps locate the actual YUV data, or
-        // the physical address of the YUV data).
-        quirks |= kStoreMetaDataInInputVideoBuffers;
-    }
-
     return quirks;
 }
 
@@ -456,7 +420,16 @@
             continue;
         }
 
-        matchingCodecs->push(String8(componentName));
+        // When requesting software-only codecs, only push software codecs
+        // When requesting hardware-only codecs, only push hardware codecs
+        // When there is request neither for software-only nor for
+        // hardware-only codecs, push all codecs
+        if (((flags & kSoftwareCodecsOnly) &&   IsSoftwareCodec(componentName)) ||
+            ((flags & kHardwareCodecsOnly) &&  !IsSoftwareCodec(componentName)) ||
+            (!(flags & (kSoftwareCodecsOnly | kHardwareCodecsOnly)))) {
+
+            matchingCodecs->push(String8(componentName));
+        }
     }
 
     if (flags & kPreferSoftwareCodecs) {
@@ -470,7 +443,8 @@
         const sp<MetaData> &meta, bool createEncoder,
         const sp<MediaSource> &source,
         const char *matchComponentName,
-        uint32_t flags) {
+        uint32_t flags,
+        const sp<ANativeWindow> &nativeWindow) {
     const char *mime;
     bool success = meta->findCString(kKeyMIMEType, &mime);
     CHECK(success);
@@ -526,7 +500,7 @@
             sp<OMXCodec> codec = new OMXCodec(
                     omx, node, quirks,
                     createEncoder, mime, componentName,
-                    source);
+                    source, nativeWindow);
 
             observer->setCodec(codec);
 
@@ -544,13 +518,29 @@
 }
 
 status_t OMXCodec::configureCodec(const sp<MetaData> &meta, uint32_t flags) {
+    mIsMetaDataStoredInVideoBuffers = false;
+    if (flags & kStoreMetaDataInVideoBuffers) {
+        mIsMetaDataStoredInVideoBuffers = true;
+    }
+
+    mOnlySubmitOneBufferAtOneTime = false;
+    if (flags & kOnlySubmitOneInputBufferAtOneTime) {
+        mOnlySubmitOneBufferAtOneTime = true;
+    }
+
+    mEnableGrallocUsageProtected = false;
+    if (flags & kEnableGrallocUsageProtected) {
+        mEnableGrallocUsageProtected = true;
+    }
+    LOGV("configureCodec protected=%d", mEnableGrallocUsageProtected);
+
     if (!(flags & kIgnoreCodecSpecificData)) {
         uint32_t type;
         const void *data;
         size_t size;
         if (meta->findData(kKeyESDS, &type, &data, &size)) {
             ESDS esds((const char *)data, size);
-            CHECK_EQ(esds.InitCheck(), OK);
+            CHECK_EQ(esds.InitCheck(), (status_t)OK);
 
             const void *codec_specific_data;
             size_t codec_specific_data_size;
@@ -565,7 +555,7 @@
             const uint8_t *ptr = (const uint8_t *)data;
 
             CHECK(size >= 7);
-            CHECK_EQ(ptr[0], 1);  // configurationVersion == 1
+            CHECK_EQ((unsigned)ptr[0], 1u);  // configurationVersion == 1
             uint8_t profile = ptr[1];
             uint8_t level = ptr[3];
 
@@ -619,7 +609,7 @@
                 size -= length;
             }
 
-            CODEC_LOGV(
+            CODEC_LOGI(
                     "AVC profile = %d (%s), level = %d",
                     (int)profile, AVCProfileToString(profile), level);
 
@@ -734,6 +724,16 @@
         mQuirks &= ~kOutputBuffersAreUnreadable;
     }
 
+    if (mNativeWindow != NULL
+        && !mIsEncoder
+        && !strncasecmp(mMIME, "video/", 6)
+        && !strncmp(mComponentName, "OMX.", 4)) {
+        status_t err = initNativeWindow();
+        if (err != OK) {
+            return err;
+        }
+    }
+
     return OK;
 }
 
@@ -744,7 +744,7 @@
 
     status_t err = mOMX->getParameter(
             mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
     if ((portIndex == kPortIndexInput && (mQuirks & kInputBufferSizesAreBogus))
         || (def.nBufferSize < size)) {
@@ -753,11 +753,11 @@
 
     err = mOMX->setParameter(
             mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
     err = mOMX->getParameter(
             mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
     // Make sure the setting actually stuck.
     if (portIndex == kPortIndexInput
@@ -917,7 +917,7 @@
     int32_t width, height, frameRate, bitRate, stride, sliceHeight;
     bool success = meta->findInt32(kKeyWidth, &width);
     success = success && meta->findInt32(kKeyHeight, &height);
-    success = success && meta->findInt32(kKeySampleRate, &frameRate);
+    success = success && meta->findInt32(kKeyFrameRate, &frameRate);
     success = success && meta->findInt32(kKeyBitRate, &bitRate);
     success = success && meta->findInt32(kKeyStride, &stride);
     success = success && meta->findInt32(kKeySliceHeight, &sliceHeight);
@@ -937,7 +937,7 @@
     }
 
     OMX_COLOR_FORMATTYPE colorFormat;
-    CHECK_EQ(OK, findTargetColorFormat(meta, &colorFormat));
+    CHECK_EQ((status_t)OK, findTargetColorFormat(meta, &colorFormat));
 
     status_t err;
     OMX_PARAM_PORTDEFINITIONTYPE def;
@@ -946,19 +946,19 @@
     //////////////////////// Input port /////////////////////////
     CHECK_EQ(setVideoPortFormatType(
             kPortIndexInput, OMX_VIDEO_CodingUnused,
-            colorFormat), OK);
+            colorFormat), (status_t)OK);
 
     InitOMXParams(&def);
     def.nPortIndex = kPortIndexInput;
 
     err = mOMX->getParameter(
             mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
     def.nBufferSize = getFrameSize(colorFormat,
             stride > 0? stride: -stride, sliceHeight);
 
-    CHECK_EQ(def.eDomain, OMX_PortDomainVideo);
+    CHECK_EQ((int)def.eDomain, (int)OMX_PortDomainVideo);
 
     video_def->nFrameWidth = width;
     video_def->nFrameHeight = height;
@@ -970,20 +970,20 @@
 
     err = mOMX->setParameter(
             mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
     //////////////////////// Output port /////////////////////////
     CHECK_EQ(setVideoPortFormatType(
             kPortIndexOutput, compressionFormat, OMX_COLOR_FormatUnused),
-            OK);
+            (status_t)OK);
     InitOMXParams(&def);
     def.nPortIndex = kPortIndexOutput;
 
     err = mOMX->getParameter(
             mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
 
-    CHECK_EQ(err, OK);
-    CHECK_EQ(def.eDomain, OMX_PortDomainVideo);
+    CHECK_EQ(err, (status_t)OK);
+    CHECK_EQ((int)def.eDomain, (int)OMX_PortDomainVideo);
 
     video_def->nFrameWidth = width;
     video_def->nFrameHeight = height;
@@ -998,23 +998,23 @@
 
     err = mOMX->setParameter(
             mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
     /////////////////// Codec-specific ////////////////////////
     switch (compressionFormat) {
         case OMX_VIDEO_CodingMPEG4:
         {
-            CHECK_EQ(setupMPEG4EncoderParameters(meta), OK);
+            CHECK_EQ(setupMPEG4EncoderParameters(meta), (status_t)OK);
             break;
         }
 
         case OMX_VIDEO_CodingH263:
-            CHECK_EQ(setupH263EncoderParameters(meta), OK);
+            CHECK_EQ(setupH263EncoderParameters(meta), (status_t)OK);
             break;
 
         case OMX_VIDEO_CodingAVC:
         {
-            CHECK_EQ(setupAVCEncoderParameters(meta), OK);
+            CHECK_EQ(setupAVCEncoderParameters(meta), (status_t)OK);
             break;
         }
 
@@ -1073,7 +1073,7 @@
     status_t err = mOMX->getParameter(
             mNode, OMX_IndexParamVideoBitrate,
             &bitrateType, sizeof(bitrateType));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
     bitrateType.eControlRate = OMX_Video_ControlRateVariable;
     bitrateType.nTargetBitrate = bitRate;
@@ -1081,7 +1081,7 @@
     err = mOMX->setParameter(
             mNode, OMX_IndexParamVideoBitrate,
             &bitrateType, sizeof(bitrateType));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
     return OK;
 }
 
@@ -1137,7 +1137,7 @@
 status_t OMXCodec::setupH263EncoderParameters(const sp<MetaData>& meta) {
     int32_t iFramesInterval, frameRate, bitRate;
     bool success = meta->findInt32(kKeyBitRate, &bitRate);
-    success = success && meta->findInt32(kKeySampleRate, &frameRate);
+    success = success && meta->findInt32(kKeyFrameRate, &frameRate);
     success = success && meta->findInt32(kKeyIFramesInterval, &iFramesInterval);
     CHECK(success);
     OMX_VIDEO_PARAM_H263TYPE h263type;
@@ -1146,7 +1146,7 @@
 
     status_t err = mOMX->getParameter(
             mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
     h263type.nAllowedPictureTypes =
         OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP;
@@ -1173,10 +1173,10 @@
 
     err = mOMX->setParameter(
             mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
-    CHECK_EQ(setupBitRate(bitRate), OK);
-    CHECK_EQ(setupErrorCorrectionParameters(), OK);
+    CHECK_EQ(setupBitRate(bitRate), (status_t)OK);
+    CHECK_EQ(setupErrorCorrectionParameters(), (status_t)OK);
 
     return OK;
 }
@@ -1184,7 +1184,7 @@
 status_t OMXCodec::setupMPEG4EncoderParameters(const sp<MetaData>& meta) {
     int32_t iFramesInterval, frameRate, bitRate;
     bool success = meta->findInt32(kKeyBitRate, &bitRate);
-    success = success && meta->findInt32(kKeySampleRate, &frameRate);
+    success = success && meta->findInt32(kKeyFrameRate, &frameRate);
     success = success && meta->findInt32(kKeyIFramesInterval, &iFramesInterval);
     CHECK(success);
     OMX_VIDEO_PARAM_MPEG4TYPE mpeg4type;
@@ -1193,7 +1193,7 @@
 
     status_t err = mOMX->getParameter(
             mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
     mpeg4type.nSliceHeaderSpacing = 0;
     mpeg4type.bSVH = OMX_FALSE;
@@ -1225,10 +1225,10 @@
 
     err = mOMX->setParameter(
             mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
-    CHECK_EQ(setupBitRate(bitRate), OK);
-    CHECK_EQ(setupErrorCorrectionParameters(), OK);
+    CHECK_EQ(setupBitRate(bitRate), (status_t)OK);
+    CHECK_EQ(setupErrorCorrectionParameters(), (status_t)OK);
 
     return OK;
 }
@@ -1236,7 +1236,7 @@
 status_t OMXCodec::setupAVCEncoderParameters(const sp<MetaData>& meta) {
     int32_t iFramesInterval, frameRate, bitRate;
     bool success = meta->findInt32(kKeyBitRate, &bitRate);
-    success = success && meta->findInt32(kKeySampleRate, &frameRate);
+    success = success && meta->findInt32(kKeyFrameRate, &frameRate);
     success = success && meta->findInt32(kKeyIFramesInterval, &iFramesInterval);
     CHECK(success);
 
@@ -1246,7 +1246,7 @@
 
     status_t err = mOMX->getParameter(
             mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
     h264type.nAllowedPictureTypes =
         OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP;
@@ -1292,11 +1292,15 @@
     h264type.bMBAFF = OMX_FALSE;
     h264type.eLoopFilterMode = OMX_VIDEO_AVCLoopFilterEnable;
 
+    if (!strcasecmp("OMX.Nvidia.h264.encoder", mComponentName)) {
+        h264type.eLevel = OMX_VIDEO_AVCLevelMax;
+    }
+
     err = mOMX->setParameter(
             mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
-    CHECK_EQ(setupBitRate(bitRate), OK);
+    CHECK_EQ(setupBitRate(bitRate), (status_t)OK);
 
     return OK;
 }
@@ -1334,8 +1338,8 @@
         status_t err = mOMX->getParameter(
                 mNode, OMX_IndexParamVideoPortFormat,
                 &format, sizeof(format));
-        CHECK_EQ(err, OK);
-        CHECK_EQ(format.eCompressionFormat, OMX_VIDEO_CodingUnused);
+        CHECK_EQ(err, (status_t)OK);
+        CHECK_EQ((int)format.eCompressionFormat, (int)OMX_VIDEO_CodingUnused);
 
         static const int OMX_QCOM_COLOR_FormatYVU420SemiPlanar = 0x7FA30C00;
 
@@ -1363,7 +1367,7 @@
     err = mOMX->getParameter(
             mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
 
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
 #if 1
     // XXX Need a (much) better heuristic to compute input buffer sizes.
@@ -1373,7 +1377,7 @@
     }
 #endif
 
-    CHECK_EQ(def.eDomain, OMX_PortDomainVideo);
+    CHECK_EQ((int)def.eDomain, (int)OMX_PortDomainVideo);
 
     video_def->nFrameWidth = width;
     video_def->nFrameHeight = height;
@@ -1395,8 +1399,8 @@
 
     err = mOMX->getParameter(
             mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
-    CHECK_EQ(err, OK);
-    CHECK_EQ(def.eDomain, OMX_PortDomainVideo);
+    CHECK_EQ(err, (status_t)OK);
+    CHECK_EQ((int)def.eDomain, (int)OMX_PortDomainVideo);
 
 #if 0
     def.nBufferSize =
@@ -1417,7 +1421,8 @@
         bool isEncoder,
         const char *mime,
         const char *componentName,
-        const sp<MediaSource> &source)
+        const sp<MediaSource> &source,
+        const sp<ANativeWindow> &nativeWindow)
     : mOMX(omx),
       mOMXLivesLocally(omx->livesLocally(getpid())),
       mNode(node),
@@ -1435,9 +1440,10 @@
       mSeekTimeUs(-1),
       mSeekMode(ReadOptions::SEEK_CLOSEST_SYNC),
       mTargetTimeUs(-1),
-      mSkipTimeUs(-1),
+      mOutputPortSettingsChangedPending(false),
       mLeftOverBuffer(NULL),
-      mPaused(false) {
+      mPaused(false),
+      mNativeWindow(nativeWindow) {
     mPortStatus[kPortIndexInput] = ENABLED;
     mPortStatus[kPortIndexOutput] = ENABLED;
 
@@ -1515,10 +1521,10 @@
 OMXCodec::~OMXCodec() {
     mSource.clear();
 
-    CHECK(mState == LOADED || mState == ERROR);
+    CHECK(mState == LOADED || mState == ERROR || mState == LOADED_TO_IDLE);
 
     status_t err = mOMX->freeNode(mNode);
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
     mNode = NULL;
     setState(DEAD);
@@ -1535,21 +1541,23 @@
 status_t OMXCodec::init() {
     // mLock is held.
 
-    CHECK_EQ(mState, LOADED);
+    CHECK_EQ((int)mState, (int)LOADED);
 
     status_t err;
     if (!(mQuirks & kRequiresLoadedToIdleAfterAllocation)) {
         err = mOMX->sendCommand(mNode, OMX_CommandStateSet, OMX_StateIdle);
-        CHECK_EQ(err, OK);
+        CHECK_EQ(err, (status_t)OK);
         setState(LOADED_TO_IDLE);
     }
 
     err = allocateBuffers();
-    CHECK_EQ(err, OK);
+    if (err != (status_t)OK) {
+        return err;
+    }
 
     if (mQuirks & kRequiresLoadedToIdleAfterAllocation) {
         err = mOMX->sendCommand(mNode, OMX_CommandStateSet, OMX_StateIdle);
-        CHECK_EQ(err, OK);
+        CHECK_EQ(err, (status_t)OK);
 
         setState(LOADED_TO_IDLE);
     }
@@ -1581,6 +1589,10 @@
 }
 
 status_t OMXCodec::allocateBuffersOnPort(OMX_U32 portIndex) {
+    if (mNativeWindow != NULL && portIndex == kPortIndexOutput) {
+        return allocateOutputBuffersFromNativeWindow();
+    }
+
     OMX_PARAM_PORTDEFINITIONTYPE def;
     InitOMXParams(&def);
     def.nPortIndex = portIndex;
@@ -1592,6 +1604,14 @@
         return err;
     }
 
+    if (mIsMetaDataStoredInVideoBuffers && portIndex == kPortIndexInput) {
+        err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexInput, OMX_TRUE);
+        if (err != OK) {
+            LOGE("Storing meta data in video buffers is not supported");
+            return err;
+        }
+    }
+
     CODEC_LOGI("allocating %lu buffers of size %lu on %s port",
             def.nBufferCountActual, def.nBufferSize,
             portIndex == kPortIndexInput ? "input" : "output");
@@ -1646,7 +1666,7 @@
         }
 
         info.mBuffer = buffer;
-        info.mOwnedByComponent = false;
+        info.mStatus = OWNED_BY_US;
         info.mMem = mem;
         info.mMediaBuffer = NULL;
 
@@ -1673,6 +1693,224 @@
     return OK;
 }
 
+status_t OMXCodec::applyRotation() {
+    sp<MetaData> meta = mSource->getFormat();
+
+    int32_t rotationDegrees;
+    if (!meta->findInt32(kKeyRotation, &rotationDegrees)) {
+        rotationDegrees = 0;
+    }
+
+    uint32_t transform;
+    switch (rotationDegrees) {
+        case 0: transform = 0; break;
+        case 90: transform = HAL_TRANSFORM_ROT_90; break;
+        case 180: transform = HAL_TRANSFORM_ROT_180; break;
+        case 270: transform = HAL_TRANSFORM_ROT_270; break;
+        default: transform = 0; break;
+    }
+
+    status_t err = OK;
+
+    if (transform) {
+        err = native_window_set_buffers_transform(
+                mNativeWindow.get(), transform);
+    }
+
+    return err;
+}
+
+status_t OMXCodec::allocateOutputBuffersFromNativeWindow() {
+    // Get the number of buffers needed.
+    OMX_PARAM_PORTDEFINITIONTYPE def;
+    InitOMXParams(&def);
+    def.nPortIndex = kPortIndexOutput;
+
+    status_t err = mOMX->getParameter(
+            mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+    if (err != OK) {
+        return err;
+    }
+
+    err = native_window_set_buffers_geometry(
+            mNativeWindow.get(),
+            def.format.video.nFrameWidth,
+            def.format.video.nFrameHeight,
+            def.format.video.eColorFormat);
+
+    if (err != 0) {
+        LOGE("native_window_set_buffers_geometry failed: %s (%d)",
+                strerror(-err), -err);
+        return err;
+    }
+
+    err = applyRotation();
+    if (err != OK) {
+        return err;
+    }
+
+    // Set up the native window.
+    OMX_U32 usage = 0;
+    err = mOMX->getGraphicBufferUsage(mNode, kPortIndexOutput, &usage);
+    if (err != 0) {
+        LOGW("querying usage flags from OMX IL component failed: %d", err);
+        // XXX: Currently this error is logged, but not fatal.
+        usage = 0;
+    }
+    if (mEnableGrallocUsageProtected) {
+        usage |= GRALLOC_USAGE_PROTECTED;
+    }
+
+    LOGV("native_window_set_usage usage=0x%x", usage);
+    err = native_window_set_usage(
+            mNativeWindow.get(), usage | GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_EXTERNAL_DISP);
+    if (err != 0) {
+        LOGE("native_window_set_usage failed: %s (%d)", strerror(-err), -err);
+        return err;
+    }
+
+    int minUndequeuedBufs = 0;
+    err = mNativeWindow->query(mNativeWindow.get(),
+            NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &minUndequeuedBufs);
+    if (err != 0) {
+        LOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)",
+                strerror(-err), -err);
+        return err;
+    }
+
+    // XXX: Is this the right logic to use?  It's not clear to me what the OMX
+    // buffer counts refer to - how do they account for the renderer holding on
+    // to buffers?
+    if (def.nBufferCountActual < def.nBufferCountMin + minUndequeuedBufs) {
+        OMX_U32 newBufferCount = def.nBufferCountMin + minUndequeuedBufs;
+        def.nBufferCountActual = newBufferCount;
+        err = mOMX->setParameter(
+                mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+        if (err != OK) {
+            CODEC_LOGE("setting nBufferCountActual to %lu failed: %d",
+                    newBufferCount, err);
+            return err;
+        }
+    }
+
+    err = native_window_set_buffer_count(
+            mNativeWindow.get(), def.nBufferCountActual);
+    if (err != 0) {
+        LOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err),
+                -err);
+        return err;
+    }
+
+    CODEC_LOGI("allocating %lu buffers from a native window of size %lu on "
+            "output port", def.nBufferCountActual, def.nBufferSize);
+
+    // Dequeue buffers and send them to OMX
+    for (OMX_U32 i = 0; i < def.nBufferCountActual; i++) {
+        android_native_buffer_t* buf;
+        err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf);
+        if (err != 0) {
+            LOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err);
+            break;
+        }
+
+        sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false));
+        BufferInfo info;
+        info.mData = NULL;
+        info.mSize = def.nBufferSize;
+        info.mStatus = OWNED_BY_US;
+        info.mMem = NULL;
+        info.mMediaBuffer = new MediaBuffer(graphicBuffer);
+        info.mMediaBuffer->setObserver(this);
+        mPortBuffers[kPortIndexOutput].push(info);
+
+        IOMX::buffer_id bufferId;
+        err = mOMX->useGraphicBuffer(mNode, kPortIndexOutput, graphicBuffer,
+                &bufferId);
+        if (err != 0) {
+            CODEC_LOGE("registering GraphicBuffer with OMX IL component "
+                    "failed: %d", err);
+            break;
+        }
+
+        mPortBuffers[kPortIndexOutput].editItemAt(i).mBuffer = bufferId;
+
+        CODEC_LOGV("registered graphic buffer with ID %p (pointer = %p)",
+                bufferId, graphicBuffer.get());
+    }
+
+    OMX_U32 cancelStart;
+    OMX_U32 cancelEnd;
+    if (err != 0) {
+        // If an error occurred while dequeuing we need to cancel any buffers
+        // that were dequeued.
+        cancelStart = 0;
+        cancelEnd = mPortBuffers[kPortIndexOutput].size();
+    } else {
+        // Return the last two buffers to the native window.
+        cancelStart = def.nBufferCountActual - minUndequeuedBufs;
+        cancelEnd = def.nBufferCountActual;
+    }
+
+    for (OMX_U32 i = cancelStart; i < cancelEnd; i++) {
+        BufferInfo *info = &mPortBuffers[kPortIndexOutput].editItemAt(i);
+        cancelBufferToNativeWindow(info);
+    }
+
+    return err;
+}
+
+status_t OMXCodec::cancelBufferToNativeWindow(BufferInfo *info) {
+    CHECK_EQ((int)info->mStatus, (int)OWNED_BY_US);
+    CODEC_LOGV("Calling cancelBuffer on buffer %p", info->mBuffer);
+    int err = mNativeWindow->cancelBuffer(
+        mNativeWindow.get(), info->mMediaBuffer->graphicBuffer().get());
+    if (err != 0) {
+      CODEC_LOGE("cancelBuffer failed w/ error 0x%08x", err);
+
+      setState(ERROR);
+      return err;
+    }
+    info->mStatus = OWNED_BY_NATIVE_WINDOW;
+    return OK;
+}
+
+OMXCodec::BufferInfo* OMXCodec::dequeueBufferFromNativeWindow() {
+    // Dequeue the next buffer from the native window.
+    android_native_buffer_t* buf;
+    int err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf);
+    if (err != 0) {
+      CODEC_LOGE("dequeueBuffer failed w/ error 0x%08x", err);
+
+      setState(ERROR);
+      return 0;
+    }
+
+    // Determine which buffer we just dequeued.
+    Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexOutput];
+    BufferInfo *bufInfo = 0;
+    for (size_t i = 0; i < buffers->size(); i++) {
+      sp<GraphicBuffer> graphicBuffer = buffers->itemAt(i).
+          mMediaBuffer->graphicBuffer();
+      if (graphicBuffer->handle == buf->handle) {
+        bufInfo = &buffers->editItemAt(i);
+        break;
+      }
+    }
+
+    if (bufInfo == 0) {
+        CODEC_LOGE("dequeued unrecognized buffer: %p", buf);
+
+        setState(ERROR);
+        return 0;
+    }
+
+    // The native window no longer owns the buffer.
+    CHECK_EQ((int)bufInfo->mStatus, (int)OWNED_BY_NATIVE_WINDOW);
+    bufInfo->mStatus = OWNED_BY_US;
+
+    return bufInfo;
+}
+
 void OMXCodec::on_message(const omx_message &msg) {
     switch (msg.type) {
         case omx_message::EVENT:
@@ -1697,32 +1935,35 @@
             }
 
             CHECK(i < buffers->size());
-            if (!(*buffers)[i].mOwnedByComponent) {
+            if ((*buffers)[i].mStatus != OWNED_BY_COMPONENT) {
                 LOGW("We already own input buffer %p, yet received "
                      "an EMPTY_BUFFER_DONE.", buffer);
             }
 
-            {
-                BufferInfo *info = &buffers->editItemAt(i);
-                info->mOwnedByComponent = false;
-                if (info->mMediaBuffer != NULL) {
-                    // It is time to release the media buffers storing meta data
-                    info->mMediaBuffer->release();
-                    info->mMediaBuffer = NULL;
+            BufferInfo* info = &buffers->editItemAt(i);
+            info->mStatus = OWNED_BY_US;
+
+            // Buffer could not be released until empty buffer done is called.
+            if (info->mMediaBuffer != NULL) {
+                if (mIsEncoder &&
+                    (mQuirks & kAvoidMemcopyInputRecordingFrames)) {
+                    // If zero-copy mode is enabled this will send the
+                    // input buffer back to the upstream source.
+                    restorePatchedDataPointer(info);
                 }
+
+                info->mMediaBuffer->release();
+                info->mMediaBuffer = NULL;
             }
 
             if (mPortStatus[kPortIndexInput] == DISABLING) {
                 CODEC_LOGV("Port is disabled, freeing buffer %p", buffer);
 
-                status_t err =
-                    mOMX->freeBuffer(mNode, kPortIndexInput, buffer);
-                CHECK_EQ(err, OK);
-
-                buffers->removeAt(i);
+                status_t err = freeBuffer(kPortIndexInput, i);
+                CHECK_EQ(err, (status_t)OK);
             } else if (mState != ERROR
                     && mPortStatus[kPortIndexInput] != SHUTTING_DOWN) {
-                CHECK_EQ(mPortStatus[kPortIndexInput], ENABLED);
+                CHECK_EQ((int)mPortStatus[kPortIndexInput], (int)ENABLED);
                 drainInputBuffer(&buffers->editItemAt(i));
             }
             break;
@@ -1749,21 +1990,19 @@
             CHECK(i < buffers->size());
             BufferInfo *info = &buffers->editItemAt(i);
 
-            if (!info->mOwnedByComponent) {
+            if (info->mStatus != OWNED_BY_COMPONENT) {
                 LOGW("We already own output buffer %p, yet received "
                      "a FILL_BUFFER_DONE.", buffer);
             }
 
-            info->mOwnedByComponent = false;
+            info->mStatus = OWNED_BY_US;
 
             if (mPortStatus[kPortIndexOutput] == DISABLING) {
                 CODEC_LOGV("Port is disabled, freeing buffer %p", buffer);
 
-                status_t err =
-                    mOMX->freeBuffer(mNode, kPortIndexOutput, buffer);
-                CHECK_EQ(err, OK);
+                status_t err = freeBuffer(kPortIndexOutput, i);
+                CHECK_EQ(err, (status_t)OK);
 
-                buffers->removeAt(i);
 #if 0
             } else if (mPortStatus[kPortIndexOutput] == ENABLED
                        && (flags & OMX_BUFFERFLAG_EOS)) {
@@ -1772,7 +2011,7 @@
                 mBufferFilled.signal();
 #endif
             } else if (mPortStatus[kPortIndexOutput] != SHUTTING_DOWN) {
-                CHECK_EQ(mPortStatus[kPortIndexOutput], ENABLED);
+                CHECK_EQ((int)mPortStatus[kPortIndexOutput], (int)ENABLED);
 
                 if (info->mMediaBuffer == NULL) {
                     CHECK(mOMXLivesLocally);
@@ -1791,8 +2030,10 @@
                 }
 
                 MediaBuffer *buffer = info->mMediaBuffer;
+                bool isGraphicBuffer = buffer->graphicBuffer() != NULL;
 
-                if (msg.u.extended_buffer_data.range_offset
+                if (!isGraphicBuffer
+                    && msg.u.extended_buffer_data.range_offset
                         + msg.u.extended_buffer_data.range_length
                             > buffer->size()) {
                     CODEC_LOGE(
@@ -1816,7 +2057,7 @@
                     buffer->meta_data()->setInt32(kKeyIsCodecConfig, true);
                 }
 
-                if (mQuirks & kOutputBuffersAreUnreadable) {
+                if (isGraphicBuffer || mQuirks & kOutputBuffersAreUnreadable) {
                     buffer->meta_data()->setInt32(kKeyIsUnreadable, true);
                 }
 
@@ -1855,6 +2096,9 @@
 
                 mFilledBuffers.push_back(i);
                 mBufferFilled.signal();
+                if (mIsEncoder) {
+                    sched_yield();
+                }
             }
 
             break;
@@ -1868,48 +2112,6 @@
     }
 }
 
-void OMXCodec::onEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
-    switch (event) {
-        case OMX_EventCmdComplete:
-        {
-            onCmdComplete((OMX_COMMANDTYPE)data1, data2);
-            break;
-        }
-
-        case OMX_EventError:
-        {
-            CODEC_LOGE("ERROR(0x%08lx, %ld)", data1, data2);
-
-            setState(ERROR);
-            break;
-        }
-
-        case OMX_EventPortSettingsChanged:
-        {
-            onPortSettingsChanged(data1);
-            break;
-        }
-
-#if 0
-        case OMX_EventBufferFlag:
-        {
-            CODEC_LOGV("EVENT_BUFFER_FLAG(%ld)", data1);
-
-            if (data1 == kPortIndexOutput) {
-                mNoMoreOutputData = true;
-            }
-            break;
-        }
-#endif
-
-        default:
-        {
-            CODEC_LOGV("EVENT(%d, %ld, %ld)", event, data1, data2);
-            break;
-        }
-    }
-}
-
 // Has the format changed in any way that the client would have to be aware of?
 static bool formatHasNotablyChanged(
         const sp<MetaData> &from, const sp<MetaData> &to) {
@@ -1954,6 +2156,21 @@
         if (height_from != height_to) {
             return true;
         }
+
+        int32_t left_from, top_from, right_from, bottom_from;
+        CHECK(from->findRect(
+                    kKeyCropRect,
+                    &left_from, &top_from, &right_from, &bottom_from));
+
+        int32_t left_to, top_to, right_to, bottom_to;
+        CHECK(to->findRect(
+                    kKeyCropRect,
+                    &left_to, &top_to, &right_to, &bottom_to));
+
+        if (left_to != left_from || top_to != top_from
+                || right_to != right_from || bottom_to != bottom_from) {
+            return true;
+        }
     } else if (!strcasecmp(mime_from, MEDIA_MIMETYPE_AUDIO_RAW)) {
         int32_t numChannels_from, numChannels_to;
         CHECK(from->findInt32(kKeyChannelCount, &numChannels_from));
@@ -1975,6 +2192,79 @@
     return false;
 }
 
+void OMXCodec::onEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
+    switch (event) {
+        case OMX_EventCmdComplete:
+        {
+            onCmdComplete((OMX_COMMANDTYPE)data1, data2);
+            break;
+        }
+
+        case OMX_EventError:
+        {
+            CODEC_LOGE("ERROR(0x%08lx, %ld)", data1, data2);
+
+            setState(ERROR);
+            break;
+        }
+
+        case OMX_EventPortSettingsChanged:
+        {
+            CODEC_LOGV("OMX_EventPortSettingsChanged(port=%ld, data2=0x%08lx)",
+                       data1, data2);
+
+            if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) {
+                onPortSettingsChanged(data1);
+            } else if (data1 == kPortIndexOutput
+                    && data2 == OMX_IndexConfigCommonOutputCrop) {
+
+                sp<MetaData> oldOutputFormat = mOutputFormat;
+                initOutputFormat(mSource->getFormat());
+
+                if (formatHasNotablyChanged(oldOutputFormat, mOutputFormat)) {
+                    mOutputPortSettingsHaveChanged = true;
+
+                    if (mNativeWindow != NULL) {
+                        int32_t left, top, right, bottom;
+                        CHECK(mOutputFormat->findRect(
+                                    kKeyCropRect,
+                                    &left, &top, &right, &bottom));
+
+                        android_native_rect_t crop;
+                        crop.left = left;
+                        crop.top = top;
+                        crop.right = right;
+                        crop.bottom = bottom;
+
+                        // We'll ignore any errors here, if the surface is
+                        // already invalid, we'll know soon enough.
+                        native_window_set_crop(mNativeWindow.get(), &crop);
+                    }
+                }
+            }
+            break;
+        }
+
+#if 0
+        case OMX_EventBufferFlag:
+        {
+            CODEC_LOGV("EVENT_BUFFER_FLAG(%ld)", data1);
+
+            if (data1 == kPortIndexOutput) {
+                mNoMoreOutputData = true;
+            }
+            break;
+        }
+#endif
+
+        default:
+        {
+            CODEC_LOGV("EVENT(%d, %ld, %ld)", event, data1, data2);
+            break;
+        }
+    }
+}
+
 void OMXCodec::onCmdComplete(OMX_COMMANDTYPE cmd, OMX_U32 data) {
     switch (cmd) {
         case OMX_CommandStateSet:
@@ -1989,13 +2279,13 @@
             CODEC_LOGV("PORT_DISABLED(%ld)", portIndex);
 
             CHECK(mState == EXECUTING || mState == RECONFIGURING);
-            CHECK_EQ(mPortStatus[portIndex], DISABLING);
-            CHECK_EQ(mPortBuffers[portIndex].size(), 0);
+            CHECK_EQ((int)mPortStatus[portIndex], (int)DISABLING);
+            CHECK_EQ(mPortBuffers[portIndex].size(), 0u);
 
             mPortStatus[portIndex] = DISABLED;
 
             if (mState == RECONFIGURING) {
-                CHECK_EQ(portIndex, kPortIndexOutput);
+                CHECK_EQ(portIndex, (OMX_U32)kPortIndexOutput);
 
                 sp<MetaData> oldOutputFormat = mOutputFormat;
                 initOutputFormat(mSource->getFormat());
@@ -2009,7 +2299,11 @@
                 enablePortAsync(portIndex);
 
                 status_t err = allocateBuffersOnPort(portIndex);
-                CHECK_EQ(err, OK);
+
+                if (err != OK) {
+                    CODEC_LOGE("allocateBuffersOnPort failed (err = %d)", err);
+                    setState(ERROR);
+                }
             }
             break;
         }
@@ -2020,12 +2314,12 @@
             CODEC_LOGV("PORT_ENABLED(%ld)", portIndex);
 
             CHECK(mState == EXECUTING || mState == RECONFIGURING);
-            CHECK_EQ(mPortStatus[portIndex], ENABLING);
+            CHECK_EQ((int)mPortStatus[portIndex], (int)ENABLING);
 
             mPortStatus[portIndex] = ENABLED;
 
             if (mState == RECONFIGURING) {
-                CHECK_EQ(portIndex, kPortIndexOutput);
+                CHECK_EQ(portIndex, (OMX_U32)kPortIndexOutput);
 
                 setState(EXECUTING);
 
@@ -2040,14 +2334,14 @@
 
             CODEC_LOGV("FLUSH_DONE(%ld)", portIndex);
 
-            CHECK_EQ(mPortStatus[portIndex], SHUTTING_DOWN);
+            CHECK_EQ((int)mPortStatus[portIndex], (int)SHUTTING_DOWN);
             mPortStatus[portIndex] = ENABLED;
 
             CHECK_EQ(countBuffersWeOwn(mPortBuffers[portIndex]),
                      mPortBuffers[portIndex].size());
 
             if (mState == RECONFIGURING) {
-                CHECK_EQ(portIndex, kPortIndexOutput);
+                CHECK_EQ(portIndex, (OMX_U32)kPortIndexOutput);
 
                 disablePortAsync(portIndex);
             } else if (mState == EXECUTING_TO_IDLE) {
@@ -2061,7 +2355,7 @@
 
                     status_t err =
                         mOMX->sendCommand(mNode, OMX_CommandStateSet, OMX_StateIdle);
-                    CHECK_EQ(err, OK);
+                    CHECK_EQ(err, (status_t)OK);
                 }
             } else {
                 // We're flushing both ports in preparation for seeking.
@@ -2077,6 +2371,14 @@
                     drainInputBuffers();
                     fillOutputBuffers();
                 }
+
+                if (mOutputPortSettingsChangedPending) {
+                    CODEC_LOGV(
+                            "Honoring deferred output port settings change.");
+
+                    mOutputPortSettingsChangedPending = false;
+                    onPortSettingsChanged(kPortIndexOutput);
+                }
             }
 
             break;
@@ -2101,11 +2403,11 @@
                 status_t err = mOMX->sendCommand(
                         mNode, OMX_CommandStateSet, OMX_StateExecuting);
 
-                CHECK_EQ(err, OK);
+                CHECK_EQ(err, (status_t)OK);
 
                 setState(IDLE_TO_EXECUTING);
             } else {
-                CHECK_EQ(mState, EXECUTING_TO_IDLE);
+                CHECK_EQ((int)mState, (int)EXECUTING_TO_IDLE);
 
                 CHECK_EQ(
                     countBuffersWeOwn(mPortBuffers[kPortIndexInput]),
@@ -2118,13 +2420,13 @@
                 status_t err = mOMX->sendCommand(
                         mNode, OMX_CommandStateSet, OMX_StateLoaded);
 
-                CHECK_EQ(err, OK);
+                CHECK_EQ(err, (status_t)OK);
 
                 err = freeBuffersOnPort(kPortIndexInput);
-                CHECK_EQ(err, OK);
+                CHECK_EQ(err, (status_t)OK);
 
                 err = freeBuffersOnPort(kPortIndexOutput);
-                CHECK_EQ(err, OK);
+                CHECK_EQ(err, (status_t)OK);
 
                 mPortStatus[kPortIndexInput] = ENABLED;
                 mPortStatus[kPortIndexOutput] = ENABLED;
@@ -2136,10 +2438,12 @@
 
         case OMX_StateExecuting:
         {
-            CHECK_EQ(mState, IDLE_TO_EXECUTING);
+            CHECK_EQ((int)mState, (int)IDLE_TO_EXECUTING);
 
             CODEC_LOGV("Now Executing.");
 
+            mOutputPortSettingsChangedPending = false;
+
             setState(EXECUTING);
 
             // Buffers will be submitted to the component in the first
@@ -2152,7 +2456,7 @@
 
         case OMX_StateLoaded:
         {
-            CHECK_EQ(mState, IDLE_TO_LOADED);
+            CHECK_EQ((int)mState, (int)IDLE_TO_LOADED);
 
             CODEC_LOGV("Now Loaded.");
 
@@ -2178,7 +2482,7 @@
 size_t OMXCodec::countBuffersWeOwn(const Vector<BufferInfo> &buffers) {
     size_t n = 0;
     for (size_t i = 0; i < buffers.size(); ++i) {
-        if (!buffers[i].mOwnedByComponent) {
+        if (buffers[i].mStatus != OWNED_BY_COMPONENT) {
             ++n;
         }
     }
@@ -2195,32 +2499,21 @@
     for (size_t i = buffers->size(); i-- > 0;) {
         BufferInfo *info = &buffers->editItemAt(i);
 
-        if (onlyThoseWeOwn && info->mOwnedByComponent) {
+        if (onlyThoseWeOwn && info->mStatus == OWNED_BY_COMPONENT) {
             continue;
         }
 
-        CHECK_EQ(info->mOwnedByComponent, false);
+        CHECK(info->mStatus == OWNED_BY_US
+                || info->mStatus == OWNED_BY_NATIVE_WINDOW);
 
         CODEC_LOGV("freeing buffer %p on port %ld", info->mBuffer, portIndex);
 
-        status_t err =
-            mOMX->freeBuffer(mNode, portIndex, info->mBuffer);
+        status_t err = freeBuffer(portIndex, i);
 
         if (err != OK) {
             stickyErr = err;
         }
 
-        if (info->mMediaBuffer != NULL) {
-            info->mMediaBuffer->setObserver(NULL);
-
-            // Make sure nobody but us owns this buffer at this point.
-            CHECK_EQ(info->mMediaBuffer->refcount(), 0);
-
-            info->mMediaBuffer->release();
-            info->mMediaBuffer = NULL;
-        }
-
-        buffers->removeAt(i);
     }
 
     CHECK(onlyThoseWeOwn || buffers->isEmpty());
@@ -2228,11 +2521,50 @@
     return stickyErr;
 }
 
+status_t OMXCodec::freeBuffer(OMX_U32 portIndex, size_t bufIndex) {
+    Vector<BufferInfo> *buffers = &mPortBuffers[portIndex];
+
+    BufferInfo *info = &buffers->editItemAt(bufIndex);
+
+    status_t err = mOMX->freeBuffer(mNode, portIndex, info->mBuffer);
+
+    if (err == OK && info->mMediaBuffer != NULL) {
+        CHECK_EQ(portIndex, (OMX_U32)kPortIndexOutput);
+        info->mMediaBuffer->setObserver(NULL);
+
+        // Make sure nobody but us owns this buffer at this point.
+        CHECK_EQ(info->mMediaBuffer->refcount(), 0);
+
+        // Cancel the buffer if it belongs to an ANativeWindow.
+        sp<GraphicBuffer> graphicBuffer = info->mMediaBuffer->graphicBuffer();
+        if (info->mStatus == OWNED_BY_US && graphicBuffer != 0) {
+            err = cancelBufferToNativeWindow(info);
+        }
+
+        info->mMediaBuffer->release();
+        info->mMediaBuffer = NULL;
+    }
+
+    if (err == OK) {
+        buffers->removeAt(bufIndex);
+    }
+
+    return err;
+}
+
 void OMXCodec::onPortSettingsChanged(OMX_U32 portIndex) {
     CODEC_LOGV("PORT_SETTINGS_CHANGED(%ld)", portIndex);
 
-    CHECK_EQ(mState, EXECUTING);
-    CHECK_EQ(portIndex, kPortIndexOutput);
+    CHECK_EQ((int)mState, (int)EXECUTING);
+    CHECK_EQ(portIndex, (OMX_U32)kPortIndexOutput);
+    CHECK(!mOutputPortSettingsChangedPending);
+
+    if (mPortStatus[kPortIndexOutput] != ENABLED) {
+        CODEC_LOGV("Deferring output port settings change.");
+        mOutputPortSettingsChangedPending = true;
+        return;
+    }
+
     setState(RECONFIGURING);
 
     if (mQuirks & kNeedsFlushBeforeDisable) {
@@ -2252,7 +2584,7 @@
          portIndex, countBuffersWeOwn(mPortBuffers[portIndex]),
          mPortBuffers[portIndex].size());
 
-    CHECK_EQ(mPortStatus[portIndex], ENABLED);
+    CHECK_EQ((int)mPortStatus[portIndex], (int)ENABLED);
     mPortStatus[portIndex] = SHUTTING_DOWN;
 
     if ((mQuirks & kRequiresFlushCompleteEmulation)
@@ -2266,7 +2598,7 @@
 
     status_t err =
         mOMX->sendCommand(mNode, OMX_CommandFlush, portIndex);
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
     return true;
 }
@@ -2274,12 +2606,13 @@
 void OMXCodec::disablePortAsync(OMX_U32 portIndex) {
     CHECK(mState == EXECUTING || mState == RECONFIGURING);
 
-    CHECK_EQ(mPortStatus[portIndex], ENABLED);
+    CHECK_EQ((int)mPortStatus[portIndex], (int)ENABLED);
     mPortStatus[portIndex] = DISABLING;
 
+    CODEC_LOGV("sending OMX_CommandPortDisable(%ld)", portIndex);
     status_t err =
         mOMX->sendCommand(mNode, OMX_CommandPortDisable, portIndex);
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
     freeBuffersOnPort(portIndex, true);
 }
@@ -2287,16 +2620,17 @@
 void OMXCodec::enablePortAsync(OMX_U32 portIndex) {
     CHECK(mState == EXECUTING || mState == RECONFIGURING);
 
-    CHECK_EQ(mPortStatus[portIndex], DISABLED);
+    CHECK_EQ((int)mPortStatus[portIndex], (int)DISABLED);
     mPortStatus[portIndex] = ENABLING;
 
+    CODEC_LOGV("sending OMX_CommandPortEnable(%ld)", portIndex);
     status_t err =
         mOMX->sendCommand(mNode, OMX_CommandPortEnable, portIndex);
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 }
 
 void OMXCodec::fillOutputBuffers() {
-    CHECK_EQ(mState, EXECUTING);
+    CHECK_EQ((int)mState, (int)EXECUTING);
 
     // This is a workaround for some decoders not properly reporting
     // end-of-output-stream. If we own all input buffers and also own
@@ -2315,7 +2649,10 @@
 
     Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexOutput];
     for (size_t i = 0; i < buffers->size(); ++i) {
-        fillOutputBuffer(&buffers->editItemAt(i));
+        BufferInfo *info = &buffers->editItemAt(i);
+        if (info->mStatus == OWNED_BY_US) {
+            fillOutputBuffer(&buffers->editItemAt(i));
+        }
     }
 }
 
@@ -2324,15 +2661,27 @@
 
     Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexInput];
     for (size_t i = 0; i < buffers->size(); ++i) {
-        drainInputBuffer(&buffers->editItemAt(i));
+        BufferInfo *info = &buffers->editItemAt(i);
+
+        if (info->mStatus != OWNED_BY_US) {
+            continue;
+        }
+
+        if (!drainInputBuffer(info)) {
+            break;
+        }
+
+        if (mOnlySubmitOneBufferAtOneTime) {
+            break;
+        }
     }
 }
 
-void OMXCodec::drainInputBuffer(BufferInfo *info) {
-    CHECK_EQ(info->mOwnedByComponent, false);
+bool OMXCodec::drainInputBuffer(BufferInfo *info) {
+    CHECK_EQ((int)info->mStatus, (int)OWNED_BY_US);
 
     if (mSignalledEOS) {
-        return;
+        return false;
     }
 
     if (mCodecSpecificDataIndex < mCodecSpecificData.size()) {
@@ -2366,16 +2715,16 @@
                 mNode, info->mBuffer, 0, size,
                 OMX_BUFFERFLAG_ENDOFFRAME | OMX_BUFFERFLAG_CODECCONFIG,
                 0);
-        CHECK_EQ(err, OK);
+        CHECK_EQ(err, (status_t)OK);
 
-        info->mOwnedByComponent = true;
+        info->mStatus = OWNED_BY_COMPONENT;
 
         ++mCodecSpecificDataIndex;
-        return;
+        return true;
     }
 
     if (mPaused) {
-        return;
+        return false;
     }
 
     status_t err;
@@ -2385,17 +2734,16 @@
 
     size_t offset = 0;
     int32_t n = 0;
+
     for (;;) {
         MediaBuffer *srcBuffer;
-        MediaSource::ReadOptions options;
-        if (mSkipTimeUs >= 0) {
-            options.setSkipFrame(mSkipTimeUs);
-        }
         if (mSeekTimeUs >= 0) {
             if (mLeftOverBuffer) {
                 mLeftOverBuffer->release();
                 mLeftOverBuffer = NULL;
             }
+
+            MediaSource::ReadOptions options;
             options.setSeekTo(mSeekTimeUs, mSeekMode);
 
             mSeekTimeUs = -1;
@@ -2409,6 +2757,7 @@
                 if (srcBuffer->meta_data()->findInt64(
                             kKeyTargetTime, &targetTimeUs)
                         && targetTimeUs >= 0) {
+                    CODEC_LOGV("targetTimeUs = %lld us", targetTimeUs);
                     mTargetTimeUs = targetTimeUs;
                 } else {
                     mTargetTimeUs = -1;
@@ -2420,13 +2769,14 @@
 
             err = OK;
         } else {
-            err = mSource->read(&srcBuffer, &options);
+            err = mSource->read(&srcBuffer);
         }
 
         if (err != OK) {
             signalEOS = true;
             mFinalStatus = err;
             mSignalledEOS = true;
+            mBufferFilled.signal();
             break;
         }
 
@@ -2443,28 +2793,35 @@
                 srcBuffer = NULL;
 
                 setState(ERROR);
-                return;
+                return false;
             }
 
             mLeftOverBuffer = srcBuffer;
             break;
         }
 
-        // Do not release the media buffer if it stores meta data
-        // instead of YUV data. The release is delayed until
-        // EMPTY_BUFFER_DONE callback is received.
         bool releaseBuffer = true;
         if (mIsEncoder && (mQuirks & kAvoidMemcopyInputRecordingFrames)) {
             CHECK(mOMXLivesLocally && offset == 0);
-            OMX_BUFFERHEADERTYPE *header = (OMX_BUFFERHEADERTYPE *) info->mBuffer;
-            header->pBuffer = (OMX_U8 *) srcBuffer->data() + srcBuffer->range_offset();
+
+            OMX_BUFFERHEADERTYPE *header =
+                (OMX_BUFFERHEADERTYPE *)info->mBuffer;
+
+            CHECK(header->pBuffer == info->mData);
+
+            header->pBuffer =
+                (OMX_U8 *)srcBuffer->data() + srcBuffer->range_offset();
+
+            releaseBuffer = false;
+            info->mMediaBuffer = srcBuffer;
         } else {
-            if (mQuirks & kStoreMetaDataInInputVideoBuffers) {
+            if (mIsMetaDataStoredInVideoBuffers) {
                 releaseBuffer = false;
                 info->mMediaBuffer = srcBuffer;
             }
             memcpy((uint8_t *)info->mData + offset,
-                    (const uint8_t *)srcBuffer->data() + srcBuffer->range_offset(),
+                    (const uint8_t *)srcBuffer->data()
+                        + srcBuffer->range_offset(),
                     srcBuffer->range_length());
         }
 
@@ -2520,10 +2877,10 @@
 
     if (err != OK) {
         setState(ERROR);
-        return;
+        return false;
     }
 
-    info->mOwnedByComponent = true;
+    info->mStatus = OWNED_BY_COMPONENT;
 
     // This component does not ever signal the EOS flag on output buffers,
     // Thanks for nothing.
@@ -2531,10 +2888,12 @@
         mNoMoreOutputData = true;
         mBufferFilled.signal();
     }
+
+    return true;
 }
 
 void OMXCodec::fillOutputBuffer(BufferInfo *info) {
-    CHECK_EQ(info->mOwnedByComponent, false);
+    CHECK_EQ((int)info->mStatus, (int)OWNED_BY_US);
 
     if (mNoMoreOutputData) {
         CODEC_LOGV("There is no more output data available, not "
@@ -2542,7 +2901,24 @@
         return;
     }
 
-    CODEC_LOGV("Calling fill_buffer on buffer %p", info->mBuffer);
+    if (info->mMediaBuffer != NULL) {
+        sp<GraphicBuffer> graphicBuffer = info->mMediaBuffer->graphicBuffer();
+        if (graphicBuffer != 0) {
+            // When using a native buffer we need to lock the buffer before
+            // giving it to OMX.
+            CODEC_LOGV("Calling lockBuffer on %p", info->mBuffer);
+            int err = mNativeWindow->lockBuffer(mNativeWindow.get(),
+                    graphicBuffer.get());
+            if (err != 0) {
+                CODEC_LOGE("lockBuffer failed w/ error 0x%08x", err);
+
+                setState(ERROR);
+                return;
+            }
+        }
+    }
+
+    CODEC_LOGV("Calling fillBuffer on buffer %p", info->mBuffer);
     status_t err = mOMX->fillBuffer(mNode, info->mBuffer);
 
     if (err != OK) {
@@ -2552,19 +2928,20 @@
         return;
     }
 
-    info->mOwnedByComponent = true;
+    info->mStatus = OWNED_BY_COMPONENT;
 }
 
-void OMXCodec::drainInputBuffer(IOMX::buffer_id buffer) {
+bool OMXCodec::drainInputBuffer(IOMX::buffer_id buffer) {
     Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexInput];
     for (size_t i = 0; i < buffers->size(); ++i) {
         if ((*buffers)[i].mBuffer == buffer) {
-            drainInputBuffer(&buffers->editItemAt(i));
-            return;
+            return drainInputBuffer(&buffers->editItemAt(i));
         }
     }
 
     CHECK(!"should not be here.");
+
+    return false;
 }
 
 void OMXCodec::fillOutputBuffer(IOMX::buffer_id buffer) {
@@ -2597,10 +2974,10 @@
     def.nPortIndex = portIndex;
     status_t err = mOMX->getParameter(
             mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
     def.format.audio.eEncoding = OMX_AUDIO_CodingPCM;
     CHECK_EQ(mOMX->setParameter(mNode, OMX_IndexParamPortDefinition,
-            &def, sizeof(def)), OK);
+            &def, sizeof(def)), (status_t)OK);
 
     // pcm param
     OMX_AUDIO_PARAM_PCMMODETYPE pcmParams;
@@ -2610,7 +2987,7 @@
     err = mOMX->getParameter(
             mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams));
 
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
     pcmParams.nChannels = numChannels;
     pcmParams.eNumData = OMX_NumericalDataSigned;
@@ -2631,7 +3008,7 @@
     err = mOMX->setParameter(
             mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams));
 
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 }
 
 static OMX_AUDIO_AMRBANDMODETYPE pickModeFromBitRate(bool isAMRWB, int32_t bps) {
@@ -2688,13 +3065,13 @@
     status_t err =
         mOMX->getParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def));
 
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
     def.eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF;
 
     def.eAMRBandMode = pickModeFromBitRate(isWAMR, bitRate);
     err = mOMX->setParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
     ////////////////////////
 
@@ -2723,33 +3100,33 @@
         status_t err = OMX_ErrorNone;
         while (OMX_ErrorNone == err) {
             CHECK_EQ(mOMX->getParameter(mNode, OMX_IndexParamAudioPortFormat,
-                    &format, sizeof(format)), OK);
+                    &format, sizeof(format)), (status_t)OK);
             if (format.eEncoding == OMX_AUDIO_CodingAAC) {
                 break;
             }
             format.nIndex++;
         }
-        CHECK_EQ(OK, err);
+        CHECK_EQ((status_t)OK, err);
         CHECK_EQ(mOMX->setParameter(mNode, OMX_IndexParamAudioPortFormat,
-                &format, sizeof(format)), OK);
+                &format, sizeof(format)), (status_t)OK);
 
         // port definition
         OMX_PARAM_PORTDEFINITIONTYPE def;
         InitOMXParams(&def);
         def.nPortIndex = kPortIndexOutput;
         CHECK_EQ(mOMX->getParameter(mNode, OMX_IndexParamPortDefinition,
-                &def, sizeof(def)), OK);
+                &def, sizeof(def)), (status_t)OK);
         def.format.audio.bFlagErrorConcealment = OMX_TRUE;
         def.format.audio.eEncoding = OMX_AUDIO_CodingAAC;
         CHECK_EQ(mOMX->setParameter(mNode, OMX_IndexParamPortDefinition,
-                &def, sizeof(def)), OK);
+                &def, sizeof(def)), (status_t)OK);
 
         // profile
         OMX_AUDIO_PARAM_AACPROFILETYPE profile;
         InitOMXParams(&profile);
         profile.nPortIndex = kPortIndexOutput;
         CHECK_EQ(mOMX->getParameter(mNode, OMX_IndexParamAudioAac,
-                &profile, sizeof(profile)), OK);
+                &profile, sizeof(profile)), (status_t)OK);
         profile.nChannels = numChannels;
         profile.eChannelMode = (numChannels == 1?
                 OMX_AUDIO_ChannelModeMono: OMX_AUDIO_ChannelModeStereo);
@@ -2762,7 +3139,7 @@
         profile.eAACProfile = OMX_AUDIO_AACObjectLC;
         profile.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4FF;
         CHECK_EQ(mOMX->setParameter(mNode, OMX_IndexParamAudioAac,
-                &profile, sizeof(profile)), OK);
+                &profile, sizeof(profile)), (status_t)OK);
 
     } else {
         OMX_AUDIO_PARAM_AACPROFILETYPE profile;
@@ -2771,7 +3148,7 @@
 
         status_t err = mOMX->getParameter(
                 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile));
-        CHECK_EQ(err, OK);
+        CHECK_EQ(err, (status_t)OK);
 
         profile.nChannels = numChannels;
         profile.nSampleRate = sampleRate;
@@ -2779,7 +3156,7 @@
 
         err = mOMX->setParameter(
                 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile));
-        CHECK_EQ(err, OK);
+        CHECK_EQ(err, (status_t)OK);
     }
 }
 
@@ -2791,10 +3168,10 @@
     OMX_INDEXTYPE index;
     status_t err = mOMX->get_extension_index(
             mNode, "OMX.TI.JPEG.decode.Config.OutputColorFormat", &index);
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
     err = mOMX->set_config(mNode, index, &format, sizeof(format));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 #endif
 
     OMX_PARAM_PORTDEFINITIONTYPE def;
@@ -2803,13 +3180,13 @@
 
     status_t err = mOMX->getParameter(
             mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
-    CHECK_EQ(def.eDomain, OMX_PortDomainImage);
+    CHECK_EQ((int)def.eDomain, (int)OMX_PortDomainImage);
 
     OMX_IMAGE_PORTDEFINITIONTYPE *imageDef = &def.format.image;
 
-    CHECK_EQ(imageDef->eCompressionFormat, OMX_IMAGE_CodingUnused);
+    CHECK_EQ((int)imageDef->eCompressionFormat, (int)OMX_IMAGE_CodingUnused);
     imageDef->eColorFormat = format;
     imageDef->nFrameWidth = width;
     imageDef->nFrameHeight = height;
@@ -2852,7 +3229,7 @@
 
     err = mOMX->setParameter(
             mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 }
 
 void OMXCodec::setJPEGInputFormat(
@@ -2863,12 +3240,12 @@
 
     status_t err = mOMX->getParameter(
             mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
-    CHECK_EQ(def.eDomain, OMX_PortDomainImage);
+    CHECK_EQ((int)def.eDomain, (int)OMX_PortDomainImage);
     OMX_IMAGE_PORTDEFINITIONTYPE *imageDef = &def.format.image;
 
-    CHECK_EQ(imageDef->eCompressionFormat, OMX_IMAGE_CodingJPEG);
+    CHECK_EQ((int)imageDef->eCompressionFormat, (int)OMX_IMAGE_CodingJPEG);
     imageDef->nFrameWidth = width;
     imageDef->nFrameHeight = height;
 
@@ -2877,7 +3254,7 @@
 
     err = mOMX->setParameter(
             mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 }
 
 void OMXCodec::addCodecSpecificData(const void *data, size_t size) {
@@ -2978,7 +3355,7 @@
 
                 status_t err =
                     mOMX->sendCommand(mNode, OMX_CommandStateSet, OMX_StateIdle);
-                CHECK_EQ(err, OK);
+                CHECK_EQ(err, (status_t)OK);
             }
 
             while (mState != LOADED && mState != ERROR) {
@@ -3002,7 +3379,7 @@
 
     mSource->stop();
 
-    CODEC_LOGV("stopped");
+    CODEC_LOGI("stopped in state %d", mState);
 
     return OK;
 }
@@ -3029,12 +3406,6 @@
     if (options && options->getSeekTo(&seekTimeUs, &seekMode)) {
         seeking = true;
     }
-    int64_t skipTimeUs;
-    if (options && options->getSkipFrame(&skipTimeUs)) {
-        mSkipTimeUs = skipTimeUs;
-    } else {
-        mSkipTimeUs = -1;
-    }
 
     if (mInitialBufferSubmit) {
         mInitialBufferSubmit = false;
@@ -3060,6 +3431,14 @@
     }
 
     if (seeking) {
+        while (mState == RECONFIGURING) {
+            mBufferFilled.wait(mLock);
+        }
+
+        if (mState != EXECUTING) {
+            return UNKNOWN_ERROR;
+        }
+
         CODEC_LOGV("seeking to %lld us (%.2f secs)", seekTimeUs, seekTimeUs / 1E6);
 
         mSignalledEOS = false;
@@ -3070,7 +3449,7 @@
 
         mFilledBuffers.clear();
 
-        CHECK_EQ(mState, EXECUTING);
+        CHECK_EQ((int)mState, (int)EXECUTING);
 
         bool emulateInputFlushCompletion = !flushPortAsync(kPortIndexInput);
         bool emulateOutputFlushCompletion = !flushPortAsync(kPortIndexOutput);
@@ -3089,7 +3468,15 @@
     }
 
     while (mState != ERROR && !mNoMoreOutputData && mFilledBuffers.empty()) {
-        mBufferFilled.wait(mLock);
+        if (mIsEncoder) {
+            if (NO_ERROR != mBufferFilled.waitRelative(mLock, 3000000000LL)) {
+                LOGW("Timed out waiting for buffers from video encoder: %d/%d",
+                    countBuffersWeOwn(mPortBuffers[kPortIndexInput]),
+                    countBuffersWeOwn(mPortBuffers[kPortIndexOutput]));
+            }
+        } else {
+            mBufferFilled.wait(mLock);
+        }
     }
 
     if (mState == ERROR) {
@@ -3110,6 +3497,9 @@
     mFilledBuffers.erase(mFilledBuffers.begin());
 
     BufferInfo *info = &mPortBuffers[kPortIndexOutput].editItemAt(index);
+    CHECK_EQ((int)info->mStatus, (int)OWNED_BY_US);
+    info->mStatus = OWNED_BY_CLIENT;
+
     info->mMediaBuffer->add_ref();
     *buffer = info->mMediaBuffer;
 
@@ -3124,8 +3514,37 @@
         BufferInfo *info = &buffers->editItemAt(i);
 
         if (info->mMediaBuffer == buffer) {
-            CHECK_EQ(mPortStatus[kPortIndexOutput], ENABLED);
-            fillOutputBuffer(info);
+            CHECK_EQ((int)mPortStatus[kPortIndexOutput], (int)ENABLED);
+            CHECK_EQ((int)info->mStatus, (int)OWNED_BY_CLIENT);
+
+            info->mStatus = OWNED_BY_US;
+
+            if (buffer->graphicBuffer() == 0) {
+                fillOutputBuffer(info);
+            } else {
+                sp<MetaData> metaData = info->mMediaBuffer->meta_data();
+                int32_t rendered = 0;
+                if (!metaData->findInt32(kKeyRendered, &rendered)) {
+                    rendered = 0;
+                }
+                if (!rendered) {
+                    status_t err = cancelBufferToNativeWindow(info);
+                    if (err < 0) {
+                        return;
+                    }
+                }
+
+                info->mStatus = OWNED_BY_NATIVE_WINDOW;
+
+                // Dequeue the next buffer from the native window.
+                BufferInfo *nextBufInfo = dequeueBufferFromNativeWindow();
+                if (nextBufInfo == 0) {
+                    return;
+                }
+
+                // Give the buffer to the OMX node to fill.
+                fillOutputBuffer(nextBufInfo);
+            }
             return;
         }
     }
@@ -3351,7 +3770,7 @@
 
     status_t err = mOMX->getParameter(
             mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
     printf("%s Port = {\n", portIndex == kPortIndexInput ? "Input" : "Output");
 
@@ -3417,7 +3836,7 @@
 
                 err = mOMX->getParameter(
                         mNode, OMX_IndexParamAudioPcm, &params, sizeof(params));
-                CHECK_EQ(err, OK);
+                CHECK_EQ(err, (status_t)OK);
 
                 printf("  nSamplingRate = %ld\n", params.nSamplingRate);
                 printf("  nChannels = %ld\n", params.nChannels);
@@ -3436,7 +3855,7 @@
 
                 err = mOMX->getParameter(
                         mNode, OMX_IndexParamAudioAmr, &amr, sizeof(amr));
-                CHECK_EQ(err, OK);
+                CHECK_EQ(err, (status_t)OK);
 
                 printf("  nChannels = %ld\n", amr.nChannels);
                 printf("  eAMRBandMode = %s\n",
@@ -3458,6 +3877,18 @@
     printf("}\n");
 }
 
+status_t OMXCodec::initNativeWindow() {
+    // Enable use of a GraphicBuffer as the output for this node.  This must
+    // happen before getting the IndexParamPortDefinition parameter because it
+    // will affect the pixel format that the node reports.
+    status_t err = mOMX->enableGraphicBuffers(mNode, kPortIndexOutput, OMX_TRUE);
+    if (err != 0) {
+        return err;
+    }
+
+    return OK;
+}
+
 void OMXCodec::initOutputFormat(const sp<MetaData> &inputFormat) {
     mOutputFormat = new MetaData;
     mOutputFormat->setCString(kKeyDecoderComponent, mComponentName);
@@ -3474,13 +3905,14 @@
 
     status_t err = mOMX->getParameter(
             mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
     switch (def.eDomain) {
         case OMX_PortDomainImage:
         {
             OMX_IMAGE_PORTDEFINITIONTYPE *imageDef = &def.format.image;
-            CHECK_EQ(imageDef->eCompressionFormat, OMX_IMAGE_CodingUnused);
+            CHECK_EQ((int)imageDef->eCompressionFormat,
+                     (int)OMX_IMAGE_CodingUnused);
 
             mOutputFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW);
             mOutputFormat->setInt32(kKeyColorFormat, imageDef->eColorFormat);
@@ -3500,11 +3932,11 @@
 
                 err = mOMX->getParameter(
                         mNode, OMX_IndexParamAudioPcm, &params, sizeof(params));
-                CHECK_EQ(err, OK);
+                CHECK_EQ(err, (status_t)OK);
 
-                CHECK_EQ(params.eNumData, OMX_NumericalDataSigned);
-                CHECK_EQ(params.nBitPerSample, 16);
-                CHECK_EQ(params.ePCMMode, OMX_AUDIO_PCMModeLinear);
+                CHECK_EQ((int)params.eNumData, (int)OMX_NumericalDataSigned);
+                CHECK_EQ(params.nBitPerSample, 16u);
+                CHECK_EQ((int)params.ePCMMode, (int)OMX_AUDIO_PCMModeLinear);
 
                 int32_t numChannels, sampleRate;
                 inputFormat->findInt32(kKeyChannelCount, &numChannels);
@@ -3538,9 +3970,9 @@
 
                 err = mOMX->getParameter(
                         mNode, OMX_IndexParamAudioAmr, &amr, sizeof(amr));
-                CHECK_EQ(err, OK);
+                CHECK_EQ(err, (status_t)OK);
 
-                CHECK_EQ(amr.nChannels, 1);
+                CHECK_EQ(amr.nChannels, 1u);
                 mOutputFormat->setInt32(kKeyChannelCount, 1);
 
                 if (amr.eAMRBandMode >= OMX_AUDIO_AMRBandModeNB0
@@ -3592,18 +4024,42 @@
                 CHECK(!"Unknown compression format.");
             }
 
-            if (!strcmp(mComponentName, "OMX.PV.avcdec")) {
-                // This component appears to be lying to me.
-                mOutputFormat->setInt32(
-                        kKeyWidth, (video_def->nFrameWidth + 15) & -16);
-                mOutputFormat->setInt32(
-                        kKeyHeight, (video_def->nFrameHeight + 15) & -16);
-            } else {
-                mOutputFormat->setInt32(kKeyWidth, video_def->nFrameWidth);
-                mOutputFormat->setInt32(kKeyHeight, video_def->nFrameHeight);
+            mOutputFormat->setInt32(kKeyWidth, video_def->nFrameWidth);
+            mOutputFormat->setInt32(kKeyHeight, video_def->nFrameHeight);
+            mOutputFormat->setInt32(kKeyColorFormat, video_def->eColorFormat);
+
+            if (!mIsEncoder) {
+                OMX_CONFIG_RECTTYPE rect;
+                InitOMXParams(&rect);
+                rect.nPortIndex = kPortIndexOutput;
+                status_t err =
+                        mOMX->getConfig(
+                            mNode, OMX_IndexConfigCommonOutputCrop,
+                            &rect, sizeof(rect));
+
+                if (err == OK) {
+                    CHECK_GE(rect.nLeft, 0);
+                    CHECK_GE(rect.nTop, 0);
+                    CHECK_GE(rect.nWidth, 0u);
+                    CHECK_GE(rect.nHeight, 0u);
+                    CHECK_LE(rect.nLeft + rect.nWidth - 1, video_def->nFrameWidth);
+                    CHECK_LE(rect.nTop + rect.nHeight - 1, video_def->nFrameHeight);
+
+                    mOutputFormat->setRect(
+                            kKeyCropRect,
+                            rect.nLeft,
+                            rect.nTop,
+                            rect.nLeft + rect.nWidth - 1,
+                            rect.nTop + rect.nHeight - 1);
+                } else {
+                    mOutputFormat->setRect(
+                            kKeyCropRect,
+                            0, 0,
+                            video_def->nFrameWidth - 1,
+                            video_def->nFrameHeight - 1);
+                }
             }
 
-            mOutputFormat->setInt32(kKeyColorFormat, video_def->eColorFormat);
             break;
         }
 
@@ -3693,8 +4149,30 @@
             caps->mProfileLevels.push(profileLevel);
         }
 
-        CHECK_EQ(omx->freeNode(node), OK);
+        // Color format query
+        OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat;
+        InitOMXParams(&portFormat);
+        portFormat.nPortIndex = queryDecoders ? 1 : 0;
+        for (portFormat.nIndex = 0;; ++portFormat.nIndex)  {
+            err = omx->getParameter(
+                    node, OMX_IndexParamVideoPortFormat,
+                    &portFormat, sizeof(portFormat));
+            if (err != OK) {
+                break;
+            }
+            caps->mColorFormats.push(portFormat.eColorFormat);
+        }
+
+        CHECK_EQ(omx->freeNode(node), (status_t)OK);
     }
 }
 
+void OMXCodec::restorePatchedDataPointer(BufferInfo *info) {
+    CHECK(mIsEncoder && (mQuirks & kAvoidMemcopyInputRecordingFrames));
+    CHECK(mOMXLivesLocally);
+
+    OMX_BUFFERHEADERTYPE *header = (OMX_BUFFERHEADERTYPE *)info->mBuffer;
+    header->pBuffer = (OMX_U8 *)info->mData;
+}
+
 }  // namespace android
diff --git a/media/libstagefright/OggExtractor.cpp b/media/libstagefright/OggExtractor.cpp
index 0368fb7..6538a05 100644
--- a/media/libstagefright/OggExtractor.cpp
+++ b/media/libstagefright/OggExtractor.cpp
@@ -73,7 +73,8 @@
     // Returns an approximate bitrate in bits per second.
     uint64_t approxBitrate();
 
-    status_t seekToOffset(off_t offset);
+    status_t seekToTime(int64_t timeUs);
+    status_t seekToOffset(off64_t offset);
     status_t readNextPacket(MediaBuffer **buffer);
 
     status_t init();
@@ -90,8 +91,13 @@
         uint8_t mLace[255];
     };
 
+    struct TOCEntry {
+        off64_t mPageOffset;
+        int64_t mTimeUs;
+    };
+
     sp<DataSource> mSource;
-    off_t mOffset;
+    off64_t mOffset;
     Page mCurrentPage;
     uint64_t mPrevGranulePosition;
     size_t mCurrentPageSize;
@@ -99,7 +105,7 @@
     uint64_t mCurrentPageSamples;
     size_t mNextLaceIndex;
 
-    off_t mFirstDataOffset;
+    off64_t mFirstDataOffset;
 
     vorbis_info mVi;
     vorbis_comment mVc;
@@ -107,21 +113,27 @@
     sp<MetaData> mMeta;
     sp<MetaData> mFileMeta;
 
-    ssize_t readPage(off_t offset, Page *page);
-    status_t findNextPage(off_t startOffset, off_t *pageOffset);
+    Vector<TOCEntry> mTableOfContents;
+
+    ssize_t readPage(off64_t offset, Page *page);
+    status_t findNextPage(off64_t startOffset, off64_t *pageOffset);
 
     status_t verifyHeader(
             MediaBuffer *buffer, uint8_t type);
 
     void parseFileMetaData();
-    void extractAlbumArt(const void *data, size_t size);
 
-    uint64_t findPrevGranulePosition(off_t pageOffset);
+    status_t findPrevGranulePosition(off64_t pageOffset, uint64_t *granulePos);
+
+    void buildTableOfContents();
 
     MyVorbisExtractor(const MyVorbisExtractor &);
     MyVorbisExtractor &operator=(const MyVorbisExtractor &);
 };
 
+static void extractAlbumArt(
+        const sp<MetaData> &fileMeta, const void *data, size_t size);
+
 ////////////////////////////////////////////////////////////////////////////////
 
 OggSource::OggSource(const sp<OggExtractor> &extractor)
@@ -162,10 +174,7 @@
     int64_t seekTimeUs;
     ReadOptions::SeekMode mode;
     if (options && options->getSeekTo(&seekTimeUs, &mode)) {
-        off_t pos = seekTimeUs * mExtractor->mImpl->approxBitrate() / 8000000ll;
-        LOGV("seeking to offset %ld", pos);
-
-        if (mExtractor->mImpl->seekToOffset(pos) != OK) {
+        if (mExtractor->mImpl->seekToTime(seekTimeUs) != OK) {
             return ERROR_END_OF_STREAM;
         }
     }
@@ -220,7 +229,7 @@
 }
 
 status_t MyVorbisExtractor::findNextPage(
-        off_t startOffset, off_t *pageOffset) {
+        off64_t startOffset, off64_t *pageOffset) {
     *pageOffset = startOffset;
 
     for (;;) {
@@ -235,7 +244,7 @@
 
         if (!memcmp(signature, "OggS", 4)) {
             if (*pageOffset > startOffset) {
-                LOGV("skipped %ld bytes of junk to reach next frame",
+                LOGV("skipped %lld bytes of junk to reach next frame",
                      *pageOffset - startOffset);
             }
 
@@ -250,9 +259,12 @@
 // it (if any) and return its granule position.
 // To do this we back up from the "current" page's offset until we find any
 // page preceding it and then scan forward to just before the current page.
-uint64_t MyVorbisExtractor::findPrevGranulePosition(off_t pageOffset) {
-    off_t prevPageOffset = 0;
-    off_t prevGuess = pageOffset;
+status_t MyVorbisExtractor::findPrevGranulePosition(
+        off64_t pageOffset, uint64_t *granulePos) {
+    *granulePos = 0;
+
+    off64_t prevPageOffset = 0;
+    off64_t prevGuess = pageOffset;
     for (;;) {
         if (prevGuess >= 5000) {
             prevGuess -= 5000;
@@ -260,9 +272,12 @@
             prevGuess = 0;
         }
 
-        LOGV("backing up %ld bytes", pageOffset - prevGuess);
+        LOGV("backing up %lld bytes", pageOffset - prevGuess);
 
-        CHECK_EQ(findNextPage(prevGuess, &prevPageOffset), (status_t)OK);
+        status_t err = findNextPage(prevGuess, &prevPageOffset);
+        if (err != OK) {
+            return err;
+        }
 
         if (prevPageOffset < pageOffset || prevGuess == 0) {
             break;
@@ -271,35 +286,72 @@
 
     if (prevPageOffset == pageOffset) {
         // We did not find a page preceding this one.
-        return 0;
+        return UNKNOWN_ERROR;
     }
 
-    LOGV("prevPageOffset at %ld, pageOffset at %ld", prevPageOffset, pageOffset);
+    LOGV("prevPageOffset at %lld, pageOffset at %lld",
+         prevPageOffset, pageOffset);
 
     for (;;) {
         Page prevPage;
         ssize_t n = readPage(prevPageOffset, &prevPage);
 
         if (n <= 0) {
-            return 0;
+            return (status_t)n;
         }
 
         prevPageOffset += n;
 
         if (prevPageOffset == pageOffset) {
-            return prevPage.mGranulePosition;
+            *granulePos = prevPage.mGranulePosition;
+            return OK;
         }
     }
 }
 
-status_t MyVorbisExtractor::seekToOffset(off_t offset) {
+status_t MyVorbisExtractor::seekToTime(int64_t timeUs) {
+    if (mTableOfContents.isEmpty()) {
+        // Perform approximate seeking based on avg. bitrate.
+
+        off64_t pos = timeUs * approxBitrate() / 8000000ll;
+
+        LOGV("seeking to offset %lld", pos);
+        return seekToOffset(pos);
+    }
+
+    size_t left = 0;
+    size_t right = mTableOfContents.size();
+    while (left < right) {
+        size_t center = left / 2 + right / 2 + (left & right & 1);
+
+        const TOCEntry &entry = mTableOfContents.itemAt(center);
+
+        if (timeUs < entry.mTimeUs) {
+            right = center;
+        } else if (timeUs > entry.mTimeUs) {
+            left = center + 1;
+        } else {
+            left = right = center;
+            break;
+        }
+    }
+
+    const TOCEntry &entry = mTableOfContents.itemAt(left);
+
+    LOGV("seeking to entry %d / %d at offset %lld",
+         left, mTableOfContents.size(), entry.mPageOffset);
+
+    return seekToOffset(entry.mPageOffset);
+}
+
+status_t MyVorbisExtractor::seekToOffset(off64_t offset) {
     if (mFirstDataOffset >= 0 && offset < mFirstDataOffset) {
         // Once we know where the actual audio data starts (past the headers)
         // don't ever seek to anywhere before that.
         offset = mFirstDataOffset;
     }
 
-    off_t pageOffset;
+    off64_t pageOffset;
     status_t err = findNextPage(offset, &pageOffset);
 
     if (err != OK) {
@@ -309,7 +361,7 @@
     // We found the page we wanted to seek to, but we'll also need
     // the page preceding it to determine how many valid samples are on
     // this page.
-    mPrevGranulePosition = findPrevGranulePosition(pageOffset);
+    findPrevGranulePosition(pageOffset, &mPrevGranulePosition);
 
     mOffset = pageOffset;
 
@@ -324,11 +376,12 @@
     return OK;
 }
 
-ssize_t MyVorbisExtractor::readPage(off_t offset, Page *page) {
+ssize_t MyVorbisExtractor::readPage(off64_t offset, Page *page) {
     uint8_t header[27];
     if (mSource->readAt(offset, header, sizeof(header))
             < (ssize_t)sizeof(header)) {
-        LOGV("failed to read %d bytes at offset 0x%08lx", sizeof(header), offset);
+        LOGV("failed to read %d bytes at offset 0x%016llx",
+             sizeof(header), offset);
 
         return ERROR_IO;
     }
@@ -410,7 +463,7 @@
         }
 
         if (mNextLaceIndex < mCurrentPage.mNumSegments) {
-            off_t dataOffset = mOffset + 27 + mCurrentPage.mNumSegments;
+            off64_t dataOffset = mOffset + 27 + mCurrentPage.mNumSegments;
             for (size_t j = 0; j < mNextLaceIndex; ++j) {
                 dataOffset += mCurrentPage.mLace[j];
             }
@@ -445,7 +498,8 @@
                     packetSize);
 
             if (n < (ssize_t)packetSize) {
-                LOGV("failed to read %d bytes at 0x%08lx", packetSize, dataOffset);
+                LOGV("failed to read %d bytes at 0x%016llx",
+                     packetSize, dataOffset);
                 return ERROR_IO;
             }
 
@@ -561,9 +615,66 @@
 
     mFirstDataOffset = mOffset + mCurrentPageSize;
 
+    off64_t size;
+    uint64_t lastGranulePosition;
+    if (!(mSource->flags() & DataSource::kIsCachingDataSource)
+            && mSource->getSize(&size) == OK
+            && findPrevGranulePosition(size, &lastGranulePosition) == OK) {
+        // Let's assume it's cheap to seek to the end.
+        // The granule position of the final page in the stream will
+        // give us the exact duration of the content, something that
+        // we can only approximate using avg. bitrate if seeking to
+        // the end is too expensive or impossible (live streaming).
+
+        int64_t durationUs = lastGranulePosition * 1000000ll / mVi.rate;
+
+        mMeta->setInt64(kKeyDuration, durationUs);
+
+        buildTableOfContents();
+    }
+
     return OK;
 }
 
+void MyVorbisExtractor::buildTableOfContents() {
+    off64_t offset = mFirstDataOffset;
+    Page page;
+    ssize_t pageSize;
+    while ((pageSize = readPage(offset, &page)) > 0) {
+        mTableOfContents.push();
+
+        TOCEntry &entry =
+            mTableOfContents.editItemAt(mTableOfContents.size() - 1);
+
+        entry.mPageOffset = offset;
+        entry.mTimeUs = page.mGranulePosition * 1000000ll / mVi.rate;
+
+        offset += (size_t)pageSize;
+    }
+
+    // Limit the maximum amount of RAM we spend on the table of contents,
+    // if necessary thin out the table evenly to trim it down to maximum
+    // size.
+
+    static const size_t kMaxTOCSize = 8192;
+    static const size_t kMaxNumTOCEntries = kMaxTOCSize / sizeof(TOCEntry);
+
+    size_t numerator = mTableOfContents.size();
+
+    if (numerator > kMaxNumTOCEntries) {
+        size_t denom = numerator - kMaxNumTOCEntries;
+
+        size_t accum = 0;
+        for (ssize_t i = mTableOfContents.size() - 1; i >= 0; --i) {
+            accum += denom;
+            if (accum >= numerator) {
+                mTableOfContents.removeAt(i);
+                accum -= numerator;
+            }
+        }
+    }
+}
+
 status_t MyVorbisExtractor::verifyHeader(
         MediaBuffer *buffer, uint8_t type) {
     const uint8_t *data =
@@ -609,7 +720,7 @@
             LOGV("nominal-bitrate = %ld", mVi.bitrate_nominal);
             LOGV("window-bitrate = %ld", mVi.bitrate_window);
 
-            off_t size;
+            off64_t size;
             if (mSource->getSize(&size) == OK) {
                 uint64_t bps = approxBitrate();
 
@@ -654,6 +765,17 @@
     mFileMeta = new MetaData;
     mFileMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_CONTAINER_OGG);
 
+    for (int i = 0; i < mVc.comments; ++i) {
+        const char *comment = mVc.user_comments[i];
+        size_t commentLength = mVc.comment_lengths[i];
+        parseVorbisComment(mFileMeta, comment, commentLength);
+        //LOGI("comment #%d: '%s'", i + 1, mVc.user_comments[i]);
+    }
+}
+
+void parseVorbisComment(
+        const sp<MetaData> &fileMeta, const char *comment, size_t commentLength)
+{
     struct {
         const char *const mTag;
         uint32_t mKey;
@@ -675,33 +797,25 @@
         { "ANDROID_LOOP", kKeyAutoLoop },
     };
 
-    for (int i = 0; i < mVc.comments; ++i) {
-        const char *comment = mVc.user_comments[i];
-
         for (size_t j = 0; j < sizeof(kMap) / sizeof(kMap[0]); ++j) {
             size_t tagLen = strlen(kMap[j].mTag);
             if (!strncasecmp(kMap[j].mTag, comment, tagLen)
                     && comment[tagLen] == '=') {
                 if (kMap[j].mKey == kKeyAlbumArt) {
                     extractAlbumArt(
+                            fileMeta,
                             &comment[tagLen + 1],
-                            mVc.comment_lengths[i] - tagLen - 1);
+                            commentLength - tagLen - 1);
                 } else if (kMap[j].mKey == kKeyAutoLoop) {
                     if (!strcasecmp(&comment[tagLen + 1], "true")) {
-                        mFileMeta->setInt32(kKeyAutoLoop, true);
+                        fileMeta->setInt32(kKeyAutoLoop, true);
                     }
                 } else {
-                    mFileMeta->setCString(kMap[j].mKey, &comment[tagLen + 1]);
+                    fileMeta->setCString(kMap[j].mKey, &comment[tagLen + 1]);
                 }
             }
         }
-    }
 
-#if 0
-    for (int i = 0; i < mVc.comments; ++i) {
-        LOGI("comment #%d: '%s'", i + 1, mVc.user_comments[i]);
-    }
-#endif
 }
 
 // The returned buffer should be free()d.
@@ -769,7 +883,8 @@
     return (uint8_t *)buffer;
 }
 
-void MyVorbisExtractor::extractAlbumArt(const void *data, size_t size) {
+static void extractAlbumArt(
+        const sp<MetaData> &fileMeta, const void *data, size_t size) {
     LOGV("extractAlbumArt from '%s'", (const char *)data);
 
     size_t flacSize;
@@ -833,10 +948,10 @@
     LOGV("got image data, %d trailing bytes",
          flacSize - 32 - typeLen - descLen - dataLen);
 
-    mFileMeta->setData(
+    fileMeta->setData(
             kKeyAlbumArt, 0, &flac[8 + typeLen + 4 + descLen + 20], dataLen);
 
-    mFileMeta->setCString(kKeyAlbumArtMIME, type);
+    fileMeta->setCString(kKeyAlbumArtMIME, type);
 
 exit:
     free(flac);
diff --git a/media/libstagefright/SampleIterator.cpp b/media/libstagefright/SampleIterator.cpp
index 7155c61..c7b00b1 100644
--- a/media/libstagefright/SampleIterator.cpp
+++ b/media/libstagefright/SampleIterator.cpp
@@ -179,7 +179,7 @@
     return OK;
 }
 
-status_t SampleIterator::getChunkOffset(uint32_t chunk, off_t *offset) {
+status_t SampleIterator::getChunkOffset(uint32_t chunk, off64_t *offset) {
     *offset = 0;
 
     if (chunk >= mTable->mNumChunkOffsets) {
@@ -307,6 +307,8 @@
 
     *time = mTTSSampleTime + mTTSDuration * (sampleIndex - mTTSSampleIndex);
 
+    *time += mTable->getCompositionTimeOffset(sampleIndex);
+
     return OK;
 }
 
diff --git a/media/libstagefright/SampleTable.cpp b/media/libstagefright/SampleTable.cpp
index 27faf4f..423df70 100644
--- a/media/libstagefright/SampleTable.cpp
+++ b/media/libstagefright/SampleTable.cpp
@@ -53,6 +53,8 @@
       mNumSampleSizes(0),
       mTimeToSampleCount(0),
       mTimeToSample(NULL),
+      mCompositionTimeDeltaEntries(NULL),
+      mNumCompositionTimeDeltaEntries(0),
       mSyncSampleOffset(-1),
       mNumSyncSamples(0),
       mSyncSamples(NULL),
@@ -68,6 +70,9 @@
     delete[] mSyncSamples;
     mSyncSamples = NULL;
 
+    delete[] mCompositionTimeDeltaEntries;
+    mCompositionTimeDeltaEntries = NULL;
+
     delete[] mTimeToSample;
     mTimeToSample = NULL;
 
@@ -76,7 +81,7 @@
 }
 
 status_t SampleTable::setChunkOffsetParams(
-        uint32_t type, off_t data_offset, size_t data_size) {
+        uint32_t type, off64_t data_offset, size_t data_size) {
     if (mChunkOffsetOffset >= 0) {
         return ERROR_MALFORMED;
     }
@@ -117,7 +122,7 @@
 }
 
 status_t SampleTable::setSampleToChunkParams(
-        off_t data_offset, size_t data_size) {
+        off64_t data_offset, size_t data_size) {
     if (mSampleToChunkOffset >= 0) {
         return ERROR_MALFORMED;
     }
@@ -168,7 +173,7 @@
 }
 
 status_t SampleTable::setSampleSizeParams(
-        uint32_t type, off_t data_offset, size_t data_size) {
+        uint32_t type, off64_t data_offset, size_t data_size) {
     if (mSampleSizeOffset >= 0) {
         return ERROR_MALFORMED;
     }
@@ -228,7 +233,7 @@
 }
 
 status_t SampleTable::setTimeToSampleParams(
-        off_t data_offset, size_t data_size) {
+        off64_t data_offset, size_t data_size) {
     if (mTimeToSample != NULL || data_size < 8) {
         return ERROR_MALFORMED;
     }
@@ -260,7 +265,52 @@
     return OK;
 }
 
-status_t SampleTable::setSyncSampleParams(off_t data_offset, size_t data_size) {
+status_t SampleTable::setCompositionTimeToSampleParams(
+        off64_t data_offset, size_t data_size) {
+    LOGI("There are reordered frames present.");
+
+    if (mCompositionTimeDeltaEntries != NULL || data_size < 8) {
+        return ERROR_MALFORMED;
+    }
+
+    uint8_t header[8];
+    if (mDataSource->readAt(
+                data_offset, header, sizeof(header))
+            < (ssize_t)sizeof(header)) {
+        return ERROR_IO;
+    }
+
+    if (U32_AT(header) != 0) {
+        // Expected version = 0, flags = 0.
+        return ERROR_MALFORMED;
+    }
+
+    size_t numEntries = U32_AT(&header[4]);
+
+    if (data_size != (numEntries + 1) * 8) {
+        return ERROR_MALFORMED;
+    }
+
+    mNumCompositionTimeDeltaEntries = numEntries;
+    mCompositionTimeDeltaEntries = new uint32_t[2 * numEntries];
+
+    if (mDataSource->readAt(
+                data_offset + 8, mCompositionTimeDeltaEntries, numEntries * 8)
+            < (ssize_t)numEntries * 8) {
+        delete[] mCompositionTimeDeltaEntries;
+        mCompositionTimeDeltaEntries = NULL;
+
+        return ERROR_IO;
+    }
+
+    for (size_t i = 0; i < 2 * numEntries; ++i) {
+        mCompositionTimeDeltaEntries[i] = ntohl(mCompositionTimeDeltaEntries[i]);
+    }
+
+    return OK;
+}
+
+status_t SampleTable::setSyncSampleParams(off64_t data_offset, size_t data_size) {
     if (mSyncSampleOffset >= 0 || data_size < 8) {
         return ERROR_MALFORMED;
     }
@@ -281,7 +331,7 @@
     mNumSyncSamples = U32_AT(&header[4]);
 
     if (mNumSyncSamples < 2) {
-        LOGW("Table of sync samples is empty or has only a single entry!");
+        LOGV("Table of sync samples is empty or has only a single entry!");
     }
 
     mSyncSamples = new uint32_t[mNumSyncSamples];
@@ -333,6 +383,8 @@
 
 status_t SampleTable::findSampleAtTime(
         uint32_t req_time, uint32_t *sample_index, uint32_t flags) {
+    // XXX this currently uses decoding time, instead of composition time.
+
     *sample_index = 0;
 
     Mutex::Autolock autoLock(mLock);
@@ -419,8 +471,10 @@
 
         ++left;
     }
+    if (left > 0) {
+        --left;
+    }
 
-    --left;
     uint32_t x;
     if (mDataSource->readAt(
                 mSyncSampleOffset + 8 + left * 4, &x, 4) != 4) {
@@ -557,7 +611,7 @@
 
 status_t SampleTable::getMetaDataForSample(
         uint32_t sampleIndex,
-        off_t *offset,
+        off64_t *offset,
         size_t *size,
         uint32_t *decodingTime,
         bool *isSyncSample) {
@@ -605,5 +659,26 @@
     return OK;
 }
 
+uint32_t SampleTable::getCompositionTimeOffset(uint32_t sampleIndex) const {
+    if (mCompositionTimeDeltaEntries == NULL) {
+        return 0;
+    }
+
+    uint32_t curSample = 0;
+    for (size_t i = 0; i < mNumCompositionTimeDeltaEntries; ++i) {
+        uint32_t sampleCount = mCompositionTimeDeltaEntries[2 * i];
+
+        if (sampleIndex < curSample + sampleCount) {
+            uint32_t sampleDelta = mCompositionTimeDeltaEntries[2 * i + 1];
+
+            return sampleDelta;
+        }
+
+        curSample += sampleCount;
+    }
+
+    return 0;
+}
+
 }  // namespace android
 
diff --git a/media/libstagefright/ShoutcastSource.cpp b/media/libstagefright/ShoutcastSource.cpp
index 23b7681..783f2d0 100644
--- a/media/libstagefright/ShoutcastSource.cpp
+++ b/media/libstagefright/ShoutcastSource.cpp
@@ -14,7 +14,6 @@
  * limitations under the License.
  */
 
-#include "include/stagefright_string.h"
 #include "include/HTTPStream.h"
 
 #include <stdlib.h>
@@ -34,7 +33,7 @@
       mBytesUntilMetaData(0),
       mGroup(NULL),
       mStarted(false) {
-    string metaint;
+    AString metaint;
     if (mHttp->find_header_value("icy-metaint", &metaint)) {
         char *end;
         const char *start = metaint.c_str();
diff --git a/media/libstagefright/StagefrightMediaScanner.cpp b/media/libstagefright/StagefrightMediaScanner.cpp
index 7a600d7..84f65ff 100644
--- a/media/libstagefright/StagefrightMediaScanner.cpp
+++ b/media/libstagefright/StagefrightMediaScanner.cpp
@@ -28,9 +28,7 @@
 
 namespace android {
 
-StagefrightMediaScanner::StagefrightMediaScanner()
-    : mRetriever(new MediaMetadataRetriever) {
-}
+StagefrightMediaScanner::StagefrightMediaScanner() {}
 
 StagefrightMediaScanner::~StagefrightMediaScanner() {}
 
@@ -39,7 +37,7 @@
         ".mp3", ".mp4", ".m4a", ".3gp", ".3gpp", ".3g2", ".3gpp2",
         ".mpeg", ".ogg", ".mid", ".smf", ".imy", ".wma", ".aac",
         ".wav", ".amr", ".midi", ".xmf", ".rtttl", ".rtx", ".ota",
-        ".mkv", ".mka", ".webm", ".ts"
+        ".mkv", ".mka", ".webm", ".ts", ".fl", ".flac"
     };
     static const size_t kNumValidExtensions =
         sizeof(kValidExtensions) / sizeof(kValidExtensions[0]);
@@ -131,37 +129,41 @@
         if (status != OK) {
             return status;
         }
-    } else if (mRetriever->setDataSource(path) == OK) {
-        const char *value;
-        if ((value = mRetriever->extractMetadata(
-                        METADATA_KEY_MIMETYPE)) != NULL) {
-            client.setMimeType(value);
-        }
+    } else {
+        sp<MediaMetadataRetriever> mRetriever(new MediaMetadataRetriever);
 
-        struct KeyMap {
-            const char *tag;
-            int key;
-        };
-        static const KeyMap kKeyMap[] = {
-            { "tracknumber", METADATA_KEY_CD_TRACK_NUMBER },
-            { "discnumber", METADATA_KEY_DISC_NUMBER },
-            { "album", METADATA_KEY_ALBUM },
-            { "artist", METADATA_KEY_ARTIST },
-            { "albumartist", METADATA_KEY_ALBUMARTIST },
-            { "composer", METADATA_KEY_COMPOSER },
-            { "genre", METADATA_KEY_GENRE },
-            { "title", METADATA_KEY_TITLE },
-            { "year", METADATA_KEY_YEAR },
-            { "duration", METADATA_KEY_DURATION },
-            { "writer", METADATA_KEY_WRITER },
-            { "compilation", METADATA_KEY_COMPILATION },
-        };
-        static const size_t kNumEntries = sizeof(kKeyMap) / sizeof(kKeyMap[0]);
-
-        for (size_t i = 0; i < kNumEntries; ++i) {
+         if (mRetriever->setDataSource(path) == OK) {
             const char *value;
-            if ((value = mRetriever->extractMetadata(kKeyMap[i].key)) != NULL) {
-                client.addStringTag(kKeyMap[i].tag, value);
+            if ((value = mRetriever->extractMetadata(
+                            METADATA_KEY_MIMETYPE)) != NULL) {
+                client.setMimeType(value);
+            }
+
+            struct KeyMap {
+                const char *tag;
+                int key;
+            };
+            static const KeyMap kKeyMap[] = {
+                { "tracknumber", METADATA_KEY_CD_TRACK_NUMBER },
+                { "discnumber", METADATA_KEY_DISC_NUMBER },
+                { "album", METADATA_KEY_ALBUM },
+                { "artist", METADATA_KEY_ARTIST },
+                { "albumartist", METADATA_KEY_ALBUMARTIST },
+                { "composer", METADATA_KEY_COMPOSER },
+                { "genre", METADATA_KEY_GENRE },
+                { "title", METADATA_KEY_TITLE },
+                { "year", METADATA_KEY_YEAR },
+                { "duration", METADATA_KEY_DURATION },
+                { "writer", METADATA_KEY_WRITER },
+                { "compilation", METADATA_KEY_COMPILATION },
+            };
+            static const size_t kNumEntries = sizeof(kKeyMap) / sizeof(kKeyMap[0]);
+
+            for (size_t i = 0; i < kNumEntries; ++i) {
+                const char *value;
+                if ((value = mRetriever->extractMetadata(kKeyMap[i].key)) != NULL) {
+                    client.addStringTag(kKeyMap[i].tag, value);
+                }
             }
         }
     }
@@ -174,12 +176,13 @@
 char *StagefrightMediaScanner::extractAlbumArt(int fd) {
     LOGV("extractAlbumArt %d", fd);
 
-    off_t size = lseek(fd, 0, SEEK_END);
+    off64_t size = lseek64(fd, 0, SEEK_END);
     if (size < 0) {
         return NULL;
     }
-    lseek(fd, 0, SEEK_SET);
+    lseek64(fd, 0, SEEK_SET);
 
+    sp<MediaMetadataRetriever> mRetriever(new MediaMetadataRetriever);
     if (mRetriever->setDataSource(fd, 0, size) == OK) {
         sp<IMemory> mem = mRetriever->extractAlbumArt();
 
diff --git a/media/libstagefright/StagefrightMetadataRetriever.cpp b/media/libstagefright/StagefrightMetadataRetriever.cpp
index e8f4839..ea3b801 100644
--- a/media/libstagefright/StagefrightMetadataRetriever.cpp
+++ b/media/libstagefright/StagefrightMetadataRetriever.cpp
@@ -144,7 +144,10 @@
             static_cast<MediaSource::ReadOptions::SeekMode>(seekMode);
 
     int64_t thumbNailTime;
-    if (frameTimeUs < 0 && trackMeta->findInt64(kKeyThumbnailTime, &thumbNailTime)) {
+    if (frameTimeUs < 0) {
+        if (!trackMeta->findInt64(kKeyThumbnailTime, &thumbNailTime)) {
+            thumbNailTime = 0;
+        }
         options.setSeekTo(thumbNailTime, mode);
     } else {
         thumbNailTime = -1;
@@ -205,17 +208,26 @@
     CHECK(meta->findInt32(kKeyWidth, &width));
     CHECK(meta->findInt32(kKeyHeight, &height));
 
+    int32_t crop_left, crop_top, crop_right, crop_bottom;
+    if (!meta->findRect(
+                kKeyCropRect,
+                &crop_left, &crop_top, &crop_right, &crop_bottom)) {
+        crop_left = crop_top = 0;
+        crop_right = width - 1;
+        crop_bottom = height - 1;
+    }
+
     int32_t rotationAngle;
     if (!trackMeta->findInt32(kKeyRotation, &rotationAngle)) {
         rotationAngle = 0;  // By default, no rotation
     }
 
     VideoFrame *frame = new VideoFrame;
-    frame->mWidth = width;
-    frame->mHeight = height;
-    frame->mDisplayWidth = width;
-    frame->mDisplayHeight = height;
-    frame->mSize = width * height * 2;
+    frame->mWidth = crop_right - crop_left + 1;
+    frame->mHeight = crop_bottom - crop_top + 1;
+    frame->mDisplayWidth = frame->mWidth;
+    frame->mDisplayHeight = frame->mHeight;
+    frame->mSize = frame->mWidth * frame->mHeight * 2;
     frame->mData = new uint8_t[frame->mSize];
     frame->mRotationAngle = rotationAngle;
 
@@ -226,17 +238,27 @@
             (OMX_COLOR_FORMATTYPE)srcFormat, OMX_COLOR_Format16bitRGB565);
     CHECK(converter.isValid());
 
-    converter.convert(
-            width, height,
+    err = converter.convert(
             (const uint8_t *)buffer->data() + buffer->range_offset(),
-            0,
-            frame->mData, width * 2);
+            width, height,
+            crop_left, crop_top, crop_right, crop_bottom,
+            frame->mData,
+            frame->mWidth,
+            frame->mHeight,
+            0, 0, frame->mWidth - 1, frame->mHeight - 1);
 
     buffer->release();
     buffer = NULL;
 
     decoder->stop();
 
+    if (err != OK) {
+        LOGE("Colorconverter failed to convert frame.");
+
+        delete frame;
+        frame = NULL;
+    }
+
     return frame;
 }
 
@@ -250,6 +272,12 @@
         return NULL;
     }
 
+    int32_t drm = 0;
+    if (mExtractor->getMetaData()->findInt32(kKeyIsDRM, &drm) && drm != 0) {
+        LOGE("frame grab not allowed.");
+        return NULL;
+    }
+
     size_t n = mExtractor->countTracks();
     size_t i;
     for (i = 0; i < n; ++i) {
@@ -419,5 +447,4 @@
     }
 }
 
-
 }  // namespace android
diff --git a/media/libstagefright/ThrottledSource.cpp b/media/libstagefright/ThrottledSource.cpp
index 4711f7c..88e07b0 100644
--- a/media/libstagefright/ThrottledSource.cpp
+++ b/media/libstagefright/ThrottledSource.cpp
@@ -41,7 +41,7 @@
     return mSource->initCheck();
 }
 
-ssize_t ThrottledSource::readAt(off_t offset, void *data, size_t size) {
+ssize_t ThrottledSource::readAt(off64_t offset, void *data, size_t size) {
     Mutex::Autolock autoLock(mLock);
 
     ssize_t n = mSource->readAt(offset, data, size);
@@ -72,7 +72,7 @@
     return n;
 }
 
-status_t ThrottledSource::getSize(off_t *size) {
+status_t ThrottledSource::getSize(off64_t *size) {
     return mSource->getSize(size);
 }
 
diff --git a/media/libstagefright/VBRISeeker.cpp b/media/libstagefright/VBRISeeker.cpp
new file mode 100644
index 0000000..48bddc2
--- /dev/null
+++ b/media/libstagefright/VBRISeeker.cpp
@@ -0,0 +1,164 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "VBRISeeker"
+#include <utils/Log.h>
+
+#include "include/VBRISeeker.h"
+
+#include "include/MP3Extractor.h"
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/Utils.h>
+
+namespace android {
+
+static uint32_t U24_AT(const uint8_t *ptr) {
+    return ptr[0] << 16 | ptr[1] << 8 | ptr[2];
+}
+
+// static
+sp<VBRISeeker> VBRISeeker::CreateFromSource(
+        const sp<DataSource> &source, off64_t post_id3_pos) {
+    off64_t pos = post_id3_pos;
+
+    uint8_t header[4];
+    ssize_t n = source->readAt(pos, header, sizeof(header));
+    if (n < (ssize_t)sizeof(header)) {
+        return NULL;
+    }
+
+    uint32_t tmp = U32_AT(&header[0]);
+    size_t frameSize;
+    int sampleRate;
+    if (!MP3Extractor::get_mp3_frame_size(tmp, &frameSize, &sampleRate)) {
+        return NULL;
+    }
+
+    // VBRI header follows 32 bytes after the header _ends_.
+    pos += sizeof(header) + 32;
+
+    uint8_t vbriHeader[26];
+    n = source->readAt(pos, vbriHeader, sizeof(vbriHeader));
+    if (n < (ssize_t)sizeof(vbriHeader)) {
+        return NULL;
+    }
+
+    if (memcmp(vbriHeader, "VBRI", 4)) {
+        return NULL;
+    }
+
+    size_t numFrames = U32_AT(&vbriHeader[14]);
+
+    int64_t durationUs =
+        numFrames * 1000000ll * (sampleRate >= 32000 ? 1152 : 576) / sampleRate;
+
+    LOGV("duration = %.2f secs", durationUs / 1E6);
+
+    size_t numEntries = U16_AT(&vbriHeader[18]);
+    size_t entrySize = U16_AT(&vbriHeader[22]);
+    size_t scale = U16_AT(&vbriHeader[20]);
+
+    LOGV("%d entries, scale=%d, size_per_entry=%d",
+         numEntries,
+         scale,
+         entrySize);
+
+    size_t totalEntrySize = numEntries * entrySize;
+    uint8_t *buffer = new uint8_t[totalEntrySize];
+
+    n = source->readAt(pos + sizeof(vbriHeader), buffer, totalEntrySize);
+    if (n < (ssize_t)totalEntrySize) {
+        delete[] buffer;
+        buffer = NULL;
+
+        return NULL;
+    }
+
+    sp<VBRISeeker> seeker = new VBRISeeker;
+    seeker->mBasePos = post_id3_pos;
+    seeker->mDurationUs = durationUs;
+
+    off64_t offset = post_id3_pos;
+    for (size_t i = 0; i < numEntries; ++i) {
+        uint32_t numBytes;
+        switch (entrySize) {
+            case 1: numBytes = buffer[i]; break;
+            case 2: numBytes = U16_AT(buffer + 2 * i); break;
+            case 3: numBytes = U24_AT(buffer + 3 * i); break;
+            default:
+            {
+                CHECK_EQ(entrySize, 4u);
+                numBytes = U32_AT(buffer + 4 * i); break;
+            }
+        }
+
+        numBytes *= scale;
+
+        seeker->mSegments.push(numBytes);
+
+        LOGV("entry #%d: %d offset 0x%08lx", i, numBytes, offset);
+        offset += numBytes;
+    }
+
+    delete[] buffer;
+    buffer = NULL;
+
+    LOGI("Found VBRI header.");
+
+    return seeker;
+}
+
+VBRISeeker::VBRISeeker()
+    : mDurationUs(-1) {
+}
+
+bool VBRISeeker::getDuration(int64_t *durationUs) {
+    if (mDurationUs < 0) {
+        return false;
+    }
+
+    *durationUs = mDurationUs;
+
+    return true;
+}
+
+bool VBRISeeker::getOffsetForTime(int64_t *timeUs, off64_t *pos) {
+    if (mDurationUs < 0) {
+        return false;
+    }
+
+    int64_t segmentDurationUs = mDurationUs / mSegments.size();
+
+    int64_t nowUs = 0;
+    *pos = mBasePos;
+    size_t segmentIndex = 0;
+    while (segmentIndex < mSegments.size() && nowUs < *timeUs) {
+        nowUs += segmentDurationUs;
+        *pos += mSegments.itemAt(segmentIndex++);
+    }
+
+    LOGV("getOffsetForTime %lld us => 0x%08lx", *timeUs, *pos);
+
+    *timeUs = nowUs;
+
+    return true;
+}
+
+}  // namespace android
+
diff --git a/media/libstagefright/VideoSourceDownSampler.cpp b/media/libstagefright/VideoSourceDownSampler.cpp
new file mode 100644
index 0000000..ea7b09a
--- /dev/null
+++ b/media/libstagefright/VideoSourceDownSampler.cpp
@@ -0,0 +1,142 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "VideoSourceDownSampler"
+
+#include <media/stagefright/VideoSourceDownSampler.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/YUVImage.h>
+#include <media/stagefright/YUVCanvas.h>
+#include "OMX_Video.h"
+
+namespace android {
+
+VideoSourceDownSampler::VideoSourceDownSampler(const sp<MediaSource> &videoSource,
+        int32_t width, int32_t height) {
+    LOGV("Construct VideoSourceDownSampler");
+    CHECK(width > 0);
+    CHECK(height > 0);
+
+    mRealVideoSource = videoSource;
+    mWidth = width;
+    mHeight = height;
+
+    mMeta = new MetaData(*(mRealVideoSource->getFormat()));
+    CHECK(mMeta->findInt32(kKeyWidth, &mRealSourceWidth));
+    CHECK(mMeta->findInt32(kKeyHeight, &mRealSourceHeight));
+
+    if ((mWidth != mRealSourceWidth) || (mHeight != mRealSourceHeight)) {
+        // Change meta data for width and height.
+        CHECK(mWidth <= mRealSourceWidth);
+        CHECK(mHeight <= mRealSourceHeight);
+
+        mNeedDownSampling = true;
+        computeDownSamplingParameters();
+        mMeta->setInt32(kKeyWidth, mWidth);
+        mMeta->setInt32(kKeyHeight, mHeight);
+    } else {
+        mNeedDownSampling = false;
+    }
+}
+
+VideoSourceDownSampler::~VideoSourceDownSampler() {
+}
+
+void VideoSourceDownSampler::computeDownSamplingParameters() {
+    mDownSampleSkipX = mRealSourceWidth / mWidth;
+    mDownSampleSkipY = mRealSourceHeight / mHeight;
+
+    mDownSampleOffsetX = mRealSourceWidth - mDownSampleSkipX * mWidth;
+    mDownSampleOffsetY = mRealSourceHeight - mDownSampleSkipY * mHeight;
+}
+
+void VideoSourceDownSampler::downSampleYUVImage(
+        const MediaBuffer &sourceBuffer, MediaBuffer **buffer) const {
+    // find the YUV format
+    int32_t srcFormat;
+    CHECK(mMeta->findInt32(kKeyColorFormat, &srcFormat));
+    YUVImage::YUVFormat yuvFormat;
+    if (srcFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
+        yuvFormat = YUVImage::YUV420SemiPlanar;
+    } else if (srcFormat == OMX_COLOR_FormatYUV420Planar) {
+        yuvFormat = YUVImage::YUV420Planar;
+    }
+
+    // allocate mediaBuffer for down sampled image and setup a canvas.
+    *buffer = new MediaBuffer(YUVImage::bufferSize(yuvFormat, mWidth, mHeight));
+    YUVImage yuvDownSampledImage(yuvFormat,
+            mWidth, mHeight,
+            (uint8_t *)(*buffer)->data());
+    YUVCanvas yuvCanvasDownSample(yuvDownSampledImage);
+
+    YUVImage yuvImageSource(yuvFormat,
+            mRealSourceWidth, mRealSourceHeight,
+            (uint8_t *)sourceBuffer.data());
+    yuvCanvasDownSample.downsample(mDownSampleOffsetX, mDownSampleOffsetY,
+            mDownSampleSkipX, mDownSampleSkipY,
+            yuvImageSource);
+}
+
+status_t VideoSourceDownSampler::start(MetaData *params) {
+    LOGV("start");
+    return mRealVideoSource->start();
+}
+
+status_t VideoSourceDownSampler::stop() {
+    LOGV("stop");
+    return mRealVideoSource->stop();
+}
+
+sp<MetaData> VideoSourceDownSampler::getFormat() {
+    LOGV("getFormat");
+    return mMeta;
+}
+
+status_t VideoSourceDownSampler::read(
+        MediaBuffer **buffer, const ReadOptions *options) {
+    LOGV("read");
+    MediaBuffer *realBuffer;
+    status_t err = mRealVideoSource->read(&realBuffer, options);
+
+    if (mNeedDownSampling) {
+        downSampleYUVImage(*realBuffer, buffer);
+
+        int64_t frameTime;
+        realBuffer->meta_data()->findInt64(kKeyTime, &frameTime);
+        (*buffer)->meta_data()->setInt64(kKeyTime, frameTime);
+
+        // We just want this buffer to be deleted when the encoder releases it.
+        // So don't add a reference to it and set the observer to NULL.
+        (*buffer)->setObserver(NULL);
+
+        // The original buffer is no longer required. Release it.
+        realBuffer->release();
+    } else {
+        *buffer = realBuffer;
+    }
+
+    return err;
+}
+
+status_t VideoSourceDownSampler::pause() {
+    LOGV("pause");
+    return mRealVideoSource->pause();
+}
+
+}  // namespace android
diff --git a/media/libstagefright/WAVExtractor.cpp b/media/libstagefright/WAVExtractor.cpp
index aff06bc..9332120 100644
--- a/media/libstagefright/WAVExtractor.cpp
+++ b/media/libstagefright/WAVExtractor.cpp
@@ -51,7 +51,7 @@
             const sp<MetaData> &meta,
             uint16_t waveFormat,
             int32_t bitsPerSample,
-            off_t offset, size_t size);
+            off64_t offset, size_t size);
 
     virtual status_t start(MetaData *params = NULL);
     virtual status_t stop();
@@ -72,11 +72,11 @@
     int32_t mSampleRate;
     int32_t mNumChannels;
     int32_t mBitsPerSample;
-    off_t mOffset;
+    off64_t mOffset;
     size_t mSize;
     bool mStarted;
     MediaBufferGroup *mGroup;
-    off_t mCurrentPos;
+    off64_t mCurrentPos;
 
     WAVSource(const WAVSource &);
     WAVSource &operator=(const WAVSource &);
@@ -139,7 +139,7 @@
 
     size_t totalSize = U32_LE_AT(&header[4]);
 
-    off_t offset = 12;
+    off64_t offset = 12;
     size_t remainingSize = totalSize;
     while (remainingSize >= 8) {
         uint8_t chunkHeader[8];
@@ -251,7 +251,7 @@
         const sp<MetaData> &meta,
         uint16_t waveFormat,
         int32_t bitsPerSample,
-        off_t offset, size_t size)
+        off64_t offset, size_t size)
     : mDataSource(dataSource),
       mMeta(meta),
       mWaveFormat(waveFormat),
@@ -318,7 +318,7 @@
     int64_t seekTimeUs;
     ReadOptions::SeekMode mode;
     if (options != NULL && options->getSeekTo(&seekTimeUs, &mode)) {
-        int64_t pos = (seekTimeUs * mSampleRate) / 1000000 * mNumChannels * 2;
+        int64_t pos = (seekTimeUs * mSampleRate) / 1000000 * mNumChannels * (mBitsPerSample >> 3);
         if (pos > mSize) {
             pos = mSize;
         }
@@ -335,7 +335,7 @@
         mBitsPerSample == 8 ? kMaxFrameSize / 2 : kMaxFrameSize;
 
     size_t maxBytesAvailable =
-        (mCurrentPos - mOffset >= (off_t)mSize)
+        (mCurrentPos - mOffset >= (off64_t)mSize)
             ? 0 : mSize - (mCurrentPos - mOffset);
 
     if (maxBytesToRead > maxBytesAvailable) {
diff --git a/media/libstagefright/WVMExtractor.cpp b/media/libstagefright/WVMExtractor.cpp
new file mode 100644
index 0000000..7c72852
--- /dev/null
+++ b/media/libstagefright/WVMExtractor.cpp
@@ -0,0 +1,104 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "WVMExtractor"
+#include <utils/Log.h>
+
+#include "include/WVMExtractor.h"
+
+#include <arpa/inet.h>
+#include <utils/String8.h>
+#include <media/stagefright/Utils.h>
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaDebug.h>
+#include <dlfcn.h>
+
+#include <utils/Errors.h>
+
+/* The extractor lifetime is short - just long enough to get
+ * the media sources constructed - so the shared lib needs to remain open
+ * beyond the lifetime of the extractor.  So keep the handle as a global
+ * rather than a member of the extractor
+ */
+void *gVendorLibHandle = NULL;
+
+namespace android {
+
+static Mutex gWVMutex;
+
+WVMExtractor::WVMExtractor(const sp<DataSource> &source)
+    : mDataSource(source) {
+    {
+        Mutex::Autolock autoLock(gWVMutex);
+        if (gVendorLibHandle == NULL) {
+            gVendorLibHandle = dlopen("libwvm.so", RTLD_NOW);
+        }
+
+        if (gVendorLibHandle == NULL) {
+            LOGE("Failed to open libwvm.so");
+            return;
+        }
+    }
+
+    typedef MediaExtractor *(*GetInstanceFunc)(sp<DataSource>);
+    GetInstanceFunc getInstanceFunc =
+        (GetInstanceFunc) dlsym(gVendorLibHandle,
+                "_ZN7android11GetInstanceENS_2spINS_10DataSourceEEE");
+
+    if (getInstanceFunc) {
+        LOGD("Calling GetInstanceFunc");
+        mImpl = (*getInstanceFunc)(source);
+        CHECK(mImpl != NULL);
+    } else {
+        LOGE("Failed to locate GetInstance in libwvm.so");
+    }
+}
+
+WVMExtractor::~WVMExtractor() {
+}
+
+size_t WVMExtractor::countTracks() {
+    return (mImpl != NULL) ? mImpl->countTracks() : 0;
+}
+
+sp<MediaSource> WVMExtractor::getTrack(size_t index) {
+    if (mImpl == NULL) {
+        return NULL;
+    }
+    return mImpl->getTrack(index);
+}
+
+sp<MetaData> WVMExtractor::getTrackMetaData(size_t index, uint32_t flags) {
+    if (mImpl == NULL) {
+        return NULL;
+    }
+    return mImpl->getTrackMetaData(index, flags);
+}
+
+sp<MetaData> WVMExtractor::getMetaData() {
+    if (mImpl == NULL) {
+        return NULL;
+    }
+    return mImpl->getMetaData();
+}
+
+} //namespace android
+
diff --git a/media/libstagefright/XINGSeeker.cpp b/media/libstagefright/XINGSeeker.cpp
new file mode 100644
index 0000000..616836c
--- /dev/null
+++ b/media/libstagefright/XINGSeeker.cpp
@@ -0,0 +1,223 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "include/XINGSeeker.h"
+
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/Utils.h>
+
+namespace android {
+
+static bool parse_xing_header(
+        const sp<DataSource> &source, off64_t first_frame_pos,
+        int32_t *frame_number = NULL, int32_t *byte_number = NULL,
+        char *table_of_contents = NULL, int32_t *quality_indicator = NULL,
+        int64_t *duration = NULL);
+
+// static
+sp<XINGSeeker> XINGSeeker::CreateFromSource(
+        const sp<DataSource> &source, off64_t first_frame_pos) {
+    sp<XINGSeeker> seeker = new XINGSeeker;
+
+    seeker->mFirstFramePos = first_frame_pos;
+
+    if (!parse_xing_header(
+                source, first_frame_pos,
+                NULL, &seeker->mSizeBytes, seeker->mTableOfContents,
+                NULL, &seeker->mDurationUs)) {
+        return NULL;
+    }
+
+    LOGI("Found XING header.");
+
+    return seeker;
+}
+
+XINGSeeker::XINGSeeker()
+    : mDurationUs(-1),
+      mSizeBytes(0) {
+}
+
+bool XINGSeeker::getDuration(int64_t *durationUs) {
+    if (mDurationUs < 0) {
+        return false;
+    }
+
+    *durationUs = mDurationUs;
+
+    return true;
+}
+
+bool XINGSeeker::getOffsetForTime(int64_t *timeUs, off64_t *pos) {
+    if (mSizeBytes == 0 || mTableOfContents[0] <= 0 || mDurationUs < 0) {
+        return false;
+    }
+
+    float percent = (float)(*timeUs) * 100 / mDurationUs;
+    float fx;
+    if( percent <= 0.0f ) {
+        fx = 0.0f;
+    } else if( percent >= 100.0f ) {
+        fx = 256.0f;
+    } else {
+        int a = (int)percent;
+        float fa, fb;
+        if ( a == 0 ) {
+            fa = 0.0f;
+        } else {
+            fa = (float)mTableOfContents[a-1];
+        }
+        if ( a < 99 ) {
+            fb = (float)mTableOfContents[a];
+        } else {
+            fb = 256.0f;
+        }
+        fx = fa + (fb-fa)*(percent-a);
+    }
+
+    *pos = (int)((1.0f/256.0f)*fx*mSizeBytes) + mFirstFramePos;
+
+    return true;
+}
+
+static bool parse_xing_header(
+        const sp<DataSource> &source, off64_t first_frame_pos,
+        int32_t *frame_number, int32_t *byte_number,
+        char *table_of_contents, int32_t *quality_indicator,
+        int64_t *duration) {
+    if (frame_number) {
+        *frame_number = 0;
+    }
+    if (byte_number) {
+        *byte_number = 0;
+    }
+    if (table_of_contents) {
+        table_of_contents[0] = 0;
+    }
+    if (quality_indicator) {
+        *quality_indicator = 0;
+    }
+    if (duration) {
+        *duration = 0;
+    }
+
+    uint8_t buffer[4];
+    int offset = first_frame_pos;
+    if (source->readAt(offset, &buffer, 4) < 4) { // get header
+        return false;
+    }
+    offset += 4;
+
+    uint8_t id, layer, sr_index, mode;
+    layer = (buffer[1] >> 1) & 3;
+    id = (buffer[1] >> 3) & 3;
+    sr_index = (buffer[2] >> 2) & 3;
+    mode = (buffer[3] >> 6) & 3;
+    if (layer == 0) {
+        return false;
+    }
+    if (id == 1) {
+        return false;
+    }
+    if (sr_index == 3) {
+        return false;
+    }
+    // determine offset of XING header
+    if(id&1) { // mpeg1
+        if (mode != 3) offset += 32;
+        else offset += 17;
+    } else { // mpeg2
+        if (mode != 3) offset += 17;
+        else offset += 9;
+    }
+
+    if (source->readAt(offset, &buffer, 4) < 4) { // XING header ID
+        return false;
+    }
+    offset += 4;
+    // Check XING ID
+    if ((buffer[0] != 'X') || (buffer[1] != 'i')
+                || (buffer[2] != 'n') || (buffer[3] != 'g')) {
+        if ((buffer[0] != 'I') || (buffer[1] != 'n')
+                    || (buffer[2] != 'f') || (buffer[3] != 'o')) {
+            return false;
+        }
+    }
+
+    if (source->readAt(offset, &buffer, 4) < 4) { // flags
+        return false;
+    }
+    offset += 4;
+    uint32_t flags = U32_AT(buffer);
+
+    if (flags & 0x0001) {  // Frames field is present
+        if (source->readAt(offset, buffer, 4) < 4) {
+             return false;
+        }
+        if (frame_number) {
+           *frame_number = U32_AT(buffer);
+        }
+        int32_t frame = U32_AT(buffer);
+        // Samples per Frame: 1. index = MPEG Version ID, 2. index = Layer
+        const int samplesPerFrames[2][3] =
+        {
+            { 384, 1152, 576  }, // MPEG 2, 2.5: layer1, layer2, layer3
+            { 384, 1152, 1152 }, // MPEG 1: layer1, layer2, layer3
+        };
+        // sampling rates in hertz: 1. index = MPEG Version ID, 2. index = sampling rate index
+        const int samplingRates[4][3] =
+        {
+            { 11025, 12000, 8000,  },    // MPEG 2.5
+            { 0,     0,     0,     },    // reserved
+            { 22050, 24000, 16000, },    // MPEG 2
+            { 44100, 48000, 32000, }     // MPEG 1
+        };
+        if (duration) {
+            *duration = (int64_t)frame * samplesPerFrames[id&1][3-layer] * 1000000LL
+                / samplingRates[id][sr_index];
+        }
+        offset += 4;
+    }
+    if (flags & 0x0002) {  // Bytes field is present
+        if (byte_number) {
+            if (source->readAt(offset, buffer, 4) < 4) {
+                return false;
+            }
+            *byte_number = U32_AT(buffer);
+        }
+        offset += 4;
+    }
+    if (flags & 0x0004) {  // TOC field is present
+       if (table_of_contents) {
+            if (source->readAt(offset + 1, table_of_contents, 99) < 99) {
+                return false;
+            }
+        }
+        offset += 100;
+    }
+    if (flags & 0x0008) {  // Quality indicator field is present
+        if (quality_indicator) {
+            if (source->readAt(offset, buffer, 4) < 4) {
+                return false;
+            }
+            *quality_indicator = U32_AT(buffer);
+        }
+    }
+    return true;
+}
+
+}  // namespace android
+
diff --git a/media/libstagefright/avc_utils.cpp b/media/libstagefright/avc_utils.cpp
index 478e40c..95cf2d3 100644
--- a/media/libstagefright/avc_utils.cpp
+++ b/media/libstagefright/avc_utils.cpp
@@ -14,6 +14,10 @@
  * limitations under the License.
  */
 
+//#define LOG_NDEBUG 0
+#define LOG_TAG "avc_utils"
+#include <utils/Log.h>
+
 #include "include/avc_utils.h"
 
 #include <media/stagefright/foundation/ABitReader.h>
@@ -218,6 +222,28 @@
     return NULL;
 }
 
+const char *AVCProfileToString(uint8_t profile) {
+    switch (profile) {
+        case kAVCProfileBaseline:
+            return "Baseline";
+        case kAVCProfileMain:
+            return "Main";
+        case kAVCProfileExtended:
+            return "Extended";
+        case kAVCProfileHigh:
+            return "High";
+        case kAVCProfileHigh10:
+            return "High 10";
+        case kAVCProfileHigh422:
+            return "High 422";
+        case kAVCProfileHigh444:
+            return "High 444";
+        case kAVCProfileCAVLC444Intra:
+            return "CAVLC 444 Intra";
+        default:   return "Unknown";
+    }
+}
+
 sp<MetaData> MakeAVCCodecSpecificData(const sp<ABuffer> &accessUnit) {
     const uint8_t *data = accessUnit->data();
     size_t size = accessUnit->size();
@@ -244,6 +270,10 @@
 
     *out++ = 0x01;  // configurationVersion
     memcpy(out, seqParamSet->data() + 1, 3);  // profile/level...
+
+    uint8_t profile = out[0];
+    uint8_t level = out[2];
+
     out += 3;
     *out++ = (0x3f << 2) | 1;  // lengthSize == 2 bytes
     *out++ = 0xe0 | 1;
@@ -271,7 +301,8 @@
     meta->setInt32(kKeyWidth, width);
     meta->setInt32(kKeyHeight, height);
 
-    LOGI("found AVC codec config (%d x %d)", width, height);
+    LOGI("found AVC codec config (%d x %d, %s-profile level %d.%d)",
+         width, height, AVCProfileToString(profile), level / 10, level % 10);
 
     return meta;
 }
@@ -298,5 +329,52 @@
     return foundIDR;
 }
 
+sp<MetaData> MakeAACCodecSpecificData(
+        unsigned profile, unsigned sampling_freq_index,
+        unsigned channel_configuration) {
+    sp<MetaData> meta = new MetaData;
+    meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_AAC);
+
+    CHECK_LE(sampling_freq_index, 11u);
+    static const int32_t kSamplingFreq[] = {
+        96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050,
+        16000, 12000, 11025, 8000
+    };
+    meta->setInt32(kKeySampleRate, kSamplingFreq[sampling_freq_index]);
+    meta->setInt32(kKeyChannelCount, channel_configuration);
+
+    static const uint8_t kStaticESDS[] = {
+        0x03, 22,
+        0x00, 0x00,     // ES_ID
+        0x00,           // streamDependenceFlag, URL_Flag, OCRstreamFlag
+
+        0x04, 17,
+        0x40,                       // Audio ISO/IEC 14496-3
+        0x00, 0x00, 0x00, 0x00,
+        0x00, 0x00, 0x00, 0x00,
+        0x00, 0x00, 0x00, 0x00,
+
+        0x05, 2,
+        // AudioSpecificInfo follows
+
+        // oooo offf fccc c000
+        // o - audioObjectType
+        // f - samplingFreqIndex
+        // c - channelConfig
+    };
+    sp<ABuffer> csd = new ABuffer(sizeof(kStaticESDS) + 2);
+    memcpy(csd->data(), kStaticESDS, sizeof(kStaticESDS));
+
+    csd->data()[sizeof(kStaticESDS)] =
+        ((profile + 1) << 3) | (sampling_freq_index >> 1);
+
+    csd->data()[sizeof(kStaticESDS) + 1] =
+        ((sampling_freq_index << 7) & 0x80) | (channel_configuration << 3);
+
+    meta->setData(kKeyESDS, 0, csd->data(), csd->size());
+
+    return meta;
+}
+
 }  // namespace android
 
diff --git a/media/libstagefright/codecs/aacdec/AACDecoder.cpp b/media/libstagefright/codecs/aacdec/AACDecoder.cpp
index f58c16d..208431c 100644
--- a/media/libstagefright/codecs/aacdec/AACDecoder.cpp
+++ b/media/libstagefright/codecs/aacdec/AACDecoder.cpp
@@ -21,8 +21,8 @@
 
 #include "pvmp4audiodecoder_api.h"
 
+#include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/MediaBufferGroup.h>
-#include <media/stagefright/MediaDebug.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MetaData.h>
 
@@ -84,7 +84,7 @@
     sp<MetaData> meta = mSource->getFormat();
     if (meta->findData(kKeyESDS, &type, &data, &size)) {
         ESDS esds((const char *)data, size);
-        CHECK_EQ(esds.InitCheck(), OK);
+        CHECK_EQ(esds.InitCheck(), (status_t)OK);
 
         const void *codec_specific_data;
         size_t codec_specific_data_size;
@@ -197,7 +197,7 @@
     }
 
     MediaBuffer *buffer;
-    CHECK_EQ(mBufferGroup->acquire_buffer(&buffer), OK);
+    CHECK_EQ(mBufferGroup->acquire_buffer(&buffer), (status_t)OK);
 
     mConfig->pInputBuffer =
         (UChar *)mInputBuffer->data() + mInputBuffer->range_offset();
@@ -308,7 +308,7 @@
             mAnchorTimeUs
                 + (mNumSamplesOutput * 1000000) / mConfig->samplingRate);
 
-    mNumSamplesOutput += mConfig->frameLength;
+    mNumSamplesOutput += mConfig->frameLength * mUpsamplingFactor;
 
     *out = buffer;
 
diff --git a/media/libstagefright/codecs/aacdec/Android.mk b/media/libstagefright/codecs/aacdec/Android.mk
index d5d8f3e..69e331f 100644
--- a/media/libstagefright/codecs/aacdec/Android.mk
+++ b/media/libstagefright/codecs/aacdec/Android.mk
@@ -149,6 +149,8 @@
 
 LOCAL_C_INCLUDES := frameworks/base/media/libstagefright/include
 
+LOCAL_ARM_MODE := arm
+
 LOCAL_MODULE := libstagefright_aacdec
 
 include $(BUILD_STATIC_LIBRARY)
diff --git a/media/libstagefright/codecs/aacenc/AACEncoder.cpp b/media/libstagefright/codecs/aacenc/AACEncoder.cpp
index df9f107..e4ff128 100644
--- a/media/libstagefright/codecs/aacenc/AACEncoder.cpp
+++ b/media/libstagefright/codecs/aacenc/AACEncoder.cpp
@@ -36,6 +36,7 @@
       mStarted(false),
       mBufferGroup(NULL),
       mInputBuffer(NULL),
+      mInputFrame(NULL),
       mEncoderHandle(NULL),
       mApiHandle(NULL),
       mMemOperator(NULL) {
@@ -45,6 +46,7 @@
     CHECK(mApiHandle == NULL && mEncoderHandle == NULL);
     CHECK(mMeta->findInt32(kKeySampleRate, &mSampleRate));
     CHECK(mMeta->findInt32(kKeyChannelCount, &mChannels));
+    CHECK(mChannels <= 2 && mChannels >= 1);
     CHECK(mMeta->findInt32(kKeyBitRate, &mBitRate));
 
     mApiHandle = new VO_AUDIO_CODECAPI;
@@ -145,7 +147,15 @@
     mNumInputSamples = 0;
     mAnchorTimeUs = 0;
     mFrameCount = 0;
-    mSource->start(params);
+
+    mInputFrame = new int16_t[mChannels * kNumSamplesPerFrame];
+    CHECK(mInputFrame != NULL);
+
+    status_t err = mSource->start(params);
+    if (err != OK) {
+         LOGE("AudioSource is not available");
+        return err;
+    }
 
     mStarted = true;
 
@@ -153,11 +163,6 @@
 }
 
 status_t AACEncoder::stop() {
-    if (!mStarted) {
-        LOGW("Call stop() when encoder has not started");
-        return OK;
-    }
-
     if (mInputBuffer) {
         mInputBuffer->release();
         mInputBuffer = NULL;
@@ -166,8 +171,17 @@
     delete mBufferGroup;
     mBufferGroup = NULL;
 
-    mSource->stop();
+    if (mInputFrame) {
+        delete[] mInputFrame;
+        mInputFrame = NULL;
+    }
 
+    if (!mStarted) {
+        LOGW("Call stop() when encoder has not started");
+        return ERROR_END_OF_STREAM;
+    }
+
+    mSource->stop();
     if (mEncoderHandle) {
         CHECK_EQ(VO_ERR_NONE, mApiHandle->Uninit(mEncoderHandle));
         mEncoderHandle = NULL;
@@ -175,6 +189,9 @@
     delete mApiHandle;
     mApiHandle = NULL;
 
+    delete mMemOperator;
+    mMemOperator = NULL;
+
     mStarted = false;
 
     return OK;
@@ -222,7 +239,8 @@
         buffer->meta_data()->setInt32(kKeyIsCodecConfig, false);
     }
 
-    while (mNumInputSamples < kNumSamplesPerFrame) {
+    const int32_t nSamples = mChannels * kNumSamplesPerFrame;
+    while (mNumInputSamples < nSamples) {
         if (mInputBuffer == NULL) {
             if (mSource->read(&mInputBuffer, options) != OK) {
                 if (mNumInputSamples == 0) {
@@ -231,7 +249,7 @@
                 }
                 memset(&mInputFrame[mNumInputSamples],
                        0,
-                       sizeof(int16_t) * (kNumSamplesPerFrame - mNumInputSamples));
+                       sizeof(int16_t) * (nSamples - mNumInputSamples));
                 mNumInputSamples = 0;
                 break;
             }
@@ -250,8 +268,7 @@
         } else {
             readFromSource = false;
         }
-        size_t copy =
-            (kNumSamplesPerFrame - mNumInputSamples) * sizeof(int16_t);
+        size_t copy = (nSamples - mNumInputSamples) * sizeof(int16_t);
 
         if (copy > mInputBuffer->range_length()) {
             copy = mInputBuffer->range_length();
@@ -271,8 +288,8 @@
             mInputBuffer = NULL;
         }
         mNumInputSamples += copy / sizeof(int16_t);
-        if (mNumInputSamples >= kNumSamplesPerFrame) {
-            mNumInputSamples %= kNumSamplesPerFrame;
+        if (mNumInputSamples >= nSamples) {
+            mNumInputSamples %= nSamples;
             break;
         }
     }
@@ -280,7 +297,7 @@
     VO_CODECBUFFER inputData;
     memset(&inputData, 0, sizeof(inputData));
     inputData.Buffer = (unsigned char*) mInputFrame;
-    inputData.Length = kNumSamplesPerFrame * sizeof(int16_t);
+    inputData.Length = nSamples * sizeof(int16_t);
     CHECK(VO_ERR_NONE == mApiHandle->SetInputData(mEncoderHandle,&inputData));
 
     VO_CODECBUFFER outputData;
@@ -289,15 +306,21 @@
     memset(&outputInfo, 0, sizeof(outputInfo));
 
     VO_U32 ret = VO_ERR_NONE;
-    outputData.Buffer = outPtr;
-    outputData.Length = buffer->size();
-    ret = mApiHandle->GetOutputData(mEncoderHandle, &outputData, &outputInfo);
-    CHECK(ret == VO_ERR_NONE || ret == VO_ERR_INPUT_BUFFER_SMALL);
-    CHECK(outputData.Length != 0);
-    buffer->set_range(0, outputData.Length);
+    size_t nOutputBytes = 0;
+    do {
+        outputData.Buffer = outPtr;
+        outputData.Length = buffer->size() - nOutputBytes;
+        ret = mApiHandle->GetOutputData(mEncoderHandle, &outputData, &outputInfo);
+        if (ret == VO_ERR_NONE) {
+            outPtr += outputData.Length;
+            nOutputBytes += outputData.Length;
+        }
+    } while (ret != VO_ERR_INPUT_BUFFER_SMALL);
+    buffer->set_range(0, nOutputBytes);
 
     int64_t mediaTimeUs =
         ((mFrameCount - 1) * 1000000LL * kNumSamplesPerFrame) / mSampleRate;
+
     buffer->meta_data()->setInt64(kKeyTime, mAnchorTimeUs + mediaTimeUs);
     if (readFromSource && wallClockTimeUs != -1) {
         buffer->meta_data()->setInt64(kKeyDriftTime, mediaTimeUs - wallClockTimeUs);
diff --git a/media/libstagefright/codecs/avc/dec/AVCDecoder.cpp b/media/libstagefright/codecs/avc/dec/AVCDecoder.cpp
index 868c514..5bbba35 100644
--- a/media/libstagefright/codecs/avc/dec/AVCDecoder.cpp
+++ b/media/libstagefright/codecs/avc/dec/AVCDecoder.cpp
@@ -73,6 +73,7 @@
     CHECK(mSource->getFormat()->findInt32(kKeyHeight, &height));
     mFormat->setInt32(kKeyWidth, width);
     mFormat->setInt32(kKeyHeight, height);
+    mFormat->setRect(kKeyCropRect, 0, 0, width - 1, height - 1);
     mFormat->setInt32(kKeyColorFormat, OMX_COLOR_FormatYUV420Planar);
     mFormat->setCString(kKeyDecoderComponent, "AVCDecoder");
 
@@ -418,16 +419,32 @@
                     crop_top = crop_left = 0;
                 }
 
-                int32_t aligned_width = (crop_right - crop_left + 1 + 15) & ~15;
-                int32_t aligned_height = (crop_bottom - crop_top + 1 + 15) & ~15;
+                int32_t prevCropLeft, prevCropTop;
+                int32_t prevCropRight, prevCropBottom;
+                if (!mFormat->findRect(
+                            kKeyCropRect,
+                            &prevCropLeft, &prevCropTop,
+                            &prevCropRight, &prevCropBottom)) {
+                    prevCropLeft = prevCropTop = 0;
+                    prevCropRight = width - 1;
+                    prevCropBottom = height - 1;
+                }
 
                 int32_t oldWidth, oldHeight;
                 CHECK(mFormat->findInt32(kKeyWidth, &oldWidth));
                 CHECK(mFormat->findInt32(kKeyHeight, &oldHeight));
 
-                if (oldWidth != aligned_width || oldHeight != aligned_height) {
-                    mFormat->setInt32(kKeyWidth, aligned_width);
-                    mFormat->setInt32(kKeyHeight, aligned_height);
+                if (oldWidth != width || oldHeight != height
+                        || prevCropLeft != crop_left
+                        || prevCropTop != crop_top
+                        || prevCropRight != crop_right
+                        || prevCropBottom != crop_bottom) {
+                    mFormat->setRect(
+                            kKeyCropRect,
+                            crop_left, crop_top, crop_right, crop_bottom);
+
+                    mFormat->setInt32(kKeyWidth, width);
+                    mFormat->setInt32(kKeyHeight, height);
 
                     err = INFO_FORMAT_CHANGED;
                 } else {
diff --git a/media/libstagefright/codecs/avc/enc/AVCEncoder.cpp b/media/libstagefright/codecs/avc/enc/AVCEncoder.cpp
index 52a391f..e3292e6 100644
--- a/media/libstagefright/codecs/avc/enc/AVCEncoder.cpp
+++ b/media/libstagefright/codecs/avc/enc/AVCEncoder.cpp
@@ -33,6 +33,80 @@
 
 namespace android {
 
+static status_t ConvertOmxAvcProfileToAvcSpecProfile(
+        int32_t omxProfile, AVCProfile* pvProfile) {
+    LOGV("ConvertOmxAvcProfileToAvcSpecProfile: %d", omxProfile);
+    switch (omxProfile) {
+        case OMX_VIDEO_AVCProfileBaseline:
+            *pvProfile = AVC_BASELINE;
+            return OK;
+        default:
+            LOGE("Unsupported omx profile: %d", omxProfile);
+    }
+    return BAD_VALUE;
+}
+
+static status_t ConvertOmxAvcLevelToAvcSpecLevel(
+        int32_t omxLevel, AVCLevel *pvLevel) {
+    LOGV("ConvertOmxAvcLevelToAvcSpecLevel: %d", omxLevel);
+    AVCLevel level = AVC_LEVEL5_1;
+    switch (omxLevel) {
+        case OMX_VIDEO_AVCLevel1:
+            level = AVC_LEVEL1_B;
+            break;
+        case OMX_VIDEO_AVCLevel1b:
+            level = AVC_LEVEL1;
+            break;
+        case OMX_VIDEO_AVCLevel11:
+            level = AVC_LEVEL1_1;
+            break;
+        case OMX_VIDEO_AVCLevel12:
+            level = AVC_LEVEL1_2;
+            break;
+        case OMX_VIDEO_AVCLevel13:
+            level = AVC_LEVEL1_3;
+            break;
+        case OMX_VIDEO_AVCLevel2:
+            level = AVC_LEVEL2;
+            break;
+        case OMX_VIDEO_AVCLevel21:
+            level = AVC_LEVEL2_1;
+            break;
+        case OMX_VIDEO_AVCLevel22:
+            level = AVC_LEVEL2_2;
+            break;
+        case OMX_VIDEO_AVCLevel3:
+            level = AVC_LEVEL3;
+            break;
+        case OMX_VIDEO_AVCLevel31:
+            level = AVC_LEVEL3_1;
+            break;
+        case OMX_VIDEO_AVCLevel32:
+            level = AVC_LEVEL3_2;
+            break;
+        case OMX_VIDEO_AVCLevel4:
+            level = AVC_LEVEL4;
+            break;
+        case OMX_VIDEO_AVCLevel41:
+            level = AVC_LEVEL4_1;
+            break;
+        case OMX_VIDEO_AVCLevel42:
+            level = AVC_LEVEL4_2;
+            break;
+        case OMX_VIDEO_AVCLevel5:
+            level = AVC_LEVEL5;
+            break;
+        case OMX_VIDEO_AVCLevel51:
+            level = AVC_LEVEL5_1;
+            break;
+        default:
+            LOGE("Unknown omx level: %d", omxLevel);
+            return BAD_VALUE;
+    }
+    *pvLevel = level;
+    return OK;
+}
+
 inline static void ConvertYUV420SemiPlanarToYUV420Planar(
         uint8_t *inyuv, uint8_t* outyuv,
         int32_t width, int32_t height) {
@@ -104,7 +178,7 @@
       mInputFrameData(NULL),
       mGroup(NULL) {
 
-    LOGV("Construct software AVCEncoder");
+    LOGI("Construct software AVCEncoder");
 
     mHandle = new tagAVCHandle;
     memset(mHandle, 0, sizeof(tagAVCHandle));
@@ -133,7 +207,7 @@
     LOGV("initCheck");
     CHECK(meta->findInt32(kKeyWidth, &mVideoWidth));
     CHECK(meta->findInt32(kKeyHeight, &mVideoHeight));
-    CHECK(meta->findInt32(kKeySampleRate, &mVideoFrameRate));
+    CHECK(meta->findInt32(kKeyFrameRate, &mVideoFrameRate));
     CHECK(meta->findInt32(kKeyBitRate, &mVideoBitRate));
 
     // XXX: Add more color format support
@@ -231,10 +305,16 @@
     mEncParams->level = AVC_LEVEL3_2;
     int32_t profile, level;
     if (meta->findInt32(kKeyVideoProfile, &profile)) {
-        mEncParams->profile = (AVCProfile) profile;
+        if (OK != ConvertOmxAvcProfileToAvcSpecProfile(
+                        profile, &mEncParams->profile)) {
+            return BAD_VALUE;
+        }
     }
     if (meta->findInt32(kKeyVideoLevel, &level)) {
-        mEncParams->level = (AVCLevel) level;
+        if (OK != ConvertOmxAvcLevelToAvcSpecLevel(
+                        level, &mEncParams->level)) {
+            return BAD_VALUE;
+        }
     }
 
 
@@ -242,7 +322,7 @@
     mFormat->setInt32(kKeyWidth, mVideoWidth);
     mFormat->setInt32(kKeyHeight, mVideoHeight);
     mFormat->setInt32(kKeyBitRate, mVideoBitRate);
-    mFormat->setInt32(kKeySampleRate, mVideoFrameRate);
+    mFormat->setInt32(kKeyFrameRate, mVideoFrameRate);
     mFormat->setInt32(kKeyColorFormat, mVideoColorFormat);
     mFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
     mFormat->setCString(kKeyDecoderComponent, "AVCEncoder");
diff --git a/media/libstagefright/codecs/m4v_h263/dec/M4vH263Decoder.cpp b/media/libstagefright/codecs/m4v_h263/dec/M4vH263Decoder.cpp
index dcf129e..2bdb3ef 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/M4vH263Decoder.cpp
+++ b/media/libstagefright/codecs/m4v_h263/dec/M4vH263Decoder.cpp
@@ -132,7 +132,10 @@
     }
 
     MP4DecodingMode actualMode = PVGetDecBitstreamMode(mHandle);
-    CHECK_EQ((int)mode, (int)actualMode);
+    if (mode != actualMode) {
+        PVCleanUpVideoDecoder(mHandle);
+        return UNKNOWN_ERROR;
+    }
 
     PVSetPostProcType((VideoDecControls *) mHandle, 0);
 
@@ -240,6 +243,8 @@
         CHECK_LE(disp_width, buf_width);
         CHECK_LE(disp_height, buf_height);
 
+        mFormat->setRect(kKeyCropRect, 0, 0, disp_width - 1, disp_height - 1);
+
         return INFO_FORMAT_CHANGED;
     }
 
diff --git a/media/libstagefright/codecs/m4v_h263/enc/M4vH263Encoder.cpp b/media/libstagefright/codecs/m4v_h263/enc/M4vH263Encoder.cpp
index a011137..15ed219 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/M4vH263Encoder.cpp
+++ b/media/libstagefright/codecs/m4v_h263/enc/M4vH263Encoder.cpp
@@ -32,6 +32,109 @@
 
 namespace android {
 
+static status_t ConvertOmxProfileLevel(
+        MP4EncodingMode mode,
+        int32_t omxProfile,
+        int32_t omxLevel,
+        ProfileLevelType* pvProfileLevel) {
+    LOGV("ConvertOmxProfileLevel: %d/%d/%d", mode, omxProfile, omxLevel);
+    ProfileLevelType profileLevel;
+    if (mode == H263_MODE) {
+        switch (omxProfile) {
+            case OMX_VIDEO_H263ProfileBaseline:
+                if (omxLevel > OMX_VIDEO_H263Level45) {
+                    LOGE("Unsupported level (%d) for H263", omxLevel);
+                    return BAD_VALUE;
+                } else {
+                    LOGW("PV does not support level configuration for H263");
+                    profileLevel = CORE_PROFILE_LEVEL2;
+                    break;
+                }
+                break;
+            default:
+                LOGE("Unsupported profile (%d) for H263", omxProfile);
+                return BAD_VALUE;
+        }
+    } else {  // MPEG4
+        switch (omxProfile) {
+            case OMX_VIDEO_MPEG4ProfileSimple:
+                switch (omxLevel) {
+                    case OMX_VIDEO_MPEG4Level0b:
+                        profileLevel = SIMPLE_PROFILE_LEVEL0;
+                        break;
+                    case OMX_VIDEO_MPEG4Level1:
+                        profileLevel = SIMPLE_PROFILE_LEVEL1;
+                        break;
+                    case OMX_VIDEO_MPEG4Level2:
+                        profileLevel = SIMPLE_PROFILE_LEVEL2;
+                        break;
+                    case OMX_VIDEO_MPEG4Level3:
+                        profileLevel = SIMPLE_PROFILE_LEVEL3;
+                        break;
+                    default:
+                        LOGE("Unsupported level (%d) for MPEG4 simple profile",
+                            omxLevel);
+                        return BAD_VALUE;
+                }
+                break;
+            case OMX_VIDEO_MPEG4ProfileSimpleScalable:
+                switch (omxLevel) {
+                    case OMX_VIDEO_MPEG4Level0b:
+                        profileLevel = SIMPLE_SCALABLE_PROFILE_LEVEL0;
+                        break;
+                    case OMX_VIDEO_MPEG4Level1:
+                        profileLevel = SIMPLE_SCALABLE_PROFILE_LEVEL1;
+                        break;
+                    case OMX_VIDEO_MPEG4Level2:
+                        profileLevel = SIMPLE_SCALABLE_PROFILE_LEVEL2;
+                        break;
+                    default:
+                        LOGE("Unsupported level (%d) for MPEG4 simple "
+                             "scalable profile", omxLevel);
+                        return BAD_VALUE;
+                }
+                break;
+            case OMX_VIDEO_MPEG4ProfileCore:
+                switch (omxLevel) {
+                    case OMX_VIDEO_MPEG4Level1:
+                        profileLevel = CORE_PROFILE_LEVEL1;
+                        break;
+                    case OMX_VIDEO_MPEG4Level2:
+                        profileLevel = CORE_PROFILE_LEVEL2;
+                        break;
+                    default:
+                        LOGE("Unsupported level (%d) for MPEG4 core "
+                             "profile", omxLevel);
+                        return BAD_VALUE;
+                }
+                break;
+            case OMX_VIDEO_MPEG4ProfileCoreScalable:
+                switch (omxLevel) {
+                    case OMX_VIDEO_MPEG4Level1:
+                        profileLevel = CORE_SCALABLE_PROFILE_LEVEL1;
+                        break;
+                    case OMX_VIDEO_MPEG4Level2:
+                        profileLevel = CORE_SCALABLE_PROFILE_LEVEL2;
+                        break;
+                    case OMX_VIDEO_MPEG4Level3:
+                        profileLevel = CORE_SCALABLE_PROFILE_LEVEL3;
+                        break;
+                    default:
+                        LOGE("Unsupported level (%d) for MPEG4 core "
+                             "scalable profile", omxLevel);
+                        return BAD_VALUE;
+                }
+                break;
+            default:
+                LOGE("Unsupported MPEG4 profile (%d)", omxProfile);
+                return BAD_VALUE;
+        }
+    }
+
+    *pvProfileLevel = profileLevel;
+    return OK;
+}
+
 inline static void ConvertYUV420SemiPlanarToYUV420Planar(
         uint8_t *inyuv, uint8_t* outyuv,
         int32_t width, int32_t height) {
@@ -75,7 +178,7 @@
       mInputFrameData(NULL),
       mGroup(NULL) {
 
-    LOGV("Construct software M4vH263Encoder");
+    LOGI("Construct software M4vH263Encoder");
 
     mHandle = new tagvideoEncControls;
     memset(mHandle, 0, sizeof(tagvideoEncControls));
@@ -97,7 +200,7 @@
     LOGV("initCheck");
     CHECK(meta->findInt32(kKeyWidth, &mVideoWidth));
     CHECK(meta->findInt32(kKeyHeight, &mVideoHeight));
-    CHECK(meta->findInt32(kKeySampleRate, &mVideoFrameRate));
+    CHECK(meta->findInt32(kKeyFrameRate, &mVideoFrameRate));
     CHECK(meta->findInt32(kKeyBitRate, &mVideoBitRate));
 
     // XXX: Add more color format support
@@ -150,9 +253,14 @@
     // If profile and level setting is not correct, failure
     // is reported when the encoder is initialized.
     mEncParams->profile_level = CORE_PROFILE_LEVEL2;
-    int32_t profileLevel;
-    if (meta->findInt32(kKeyVideoLevel, &profileLevel)) {
-        mEncParams->profile_level = (ProfileLevelType)profileLevel;
+    int32_t profile, level;
+    if (meta->findInt32(kKeyVideoProfile, &profile) &&
+        meta->findInt32(kKeyVideoLevel, &level)) {
+        if (OK != ConvertOmxProfileLevel(
+                        mEncParams->encMode, profile, level,
+                        &mEncParams->profile_level)) {
+            return BAD_VALUE;
+        }
     }
 
     mEncParams->packetSize = 32;
@@ -191,7 +299,7 @@
     mFormat->setInt32(kKeyWidth, mVideoWidth);
     mFormat->setInt32(kKeyHeight, mVideoHeight);
     mFormat->setInt32(kKeyBitRate, mVideoBitRate);
-    mFormat->setInt32(kKeySampleRate, mVideoFrameRate);
+    mFormat->setInt32(kKeyFrameRate, mVideoFrameRate);
     mFormat->setInt32(kKeyColorFormat, mVideoColorFormat);
 
     mFormat->setCString(kKeyMIMEType, mime);
diff --git a/media/libstagefright/codecs/mp3dec/Android.mk b/media/libstagefright/codecs/mp3dec/Android.mk
index fb56a93..753500e 100644
--- a/media/libstagefright/codecs/mp3dec/Android.mk
+++ b/media/libstagefright/codecs/mp3dec/Android.mk
@@ -53,5 +53,7 @@
 
 LOCAL_MODULE := libstagefright_mp3dec
 
+LOCAL_ARM_MODE := arm
+
 include $(BUILD_STATIC_LIBRARY)
 
diff --git a/media/libstagefright/codecs/on2/dec/VPXDecoder.cpp b/media/libstagefright/codecs/on2/dec/VPXDecoder.cpp
index 9433178..489e5ad 100644
--- a/media/libstagefright/codecs/on2/dec/VPXDecoder.cpp
+++ b/media/libstagefright/codecs/on2/dec/VPXDecoder.cpp
@@ -205,7 +205,9 @@
     vpx_image_t *img = vpx_codec_get_frame((vpx_codec_ctx_t *)mCtx, &iter);
 
     if (img == NULL) {
-        LOGI("on2 decoder did not return a frame.");
+        // The VPX format supports "internal-only" frames that are
+        // referenced by future content but never actually displayed, so
+        // this is a perfectly valid scenario.
 
         *out = new MediaBuffer(0);
         return OK;
diff --git a/media/libstagefright/colorconversion/Android.mk b/media/libstagefright/colorconversion/Android.mk
index 0dcbd73..62ba40f 100644
--- a/media/libstagefright/colorconversion/Android.mk
+++ b/media/libstagefright/colorconversion/Android.mk
@@ -6,17 +6,9 @@
         SoftwareRenderer.cpp
 
 LOCAL_C_INCLUDES := \
-        $(TOP)/frameworks/base/include/media/stagefright/openmax
-
-LOCAL_SHARED_LIBRARIES :=       \
-        libbinder               \
-        libmedia                \
-        libutils                \
-        libui                   \
-        libcutils				\
-        libsurfaceflinger_client\
-        libcamera_client
+        $(TOP)/frameworks/base/include/media/stagefright/openmax \
+        $(TOP)/hardware/msm7k
 
 LOCAL_MODULE:= libstagefright_color_conversion
 
-include $(BUILD_SHARED_LIBRARY)
+include $(BUILD_STATIC_LIBRARY)
diff --git a/media/libstagefright/colorconversion/ColorConverter.cpp b/media/libstagefright/colorconversion/ColorConverter.cpp
index 5b16997..3b92e5d 100644
--- a/media/libstagefright/colorconversion/ColorConverter.cpp
+++ b/media/libstagefright/colorconversion/ColorConverter.cpp
@@ -16,6 +16,7 @@
 
 #include <media/stagefright/ColorConverter.h>
 #include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/MediaErrors.h>
 
 namespace android {
 
@@ -50,31 +51,68 @@
     }
 }
 
-void ColorConverter::convert(
+ColorConverter::BitmapParams::BitmapParams(
+        void *bits,
         size_t width, size_t height,
-        const void *srcBits, size_t srcSkip,
-        void *dstBits, size_t dstSkip) {
-    CHECK_EQ(mDstFormat, OMX_COLOR_Format16bitRGB565);
+        size_t cropLeft, size_t cropTop,
+        size_t cropRight, size_t cropBottom)
+    : mBits(bits),
+      mWidth(width),
+      mHeight(height),
+      mCropLeft(cropLeft),
+      mCropTop(cropTop),
+      mCropRight(cropRight),
+      mCropBottom(cropBottom) {
+}
+
+size_t ColorConverter::BitmapParams::cropWidth() const {
+    return mCropRight - mCropLeft + 1;
+}
+
+size_t ColorConverter::BitmapParams::cropHeight() const {
+    return mCropBottom - mCropTop + 1;
+}
+
+status_t ColorConverter::convert(
+        const void *srcBits,
+        size_t srcWidth, size_t srcHeight,
+        size_t srcCropLeft, size_t srcCropTop,
+        size_t srcCropRight, size_t srcCropBottom,
+        void *dstBits,
+        size_t dstWidth, size_t dstHeight,
+        size_t dstCropLeft, size_t dstCropTop,
+        size_t dstCropRight, size_t dstCropBottom) {
+    if (mDstFormat != OMX_COLOR_Format16bitRGB565) {
+        return ERROR_UNSUPPORTED;
+    }
+
+    BitmapParams src(
+            const_cast<void *>(srcBits),
+            srcWidth, srcHeight,
+            srcCropLeft, srcCropTop, srcCropRight, srcCropBottom);
+
+    BitmapParams dst(
+            dstBits,
+            dstWidth, dstHeight,
+            dstCropLeft, dstCropTop, dstCropRight, dstCropBottom);
+
+    status_t err;
 
     switch (mSrcFormat) {
         case OMX_COLOR_FormatYUV420Planar:
-            convertYUV420Planar(
-                    width, height, srcBits, srcSkip, dstBits, dstSkip);
+            err = convertYUV420Planar(src, dst);
             break;
 
         case OMX_COLOR_FormatCbYCrY:
-            convertCbYCrY(
-                    width, height, srcBits, srcSkip, dstBits, dstSkip);
+            err = convertCbYCrY(src, dst);
             break;
 
         case OMX_QCOM_COLOR_FormatYVU420SemiPlanar:
-            convertQCOMYUV420SemiPlanar(
-                    width, height, srcBits, srcSkip, dstBits, dstSkip);
+            err = convertQCOMYUV420SemiPlanar(src, dst);
             break;
 
         case OMX_COLOR_FormatYUV420SemiPlanar:
-            convertYUV420SemiPlanar(
-                    width, height, srcBits, srcSkip, dstBits, dstSkip);
+            err = convertYUV420SemiPlanar(src, dst);
             break;
 
         default:
@@ -83,28 +121,34 @@
             break;
         }
     }
+
+    return err;
 }
 
-void ColorConverter::convertCbYCrY(
-        size_t width, size_t height,
-        const void *srcBits, size_t srcSkip,
-        void *dstBits, size_t dstSkip) {
-    CHECK_EQ(srcSkip, 0);  // Doesn't really make sense for YUV formats.
-    CHECK(dstSkip >= width * 2);
-    CHECK((dstSkip & 3) == 0);
+status_t ColorConverter::convertCbYCrY(
+        const BitmapParams &src, const BitmapParams &dst) {
+    // XXX Untested
 
     uint8_t *kAdjustedClip = initClip();
 
-    uint32_t *dst_ptr = (uint32_t *)dstBits;
+    if (!((src.mCropLeft & 1) == 0
+        && src.cropWidth() == dst.cropWidth()
+        && src.cropHeight() == dst.cropHeight())) {
+        return ERROR_UNSUPPORTED;
+    }
 
-    const uint8_t *src = (const uint8_t *)srcBits;
+    uint32_t *dst_ptr = (uint32_t *)dst.mBits
+        + (dst.mCropTop * dst.mWidth + dst.mCropLeft) / 2;
 
-    for (size_t y = 0; y < height; ++y) {
-        for (size_t x = 0; x < width; x += 2) {
-            signed y1 = (signed)src[2 * x + 1] - 16;
-            signed y2 = (signed)src[2 * x + 3] - 16;
-            signed u = (signed)src[2 * x] - 128;
-            signed v = (signed)src[2 * x + 2] - 128;
+    const uint8_t *src_ptr = (const uint8_t *)src.mBits
+        + (src.mCropTop * dst.mWidth + src.mCropLeft) * 2;
+
+    for (size_t y = 0; y < src.cropHeight(); ++y) {
+        for (size_t x = 0; x < src.cropWidth(); x += 2) {
+            signed y1 = (signed)src_ptr[2 * x + 1] - 16;
+            signed y2 = (signed)src_ptr[2 * x + 3] - 16;
+            signed u = (signed)src_ptr[2 * x] - 128;
+            signed v = (signed)src_ptr[2 * x + 2] - 128;
 
             signed u_b = u * 517;
             signed u_g = -u * 100;
@@ -134,32 +178,39 @@
             dst_ptr[x / 2] = (rgb2 << 16) | rgb1;
         }
 
-        src += width * 2;
-        dst_ptr += dstSkip / 4;
+        src_ptr += src.mWidth * 2;
+        dst_ptr += dst.mWidth / 2;
     }
+
+    return OK;
 }
 
-void ColorConverter::convertYUV420Planar(
-        size_t width, size_t height,
-        const void *srcBits, size_t srcSkip,
-        void *dstBits, size_t dstSkip) {
-    CHECK_EQ(srcSkip, 0);  // Doesn't really make sense for YUV formats.
-    CHECK(dstSkip >= width * 2);
-    CHECK((dstSkip & 3) == 0);
+status_t ColorConverter::convertYUV420Planar(
+        const BitmapParams &src, const BitmapParams &dst) {
+    if (!((dst.mWidth & 1) == 0
+            && (src.mCropLeft & 1) == 0
+            && src.cropWidth() == dst.cropWidth()
+            && src.cropHeight() == dst.cropHeight())) {
+        return ERROR_UNSUPPORTED;
+    }
 
     uint8_t *kAdjustedClip = initClip();
 
-    uint32_t *dst_ptr = (uint32_t *)dstBits;
-    const uint8_t *src_y = (const uint8_t *)srcBits;
+    uint32_t *dst_ptr = (uint32_t *)dst.mBits
+        + (dst.mCropTop * dst.mWidth + dst.mCropLeft) / 2;
+
+    const uint8_t *src_y =
+        (const uint8_t *)src.mBits + src.mCropTop * src.mWidth + src.mCropLeft;
 
     const uint8_t *src_u =
-        (const uint8_t *)src_y + width * height;
+        (const uint8_t *)src_y + src.mWidth * src.mHeight
+        + src.mCropTop * (src.mWidth / 2) + src.mCropLeft / 2;
 
     const uint8_t *src_v =
-        (const uint8_t *)src_u + (width / 2) * (height / 2);
+        src_u + (src.mWidth / 2) * (src.mHeight / 2);
 
-    for (size_t y = 0; y < height; ++y) {
-        for (size_t x = 0; x < width; x += 2) {
+    for (size_t y = 0; y < src.cropHeight(); ++y) {
+        for (size_t x = 0; x < src.cropWidth(); x += 2) {
             // B = 1.164 * (Y - 16) + 2.018 * (U - 128)
             // G = 1.164 * (Y - 16) - 0.813 * (V - 128) - 0.391 * (U - 128)
             // R = 1.164 * (Y - 16) + 1.596 * (V - 128)
@@ -212,35 +263,42 @@
             dst_ptr[x / 2] = (rgb2 << 16) | rgb1;
         }
 
-        src_y += width;
+        src_y += src.mWidth;
 
         if (y & 1) {
-            src_u += width / 2;
-            src_v += width / 2;
+            src_u += src.mWidth / 2;
+            src_v += src.mWidth / 2;
         }
 
-        dst_ptr += dstSkip / 4;
+        dst_ptr += dst.mWidth / 2;
     }
+
+    return OK;
 }
 
-void ColorConverter::convertQCOMYUV420SemiPlanar(
-        size_t width, size_t height,
-        const void *srcBits, size_t srcSkip,
-        void *dstBits, size_t dstSkip) {
-    CHECK_EQ(srcSkip, 0);  // Doesn't really make sense for YUV formats.
-    CHECK(dstSkip >= width * 2);
-    CHECK((dstSkip & 3) == 0);
-
+status_t ColorConverter::convertQCOMYUV420SemiPlanar(
+        const BitmapParams &src, const BitmapParams &dst) {
     uint8_t *kAdjustedClip = initClip();
 
-    uint32_t *dst_ptr = (uint32_t *)dstBits;
-    const uint8_t *src_y = (const uint8_t *)srcBits;
+    if (!((dst.mWidth & 3) == 0
+            && (src.mCropLeft & 1) == 0
+            && src.cropWidth() == dst.cropWidth()
+            && src.cropHeight() == dst.cropHeight())) {
+        return ERROR_UNSUPPORTED;
+    }
+
+    uint32_t *dst_ptr = (uint32_t *)dst.mBits
+        + (dst.mCropTop * dst.mWidth + dst.mCropLeft) / 2;
+
+    const uint8_t *src_y =
+        (const uint8_t *)src.mBits + src.mCropTop * src.mWidth + src.mCropLeft;
 
     const uint8_t *src_u =
-        (const uint8_t *)src_y + width * height;
+        (const uint8_t *)src_y + src.mWidth * src.mHeight
+        + src.mCropTop * src.mWidth + src.mCropLeft;
 
-    for (size_t y = 0; y < height; ++y) {
-        for (size_t x = 0; x < width; x += 2) {
+    for (size_t y = 0; y < src.cropHeight(); ++y) {
+        for (size_t x = 0; x < src.cropWidth(); x += 2) {
             signed y1 = (signed)src_y[x] - 16;
             signed y2 = (signed)src_y[x + 1] - 16;
 
@@ -275,34 +333,43 @@
             dst_ptr[x / 2] = (rgb2 << 16) | rgb1;
         }
 
-        src_y += width;
+        src_y += src.mWidth;
 
         if (y & 1) {
-            src_u += width;
+            src_u += src.mWidth;
         }
 
-        dst_ptr += dstSkip / 4;
+        dst_ptr += dst.mWidth / 2;
     }
+
+    return OK;
 }
 
-void ColorConverter::convertYUV420SemiPlanar(
-        size_t width, size_t height,
-        const void *srcBits, size_t srcSkip,
-        void *dstBits, size_t dstSkip) {
-    CHECK_EQ(srcSkip, 0);  // Doesn't really make sense for YUV formats.
-    CHECK(dstSkip >= width * 2);
-    CHECK((dstSkip & 3) == 0);
+status_t ColorConverter::convertYUV420SemiPlanar(
+        const BitmapParams &src, const BitmapParams &dst) {
+    // XXX Untested
 
     uint8_t *kAdjustedClip = initClip();
 
-    uint32_t *dst_ptr = (uint32_t *)dstBits;
-    const uint8_t *src_y = (const uint8_t *)srcBits;
+    if (!((dst.mWidth & 3) == 0
+            && (src.mCropLeft & 1) == 0
+            && src.cropWidth() == dst.cropWidth()
+            && src.cropHeight() == dst.cropHeight())) {
+        return ERROR_UNSUPPORTED;
+    }
+
+    uint32_t *dst_ptr = (uint32_t *)dst.mBits
+        + (dst.mCropTop * dst.mWidth + dst.mCropLeft) / 2;
+
+    const uint8_t *src_y =
+        (const uint8_t *)src.mBits + src.mCropTop * src.mWidth + src.mCropLeft;
 
     const uint8_t *src_u =
-        (const uint8_t *)src_y + width * height;
+        (const uint8_t *)src_y + src.mWidth * src.mHeight
+        + src.mCropTop * src.mWidth + src.mCropLeft;
 
-    for (size_t y = 0; y < height; ++y) {
-        for (size_t x = 0; x < width; x += 2) {
+    for (size_t y = 0; y < src.cropHeight(); ++y) {
+        for (size_t x = 0; x < src.cropWidth(); x += 2) {
             signed y1 = (signed)src_y[x] - 16;
             signed y2 = (signed)src_y[x + 1] - 16;
 
@@ -337,14 +404,16 @@
             dst_ptr[x / 2] = (rgb2 << 16) | rgb1;
         }
 
-        src_y += width;
+        src_y += src.mWidth;
 
         if (y & 1) {
-            src_u += width;
+            src_u += src.mWidth;
         }
 
-        dst_ptr += dstSkip / 4;
+        dst_ptr += dst.mWidth / 2;
     }
+
+    return OK;
 }
 
 uint8_t *ColorConverter::initClip() {
diff --git a/media/libstagefright/colorconversion/SoftwareRenderer.cpp b/media/libstagefright/colorconversion/SoftwareRenderer.cpp
index 93ec79d..31afc43 100644
--- a/media/libstagefright/colorconversion/SoftwareRenderer.cpp
+++ b/media/libstagefright/colorconversion/SoftwareRenderer.cpp
@@ -21,91 +21,130 @@
 
 #include <binder/MemoryHeapBase.h>
 #include <binder/MemoryHeapPmem.h>
-#include <media/stagefright/MediaDebug.h>
-#include <surfaceflinger/ISurface.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/MetaData.h>
+#include <surfaceflinger/Surface.h>
+#include <ui/android_native_buffer.h>
+#include <ui/GraphicBufferMapper.h>
+#include <gui/ISurfaceTexture.h>
 
 namespace android {
 
 SoftwareRenderer::SoftwareRenderer(
-        OMX_COLOR_FORMATTYPE colorFormat,
-        const sp<ISurface> &surface,
-        size_t displayWidth, size_t displayHeight,
-        size_t decodedWidth, size_t decodedHeight,
-        int32_t rotationDegrees)
-    : mInitCheck(NO_INIT),
-      mColorFormat(colorFormat),
-      mConverter(colorFormat, OMX_COLOR_Format16bitRGB565),
-      mISurface(surface),
-      mDisplayWidth(displayWidth),
-      mDisplayHeight(displayHeight),
-      mDecodedWidth(decodedWidth),
-      mDecodedHeight(decodedHeight),
-      mFrameSize(mDecodedWidth * mDecodedHeight * 2),  // RGB565
-      mIndex(0) {
-    mMemoryHeap = new MemoryHeapBase("/dev/pmem_adsp", 2 * mFrameSize);
-    if (mMemoryHeap->heapID() < 0) {
-        LOGI("Creating physical memory heap failed, reverting to regular heap.");
-        mMemoryHeap = new MemoryHeapBase(2 * mFrameSize);
-    } else {
-        sp<MemoryHeapPmem> pmemHeap = new MemoryHeapPmem(mMemoryHeap);
-        pmemHeap->slap();
-        mMemoryHeap = pmemHeap;
+        const sp<ANativeWindow> &nativeWindow, const sp<MetaData> &meta)
+    : mConverter(NULL),
+      mYUVMode(None),
+      mNativeWindow(nativeWindow) {
+    int32_t tmp;
+    CHECK(meta->findInt32(kKeyColorFormat, &tmp));
+    mColorFormat = (OMX_COLOR_FORMATTYPE)tmp;
+
+    CHECK(meta->findInt32(kKeyWidth, &mWidth));
+    CHECK(meta->findInt32(kKeyHeight, &mHeight));
+
+    if (!meta->findRect(
+                kKeyCropRect,
+                &mCropLeft, &mCropTop, &mCropRight, &mCropBottom)) {
+        mCropLeft = mCropTop = 0;
+        mCropRight = mWidth - 1;
+        mCropBottom = mHeight - 1;
     }
 
-    CHECK(mISurface.get() != NULL);
-    CHECK(mDecodedWidth > 0);
-    CHECK(mDecodedHeight > 0);
-    CHECK(mMemoryHeap->heapID() >= 0);
-    CHECK(mConverter.isValid());
+    int32_t rotationDegrees;
+    if (!meta->findInt32(kKeyRotation, &rotationDegrees)) {
+        rotationDegrees = 0;
+    }
 
-    uint32_t orientation;
+    int halFormat;
+    switch (mColorFormat) {
+        default:
+            halFormat = HAL_PIXEL_FORMAT_RGB_565;
+
+            mConverter = new ColorConverter(
+                    mColorFormat, OMX_COLOR_Format16bitRGB565);
+            CHECK(mConverter->isValid());
+            break;
+    }
+
+    CHECK(mNativeWindow != NULL);
+    CHECK(mWidth > 0);
+    CHECK(mHeight > 0);
+    CHECK(mConverter == NULL || mConverter->isValid());
+
+    CHECK_EQ(0,
+            native_window_set_usage(
+            mNativeWindow.get(),
+            GRALLOC_USAGE_SW_READ_NEVER | GRALLOC_USAGE_SW_WRITE_OFTEN
+            | GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_EXTERNAL_DISP));
+
+    CHECK_EQ(0, native_window_set_buffer_count(mNativeWindow.get(), 2));
+
+    // Width must be multiple of 32???
+    CHECK_EQ(0, native_window_set_buffers_geometry(
+                mNativeWindow.get(),
+                mCropRight - mCropLeft + 1,
+                mCropBottom - mCropTop + 1,
+                halFormat));
+
+    uint32_t transform;
     switch (rotationDegrees) {
-        case 0: orientation = ISurface::BufferHeap::ROT_0; break;
-        case 90: orientation = ISurface::BufferHeap::ROT_90; break;
-        case 180: orientation = ISurface::BufferHeap::ROT_180; break;
-        case 270: orientation = ISurface::BufferHeap::ROT_270; break;
-        default: orientation = ISurface::BufferHeap::ROT_0; break;
+        case 0: transform = 0; break;
+        case 90: transform = HAL_TRANSFORM_ROT_90; break;
+        case 180: transform = HAL_TRANSFORM_ROT_180; break;
+        case 270: transform = HAL_TRANSFORM_ROT_270; break;
+        default: transform = 0; break;
     }
 
-    ISurface::BufferHeap bufferHeap(
-            mDisplayWidth, mDisplayHeight,
-            mDecodedWidth, mDecodedHeight,
-            PIXEL_FORMAT_RGB_565,
-            orientation, 0,
-            mMemoryHeap);
-
-    status_t err = mISurface->registerBuffers(bufferHeap);
-
-    if (err != OK) {
-        LOGW("ISurface failed to register buffers (0x%08x)", err);
+    if (transform) {
+        CHECK_EQ(0, native_window_set_buffers_transform(
+                    mNativeWindow.get(), transform));
     }
-
-    mInitCheck = err;
 }
 
 SoftwareRenderer::~SoftwareRenderer() {
-    mISurface->unregisterBuffers();
-}
-
-status_t SoftwareRenderer::initCheck() const {
-    return mInitCheck;
+    delete mConverter;
+    mConverter = NULL;
 }
 
 void SoftwareRenderer::render(
         const void *data, size_t size, void *platformPrivate) {
-    if (mInitCheck != OK) {
+    android_native_buffer_t *buf;
+    int err;
+    if ((err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf)) != 0) {
+        LOGW("Surface::dequeueBuffer returned error %d", err);
         return;
     }
 
-    size_t offset = mIndex * mFrameSize;
-    void *dst = (uint8_t *)mMemoryHeap->getBase() + offset;
+    CHECK_EQ(0, mNativeWindow->lockBuffer(mNativeWindow.get(), buf));
 
-    mConverter.convert(
-            mDecodedWidth, mDecodedHeight,
-            data, 0, dst, 2 * mDecodedWidth);
+    GraphicBufferMapper &mapper = GraphicBufferMapper::get();
 
-    mISurface->postBuffer(offset);
-    mIndex = 1 - mIndex;
+    Rect bounds(mWidth, mHeight);
+
+    void *dst;
+    CHECK_EQ(0, mapper.lock(
+                buf->handle, GRALLOC_USAGE_SW_WRITE_OFTEN, bounds, &dst));
+
+    if (mConverter) {
+        mConverter->convert(
+                data,
+                mWidth, mHeight,
+                mCropLeft, mCropTop, mCropRight, mCropBottom,
+                dst,
+                buf->stride, buf->height,
+                0, 0,
+                mCropRight - mCropLeft,
+                mCropBottom - mCropTop);
+    } else {
+        TRESPASS();
+    }
+
+    CHECK_EQ(0, mapper.unlock(buf->handle));
+
+    if ((err = mNativeWindow->queueBuffer(mNativeWindow.get(), buf)) != 0) {
+        LOGW("Surface::queueBuffer returned error %d", err);
+    }
+    buf = NULL;
 }
 
 }  // namespace android
diff --git a/media/libstagefright/foundation/ABitReader.cpp b/media/libstagefright/foundation/ABitReader.cpp
index 24c8df8..f07dd4f 100644
--- a/media/libstagefright/foundation/ABitReader.cpp
+++ b/media/libstagefright/foundation/ABitReader.cpp
@@ -90,9 +90,7 @@
 }
 
 const uint8_t *ABitReader::data() const {
-    CHECK_EQ(mNumBitsLeft % 8, 0u);
-
-    return mData - mNumBitsLeft / 8;
+    return mData - (mNumBitsLeft + 7) / 8;
 }
 
 }  // namespace android
diff --git a/media/libstagefright/foundation/AHierarchicalStateMachine.cpp b/media/libstagefright/foundation/AHierarchicalStateMachine.cpp
new file mode 100644
index 0000000..30286d8
--- /dev/null
+++ b/media/libstagefright/foundation/AHierarchicalStateMachine.cpp
@@ -0,0 +1,97 @@
+#include <media/stagefright/foundation/AHierarchicalStateMachine.h>
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <utils/Vector.h>
+
+namespace android {
+
+AState::AState(const sp<AState> &parentState)
+    : mParentState(parentState) {
+}
+
+AState::~AState() {
+}
+
+sp<AState> AState::parentState() {
+    return mParentState;
+}
+
+void AState::stateEntered() {
+}
+
+void AState::stateExited() {
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+AHierarchicalStateMachine::AHierarchicalStateMachine() {
+}
+
+AHierarchicalStateMachine::~AHierarchicalStateMachine() {
+}
+
+void AHierarchicalStateMachine::onMessageReceived(const sp<AMessage> &msg) {
+    sp<AState> save = mState;
+
+    sp<AState> cur = mState;
+    while (cur != NULL && !cur->onMessageReceived(msg)) {
+        // If you claim not to have handled the message you shouldn't
+        // have called setState...
+        CHECK(save == mState);
+
+        cur = cur->parentState();
+    }
+
+    if (cur != NULL) {
+        return;
+    }
+
+    LOGW("Warning message %s unhandled in root state.",
+         msg->debugString().c_str());
+}
+
+void AHierarchicalStateMachine::changeState(const sp<AState> &state) {
+    if (state == mState) {
+        // Quick exit for the easy case.
+        return;
+    }
+
+    Vector<sp<AState> > A;
+    sp<AState> cur = mState;
+    for (;;) {
+        A.push(cur);
+        if (cur == NULL) {
+            break;
+        }
+        cur = cur->parentState();
+    }
+
+    Vector<sp<AState> > B;
+    cur = state;
+    for (;;) {
+        B.push(cur);
+        if (cur == NULL) {
+            break;
+        }
+        cur = cur->parentState();
+    }
+
+    // Remove the common tail.
+    while (A.size() > 0 && B.size() > 0 && A.top() == B.top()) {
+        A.pop();
+        B.pop();
+    }
+
+    mState = state;
+
+    for (size_t i = 0; i < A.size(); ++i) {
+        A.editItemAt(i)->stateExited();
+    }
+
+    for (size_t i = B.size(); i-- > 0;) {
+        B.editItemAt(i)->stateEntered();
+    }
+}
+
+}  // namespace android
diff --git a/media/libstagefright/foundation/AMessage.cpp b/media/libstagefright/foundation/AMessage.cpp
index 26c6d42..b592c3f 100644
--- a/media/libstagefright/foundation/AMessage.cpp
+++ b/media/libstagefright/foundation/AMessage.cpp
@@ -23,6 +23,8 @@
 #include "ALooperRoster.h"
 #include "AString.h"
 
+#include <binder/Parcel.h>
+
 namespace android {
 
 AMessage::AMessage(uint32_t what, ALooper::handler_id target)
@@ -169,6 +171,18 @@
     item->u.refValue = obj.get();
 }
 
+void AMessage::setRect(
+        const char *name,
+        int32_t left, int32_t top, int32_t right, int32_t bottom) {
+    Item *item = allocateItem(name);
+    item->mType = kTypeRect;
+
+    item->u.rectValue.mLeft = left;
+    item->u.rectValue.mTop = top;
+    item->u.rectValue.mRight = right;
+    item->u.rectValue.mBottom = bottom;
+}
+
 bool AMessage::findString(const char *name, AString *value) const {
     const Item *item = findItem(name, kTypeString);
     if (item) {
@@ -196,6 +210,22 @@
     return false;
 }
 
+bool AMessage::findRect(
+        const char *name,
+        int32_t *left, int32_t *top, int32_t *right, int32_t *bottom) const {
+    const Item *item = findItem(name, kTypeRect);
+    if (item == NULL) {
+        return false;
+    }
+
+    *left = item->u.rectValue.mLeft;
+    *top = item->u.rectValue.mTop;
+    *right = item->u.rectValue.mRight;
+    *bottom = item->u.rectValue.mBottom;
+
+    return true;
+}
+
 void AMessage::post(int64_t delayUs) {
     extern ALooperRoster gLooperRoster;
 
@@ -222,13 +252,22 @@
             }
 
             case kTypeObject:
-            case kTypeMessage:
             {
                 to->u.refValue = from->u.refValue;
                 to->u.refValue->incStrong(msg.get());
                 break;
             }
 
+            case kTypeMessage:
+            {
+                sp<AMessage> copy =
+                    static_cast<AMessage *>(from->u.refValue)->dup();
+
+                to->u.refValue = copy.get();
+                to->u.refValue->incStrong(msg.get());
+                break;
+            }
+
             default:
             {
                 to->u = from->u;
@@ -341,4 +380,136 @@
     return s;
 }
 
+// static
+sp<AMessage> AMessage::FromParcel(const Parcel &parcel) {
+    int32_t what = parcel.readInt32();
+    sp<AMessage> msg = new AMessage(what);
+
+    msg->mNumItems = static_cast<size_t>(parcel.readInt32());
+
+    for (size_t i = 0; i < msg->mNumItems; ++i) {
+        Item *item = &msg->mItems[i];
+
+        item->mName = AAtomizer::Atomize(parcel.readCString());
+        item->mType = static_cast<Type>(parcel.readInt32());
+
+        switch (item->mType) {
+            case kTypeInt32:
+            {
+                item->u.int32Value = parcel.readInt32();
+                break;
+            }
+
+            case kTypeInt64:
+            {
+                item->u.int64Value = parcel.readInt64();
+                break;
+            }
+
+            case kTypeSize:
+            {
+                item->u.sizeValue = static_cast<size_t>(parcel.readInt32());
+                break;
+            }
+
+            case kTypeFloat:
+            {
+                item->u.floatValue = parcel.readFloat();
+                break;
+            }
+
+            case kTypeDouble:
+            {
+                item->u.doubleValue = parcel.readDouble();
+                break;
+            }
+
+            case kTypeString:
+            {
+                item->u.stringValue = new AString(parcel.readCString());
+                break;
+            }
+
+            case kTypeMessage:
+            {
+                sp<AMessage> subMsg = AMessage::FromParcel(parcel);
+                subMsg->incStrong(msg.get());
+
+                item->u.refValue = subMsg.get();
+                break;
+            }
+
+            default:
+            {
+                LOGE("This type of object cannot cross process boundaries.");
+                TRESPASS();
+            }
+        }
+    }
+
+    return msg;
+}
+
+void AMessage::writeToParcel(Parcel *parcel) const {
+    parcel->writeInt32(static_cast<int32_t>(mWhat));
+    parcel->writeInt32(static_cast<int32_t>(mNumItems));
+
+    for (size_t i = 0; i < mNumItems; ++i) {
+        const Item &item = mItems[i];
+
+        parcel->writeCString(item.mName);
+        parcel->writeInt32(static_cast<int32_t>(item.mType));
+
+        switch (item.mType) {
+            case kTypeInt32:
+            {
+                parcel->writeInt32(item.u.int32Value);
+                break;
+            }
+
+            case kTypeInt64:
+            {
+                parcel->writeInt64(item.u.int64Value);
+                break;
+            }
+
+            case kTypeSize:
+            {
+                parcel->writeInt32(static_cast<int32_t>(item.u.sizeValue));
+                break;
+            }
+
+            case kTypeFloat:
+            {
+                parcel->writeFloat(item.u.floatValue);
+                break;
+            }
+
+            case kTypeDouble:
+            {
+                parcel->writeDouble(item.u.doubleValue);
+                break;
+            }
+
+            case kTypeString:
+            {
+                parcel->writeCString(item.u.stringValue->c_str());
+                break;
+            }
+
+            case kTypeMessage:
+            {
+                static_cast<AMessage *>(item.u.refValue)->writeToParcel(parcel);
+                break;
+            }
+
+            default:
+            {
+                LOGE("This type of object cannot cross process boundaries.");
+                TRESPASS();
+            }
+        }
+    }
+}
+
 }  // namespace android
diff --git a/media/libstagefright/foundation/Android.mk b/media/libstagefright/foundation/Android.mk
index ffa7db0..4e07f6f 100644
--- a/media/libstagefright/foundation/Android.mk
+++ b/media/libstagefright/foundation/Android.mk
@@ -1,16 +1,17 @@
 LOCAL_PATH:= $(call my-dir)
 include $(CLEAR_VARS)
 
-LOCAL_SRC_FILES:=               \
-    AAtomizer.cpp               \
-    ABitReader.cpp              \
-    ABuffer.cpp                 \
-    AHandler.cpp                \
-    ALooper.cpp                 \
-    ALooperRoster.cpp           \
-    AMessage.cpp                \
-    AString.cpp                 \
-    base64.cpp                  \
+LOCAL_SRC_FILES:=                 \
+    AAtomizer.cpp                 \
+    ABitReader.cpp                \
+    ABuffer.cpp                   \
+    AHandler.cpp                  \
+    AHierarchicalStateMachine.cpp \
+    ALooper.cpp                   \
+    ALooperRoster.cpp             \
+    AMessage.cpp                  \
+    AString.cpp                   \
+    base64.cpp                    \
     hexdump.cpp
 
 LOCAL_C_INCLUDES:= \
@@ -18,14 +19,7 @@
 
 LOCAL_SHARED_LIBRARIES := \
         libbinder         \
-        libmedia          \
         libutils          \
-        libcutils         \
-        libui             \
-        libsonivox        \
-        libvorbisidec     \
-        libsurfaceflinger_client \
-        libcamera_client
 
 LOCAL_CFLAGS += -Wno-multichar
 
diff --git a/media/libstagefright/httplive/Android.mk b/media/libstagefright/httplive/Android.mk
index cc7dd4f..9225e41 100644
--- a/media/libstagefright/httplive/Android.mk
+++ b/media/libstagefright/httplive/Android.mk
@@ -2,14 +2,16 @@
 
 include $(CLEAR_VARS)
 
-LOCAL_SRC_FILES:=       \
-        LiveSource.cpp  \
-        M3UParser.cpp   \
+LOCAL_SRC_FILES:=               \
+        LiveDataSource.cpp      \
+        LiveSession.cpp         \
+        M3UParser.cpp           \
 
 LOCAL_C_INCLUDES:= \
 	$(JNI_H_INCLUDE) \
 	$(TOP)/frameworks/base/include/media/stagefright/openmax \
-        $(TOP)/frameworks/base/media/libstagefright
+        $(TOP)/frameworks/base/media/libstagefright \
+        $(TOP)/external/openssl/include
 
 LOCAL_MODULE:= libstagefright_httplive
 
diff --git a/media/libstagefright/httplive/LiveDataSource.cpp b/media/libstagefright/httplive/LiveDataSource.cpp
new file mode 100644
index 0000000..5f5c6d4
--- /dev/null
+++ b/media/libstagefright/httplive/LiveDataSource.cpp
@@ -0,0 +1,173 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "LiveDataSource"
+#include <utils/Log.h>
+
+#include "LiveDataSource.h"
+
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+
+#define SAVE_BACKUP     0
+
+namespace android {
+
+LiveDataSource::LiveDataSource()
+    : mOffset(0),
+      mFinalResult(OK),
+      mBackupFile(NULL) {
+#if SAVE_BACKUP
+    mBackupFile = fopen("/data/misc/backup.ts", "wb");
+    CHECK(mBackupFile != NULL);
+#endif
+}
+
+LiveDataSource::~LiveDataSource() {
+    if (mBackupFile != NULL) {
+        fclose(mBackupFile);
+        mBackupFile = NULL;
+    }
+}
+
+status_t LiveDataSource::initCheck() const {
+    return OK;
+}
+
+size_t LiveDataSource::countQueuedBuffers() {
+    Mutex::Autolock autoLock(mLock);
+
+    return mBufferQueue.size();
+}
+
+ssize_t LiveDataSource::readAtNonBlocking(
+        off64_t offset, void *data, size_t size) {
+    Mutex::Autolock autoLock(mLock);
+
+    if (offset != mOffset) {
+        LOGE("Attempt at reading non-sequentially from LiveDataSource.");
+        return -EPIPE;
+    }
+
+    size_t totalAvailable = 0;
+    for (List<sp<ABuffer> >::iterator it = mBufferQueue.begin();
+         it != mBufferQueue.end(); ++it) {
+        sp<ABuffer> buffer = *it;
+
+        totalAvailable += buffer->size();
+
+        if (totalAvailable >= size) {
+            break;
+        }
+    }
+
+    if (totalAvailable < size) {
+        return mFinalResult == OK ? -EWOULDBLOCK : mFinalResult;
+    }
+
+    return readAt_l(offset, data, size);
+}
+
+ssize_t LiveDataSource::readAt(off64_t offset, void *data, size_t size) {
+    Mutex::Autolock autoLock(mLock);
+    return readAt_l(offset, data, size);
+}
+
+ssize_t LiveDataSource::readAt_l(off64_t offset, void *data, size_t size) {
+    if (offset != mOffset) {
+        LOGE("Attempt at reading non-sequentially from LiveDataSource.");
+        return -EPIPE;
+    }
+
+    size_t sizeDone = 0;
+
+    while (sizeDone < size) {
+        while (mBufferQueue.empty() && mFinalResult == OK) {
+            mCondition.wait(mLock);
+        }
+
+        if (mBufferQueue.empty()) {
+            if (sizeDone > 0) {
+                mOffset += sizeDone;
+                return sizeDone;
+            }
+
+            return mFinalResult;
+        }
+
+        sp<ABuffer> buffer = *mBufferQueue.begin();
+
+        size_t copy = size - sizeDone;
+
+        if (copy > buffer->size()) {
+            copy = buffer->size();
+        }
+
+        memcpy((uint8_t *)data + sizeDone, buffer->data(), copy);
+
+        sizeDone += copy;
+
+        buffer->setRange(buffer->offset() + copy, buffer->size() - copy);
+
+        if (buffer->size() == 0) {
+            mBufferQueue.erase(mBufferQueue.begin());
+        }
+    }
+
+    mOffset += sizeDone;
+
+    return sizeDone;
+}
+
+void LiveDataSource::queueBuffer(const sp<ABuffer> &buffer) {
+    Mutex::Autolock autoLock(mLock);
+
+    if (mFinalResult != OK) {
+        return;
+    }
+
+#if SAVE_BACKUP
+    if (mBackupFile != NULL) {
+        CHECK_EQ(fwrite(buffer->data(), 1, buffer->size(), mBackupFile),
+                 buffer->size());
+    }
+#endif
+
+    mBufferQueue.push_back(buffer);
+    mCondition.broadcast();
+}
+
+void LiveDataSource::queueEOS(status_t finalResult) {
+    CHECK_NE(finalResult, (status_t)OK);
+
+    Mutex::Autolock autoLock(mLock);
+
+    mFinalResult = finalResult;
+    mCondition.broadcast();
+}
+
+void LiveDataSource::reset() {
+    Mutex::Autolock autoLock(mLock);
+
+    // XXX FIXME: If we've done a partial read and waiting for more buffers,
+    // we'll mix old and new data...
+
+    mFinalResult = OK;
+    mBufferQueue.clear();
+}
+
+}  // namespace android
diff --git a/media/libstagefright/httplive/LiveDataSource.h b/media/libstagefright/httplive/LiveDataSource.h
new file mode 100644
index 0000000..b7be637
--- /dev/null
+++ b/media/libstagefright/httplive/LiveDataSource.h
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LIVE_DATA_SOURCE_H_
+
+#define LIVE_DATA_SOURCE_H_
+
+#include <media/stagefright/foundation/ABase.h>
+#include <media/stagefright/DataSource.h>
+#include <utils/threads.h>
+#include <utils/List.h>
+
+namespace android {
+
+struct ABuffer;
+
+struct LiveDataSource : public DataSource {
+    LiveDataSource();
+
+    virtual status_t initCheck() const;
+
+    virtual ssize_t readAt(off64_t offset, void *data, size_t size);
+    ssize_t readAtNonBlocking(off64_t offset, void *data, size_t size);
+
+    void queueBuffer(const sp<ABuffer> &buffer);
+    void queueEOS(status_t finalResult);
+    void reset();
+
+    size_t countQueuedBuffers();
+
+protected:
+    virtual ~LiveDataSource();
+
+private:
+    Mutex mLock;
+    Condition mCondition;
+
+    off64_t mOffset;
+    List<sp<ABuffer> > mBufferQueue;
+    status_t mFinalResult;
+
+    FILE *mBackupFile;
+
+    ssize_t readAt_l(off64_t offset, void *data, size_t size);
+
+    DISALLOW_EVIL_CONSTRUCTORS(LiveDataSource);
+};
+
+}  // namespace android
+
+#endif  // LIVE_DATA_SOURCE_H_
diff --git a/media/libstagefright/httplive/LiveSession.cpp b/media/libstagefright/httplive/LiveSession.cpp
new file mode 100644
index 0000000..f0cd6a0
--- /dev/null
+++ b/media/libstagefright/httplive/LiveSession.cpp
@@ -0,0 +1,742 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "LiveSession"
+#include <utils/Log.h>
+
+#include "include/LiveSession.h"
+
+#include "LiveDataSource.h"
+
+#include "include/M3UParser.h"
+#include "include/NuHTTPDataSource.h"
+
+#include <cutils/properties.h>
+#include <media/stagefright/foundation/hexdump.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/FileSource.h>
+#include <media/stagefright/MediaErrors.h>
+
+#include <ctype.h>
+#include <openssl/aes.h>
+
+namespace android {
+
+const int64_t LiveSession::kMaxPlaylistAgeUs = 15000000ll;
+
+LiveSession::LiveSession(uint32_t flags)
+    : mFlags(flags),
+      mDataSource(new LiveDataSource),
+      mHTTPDataSource(
+              new NuHTTPDataSource(
+                  (mFlags & kFlagIncognito)
+                    ? NuHTTPDataSource::kFlagIncognito
+                    : 0)),
+      mPrevBandwidthIndex(-1),
+      mLastPlaylistFetchTimeUs(-1),
+      mSeqNumber(-1),
+      mSeekTimeUs(-1),
+      mNumRetries(0),
+      mDurationUs(-1),
+      mSeekDone(false),
+      mDisconnectPending(false),
+      mMonitorQueueGeneration(0) {
+}
+
+LiveSession::~LiveSession() {
+}
+
+sp<DataSource> LiveSession::getDataSource() {
+    return mDataSource;
+}
+
+void LiveSession::connect(const char *url) {
+    sp<AMessage> msg = new AMessage(kWhatConnect, id());
+    msg->setString("url", url);
+    msg->post();
+}
+
+void LiveSession::disconnect() {
+    Mutex::Autolock autoLock(mLock);
+    mDisconnectPending = true;
+
+    mHTTPDataSource->disconnect();
+
+    (new AMessage(kWhatDisconnect, id()))->post();
+}
+
+void LiveSession::seekTo(int64_t timeUs) {
+    Mutex::Autolock autoLock(mLock);
+    mSeekDone = false;
+
+    sp<AMessage> msg = new AMessage(kWhatSeek, id());
+    msg->setInt64("timeUs", timeUs);
+    msg->post();
+
+    while (!mSeekDone) {
+        mCondition.wait(mLock);
+    }
+}
+
+void LiveSession::onMessageReceived(const sp<AMessage> &msg) {
+    switch (msg->what()) {
+        case kWhatConnect:
+            onConnect(msg);
+            break;
+
+        case kWhatDisconnect:
+            onDisconnect();
+            break;
+
+        case kWhatMonitorQueue:
+        {
+            int32_t generation;
+            CHECK(msg->findInt32("generation", &generation));
+
+            if (generation != mMonitorQueueGeneration) {
+                // Stale event
+                break;
+            }
+
+            onMonitorQueue();
+            break;
+        }
+
+        case kWhatSeek:
+            onSeek(msg);
+            break;
+
+        default:
+            TRESPASS();
+            break;
+    }
+}
+
+// static
+int LiveSession::SortByBandwidth(const BandwidthItem *a, const BandwidthItem *b) {
+    if (a->mBandwidth < b->mBandwidth) {
+        return -1;
+    } else if (a->mBandwidth == b->mBandwidth) {
+        return 0;
+    }
+
+    return 1;
+}
+
+void LiveSession::onConnect(const sp<AMessage> &msg) {
+    AString url;
+    CHECK(msg->findString("url", &url));
+
+    if (!(mFlags & kFlagIncognito)) {
+        LOGI("onConnect '%s'", url.c_str());
+    } else {
+        LOGI("onConnect <URL suppressed>");
+    }
+
+    mMasterURL = url;
+
+    sp<M3UParser> playlist = fetchPlaylist(url.c_str());
+
+    if (playlist == NULL) {
+        LOGE("unable to fetch master playlist '%s'.", url.c_str());
+
+        mDataSource->queueEOS(ERROR_IO);
+        return;
+    }
+
+    if (playlist->isVariantPlaylist()) {
+        for (size_t i = 0; i < playlist->size(); ++i) {
+            BandwidthItem item;
+
+            sp<AMessage> meta;
+            playlist->itemAt(i, &item.mURI, &meta);
+
+            unsigned long bandwidth;
+            CHECK(meta->findInt32("bandwidth", (int32_t *)&item.mBandwidth));
+
+            mBandwidthItems.push(item);
+        }
+
+        CHECK_GT(mBandwidthItems.size(), 0u);
+
+        mBandwidthItems.sort(SortByBandwidth);
+    }
+
+    postMonitorQueue();
+}
+
+void LiveSession::onDisconnect() {
+    LOGI("onDisconnect");
+
+    mDataSource->queueEOS(ERROR_END_OF_STREAM);
+
+    Mutex::Autolock autoLock(mLock);
+    mDisconnectPending = false;
+}
+
+status_t LiveSession::fetchFile(const char *url, sp<ABuffer> *out) {
+    *out = NULL;
+
+    sp<DataSource> source;
+
+    if (!strncasecmp(url, "file://", 7)) {
+        source = new FileSource(url + 7);
+    } else if (strncasecmp(url, "http://", 7)
+            && strncasecmp(url, "https://", 8)) {
+        return ERROR_UNSUPPORTED;
+    } else {
+        {
+            Mutex::Autolock autoLock(mLock);
+
+            if (mDisconnectPending) {
+                return ERROR_IO;
+            }
+        }
+
+        status_t err = mHTTPDataSource->connect(url);
+
+        if (err != OK) {
+            return err;
+        }
+
+        source = mHTTPDataSource;
+    }
+
+    off64_t size;
+    status_t err = source->getSize(&size);
+
+    if (err != OK) {
+        size = 65536;
+    }
+
+    sp<ABuffer> buffer = new ABuffer(size);
+    buffer->setRange(0, 0);
+
+    for (;;) {
+        size_t bufferRemaining = buffer->capacity() - buffer->size();
+
+        if (bufferRemaining == 0) {
+            bufferRemaining = 32768;
+
+            LOGV("increasing download buffer to %d bytes",
+                 buffer->size() + bufferRemaining);
+
+            sp<ABuffer> copy = new ABuffer(buffer->size() + bufferRemaining);
+            memcpy(copy->data(), buffer->data(), buffer->size());
+            copy->setRange(0, buffer->size());
+
+            buffer = copy;
+        }
+
+        ssize_t n = source->readAt(
+                buffer->size(), buffer->data() + buffer->size(),
+                bufferRemaining);
+
+        if (n < 0) {
+            return n;
+        }
+
+        if (n == 0) {
+            break;
+        }
+
+        buffer->setRange(0, buffer->size() + (size_t)n);
+    }
+
+    *out = buffer;
+
+    return OK;
+}
+
+sp<M3UParser> LiveSession::fetchPlaylist(const char *url) {
+    sp<ABuffer> buffer;
+    status_t err = fetchFile(url, &buffer);
+
+    if (err != OK) {
+        return NULL;
+    }
+
+    sp<M3UParser> playlist =
+        new M3UParser(url, buffer->data(), buffer->size());
+
+    if (playlist->initCheck() != OK) {
+        return NULL;
+    }
+
+    return playlist;
+}
+
+static double uniformRand() {
+    return (double)rand() / RAND_MAX;
+}
+
+size_t LiveSession::getBandwidthIndex() {
+    if (mBandwidthItems.size() == 0) {
+        return 0;
+    }
+
+#if 1
+    int32_t bandwidthBps;
+    if (mHTTPDataSource != NULL
+            && mHTTPDataSource->estimateBandwidth(&bandwidthBps)) {
+        LOGV("bandwidth estimated at %.2f kbps", bandwidthBps / 1024.0f);
+    } else {
+        LOGV("no bandwidth estimate.");
+        return 0;  // Pick the lowest bandwidth stream by default.
+    }
+
+    char value[PROPERTY_VALUE_MAX];
+    if (property_get("media.httplive.max-bw", value, NULL)) {
+        char *end;
+        long maxBw = strtoul(value, &end, 10);
+        if (end > value && *end == '\0') {
+            if (maxBw > 0 && bandwidthBps > maxBw) {
+                LOGV("bandwidth capped to %ld bps", maxBw);
+                bandwidthBps = maxBw;
+            }
+        }
+    }
+
+    // Consider only 80% of the available bandwidth usable.
+    bandwidthBps = (bandwidthBps * 8) / 10;
+
+    // Pick the highest bandwidth stream below or equal to estimated bandwidth.
+
+    size_t index = mBandwidthItems.size() - 1;
+    while (index > 0 && mBandwidthItems.itemAt(index).mBandwidth
+                            > (size_t)bandwidthBps) {
+        --index;
+    }
+#elif 0
+    // Change bandwidth at random()
+    size_t index = uniformRand() * mBandwidthItems.size();
+#elif 0
+    // There's a 50% chance to stay on the current bandwidth and
+    // a 50% chance to switch to the next higher bandwidth (wrapping around
+    // to lowest)
+    const size_t kMinIndex = 0;
+
+    size_t index;
+    if (mPrevBandwidthIndex < 0) {
+        index = kMinIndex;
+    } else if (uniformRand() < 0.5) {
+        index = (size_t)mPrevBandwidthIndex;
+    } else {
+        index = mPrevBandwidthIndex + 1;
+        if (index == mBandwidthItems.size()) {
+            index = kMinIndex;
+        }
+    }
+#elif 0
+    // Pick the highest bandwidth stream below or equal to 1.2 Mbit/sec
+
+    size_t index = mBandwidthItems.size() - 1;
+    while (index > 0 && mBandwidthItems.itemAt(index).mBandwidth > 1200000) {
+        --index;
+    }
+#else
+    size_t index = mBandwidthItems.size() - 1;  // Highest bandwidth stream
+#endif
+
+    return index;
+}
+
+void LiveSession::onDownloadNext() {
+    size_t bandwidthIndex = getBandwidthIndex();
+
+rinse_repeat:
+    int64_t nowUs = ALooper::GetNowUs();
+
+    if (mLastPlaylistFetchTimeUs < 0
+            || (ssize_t)bandwidthIndex != mPrevBandwidthIndex
+            || (!mPlaylist->isComplete()
+                && mLastPlaylistFetchTimeUs + kMaxPlaylistAgeUs <= nowUs)) {
+        AString url;
+        if (mBandwidthItems.size() > 0) {
+            url = mBandwidthItems.editItemAt(bandwidthIndex).mURI;
+        } else {
+            url = mMasterURL;
+        }
+
+        bool firstTime = (mPlaylist == NULL);
+
+        mPlaylist = fetchPlaylist(url.c_str());
+        if (mPlaylist == NULL) {
+            LOGE("failed to load playlist at url '%s'", url.c_str());
+            mDataSource->queueEOS(ERROR_IO);
+            return;
+        }
+
+        if (firstTime) {
+            Mutex::Autolock autoLock(mLock);
+
+            int32_t targetDuration;
+            if (!mPlaylist->isComplete()
+                    || !mPlaylist->meta()->findInt32(
+                    "target-duration", &targetDuration)) {
+                mDurationUs = -1;
+            } else {
+                mDurationUs = 1000000ll * targetDuration * mPlaylist->size();
+            }
+        }
+
+        mLastPlaylistFetchTimeUs = ALooper::GetNowUs();
+    }
+
+    int32_t firstSeqNumberInPlaylist;
+    if (mPlaylist->meta() == NULL || !mPlaylist->meta()->findInt32(
+                "media-sequence", &firstSeqNumberInPlaylist)) {
+        firstSeqNumberInPlaylist = 0;
+    }
+
+    bool explicitDiscontinuity = false;
+    bool bandwidthChanged = false;
+
+    if (mSeekTimeUs >= 0) {
+        int32_t targetDuration;
+        if (mPlaylist->isComplete() &&
+                mPlaylist->meta()->findInt32(
+                    "target-duration", &targetDuration)) {
+            int64_t seekTimeSecs = (mSeekTimeUs + 500000ll) / 1000000ll;
+            int64_t index = seekTimeSecs / targetDuration;
+
+            if (index >= 0 && index < mPlaylist->size()) {
+                int32_t newSeqNumber = firstSeqNumberInPlaylist + index;
+
+                if (newSeqNumber != mSeqNumber) {
+                    LOGI("seeking to seq no %d", newSeqNumber);
+
+                    mSeqNumber = newSeqNumber;
+
+                    mDataSource->reset();
+
+                    // reseting the data source will have had the
+                    // side effect of discarding any previously queued
+                    // bandwidth change discontinuity.
+                    // Therefore we'll need to treat these explicit
+                    // discontinuities as involving a bandwidth change
+                    // even if they aren't directly.
+                    explicitDiscontinuity = true;
+                    bandwidthChanged = true;
+                }
+            }
+        }
+
+        mSeekTimeUs = -1;
+
+        Mutex::Autolock autoLock(mLock);
+        mSeekDone = true;
+        mCondition.broadcast();
+    }
+
+    if (mSeqNumber < 0) {
+        if (mPlaylist->isComplete()) {
+            mSeqNumber = firstSeqNumberInPlaylist;
+        } else {
+            mSeqNumber = firstSeqNumberInPlaylist + mPlaylist->size() / 2;
+        }
+    }
+
+    int32_t lastSeqNumberInPlaylist =
+        firstSeqNumberInPlaylist + (int32_t)mPlaylist->size() - 1;
+
+    if (mSeqNumber < firstSeqNumberInPlaylist
+            || mSeqNumber > lastSeqNumberInPlaylist) {
+        if (mPrevBandwidthIndex != (ssize_t)bandwidthIndex) {
+            // Go back to the previous bandwidth.
+
+            LOGI("new bandwidth does not have the sequence number "
+                 "we're looking for, switching back to previous bandwidth");
+
+            mLastPlaylistFetchTimeUs = -1;
+            bandwidthIndex = mPrevBandwidthIndex;
+            goto rinse_repeat;
+        }
+
+        if (!mPlaylist->isComplete()
+                && mSeqNumber > lastSeqNumberInPlaylist
+                && mNumRetries < kMaxNumRetries) {
+            ++mNumRetries;
+
+            mLastPlaylistFetchTimeUs = -1;
+            postMonitorQueue(3000000ll);
+            return;
+        }
+
+        LOGE("Cannot find sequence number %d in playlist "
+             "(contains %d - %d)",
+             mSeqNumber, firstSeqNumberInPlaylist,
+             firstSeqNumberInPlaylist + mPlaylist->size() - 1);
+
+        mDataSource->queueEOS(ERROR_END_OF_STREAM);
+        return;
+    }
+
+    mNumRetries = 0;
+
+    AString uri;
+    sp<AMessage> itemMeta;
+    CHECK(mPlaylist->itemAt(
+                mSeqNumber - firstSeqNumberInPlaylist,
+                &uri,
+                &itemMeta));
+
+    int32_t val;
+    if (itemMeta->findInt32("discontinuity", &val) && val != 0) {
+        explicitDiscontinuity = true;
+    }
+
+    sp<ABuffer> buffer;
+    status_t err = fetchFile(uri.c_str(), &buffer);
+    if (err != OK) {
+        LOGE("failed to fetch .ts segment at url '%s'", uri.c_str());
+        mDataSource->queueEOS(err);
+        return;
+    }
+
+    CHECK(buffer != NULL);
+
+    err = decryptBuffer(mSeqNumber - firstSeqNumberInPlaylist, buffer);
+
+    if (err != OK) {
+        LOGE("decryptBuffer failed w/ error %d", err);
+
+        mDataSource->queueEOS(err);
+        return;
+    }
+
+    if (buffer->size() == 0 || buffer->data()[0] != 0x47) {
+        // Not a transport stream???
+
+        LOGE("This doesn't look like a transport stream...");
+
+        mBandwidthItems.removeAt(bandwidthIndex);
+
+        if (mBandwidthItems.isEmpty()) {
+            mDataSource->queueEOS(ERROR_UNSUPPORTED);
+            return;
+        }
+
+        LOGI("Retrying with a different bandwidth stream.");
+
+        mLastPlaylistFetchTimeUs = -1;
+        bandwidthIndex = getBandwidthIndex();
+        mPrevBandwidthIndex = bandwidthIndex;
+        mSeqNumber = -1;
+
+        goto rinse_repeat;
+    }
+
+    if ((size_t)mPrevBandwidthIndex != bandwidthIndex) {
+        bandwidthChanged = true;
+    }
+
+    if (mPrevBandwidthIndex < 0) {
+        // Don't signal a bandwidth change at the very beginning of
+        // playback.
+        bandwidthChanged = false;
+    }
+
+    if (explicitDiscontinuity || bandwidthChanged) {
+        // Signal discontinuity.
+
+        LOGI("queueing discontinuity (explicit=%d, bandwidthChanged=%d)",
+             explicitDiscontinuity, bandwidthChanged);
+
+        sp<ABuffer> tmp = new ABuffer(188);
+        memset(tmp->data(), 0, tmp->size());
+        tmp->data()[1] = bandwidthChanged;
+
+        mDataSource->queueBuffer(tmp);
+    }
+
+    mDataSource->queueBuffer(buffer);
+
+    mPrevBandwidthIndex = bandwidthIndex;
+    ++mSeqNumber;
+
+    postMonitorQueue();
+}
+
+void LiveSession::onMonitorQueue() {
+    if (mSeekTimeUs >= 0
+            || mDataSource->countQueuedBuffers() < kMaxNumQueuedFragments) {
+        onDownloadNext();
+    } else {
+        postMonitorQueue(1000000ll);
+    }
+}
+
+status_t LiveSession::decryptBuffer(
+        size_t playlistIndex, const sp<ABuffer> &buffer) {
+    sp<AMessage> itemMeta;
+    bool found = false;
+    AString method;
+
+    for (ssize_t i = playlistIndex; i >= 0; --i) {
+        AString uri;
+        CHECK(mPlaylist->itemAt(i, &uri, &itemMeta));
+
+        if (itemMeta->findString("cipher-method", &method)) {
+            found = true;
+            break;
+        }
+    }
+
+    if (!found) {
+        method = "NONE";
+    }
+
+    if (method == "NONE") {
+        return OK;
+    } else if (!(method == "AES-128")) {
+        LOGE("Unsupported cipher method '%s'", method.c_str());
+        return ERROR_UNSUPPORTED;
+    }
+
+    AString keyURI;
+    if (!itemMeta->findString("cipher-uri", &keyURI)) {
+        LOGE("Missing key uri");
+        return ERROR_MALFORMED;
+    }
+
+    ssize_t index = mAESKeyForURI.indexOfKey(keyURI);
+
+    sp<ABuffer> key;
+    if (index >= 0) {
+        key = mAESKeyForURI.valueAt(index);
+    } else {
+        key = new ABuffer(16);
+
+        sp<NuHTTPDataSource> keySource = new NuHTTPDataSource;
+        status_t err = keySource->connect(keyURI.c_str());
+
+        if (err == OK) {
+            size_t offset = 0;
+            while (offset < 16) {
+                ssize_t n = keySource->readAt(
+                        offset, key->data() + offset, 16 - offset);
+                if (n <= 0) {
+                    err = ERROR_IO;
+                    break;
+                }
+
+                offset += n;
+            }
+        }
+
+        if (err != OK) {
+            LOGE("failed to fetch cipher key from '%s'.", keyURI.c_str());
+            return ERROR_IO;
+        }
+
+        mAESKeyForURI.add(keyURI, key);
+    }
+
+    AES_KEY aes_key;
+    if (AES_set_decrypt_key(key->data(), 128, &aes_key) != 0) {
+        LOGE("failed to set AES decryption key.");
+        return UNKNOWN_ERROR;
+    }
+
+    unsigned char aes_ivec[16];
+
+    AString iv;
+    if (itemMeta->findString("cipher-iv", &iv)) {
+        if ((!iv.startsWith("0x") && !iv.startsWith("0X"))
+                || iv.size() != 16 * 2 + 2) {
+            LOGE("malformed cipher IV '%s'.", iv.c_str());
+            return ERROR_MALFORMED;
+        }
+
+        memset(aes_ivec, 0, sizeof(aes_ivec));
+        for (size_t i = 0; i < 16; ++i) {
+            char c1 = tolower(iv.c_str()[2 + 2 * i]);
+            char c2 = tolower(iv.c_str()[3 + 2 * i]);
+            if (!isxdigit(c1) || !isxdigit(c2)) {
+                LOGE("malformed cipher IV '%s'.", iv.c_str());
+                return ERROR_MALFORMED;
+            }
+            uint8_t nibble1 = isdigit(c1) ? c1 - '0' : c1 - 'a' + 10;
+            uint8_t nibble2 = isdigit(c2) ? c2 - '0' : c2 - 'a' + 10;
+
+            aes_ivec[i] = nibble1 << 4 | nibble2;
+        }
+    } else {
+        memset(aes_ivec, 0, sizeof(aes_ivec));
+        aes_ivec[15] = mSeqNumber & 0xff;
+        aes_ivec[14] = (mSeqNumber >> 8) & 0xff;
+        aes_ivec[13] = (mSeqNumber >> 16) & 0xff;
+        aes_ivec[12] = (mSeqNumber >> 24) & 0xff;
+    }
+
+    AES_cbc_encrypt(
+            buffer->data(), buffer->data(), buffer->size(),
+            &aes_key, aes_ivec, AES_DECRYPT);
+
+    // hexdump(buffer->data(), buffer->size());
+
+    size_t n = buffer->size();
+    CHECK_GT(n, 0u);
+
+    size_t pad = buffer->data()[n - 1];
+
+    CHECK_GT(pad, 0u);
+    CHECK_LE(pad, 16u);
+    CHECK_GE((size_t)n, pad);
+    for (size_t i = 0; i < pad; ++i) {
+        CHECK_EQ((unsigned)buffer->data()[n - 1 - i], pad);
+    }
+
+    n -= pad;
+
+    buffer->setRange(buffer->offset(), n);
+
+    return OK;
+}
+
+void LiveSession::postMonitorQueue(int64_t delayUs) {
+    sp<AMessage> msg = new AMessage(kWhatMonitorQueue, id());
+    msg->setInt32("generation", ++mMonitorQueueGeneration);
+    msg->post(delayUs);
+}
+
+void LiveSession::onSeek(const sp<AMessage> &msg) {
+    int64_t timeUs;
+    CHECK(msg->findInt64("timeUs", &timeUs));
+
+    mSeekTimeUs = timeUs;
+    postMonitorQueue();
+}
+
+status_t LiveSession::getDuration(int64_t *durationUs) {
+    Mutex::Autolock autoLock(mLock);
+    *durationUs = mDurationUs;
+
+    return OK;
+}
+
+bool LiveSession::isSeekable() {
+    int64_t durationUs;
+    return getDuration(&durationUs) == OK && durationUs >= 0;
+}
+
+}  // namespace android
+
diff --git a/media/libstagefright/httplive/LiveSource.cpp b/media/libstagefright/httplive/LiveSource.cpp
deleted file mode 100644
index 29c7b04..0000000
--- a/media/libstagefright/httplive/LiveSource.cpp
+++ /dev/null
@@ -1,428 +0,0 @@
-/*
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "LiveSource"
-#include <utils/Log.h>
-
-#include "include/LiveSource.h"
-#include "include/M3UParser.h"
-#include "include/NuHTTPDataSource.h"
-
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/FileSource.h>
-#include <media/stagefright/MediaDebug.h>
-
-namespace android {
-
-LiveSource::LiveSource(const char *url)
-    : mMasterURL(url),
-      mInitCheck(NO_INIT),
-      mDurationUs(-1),
-      mPlaylistIndex(0),
-      mLastFetchTimeUs(-1),
-      mSource(new NuHTTPDataSource),
-      mSourceSize(0),
-      mOffsetBias(0),
-      mSignalDiscontinuity(false),
-      mPrevBandwidthIndex(-1) {
-    if (switchToNext()) {
-        mInitCheck = OK;
-
-        determineSeekability();
-    }
-}
-
-LiveSource::~LiveSource() {
-}
-
-status_t LiveSource::initCheck() const {
-    return mInitCheck;
-}
-
-// static
-int LiveSource::SortByBandwidth(const BandwidthItem *a, const BandwidthItem *b) {
-    if (a->mBandwidth < b->mBandwidth) {
-        return -1;
-    } else if (a->mBandwidth == b->mBandwidth) {
-        return 0;
-    }
-
-    return 1;
-}
-
-static double uniformRand() {
-    return (double)rand() / RAND_MAX;
-}
-
-bool LiveSource::loadPlaylist(bool fetchMaster) {
-    mSignalDiscontinuity = false;
-
-    mPlaylist.clear();
-    mPlaylistIndex = 0;
-
-    if (fetchMaster) {
-        mPrevBandwidthIndex = -1;
-
-        sp<ABuffer> buffer;
-        status_t err = fetchM3U(mMasterURL.c_str(), &buffer);
-
-        if (err != OK) {
-            return false;
-        }
-
-        mPlaylist = new M3UParser(
-                mMasterURL.c_str(), buffer->data(), buffer->size());
-
-        if (mPlaylist->initCheck() != OK) {
-            return false;
-        }
-
-        if (mPlaylist->isVariantPlaylist()) {
-            for (size_t i = 0; i < mPlaylist->size(); ++i) {
-                BandwidthItem item;
-
-                sp<AMessage> meta;
-                mPlaylist->itemAt(i, &item.mURI, &meta);
-
-                unsigned long bandwidth;
-                CHECK(meta->findInt32("bandwidth", (int32_t *)&item.mBandwidth));
-
-                mBandwidthItems.push(item);
-            }
-            mPlaylist.clear();
-
-            // fall through
-            if (mBandwidthItems.size() == 0) {
-                return false;
-            }
-
-            mBandwidthItems.sort(SortByBandwidth);
-
-            for (size_t i = 0; i < mBandwidthItems.size(); ++i) {
-                const BandwidthItem &item = mBandwidthItems.itemAt(i);
-                LOGV("item #%d: %s", i, item.mURI.c_str());
-            }
-        }
-    }
-
-    if (mBandwidthItems.size() > 0) {
-#if 0
-        // Change bandwidth at random()
-        size_t index = uniformRand() * mBandwidthItems.size();
-#elif 0
-        // There's a 50% chance to stay on the current bandwidth and
-        // a 50% chance to switch to the next higher bandwidth (wrapping around
-        // to lowest)
-        size_t index;
-        if (uniformRand() < 0.5) {
-            index = mPrevBandwidthIndex < 0 ? 0 : (size_t)mPrevBandwidthIndex;
-        } else {
-            if (mPrevBandwidthIndex < 0) {
-                index = 0;
-            } else {
-                index = mPrevBandwidthIndex + 1;
-                if (index == mBandwidthItems.size()) {
-                    index = 0;
-                }
-            }
-        }
-#else
-        // Stay on the lowest bandwidth available.
-        size_t index = mBandwidthItems.size() - 1;  // Highest bandwidth stream
-#endif
-
-        mURL = mBandwidthItems.editItemAt(index).mURI;
-
-        if (mPrevBandwidthIndex >= 0 && (size_t)mPrevBandwidthIndex != index) {
-            // If we switched streams because of bandwidth changes,
-            // we'll signal this discontinuity by inserting a
-            // special transport stream packet into the stream.
-            mSignalDiscontinuity = true;
-        }
-
-        mPrevBandwidthIndex = index;
-    } else {
-        mURL = mMasterURL;
-    }
-
-    if (mPlaylist == NULL) {
-        sp<ABuffer> buffer;
-        status_t err = fetchM3U(mURL.c_str(), &buffer);
-
-        if (err != OK) {
-            return false;
-        }
-
-        mPlaylist = new M3UParser(mURL.c_str(), buffer->data(), buffer->size());
-
-        if (mPlaylist->initCheck() != OK) {
-            return false;
-        }
-
-        if (mPlaylist->isVariantPlaylist()) {
-            return false;
-        }
-    }
-
-    if (!mPlaylist->meta()->findInt32(
-                "media-sequence", &mFirstItemSequenceNumber)) {
-        mFirstItemSequenceNumber = 0;
-    }
-
-    return true;
-}
-
-static int64_t getNowUs() {
-    struct timeval tv;
-    gettimeofday(&tv, NULL);
-
-    return (int64_t)tv.tv_usec + tv.tv_sec * 1000000ll;
-}
-
-bool LiveSource::switchToNext() {
-    mSignalDiscontinuity = false;
-
-    mOffsetBias += mSourceSize;
-    mSourceSize = 0;
-
-    if (mLastFetchTimeUs < 0 || getNowUs() >= mLastFetchTimeUs + 15000000ll
-        || mPlaylistIndex == mPlaylist->size()) {
-        int32_t nextSequenceNumber =
-            mPlaylistIndex + mFirstItemSequenceNumber;
-
-        if (!loadPlaylist(mLastFetchTimeUs < 0)) {
-            LOGE("failed to reload playlist");
-            return false;
-        }
-
-        if (mLastFetchTimeUs < 0) {
-            mPlaylistIndex = 0;
-        } else {
-            if (nextSequenceNumber < mFirstItemSequenceNumber
-                    || nextSequenceNumber
-                            >= mFirstItemSequenceNumber + (int32_t)mPlaylist->size()) {
-                LOGE("Cannot find sequence number %d in new playlist",
-                     nextSequenceNumber);
-
-                return false;
-            }
-
-            mPlaylistIndex = nextSequenceNumber - mFirstItemSequenceNumber;
-        }
-
-        mLastFetchTimeUs = getNowUs();
-    }
-
-    AString uri;
-    sp<AMessage> itemMeta;
-    CHECK(mPlaylist->itemAt(mPlaylistIndex, &uri, &itemMeta));
-    LOGV("switching to %s", uri.c_str());
-
-    if (mSource->connect(uri.c_str()) != OK
-            || mSource->getSize(&mSourceSize) != OK) {
-        return false;
-    }
-
-    int32_t val;
-    if (itemMeta->findInt32("discontinuity", &val) && val != 0) {
-        mSignalDiscontinuity = true;
-    }
-
-    mPlaylistIndex++;
-    return true;
-}
-
-static const ssize_t kHeaderSize = 188;
-
-ssize_t LiveSource::readAt(off_t offset, void *data, size_t size) {
-    CHECK(offset >= mOffsetBias);
-    offset -= mOffsetBias;
-
-    off_t delta = mSignalDiscontinuity ? kHeaderSize : 0;
-
-    if (offset >= mSourceSize + delta) {
-        CHECK_EQ(offset, mSourceSize + delta);
-
-        offset -= mSourceSize + delta;
-        if (!switchToNext()) {
-            return ERROR_END_OF_STREAM;
-        }
-
-        if (mSignalDiscontinuity) {
-            LOGV("switchToNext changed streams");
-        } else {
-            LOGV("switchToNext stayed within the same stream");
-        }
-
-        mOffsetBias += delta;
-
-        delta = mSignalDiscontinuity ? kHeaderSize : 0;
-    }
-
-    if (offset < delta) {
-        size_t avail = delta - offset;
-        memset(data, 0, avail);
-        return avail;
-    }
-
-    size_t numRead = 0;
-    while (numRead < size) {
-        ssize_t n = mSource->readAt(
-                offset + numRead - delta,
-                (uint8_t *)data + numRead, size - numRead);
-
-        if (n <= 0) {
-            break;
-        }
-
-        numRead += n;
-    }
-
-    return numRead;
-}
-
-status_t LiveSource::fetchM3U(const char *url, sp<ABuffer> *out) {
-    *out = NULL;
-
-    sp<DataSource> source;
-
-    if (!strncasecmp(url, "file://", 7)) {
-        source = new FileSource(url + 7);
-    } else {
-        CHECK(!strncasecmp(url, "http://", 7));
-
-        status_t err = mSource->connect(url);
-
-        if (err != OK) {
-            return err;
-        }
-
-        source = mSource;
-    }
-
-    off_t size;
-    status_t err = source->getSize(&size);
-
-    if (err != OK) {
-        size = 65536;
-    }
-
-    sp<ABuffer> buffer = new ABuffer(size);
-    buffer->setRange(0, 0);
-
-    for (;;) {
-        size_t bufferRemaining = buffer->capacity() - buffer->size();
-
-        if (bufferRemaining == 0) {
-            bufferRemaining = 32768;
-
-            LOGV("increasing download buffer to %d bytes",
-                 buffer->size() + bufferRemaining);
-
-            sp<ABuffer> copy = new ABuffer(buffer->size() + bufferRemaining);
-            memcpy(copy->data(), buffer->data(), buffer->size());
-            copy->setRange(0, buffer->size());
-
-            buffer = copy;
-        }
-
-        ssize_t n = source->readAt(
-                buffer->size(), buffer->data() + buffer->size(),
-                bufferRemaining);
-
-        if (n < 0) {
-            return err;
-        }
-
-        if (n == 0) {
-            break;
-        }
-
-        buffer->setRange(0, buffer->size() + (size_t)n);
-    }
-
-    *out = buffer;
-
-    return OK;
-}
-
-bool LiveSource::seekTo(int64_t seekTimeUs) {
-    LOGV("seek to %lld us", seekTimeUs);
-
-    if (!mPlaylist->isComplete()) {
-        return false;
-    }
-
-    int32_t targetDuration;
-    if (!mPlaylist->meta()->findInt32("target-duration", &targetDuration)) {
-        return false;
-    }
-
-    int64_t seekTimeSecs = (seekTimeUs + 500000ll) / 1000000ll;
-
-    int64_t index = seekTimeSecs / targetDuration;
-
-    if (index < 0 || index >= mPlaylist->size()) {
-        return false;
-    }
-
-    size_t newPlaylistIndex = mFirstItemSequenceNumber + index;
-
-    if (newPlaylistIndex == mPlaylistIndex) {
-        return false;
-    }
-
-    mPlaylistIndex = newPlaylistIndex;
-
-    switchToNext();
-    mOffsetBias = 0;
-
-    LOGV("seeking to index %lld", index);
-
-    return true;
-}
-
-bool LiveSource::getDuration(int64_t *durationUs) const {
-    if (mDurationUs >= 0) {
-        *durationUs = mDurationUs;
-        return true;
-    }
-
-    *durationUs = 0;
-    return false;
-}
-
-bool LiveSource::isSeekable() const {
-    return mDurationUs >= 0;
-}
-
-void LiveSource::determineSeekability() {
-    mDurationUs = -1;
-
-    if (!mPlaylist->isComplete()) {
-        return;
-    }
-
-    int32_t targetDuration;
-    if (!mPlaylist->meta()->findInt32("target-duration", &targetDuration)) {
-        return;
-    }
-
-    mDurationUs = targetDuration * 1000000ll * mPlaylist->size();
-}
-
-}  // namespace android
diff --git a/media/libstagefright/httplive/M3UParser.cpp b/media/libstagefright/httplive/M3UParser.cpp
index 90f3d6d..95f6741 100644
--- a/media/libstagefright/httplive/M3UParser.cpp
+++ b/media/libstagefright/httplive/M3UParser.cpp
@@ -14,6 +14,10 @@
  * limitations under the License.
  */
 
+//#define LOG_NDEBUG 0
+#define LOG_TAG "M3UParser"
+#include <utils/Log.h>
+
 #include "include/M3UParser.h"
 
 #include <media/stagefright/foundation/AMessage.h>
@@ -80,14 +84,18 @@
     out->clear();
 
     if (strncasecmp("http://", baseURL, 7)
+            && strncasecmp("https://", baseURL, 8)
             && strncasecmp("file://", baseURL, 7)) {
         // Base URL must be absolute
         return false;
     }
 
-    if (!strncasecmp("http://", url, 7)) {
+    if (!strncasecmp("http://", url, 7) || !strncasecmp("https://", url, 8)) {
         // "url" is already an absolute URL, ignore base URL.
         out->setTo(url);
+
+        LOGV("base:'%s', url:'%s' => '%s'", baseURL, url, out->c_str());
+
         return true;
     }
 
@@ -108,6 +116,8 @@
         out->append(url);
     }
 
+    LOGV("base:'%s', url:'%s' => '%s'", baseURL, url, out->c_str());
+
     return true;
 }
 
@@ -158,6 +168,11 @@
                     return ERROR_MALFORMED;
                 }
                 err = parseMetaData(line, &mMeta, "media-sequence");
+            } else if (line.startsWith("#EXT-X-KEY")) {
+                if (mIsVariantPlaylist) {
+                    return ERROR_MALFORMED;
+                }
+                err = parseCipherInfo(line, &itemMeta, mBaseURI);
             } else if (line.startsWith("#EXT-X-ENDLIST")) {
                 mIsComplete = true;
             } else if (line.startsWith("#EXTINF")) {
@@ -292,6 +307,75 @@
 }
 
 // static
+status_t M3UParser::parseCipherInfo(
+        const AString &line, sp<AMessage> *meta, const AString &baseURI) {
+    ssize_t colonPos = line.find(":");
+
+    if (colonPos < 0) {
+        return ERROR_MALFORMED;
+    }
+
+    size_t offset = colonPos + 1;
+
+    while (offset < line.size()) {
+        ssize_t end = line.find(",", offset);
+        if (end < 0) {
+            end = line.size();
+        }
+
+        AString attr(line, offset, end - offset);
+        attr.trim();
+
+        offset = end + 1;
+
+        ssize_t equalPos = attr.find("=");
+        if (equalPos < 0) {
+            continue;
+        }
+
+        AString key(attr, 0, equalPos);
+        key.trim();
+
+        AString val(attr, equalPos + 1, attr.size() - equalPos - 1);
+        val.trim();
+
+        LOGV("key=%s value=%s", key.c_str(), val.c_str());
+
+        key.tolower();
+
+        if (key == "method" || key == "uri" || key == "iv") {
+            if (meta->get() == NULL) {
+                *meta = new AMessage;
+            }
+
+            if (key == "uri") {
+                if (val.size() >= 2
+                        && val.c_str()[0] == '"'
+                        && val.c_str()[val.size() - 1] == '"') {
+                    // Remove surrounding quotes.
+                    AString tmp(val, 1, val.size() - 2);
+                    val = tmp;
+                }
+
+                AString absURI;
+                if (MakeURL(baseURI.c_str(), val.c_str(), &absURI)) {
+                    val = absURI;
+                } else {
+                    LOGE("failed to make absolute url for '%s'.",
+                         val.c_str());
+                }
+            }
+
+            key.insert(AString("cipher-"), 0);
+
+            (*meta)->setString(key.c_str(), val.c_str(), val.size());
+        }
+    }
+
+    return OK;
+}
+
+// static
 status_t M3UParser::ParseInt32(const char *s, int32_t *x) {
     char *end;
     long lval = strtol(s, &end, 10);
diff --git a/media/libstagefright/id3/ID3.cpp b/media/libstagefright/id3/ID3.cpp
index da340f7..45e018d 100644
--- a/media/libstagefright/id3/ID3.cpp
+++ b/media/libstagefright/id3/ID3.cpp
@@ -149,7 +149,25 @@
     }
 
     if (header.version_major == 4) {
-        if (!removeUnsynchronizationV2_4()) {
+        void *copy = malloc(size);
+        memcpy(copy, mData, size);
+
+        bool success = removeUnsynchronizationV2_4(false /* iTunesHack */);
+        if (!success) {
+            memcpy(mData, copy, size);
+            mSize = size;
+
+            success = removeUnsynchronizationV2_4(true /* iTunesHack */);
+
+            if (success) {
+                LOGV("Had to apply the iTunes hack to parse this ID3 tag");
+            }
+        }
+
+        free(copy);
+        copy = NULL;
+
+        if (!success) {
             free(mData);
             mData = NULL;
 
@@ -261,7 +279,7 @@
     }
 }
 
-bool ID3::removeUnsynchronizationV2_4() {
+bool ID3::removeUnsynchronizationV2_4(bool iTunesHack) {
     size_t oldSize = mSize;
 
     size_t offset = 0;
@@ -271,7 +289,9 @@
         }
 
         size_t dataSize;
-        if (!ParseSyncsafeInteger(&mData[offset + 4], &dataSize)) {
+        if (iTunesHack) {
+            dataSize = U32_AT(&mData[offset + 4]);
+        } else if (!ParseSyncsafeInteger(&mData[offset + 4], &dataSize)) {
             return false;
         }
 
@@ -308,7 +328,7 @@
             flags &= ~2;
         }
 
-        if (flags != prevFlags) {
+        if (flags != prevFlags || iTunesHack) {
             WriteSyncsafeInteger(&mData[offset + 4], dataSize);
             mData[offset + 8] = flags >> 8;
             mData[offset + 9] = flags & 0xff;
@@ -769,8 +789,8 @@
 bool ID3::parseV1(const sp<DataSource> &source) {
     const size_t V1_TAG_SIZE = 128;
 
-    off_t size;
-    if (source->getSize(&size) != OK || size < (off_t)V1_TAG_SIZE) {
+    off64_t size;
+    if (source->getSize(&size) != OK || size < (off64_t)V1_TAG_SIZE) {
         return false;
     }
 
diff --git a/media/libstagefright/include/AACEncoder.h b/media/libstagefright/include/AACEncoder.h
index ecc533f..3d5fc60 100644
--- a/media/libstagefright/include/AACEncoder.h
+++ b/media/libstagefright/include/AACEncoder.h
@@ -60,7 +60,7 @@
             kNumSamplesPerFrame = 1024,
         };
 
-        int16_t           mInputFrame[kNumSamplesPerFrame];
+        int16_t           *mInputFrame;
 
         uint8_t           mAudioSpecificConfigData[2]; // auido specific data
         void             *mEncoderHandle;
diff --git a/media/libstagefright/include/AACExtractor.h b/media/libstagefright/include/AACExtractor.h
new file mode 100644
index 0000000..8e5657b
--- /dev/null
+++ b/media/libstagefright/include/AACExtractor.h
@@ -0,0 +1,61 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef AAC_EXTRACTOR_H_
+
+#define AAC_EXTRACTOR_H_
+
+#include <media/stagefright/MediaExtractor.h>
+
+#include <utils/Vector.h>
+
+namespace android {
+
+struct AMessage;
+class String8;
+
+class AACExtractor : public MediaExtractor {
+public:
+    AACExtractor(const sp<DataSource> &source);
+
+    virtual size_t countTracks();
+    virtual sp<MediaSource> getTrack(size_t index);
+    virtual sp<MetaData> getTrackMetaData(size_t index, uint32_t flags);
+
+    virtual sp<MetaData> getMetaData();
+
+protected:
+    virtual ~AACExtractor();
+
+private:
+    sp<DataSource> mDataSource;
+    sp<MetaData> mMeta;
+    status_t mInitCheck;
+
+    Vector<uint64_t> mOffsetVector;
+    int64_t mFrameDurationUs;
+
+    AACExtractor(const AACExtractor &);
+    AACExtractor &operator=(const AACExtractor &);
+};
+
+bool SniffAAC(
+        const sp<DataSource> &source, String8 *mimeType, float *confidence,
+        sp<AMessage> *);
+
+}  // namespace android
+
+#endif  // AAC_EXTRACTOR_H_
diff --git a/media/libstagefright/include/AMRExtractor.h b/media/libstagefright/include/AMRExtractor.h
index 1cdf36d..4a1c827 100644
--- a/media/libstagefright/include/AMRExtractor.h
+++ b/media/libstagefright/include/AMRExtractor.h
@@ -18,12 +18,14 @@
 
 #define AMR_EXTRACTOR_H_
 
+#include <utils/Errors.h>
 #include <media/stagefright/MediaExtractor.h>
 
 namespace android {
 
 struct AMessage;
 class String8;
+#define OFFSET_TABLE_LEN    300
 
 class AMRExtractor : public MediaExtractor {
 public:
@@ -42,9 +44,11 @@
     sp<DataSource> mDataSource;
     sp<MetaData> mMeta;
     status_t mInitCheck;
-    size_t mFrameSize;
     bool mIsWide;
 
+    off64_t mOffsetTable[OFFSET_TABLE_LEN]; //5 min
+    size_t mOffsetTableLength;
+
     AMRExtractor(const AMRExtractor &);
     AMRExtractor &operator=(const AMRExtractor &);
 };
diff --git a/media/libstagefright/include/AwesomePlayer.h b/media/libstagefright/include/AwesomePlayer.h
index e352928..4e6f75c 100644
--- a/media/libstagefright/include/AwesomePlayer.h
+++ b/media/libstagefright/include/AwesomePlayer.h
@@ -36,11 +36,10 @@
 struct MediaExtractor;
 struct MediaSource;
 struct NuCachedSource2;
+struct ISurfaceTexture;
 
 struct ALooper;
 struct ARTSPController;
-struct ARTPSession;
-struct UDPPusher;
 
 class DrmManagerClinet;
 class DecryptHandle;
@@ -48,7 +47,6 @@
 struct AwesomeRenderer : public RefBase {
     AwesomeRenderer() {}
 
-    virtual status_t initCheck() const = 0;
     virtual void render(MediaBuffer *buffer) = 0;
 
 private:
@@ -68,6 +66,8 @@
 
     status_t setDataSource(int fd, int64_t offset, int64_t length);
 
+    status_t setDataSource(const sp<IStreamSource> &source);
+
     void reset();
 
     status_t prepare();
@@ -80,7 +80,8 @@
 
     bool isPlaying() const;
 
-    void setISurface(const sp<ISurface> &isurface);
+    void setSurface(const sp<Surface> &surface);
+    void setSurfaceTexture(const sp<ISurfaceTexture> &surfaceTexture);
     void setAudioSink(const sp<MediaPlayerBase::AudioSink> &audioSink);
     status_t setLooping(bool shouldLoop);
 
@@ -89,11 +90,6 @@
 
     status_t seekTo(int64_t timeUs);
 
-    status_t getVideoDimensions(int32_t *width, int32_t *height) const;
-
-    status_t suspend();
-    status_t resume();
-
     // This is a mask of MediaExtractor::Flags.
     uint32_t flags() const;
 
@@ -102,6 +98,7 @@
 
 private:
     friend struct AwesomeEvent;
+    friend struct PreviewPlayer;
 
     enum {
         PLAYING             = 1,
@@ -115,6 +112,20 @@
         AUDIO_AT_EOS        = 256,
         VIDEO_AT_EOS        = 512,
         AUTO_LOOPING        = 1024,
+
+        // We are basically done preparing but are currently buffering
+        // sufficient data to begin playback and finish the preparation phase
+        // for good.
+        PREPARING_CONNECTED = 2048,
+
+        // We're triggering a single video event to display the first frame
+        // after the seekpoint.
+        SEEK_PREVIEW        = 4096,
+
+        AUDIO_RUNNING       = 8192,
+        AUDIOPLAYER_STARTED = 16384,
+
+        INCOGNITO           = 32768,
     };
 
     mutable Mutex mLock;
@@ -125,7 +136,8 @@
     bool mQueueStarted;
     wp<MediaPlayerBase> mListener;
 
-    sp<ISurface> mISurface;
+    sp<Surface> mSurface;
+    sp<ANativeWindow> mNativeWindow;
     sp<MediaPlayerBase::AudioSink> mAudioSink;
 
     SystemTimeSource mSystemTimeSource;
@@ -146,14 +158,23 @@
     AudioPlayer *mAudioPlayer;
     int64_t mDurationUs;
 
+    int32_t mDisplayWidth;
+    int32_t mDisplayHeight;
+
     uint32_t mFlags;
     uint32_t mExtractorFlags;
+    uint32_t mSinceLastDropped;
 
-    int32_t mVideoWidth, mVideoHeight;
     int64_t mTimeSourceDeltaUs;
     int64_t mVideoTimeUs;
 
-    bool mSeeking;
+    enum SeekType {
+        NO_SEEK,
+        SEEK,
+        SEEK_VIDEO_ONLY
+    };
+    SeekType mSeeking;
+
     bool mSeekNotificationSent;
     int64_t mSeekTimeUs;
 
@@ -170,6 +191,8 @@
     bool mBufferingEventPending;
     sp<TimedEventQueue::Event> mCheckAudioStatusEvent;
     bool mAudioStatusEventPending;
+    sp<TimedEventQueue::Event> mVideoLagEvent;
+    bool mVideoLagEventPending;
 
     sp<TimedEventQueue::Event> mAsyncPrepareEvent;
     Condition mPreparedCondition;
@@ -181,9 +204,9 @@
     void postBufferingEvent_l();
     void postStreamDoneEvent_l(status_t status);
     void postCheckAudioStatusEvent_l();
+    void postVideoLagEvent_l();
     status_t play_l();
 
-    MediaBuffer *mLastVideoBuffer;
     MediaBuffer *mVideoBuffer;
 
     sp<NuHTTPDataSource> mConnectingDataSource;
@@ -191,34 +214,7 @@
 
     sp<ALooper> mLooper;
     sp<ARTSPController> mRTSPController;
-    sp<ARTPSession> mRTPSession;
-    sp<UDPPusher> mRTPPusher, mRTCPPusher;
-
-    struct SuspensionState {
-        String8 mUri;
-        KeyedVector<String8, String8> mUriHeaders;
-        sp<DataSource> mFileSource;
-
-        uint32_t mFlags;
-        int64_t mPositionUs;
-
-        void *mLastVideoFrame;
-        size_t mLastVideoFrameSize;
-        int32_t mColorFormat;
-        int32_t mVideoWidth, mVideoHeight;
-        int32_t mDecodedWidth, mDecodedHeight;
-
-        SuspensionState()
-            : mLastVideoFrame(NULL) {
-        }
-
-        ~SuspensionState() {
-            if (mLastVideoFrame) {
-                free(mLastVideoFrame);
-                mLastVideoFrame = NULL;
-            }
-        }
-    } *mSuspensionState;
+    sp<ARTSPController> mConnectingRTSPController;
 
     DrmManagerClient *mDrmManagerClient;
     DecryptHandle *mDecryptHandle;
@@ -230,10 +226,10 @@
     status_t setDataSource_l(const sp<DataSource> &dataSource);
     status_t setDataSource_l(const sp<MediaExtractor> &extractor);
     void reset_l();
-    void partial_reset_l();
     status_t seekTo_l(int64_t timeUs);
     status_t pause_l(bool at_eos = false);
-    status_t initRenderer_l();
+    void initRenderer_l();
+    void notifyVideoSize_l();
     void seekAudioIfNecessary_l();
 
     void cancelPlayerEvents(bool keepBufferingGoing = false);
@@ -254,6 +250,7 @@
     void onPrepareAsyncEvent();
     void abortPrepare(status_t err);
     void finishAsyncPrepare_l();
+    void onVideoLagUpdate();
 
     bool getCachedDuration_l(int64_t *durationUs, bool *eos);
 
@@ -267,6 +264,9 @@
     bool getBitrate(int64_t *bitrate);
 
     void finishSeekIfNecessary(int64_t videoTimeUs);
+    void ensureCacheIsFetching_l();
+
+    status_t startAudioPlayer_l();
 
     AwesomePlayer(const AwesomePlayer &);
     AwesomePlayer &operator=(const AwesomePlayer &);
diff --git a/media/libstagefright/include/DRMExtractor.h b/media/libstagefright/include/DRMExtractor.h
index cafc812..9881cc1 100644
--- a/media/libstagefright/include/DRMExtractor.h
+++ b/media/libstagefright/include/DRMExtractor.h
@@ -46,6 +46,7 @@
 
     sp<MediaExtractor> mOriginalExtractor;
     DecryptHandle* mDecryptHandle;
+    DrmManagerClient* mDrmManagerClient;
 
     DRMExtractor(const DRMExtractor &);
     DRMExtractor &operator=(const DRMExtractor &);
diff --git a/media/libstagefright/include/FLACExtractor.h b/media/libstagefright/include/FLACExtractor.h
new file mode 100644
index 0000000..ded91c2
--- /dev/null
+++ b/media/libstagefright/include/FLACExtractor.h
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FLAC_EXTRACTOR_H_
+#define FLAC_EXTRACTOR_H_
+
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/MediaExtractor.h>
+#include <utils/String8.h>
+
+namespace android {
+
+class FLACParser;
+
+class FLACExtractor : public MediaExtractor {
+
+public:
+    // Extractor assumes ownership of source
+    FLACExtractor(const sp<DataSource> &source);
+
+    virtual size_t countTracks();
+    virtual sp<MediaSource> getTrack(size_t index);
+    virtual sp<MetaData> getTrackMetaData(size_t index, uint32_t flags);
+
+    virtual sp<MetaData> getMetaData();
+
+protected:
+    virtual ~FLACExtractor();
+
+private:
+    sp<DataSource> mDataSource;
+    sp<FLACParser> mParser;
+    status_t mInitCheck;
+    sp<MetaData> mFileMetadata;
+
+    // There is only one track
+    sp<MetaData> mTrackMetadata;
+
+    status_t init();
+
+    FLACExtractor(const FLACExtractor &);
+    FLACExtractor &operator=(const FLACExtractor &);
+
+};
+
+bool SniffFLAC(const sp<DataSource> &source, String8 *mimeType,
+        float *confidence, sp<AMessage> *);
+
+}  // namespace android
+
+#endif  // FLAC_EXTRACTOR_H_
diff --git a/media/libstagefright/include/HTTPStream.h b/media/libstagefright/include/HTTPStream.h
index 793798f..09e6a5f 100644
--- a/media/libstagefright/include/HTTPStream.h
+++ b/media/libstagefright/include/HTTPStream.h
@@ -18,10 +18,9 @@
 
 #define HTTP_STREAM_H_
 
-#include "stagefright_string.h"
-
 #include <sys/types.h>
 
+#include <media/stagefright/foundation/AString.h>
 #include <media/stagefright/MediaErrors.h>
 #include <utils/KeyedVector.h>
 #include <utils/threads.h>
@@ -33,7 +32,7 @@
     HTTPStream();
     ~HTTPStream();
 
-    status_t connect(const char *server, int port = 80);
+    status_t connect(const char *server, int port = -1, bool https = false);
     status_t disconnect();
 
     status_t send(const char *data, size_t size);
@@ -50,7 +49,7 @@
     static const char *kStatusKey;
 
     bool find_header_value(
-            const string &key, string *value) const;
+            const AString &key, AString *value) const;
 
     // Pass a negative value to disable the timeout.
     void setReceiveTimeout(int seconds);
@@ -70,7 +69,10 @@
     Mutex mLock;
     int mSocket;
 
-    KeyedVector<string, string> mHeaders;
+    KeyedVector<AString, AString> mHeaders;
+
+    void *mSSLContext;
+    void *mSSL;
 
     HTTPStream(const HTTPStream &);
     HTTPStream &operator=(const HTTPStream &);
diff --git a/media/libstagefright/include/ID3.h b/media/libstagefright/include/ID3.h
index 7ddbb41..98c82a4 100644
--- a/media/libstagefright/include/ID3.h
+++ b/media/libstagefright/include/ID3.h
@@ -80,7 +80,7 @@
     bool parseV1(const sp<DataSource> &source);
     bool parseV2(const sp<DataSource> &source);
     void removeUnsynchronization();
-    bool removeUnsynchronizationV2_4();
+    bool removeUnsynchronizationV2_4(bool iTunesHack);
 
     static bool ParseSyncsafeInteger(const uint8_t encoded[4], size_t *x);
 
diff --git a/media/libstagefright/include/LiveSession.h b/media/libstagefright/include/LiveSession.h
new file mode 100644
index 0000000..3fe5d4e
--- /dev/null
+++ b/media/libstagefright/include/LiveSession.h
@@ -0,0 +1,122 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LIVE_SESSION_H_
+
+#define LIVE_SESSION_H_
+
+#include <media/stagefright/foundation/AHandler.h>
+
+namespace android {
+
+struct ABuffer;
+struct DataSource;
+struct LiveDataSource;
+struct M3UParser;
+struct NuHTTPDataSource;
+
+struct LiveSession : public AHandler {
+    enum Flags {
+        // Don't log any URLs.
+        kFlagIncognito = 1,
+    };
+    LiveSession(uint32_t flags = 0);
+
+    sp<DataSource> getDataSource();
+
+    void connect(const char *url);
+    void disconnect();
+
+    // Blocks until seek is complete.
+    void seekTo(int64_t timeUs);
+
+    status_t getDuration(int64_t *durationUs);
+    bool isSeekable();
+
+protected:
+    virtual ~LiveSession();
+
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+
+private:
+    enum {
+        kMaxNumQueuedFragments = 3,
+        kMaxNumRetries         = 5,
+    };
+
+    static const int64_t kMaxPlaylistAgeUs;
+
+    enum {
+        kWhatConnect        = 'conn',
+        kWhatDisconnect     = 'disc',
+        kWhatMonitorQueue   = 'moni',
+        kWhatSeek           = 'seek',
+    };
+
+    struct BandwidthItem {
+        AString mURI;
+        unsigned long mBandwidth;
+    };
+
+    uint32_t mFlags;
+
+    sp<LiveDataSource> mDataSource;
+
+    sp<NuHTTPDataSource> mHTTPDataSource;
+
+    AString mMasterURL;
+    Vector<BandwidthItem> mBandwidthItems;
+
+    KeyedVector<AString, sp<ABuffer> > mAESKeyForURI;
+
+    ssize_t mPrevBandwidthIndex;
+    int64_t mLastPlaylistFetchTimeUs;
+    sp<M3UParser> mPlaylist;
+    int32_t mSeqNumber;
+    int64_t mSeekTimeUs;
+    int32_t mNumRetries;
+
+    Mutex mLock;
+    Condition mCondition;
+    int64_t mDurationUs;
+    bool mSeekDone;
+    bool mDisconnectPending;
+
+    int32_t mMonitorQueueGeneration;
+
+    void onConnect(const sp<AMessage> &msg);
+    void onDisconnect();
+    void onDownloadNext();
+    void onMonitorQueue();
+    void onSeek(const sp<AMessage> &msg);
+
+    status_t fetchFile(const char *url, sp<ABuffer> *out);
+    sp<M3UParser> fetchPlaylist(const char *url);
+    size_t getBandwidthIndex();
+
+    status_t decryptBuffer(
+            size_t playlistIndex, const sp<ABuffer> &buffer);
+
+    void postMonitorQueue(int64_t delayUs = 0);
+
+    static int SortByBandwidth(const BandwidthItem *, const BandwidthItem *);
+
+    DISALLOW_EVIL_CONSTRUCTORS(LiveSession);
+};
+
+}  // namespace android
+
+#endif  // LIVE_SESSION_H_
diff --git a/media/libstagefright/include/LiveSource.h b/media/libstagefright/include/LiveSource.h
deleted file mode 100644
index 55dd45e..0000000
--- a/media/libstagefright/include/LiveSource.h
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef LIVE_SOURCE_H_
-
-#define LIVE_SOURCE_H_
-
-#include <media/stagefright/foundation/ABase.h>
-#include <media/stagefright/foundation/AString.h>
-#include <media/stagefright/DataSource.h>
-#include <utils/Vector.h>
-
-namespace android {
-
-struct ABuffer;
-struct NuHTTPDataSource;
-struct M3UParser;
-
-struct LiveSource : public DataSource {
-    LiveSource(const char *url);
-
-    virtual status_t initCheck() const;
-
-    virtual ssize_t readAt(off_t offset, void *data, size_t size);
-
-    virtual uint32_t flags() {
-        return kWantsPrefetching;
-    }
-
-    bool getDuration(int64_t *durationUs) const;
-
-    bool isSeekable() const;
-    bool seekTo(int64_t seekTimeUs);
-
-protected:
-    virtual ~LiveSource();
-
-private:
-    struct BandwidthItem {
-        AString mURI;
-        unsigned long mBandwidth;
-    };
-    Vector<BandwidthItem> mBandwidthItems;
-
-    AString mMasterURL;
-    AString mURL;
-    status_t mInitCheck;
-    int64_t mDurationUs;
-
-    sp<M3UParser> mPlaylist;
-    int32_t mFirstItemSequenceNumber;
-    size_t mPlaylistIndex;
-    int64_t mLastFetchTimeUs;
-
-    sp<NuHTTPDataSource> mSource;
-    off_t mSourceSize;
-    off_t mOffsetBias;
-
-    bool mSignalDiscontinuity;
-    ssize_t mPrevBandwidthIndex;
-
-    status_t fetchM3U(const char *url, sp<ABuffer> *buffer);
-
-    static int SortByBandwidth(const BandwidthItem *a, const BandwidthItem *b);
-
-    bool switchToNext();
-    bool loadPlaylist(bool fetchMaster);
-    void determineSeekability();
-
-    DISALLOW_EVIL_CONSTRUCTORS(LiveSource);
-};
-
-}  // namespace android
-
-#endif  // LIVE_SOURCE_H_
diff --git a/media/libstagefright/include/M3UParser.h b/media/libstagefright/include/M3UParser.h
index bd9eebe..63895b4 100644
--- a/media/libstagefright/include/M3UParser.h
+++ b/media/libstagefright/include/M3UParser.h
@@ -66,6 +66,9 @@
     static status_t parseStreamInf(
             const AString &line, sp<AMessage> *meta);
 
+    static status_t parseCipherInfo(
+            const AString &line, sp<AMessage> *meta, const AString &baseURI);
+
     static status_t ParseInt32(const char *s, int32_t *x);
 
     DISALLOW_EVIL_CONSTRUCTORS(M3UParser);
diff --git a/media/libstagefright/include/MP3Extractor.h b/media/libstagefright/include/MP3Extractor.h
index 30136e7..ef71b8f 100644
--- a/media/libstagefright/include/MP3Extractor.h
+++ b/media/libstagefright/include/MP3Extractor.h
@@ -18,12 +18,14 @@
 
 #define MP3_EXTRACTOR_H_
 
+#include <utils/Errors.h>
 #include <media/stagefright/MediaExtractor.h>
 
 namespace android {
 
 struct AMessage;
 class DataSource;
+struct MP3Seeker;
 class String8;
 
 class MP3Extractor : public MediaExtractor {
@@ -37,15 +39,19 @@
 
     virtual sp<MetaData> getMetaData();
 
+    static bool get_mp3_frame_size(
+            uint32_t header, size_t *frame_size,
+            int *out_sampling_rate = NULL, int *out_channels = NULL,
+            int *out_bitrate = NULL);
+
 private:
     status_t mInitCheck;
 
     sp<DataSource> mDataSource;
-    off_t mFirstFramePos;
+    off64_t mFirstFramePos;
     sp<MetaData> mMeta;
     uint32_t mFixedHeader;
-    int32_t mByteNumber; // total number of bytes in this MP3
-    char mTableOfContents[99]; // TOC entries in XING header
+    sp<MP3Seeker> mSeeker;
 
     MP3Extractor(const MP3Extractor &);
     MP3Extractor &operator=(const MP3Extractor &);
diff --git a/media/libstagefright/include/MP3Seeker.h b/media/libstagefright/include/MP3Seeker.h
new file mode 100644
index 0000000..599542e
--- /dev/null
+++ b/media/libstagefright/include/MP3Seeker.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MP3_SEEKER_H_
+
+#define MP3_SEEKER_H_
+
+#include <media/stagefright/foundation/ABase.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+struct MP3Seeker : public RefBase {
+    MP3Seeker() {}
+
+    virtual bool getDuration(int64_t *durationUs) = 0;
+
+    // Given a request seek time in "*timeUs", find the byte offset closest
+    // to that position and return it in "*pos". Update "*timeUs" to reflect
+    // the actual time that seekpoint represents.
+    virtual bool getOffsetForTime(int64_t *timeUs, off64_t *pos) = 0;
+
+protected:
+    virtual ~MP3Seeker() {}
+
+private:
+    DISALLOW_EVIL_CONSTRUCTORS(MP3Seeker);
+};
+
+}  // namespace android
+
+#endif  // MP3_SEEKER_H_
+
diff --git a/media/libstagefright/include/MPEG2TSExtractor.h b/media/libstagefright/include/MPEG2TSExtractor.h
index d83b538..efe7496 100644
--- a/media/libstagefright/include/MPEG2TSExtractor.h
+++ b/media/libstagefright/include/MPEG2TSExtractor.h
@@ -15,7 +15,7 @@
 struct DataSource;
 struct MPEG2TSSource;
 struct String8;
-struct LiveSource;
+struct LiveSession;
 
 struct MPEG2TSExtractor : public MediaExtractor {
     MPEG2TSExtractor(const sp<DataSource> &source);
@@ -28,7 +28,7 @@
 
     virtual uint32_t flags() const;
 
-    void setLiveSource(const sp<LiveSource> &liveSource);
+    void setLiveSession(const sp<LiveSession> &liveSession);
     void seekTo(int64_t seekTimeUs);
 
 private:
@@ -37,13 +37,13 @@
     mutable Mutex mLock;
 
     sp<DataSource> mDataSource;
-    sp<LiveSource> mLiveSource;
+    sp<LiveSession> mLiveSession;
 
     sp<ATSParser> mParser;
 
     Vector<sp<AnotherPacketSource> > mSourceImpls;
 
-    off_t mOffset;
+    off64_t mOffset;
 
     void init();
     status_t feedMore();
diff --git a/media/libstagefright/include/MPEG4Extractor.h b/media/libstagefright/include/MPEG4Extractor.h
index bc2e4dc..04e8a6a 100644
--- a/media/libstagefright/include/MPEG4Extractor.h
+++ b/media/libstagefright/include/MPEG4Extractor.h
@@ -67,8 +67,8 @@
     Vector<uint32_t> mPath;
 
     status_t readMetaData();
-    status_t parseChunk(off_t *offset, int depth);
-    status_t parseMetaData(off_t offset, size_t size);
+    status_t parseChunk(off64_t *offset, int depth);
+    status_t parseMetaData(off64_t offset, size_t size);
 
     status_t updateAudioTrackInfoFromESDS_MPEG4Audio(
             const void *esds_data, size_t esds_size);
@@ -86,9 +86,9 @@
     SINF *mFirstSINF;
 
     bool mIsDrm;
-    status_t parseDrmSINF(off_t *offset, off_t data_offset);
+    status_t parseDrmSINF(off64_t *offset, off64_t data_offset);
 
-    status_t parseTrackHeader(off_t data_offset, off_t data_size);
+    status_t parseTrackHeader(off64_t data_offset, off64_t data_size);
 
     MPEG4Extractor(const MPEG4Extractor &);
     MPEG4Extractor &operator=(const MPEG4Extractor &);
diff --git a/media/libstagefright/include/NuCachedSource2.h b/media/libstagefright/include/NuCachedSource2.h
index 1fb2088..022804c 100644
--- a/media/libstagefright/include/NuCachedSource2.h
+++ b/media/libstagefright/include/NuCachedSource2.h
@@ -32,18 +32,20 @@
 
     virtual status_t initCheck() const;
 
-    virtual ssize_t readAt(off_t offset, void *data, size_t size);
+    virtual ssize_t readAt(off64_t offset, void *data, size_t size);
 
-    virtual status_t getSize(off_t *size);
+    virtual status_t getSize(off64_t *size);
     virtual uint32_t flags();
 
+    virtual DecryptHandle* DrmInitialization();
+    virtual void getDrmInfo(DecryptHandle **handle, DrmManagerClient **client);
+    virtual String8 getUri();
     ////////////////////////////////////////////////////////////////////////////
 
     size_t cachedSize();
-    size_t approxDataRemaining(bool *eos);
+    size_t approxDataRemaining(status_t *finalStatus);
 
-    void suspend();
-    void clearCacheAndResume();
+    void resumeFetchingIfNecessary();
 
 protected:
     virtual ~NuCachedSource2();
@@ -53,8 +55,8 @@
 
     enum {
         kPageSize            = 65536,
-        kHighWaterThreshold  = 5 * 1024 * 1024,
-        kLowWaterThreshold   = 512 * 1024,
+        kHighWaterThreshold  = 20 * 1024 * 1024,
+        kLowWaterThreshold   = 4 * 1024 * 1024,
 
         // Read data after a 15 sec timeout whether we're actively
         // fetching or not.
@@ -64,7 +66,6 @@
     enum {
         kWhatFetchMore  = 'fetc',
         kWhatRead       = 'read',
-        kWhatSuspend    = 'susp',
     };
 
     sp<DataSource> mSource;
@@ -76,25 +77,23 @@
     Condition mCondition;
 
     PageCache *mCache;
-    off_t mCacheOffset;
+    off64_t mCacheOffset;
     status_t mFinalStatus;
-    off_t mLastAccessPos;
+    off64_t mLastAccessPos;
     sp<AMessage> mAsyncResult;
     bool mFetching;
     int64_t mLastFetchTimeUs;
-    bool mSuspended;
 
     void onMessageReceived(const sp<AMessage> &msg);
     void onFetch();
     void onRead(const sp<AMessage> &msg);
-    void onSuspend();
 
     void fetchInternal();
-    ssize_t readInternal(off_t offset, void *data, size_t size);
-    status_t seekInternal_l(off_t offset);
+    ssize_t readInternal(off64_t offset, void *data, size_t size);
+    status_t seekInternal_l(off64_t offset);
 
-    size_t approxDataRemaining_l(bool *eos);
-    void restartPrefetcherIfNecessary_l();
+    size_t approxDataRemaining_l(status_t *finalStatus);
+    void restartPrefetcherIfNecessary_l(bool ignoreLowWaterThreshold = false);
 
     DISALLOW_EVIL_CONSTRUCTORS(NuCachedSource2);
 };
diff --git a/media/libstagefright/include/NuHTTPDataSource.h b/media/libstagefright/include/NuHTTPDataSource.h
index ad22807..2569568 100644
--- a/media/libstagefright/include/NuHTTPDataSource.h
+++ b/media/libstagefright/include/NuHTTPDataSource.h
@@ -19,6 +19,7 @@
 #define NU_HTTP_DATA_SOURCE_H_
 
 #include <media/stagefright/DataSource.h>
+#include <utils/List.h>
 #include <utils/String8.h>
 #include <utils/threads.h>
 
@@ -27,21 +28,33 @@
 namespace android {
 
 struct NuHTTPDataSource : public DataSource {
-    NuHTTPDataSource();
+    enum Flags {
+        // Don't log any URLs.
+        kFlagIncognito = 1
+    };
+    NuHTTPDataSource(uint32_t flags = 0);
 
     status_t connect(
             const char *uri,
             const KeyedVector<String8, String8> *headers = NULL,
-            off_t offset = 0);
+            off64_t offset = 0);
 
     void disconnect();
 
     virtual status_t initCheck() const;
 
-    virtual ssize_t readAt(off_t offset, void *data, size_t size);
-    virtual status_t getSize(off_t *size);
+    virtual ssize_t readAt(off64_t offset, void *data, size_t size);
+    virtual status_t getSize(off64_t *size);
     virtual uint32_t flags();
 
+    // Returns true if bandwidth could successfully be estimated,
+    // false otherwise.
+    bool estimateBandwidth(int32_t *bandwidth_bps);
+
+    virtual DecryptHandle* DrmInitialization();
+    virtual void getDrmInfo(DecryptHandle **handle, DrmManagerClient **client);
+    virtual String8 getUri();
+
 protected:
     virtual ~NuHTTPDataSource();
 
@@ -52,18 +65,27 @@
         CONNECTED
     };
 
+    struct BandwidthEntry {
+        int64_t mDelayUs;
+        size_t mNumBytes;
+    };
+
     Mutex mLock;
 
+    uint32_t mFlags;
+
     State mState;
 
     String8 mHost;
     unsigned mPort;
     String8 mPath;
+    bool mHTTPS;
     String8 mHeaders;
+    String8 mUri;
 
     HTTPStream mHTTP;
-    off_t mOffset;
-    off_t mContentLength;
+    off64_t mOffset;
+    off64_t mContentLength;
     bool mContentLengthValid;
     bool mHasChunkedTransferEncoding;
 
@@ -71,18 +93,28 @@
     // chunk header (or -1 if no more chunks).
     ssize_t mChunkDataBytesLeft;
 
+    List<BandwidthEntry> mBandwidthHistory;
+    size_t mNumBandwidthHistoryItems;
+    int64_t mTotalTransferTimeUs;
+    size_t mTotalTransferBytes;
+
+    DecryptHandle *mDecryptHandle;
+    DrmManagerClient *mDrmManagerClient;
+
     status_t connect(
-            const char *uri, const String8 &headers, off_t offset);
+            const char *uri, const String8 &headers, off64_t offset);
 
     status_t connect(
             const char *host, unsigned port, const char *path,
+            bool https,
             const String8 &headers,
-            off_t offset);
+            off64_t offset);
 
     // Read up to "size" bytes of data, respect transfer encoding.
     ssize_t internalRead(void *data, size_t size);
 
     void applyTimeoutResponse();
+    void addBandwidthMeasurement_l(size_t numBytes, int64_t delayUs);
 
     static void MakeFullHeaders(
             const KeyedVector<String8, String8> *overrides,
diff --git a/media/libstagefright/include/OMX.h b/media/libstagefright/include/OMX.h
index 72ab5aa..ec3e5fa 100644
--- a/media/libstagefright/include/OMX.h
+++ b/media/libstagefright/include/OMX.h
@@ -59,10 +59,23 @@
             node_id node, OMX_INDEXTYPE index,
             const void *params, size_t size);
 
+    virtual status_t enableGraphicBuffers(
+            node_id node, OMX_U32 port_index, OMX_BOOL enable);
+
+    virtual status_t getGraphicBufferUsage(
+            node_id node, OMX_U32 port_index, OMX_U32* usage);
+
+    virtual status_t storeMetaDataInBuffers(
+            node_id node, OMX_U32 port_index, OMX_BOOL enable);
+
     virtual status_t useBuffer(
             node_id node, OMX_U32 port_index, const sp<IMemory> &params,
             buffer_id *buffer);
 
+    virtual status_t useGraphicBuffer(
+            node_id node, OMX_U32 port_index,
+            const sp<GraphicBuffer> &graphicBuffer, buffer_id *buffer);
+
     virtual status_t allocateBuffer(
             node_id node, OMX_U32 port_index, size_t size,
             buffer_id *buffer, void **buffer_data);
@@ -87,14 +100,6 @@
             const char *parameter_name,
             OMX_INDEXTYPE *index);
 
-    virtual sp<IOMXRenderer> createRenderer(
-            const sp<ISurface> &surface,
-            const char *componentName,
-            OMX_COLOR_FORMATTYPE colorFormat,
-            size_t encodedWidth, size_t encodedHeight,
-            size_t displayWidth, size_t displayHeight,
-            int32_t rotationDegrees);
-
     virtual void binderDied(const wp<IBinder> &the_late_who);
 
     OMX_ERRORTYPE OnEvent(
diff --git a/media/libstagefright/include/OMXNodeInstance.h b/media/libstagefright/include/OMXNodeInstance.h
index b5b31ac..ca2578f 100644
--- a/media/libstagefright/include/OMXNodeInstance.h
+++ b/media/libstagefright/include/OMXNodeInstance.h
@@ -49,10 +49,20 @@
     status_t getConfig(OMX_INDEXTYPE index, void *params, size_t size);
     status_t setConfig(OMX_INDEXTYPE index, const void *params, size_t size);
 
+    status_t enableGraphicBuffers(OMX_U32 portIndex, OMX_BOOL enable);
+
+    status_t getGraphicBufferUsage(OMX_U32 portIndex, OMX_U32* usage);
+
+    status_t storeMetaDataInBuffers(OMX_U32 portIndex, OMX_BOOL enable);
+
     status_t useBuffer(
             OMX_U32 portIndex, const sp<IMemory> &params,
             OMX::buffer_id *buffer);
 
+    status_t useGraphicBuffer(
+            OMX_U32 portIndex, const sp<GraphicBuffer> &graphicBuffer,
+            OMX::buffer_id *buffer);
+
     status_t allocateBuffer(
             OMX_U32 portIndex, size_t size, OMX::buffer_id *buffer,
             void **buffer_data);
@@ -125,4 +135,3 @@
 }  // namespace android
 
 #endif  // OMX_NODE_INSTANCE_H_
-
diff --git a/media/libstagefright/include/OggExtractor.h b/media/libstagefright/include/OggExtractor.h
index 1eda025..e97c8cd 100644
--- a/media/libstagefright/include/OggExtractor.h
+++ b/media/libstagefright/include/OggExtractor.h
@@ -18,6 +18,7 @@
 
 #define OGG_EXTRACTOR_H_
 
+#include <utils/Errors.h>
 #include <media/stagefright/MediaExtractor.h>
 
 namespace android {
@@ -57,6 +58,9 @@
         const sp<DataSource> &source, String8 *mimeType, float *confidence,
         sp<AMessage> *);
 
+void parseVorbisComment(
+        const sp<MetaData> &fileMeta, const char *comment, size_t commentLength);
+
 }  // namespace android
 
 #endif  // OGG_EXTRACTOR_H_
diff --git a/media/libstagefright/include/SampleIterator.h b/media/libstagefright/include/SampleIterator.h
index a5eaed9..b5a043c 100644
--- a/media/libstagefright/include/SampleIterator.h
+++ b/media/libstagefright/include/SampleIterator.h
@@ -27,7 +27,7 @@
 
     uint32_t getChunkIndex() const { return mCurrentChunkIndex; }
     uint32_t getDescIndex() const { return mChunkDesc; }
-    off_t getSampleOffset() const { return mCurrentSampleOffset; }
+    off64_t getSampleOffset() const { return mCurrentSampleOffset; }
     size_t getSampleSize() const { return mCurrentSampleSize; }
     uint32_t getSampleTime() const { return mCurrentSampleTime; }
 
@@ -48,7 +48,7 @@
     uint32_t mChunkDesc;
 
     uint32_t mCurrentChunkIndex;
-    off_t mCurrentChunkOffset;
+    off64_t mCurrentChunkOffset;
     Vector<size_t> mCurrentChunkSampleSizes;
 
     uint32_t mTimeToSampleIndex;
@@ -58,13 +58,13 @@
     uint32_t mTTSDuration;
 
     uint32_t mCurrentSampleIndex;
-    off_t mCurrentSampleOffset;
+    off64_t mCurrentSampleOffset;
     size_t mCurrentSampleSize;
     uint32_t mCurrentSampleTime;
 
     void reset();
     status_t findChunkRange(uint32_t sampleIndex);
-    status_t getChunkOffset(uint32_t chunk, off_t *offset);
+    status_t getChunkOffset(uint32_t chunk, off64_t *offset);
     status_t findSampleTime(uint32_t sampleIndex, uint32_t *time);
 
     SampleIterator(const SampleIterator &);
diff --git a/media/libstagefright/include/SampleTable.h b/media/libstagefright/include/SampleTable.h
index f830690..2f95de9 100644
--- a/media/libstagefright/include/SampleTable.h
+++ b/media/libstagefright/include/SampleTable.h
@@ -36,17 +36,20 @@
 
     // type can be 'stco' or 'co64'.
     status_t setChunkOffsetParams(
-            uint32_t type, off_t data_offset, size_t data_size);
+            uint32_t type, off64_t data_offset, size_t data_size);
 
-    status_t setSampleToChunkParams(off_t data_offset, size_t data_size);
+    status_t setSampleToChunkParams(off64_t data_offset, size_t data_size);
 
     // type can be 'stsz' or 'stz2'.
     status_t setSampleSizeParams(
-            uint32_t type, off_t data_offset, size_t data_size);
+            uint32_t type, off64_t data_offset, size_t data_size);
 
-    status_t setTimeToSampleParams(off_t data_offset, size_t data_size);
+    status_t setTimeToSampleParams(off64_t data_offset, size_t data_size);
 
-    status_t setSyncSampleParams(off_t data_offset, size_t data_size);
+    status_t setCompositionTimeToSampleParams(
+            off64_t data_offset, size_t data_size);
+
+    status_t setSyncSampleParams(off64_t data_offset, size_t data_size);
 
     ////////////////////////////////////////////////////////////////////////////
 
@@ -58,7 +61,7 @@
 
     status_t getMetaDataForSample(
             uint32_t sampleIndex,
-            off_t *offset,
+            off64_t *offset,
             size_t *size,
             uint32_t *decodingTime,
             bool *isSyncSample = NULL);
@@ -89,14 +92,14 @@
     sp<DataSource> mDataSource;
     Mutex mLock;
 
-    off_t mChunkOffsetOffset;
+    off64_t mChunkOffsetOffset;
     uint32_t mChunkOffsetType;
     uint32_t mNumChunkOffsets;
 
-    off_t mSampleToChunkOffset;
+    off64_t mSampleToChunkOffset;
     uint32_t mNumSampleToChunkOffsets;
 
-    off_t mSampleSizeOffset;
+    off64_t mSampleSizeOffset;
     uint32_t mSampleSizeFieldSize;
     uint32_t mDefaultSampleSize;
     uint32_t mNumSampleSizes;
@@ -104,7 +107,10 @@
     uint32_t mTimeToSampleCount;
     uint32_t *mTimeToSample;
 
-    off_t mSyncSampleOffset;
+    uint32_t *mCompositionTimeDeltaEntries;
+    size_t mNumCompositionTimeDeltaEntries;
+
+    off64_t mSyncSampleOffset;
     uint32_t mNumSyncSamples;
     uint32_t *mSyncSamples;
     size_t mLastSyncSampleIndex;
@@ -122,6 +128,8 @@
 
     status_t getSampleSize_l(uint32_t sample_index, size_t *sample_size);
 
+    uint32_t getCompositionTimeOffset(uint32_t sampleIndex) const;
+
     SampleTable(const SampleTable &);
     SampleTable &operator=(const SampleTable &);
 };
diff --git a/media/libstagefright/include/SoftwareRenderer.h b/media/libstagefright/include/SoftwareRenderer.h
index 89d7cc4..78037b9 100644
--- a/media/libstagefright/include/SoftwareRenderer.h
+++ b/media/libstagefright/include/SoftwareRenderer.h
@@ -19,40 +19,34 @@
 #define SOFTWARE_RENDERER_H_
 
 #include <media/stagefright/ColorConverter.h>
-#include <media/stagefright/VideoRenderer.h>
 #include <utils/RefBase.h>
+#include <ui/android_native_buffer.h>
 
 namespace android {
 
-class ISurface;
-class MemoryHeapBase;
+struct MetaData;
 
-class SoftwareRenderer : public VideoRenderer {
+class SoftwareRenderer {
 public:
     SoftwareRenderer(
-            OMX_COLOR_FORMATTYPE colorFormat,
-            const sp<ISurface> &surface,
-            size_t displayWidth, size_t displayHeight,
-            size_t decodedWidth, size_t decodedHeight,
-            int32_t rotationDegrees = 0);
+            const sp<ANativeWindow> &nativeWindow, const sp<MetaData> &meta);
 
-    virtual ~SoftwareRenderer();
+    ~SoftwareRenderer();
 
-    status_t initCheck() const;
-
-    virtual void render(
+    void render(
             const void *data, size_t size, void *platformPrivate);
 
 private:
-    status_t mInitCheck;
+    enum YUVMode {
+        None,
+    };
+
     OMX_COLOR_FORMATTYPE mColorFormat;
-    ColorConverter mConverter;
-    sp<ISurface> mISurface;
-    size_t mDisplayWidth, mDisplayHeight;
-    size_t mDecodedWidth, mDecodedHeight;
-    size_t mFrameSize;
-    sp<MemoryHeapBase> mMemoryHeap;
-    int mIndex;
+    ColorConverter *mConverter;
+    YUVMode mYUVMode;
+    sp<ANativeWindow> mNativeWindow;
+    int32_t mWidth, mHeight;
+    int32_t mCropLeft, mCropTop, mCropRight, mCropBottom;
 
     SoftwareRenderer(const SoftwareRenderer &);
     SoftwareRenderer &operator=(const SoftwareRenderer &);
diff --git a/media/libstagefright/include/ThrottledSource.h b/media/libstagefright/include/ThrottledSource.h
index 88164b3..8928a4a 100644
--- a/media/libstagefright/include/ThrottledSource.h
+++ b/media/libstagefright/include/ThrottledSource.h
@@ -30,9 +30,9 @@
 
     virtual status_t initCheck() const;
 
-    virtual ssize_t readAt(off_t offset, void *data, size_t size);
+    virtual ssize_t readAt(off64_t offset, void *data, size_t size);
 
-    virtual status_t getSize(off_t *size);
+    virtual status_t getSize(off64_t *size);
     virtual uint32_t flags();
 
 private:
diff --git a/media/libstagefright/include/VBRISeeker.h b/media/libstagefright/include/VBRISeeker.h
new file mode 100644
index 0000000..1a2bf9f
--- /dev/null
+++ b/media/libstagefright/include/VBRISeeker.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef VBRI_SEEKER_H_
+
+#define VBRI_SEEKER_H_
+
+#include "include/MP3Seeker.h"
+
+#include <utils/Vector.h>
+
+namespace android {
+
+struct DataSource;
+
+struct VBRISeeker : public MP3Seeker {
+    static sp<VBRISeeker> CreateFromSource(
+            const sp<DataSource> &source, off64_t post_id3_pos);
+
+    virtual bool getDuration(int64_t *durationUs);
+    virtual bool getOffsetForTime(int64_t *timeUs, off64_t *pos);
+
+private:
+    off64_t mBasePos;
+    int64_t mDurationUs;
+    Vector<uint32_t> mSegments;
+
+    VBRISeeker();
+
+    DISALLOW_EVIL_CONSTRUCTORS(VBRISeeker);
+};
+
+}  // namespace android
+
+#endif  // VBRI_SEEKER_H_
+
+
diff --git a/media/libstagefright/include/WAVExtractor.h b/media/libstagefright/include/WAVExtractor.h
index df6d3e7..ce1f33a 100644
--- a/media/libstagefright/include/WAVExtractor.h
+++ b/media/libstagefright/include/WAVExtractor.h
@@ -18,6 +18,7 @@
 
 #define WAV_EXTRACTOR_H_
 
+#include <utils/Errors.h>
 #include <media/stagefright/MediaExtractor.h>
 
 namespace android {
@@ -48,7 +49,7 @@
     uint16_t mNumChannels;
     uint32_t mSampleRate;
     uint16_t mBitsPerSample;
-    off_t mDataOffset;
+    off64_t mDataOffset;
     size_t mDataSize;
     sp<MetaData> mTrackMeta;
 
diff --git a/media/libstagefright/include/WVMExtractor.h b/media/libstagefright/include/WVMExtractor.h
new file mode 100644
index 0000000..0da45a8
--- /dev/null
+++ b/media/libstagefright/include/WVMExtractor.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef WVM_EXTRACTOR_H_
+
+#define WVM_EXTRACTOR_H_
+
+#include <media/stagefright/MediaExtractor.h>
+
+namespace android {
+
+class DataSource;
+
+class WVMExtractor : public MediaExtractor {
+public:
+    WVMExtractor(const sp<DataSource> &source);
+
+    virtual size_t countTracks();
+    virtual sp<MediaSource> getTrack(size_t index);
+    virtual sp<MetaData> getTrackMetaData(size_t index, uint32_t flags);
+    virtual sp<MetaData> getMetaData();
+
+protected:
+    virtual ~WVMExtractor();
+
+private:
+    sp<DataSource> mDataSource;
+    sp<MediaExtractor> mImpl;
+
+    WVMExtractor(const WVMExtractor &);
+    WVMExtractor &operator=(const WVMExtractor &);
+
+};
+
+}  // namespace android
+
+#endif  // DRM_EXTRACTOR_H_
+
diff --git a/media/libstagefright/include/XINGSeeker.h b/media/libstagefright/include/XINGSeeker.h
new file mode 100644
index 0000000..d5a484e
--- /dev/null
+++ b/media/libstagefright/include/XINGSeeker.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef XING_SEEKER_H_
+
+#define XING_SEEKER_H_
+
+#include "include/MP3Seeker.h"
+
+namespace android {
+
+struct DataSource;
+
+struct XINGSeeker : public MP3Seeker {
+    static sp<XINGSeeker> CreateFromSource(
+            const sp<DataSource> &source, off64_t first_frame_pos);
+
+    virtual bool getDuration(int64_t *durationUs);
+    virtual bool getOffsetForTime(int64_t *timeUs, off64_t *pos);
+
+private:
+    int64_t mFirstFramePos;
+    int64_t mDurationUs;
+    int32_t mSizeBytes;
+
+    // TOC entries in XING header. Skip the first one since it's always 0.
+    char mTableOfContents[99];
+
+    XINGSeeker();
+
+    DISALLOW_EVIL_CONSTRUCTORS(XINGSeeker);
+};
+
+}  // namespace android
+
+#endif  // XING_SEEKER_H_
+
diff --git a/media/libstagefright/include/avc_utils.h b/media/libstagefright/include/avc_utils.h
index 62cfc36..afff824 100644
--- a/media/libstagefright/include/avc_utils.h
+++ b/media/libstagefright/include/avc_utils.h
@@ -19,11 +19,23 @@
 #define AVC_UTILS_H_
 
 #include <media/stagefright/foundation/ABuffer.h>
+#include <utils/Errors.h>
 
 namespace android {
 
 struct ABitReader;
 
+enum {
+    kAVCProfileBaseline      = 0x42,
+    kAVCProfileMain          = 0x4d,
+    kAVCProfileExtended      = 0x58,
+    kAVCProfileHigh          = 0x64,
+    kAVCProfileHigh10        = 0x6e,
+    kAVCProfileHigh422       = 0x7a,
+    kAVCProfileHigh444       = 0xf4,
+    kAVCProfileCAVLC444Intra = 0x2c
+};
+
 void FindAVCDimensions(
         const sp<ABuffer> &seqParamSet, int32_t *width, int32_t *height);
 
@@ -39,6 +51,12 @@
 
 bool IsIDR(const sp<ABuffer> &accessUnit);
 
+const char *AVCProfileToString(uint8_t profile);
+
+sp<MetaData> MakeAACCodecSpecificData(
+        unsigned profile, unsigned sampling_freq_index,
+        unsigned channel_configuration);
+
 }  // namespace android
 
 #endif  // AVC_UTILS_H_
diff --git a/media/libstagefright/include/stagefright_string.h b/media/libstagefright/include/stagefright_string.h
deleted file mode 100644
index 5dc7116..0000000
--- a/media/libstagefright/include/stagefright_string.h
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef STRING_H_
-
-#define STRING_H_
-
-#include <utils/String8.h>
-
-namespace android {
-
-class string {
-public:
-    typedef size_t size_type;
-    static size_type npos;
-
-    string();
-    string(const char *s);
-    string(const char *s, size_t length);
-    string(const string &from, size_type start, size_type length = npos);
-
-    const char *c_str() const;
-    size_type size() const;
-
-    void clear();
-    void erase(size_type from, size_type length);
-
-    size_type find(char c) const;
-
-    bool operator<(const string &other) const;
-    bool operator==(const string &other) const;
-
-    string &operator+=(char c);
-
-private:
-    String8 mString;
-};
-
-}  // namespace android
-
-#endif  // STRING_H_
diff --git a/media/libstagefright/matroska/Android.mk b/media/libstagefright/matroska/Android.mk
index 33c9d97..1f1c68b 100644
--- a/media/libstagefright/matroska/Android.mk
+++ b/media/libstagefright/matroska/Android.mk
@@ -2,11 +2,11 @@
 include $(CLEAR_VARS)
 
 LOCAL_SRC_FILES:=                 \
-        MatroskaExtractor.cpp     \
-        mkvparser.cpp             \
+        MatroskaExtractor.cpp
 
 LOCAL_C_INCLUDES:= \
-	$(JNI_H_INCLUDE) \
+        $(JNI_H_INCLUDE) \
+        $(TOP)/external/libvpx/mkvparser \
         $(TOP)/frameworks/base/include/media/stagefright/openmax \
 
 LOCAL_CFLAGS += -Wno-multichar
diff --git a/media/libstagefright/matroska/MatroskaExtractor.cpp b/media/libstagefright/matroska/MatroskaExtractor.cpp
index 1661130..733de92b 100644
--- a/media/libstagefright/matroska/MatroskaExtractor.cpp
+++ b/media/libstagefright/matroska/MatroskaExtractor.cpp
@@ -58,7 +58,7 @@
     }
 
     virtual int Length(long long* total, long long* available) {
-        off_t size;
+        off64_t size;
         if (mSource->getSize(&size) != OK) {
             return -1;
         }
@@ -707,6 +707,12 @@
     for (size_t index = 0; index < tracks->GetTracksCount(); ++index) {
         const mkvparser::Track *track = tracks->GetTrackByIndex(index);
 
+        if (track == NULL) {
+            // Apparently this is currently valid (if unexpected) behaviour
+            // of the mkv parser lib.
+            continue;
+        }
+
         const char *const codecID = track->GetCodecId();
         LOGV("codec id = %s", codecID);
         LOGV("codec name = %s", track->GetCodecNameAsUTF8());
diff --git a/media/libstagefright/matroska/mkvparser.cpp b/media/libstagefright/matroska/mkvparser.cpp
deleted file mode 100644
index 7448d96..0000000
--- a/media/libstagefright/matroska/mkvparser.cpp
+++ /dev/null
@@ -1,4514 +0,0 @@
-// Copyright (c) 2010 The WebM project authors. All Rights Reserved.

-//

-// Use of this source code is governed by a BSD-style license

-// that can be found in the LICENSE file in the root of the source

-// tree. An additional intellectual property rights grant can be found

-// in the file PATENTS.  All contributing project authors may

-// be found in the AUTHORS file in the root of the source tree.

-

-#include "mkvparser.hpp"

-#include <cassert>

-#include <cstring>

-#include <new>

-//#include <windows.h>

-//#include "odbgstream.hpp"

-//using std::endl;

-

-mkvparser::IMkvReader::~IMkvReader()

-{

-}

-

-

-void mkvparser::GetVersion(int& major, int& minor, int& build, int& revision)

-{

-    major = 1;

-    minor = 0;

-    build = 0;

-    revision = 4;

-}

-

-

-long long mkvparser::ReadUInt(IMkvReader* pReader, long long pos, long& len)

-{

-    assert(pReader);

-    assert(pos >= 0);

-

-    long long total, available;

-

-    long hr = pReader->Length(&total, &available);

-    assert(hr >= 0);

-    assert(pos < available);

-    assert((available - pos) >= 1);  //assume here max u-int len is 8

-

-    unsigned char b;

-

-    hr = pReader->Read(pos, 1, &b);

-    if (hr < 0)

-        return hr;

-

-    assert(hr == 0L);

-

-    if (b & 0x80)       //1000 0000

-    {

-        len = 1;

-        b &= 0x7F;      //0111 1111

-    }

-    else if (b & 0x40)  //0100 0000

-    {

-        len = 2;

-        b &= 0x3F;      //0011 1111

-    }

-    else if (b & 0x20)  //0010 0000

-    {

-        len = 3;

-        b &= 0x1F;      //0001 1111

-    }

-    else if (b & 0x10)  //0001 0000

-    {

-        len = 4;

-        b &= 0x0F;      //0000 1111

-    }

-    else if (b & 0x08)  //0000 1000

-    {

-        len = 5;

-        b &= 0x07;      //0000 0111

-    }

-    else if (b & 0x04)  //0000 0100

-    {

-        len = 6;

-        b &= 0x03;      //0000 0011

-    }

-    else if (b & 0x02)  //0000 0010

-    {

-        len = 7;

-        b &= 0x01;      //0000 0001

-    }

-    else

-    {

-        assert(b & 0x01);  //0000 0001

-        len = 8;

-        b = 0;             //0000 0000

-    }

-

-    assert((available - pos) >= len);

-

-    long long result = b;

-    ++pos;

-    for (long i = 1; i < len; ++i)

-    {

-        hr = pReader->Read(pos, 1, &b);

-

-        if (hr < 0)

-            return hr;

-

-        assert(hr == 0L);

-

-        result <<= 8;

-        result |= b;

-

-        ++pos;

-    }

-

-    return result;

-}

-

-

-long long mkvparser::GetUIntLength(

-    IMkvReader* pReader,

-    long long pos,

-    long& len)

-{

-    assert(pReader);

-    assert(pos >= 0);

-

-    long long total, available;

-

-    long hr = pReader->Length(&total, &available);

-    assert(hr >= 0);

-    assert(available <= total);

-

-    if (pos >= available)

-        return pos;  //too few bytes available

-

-    unsigned char b;

-

-    hr = pReader->Read(pos, 1, &b);

-

-    if (hr < 0)

-        return hr;

-

-    assert(hr == 0L);

-

-    if (b == 0)  //we can't handle u-int values larger than 8 bytes

-        return E_FILE_FORMAT_INVALID;

-

-    unsigned char m = 0x80;

-    len = 1;

-

-    while (!(b & m))

-    {

-        m >>= 1;

-        ++len;

-    }

-

-    return 0;  //success

-}

-

-

-long long mkvparser::SyncReadUInt(

-    IMkvReader* pReader,

-    long long pos,

-    long long stop,

-    long& len)

-{

-    assert(pReader);

-

-    if (pos >= stop)

-        return E_FILE_FORMAT_INVALID;

-

-    unsigned char b;

-

-    long hr = pReader->Read(pos, 1, &b);

-

-    if (hr < 0)

-        return hr;

-

-    if (hr != 0L)

-        return E_BUFFER_NOT_FULL;

-

-    if (b == 0)  //we can't handle u-int values larger than 8 bytes

-        return E_FILE_FORMAT_INVALID;

-

-    unsigned char m = 0x80;

-    len = 1;

-

-    while (!(b & m))

-    {

-        m >>= 1;

-        ++len;

-    }

-

-    if ((pos + len) > stop)

-        return E_FILE_FORMAT_INVALID;

-

-    long long result = b & (~m);

-    ++pos;

-

-    for (int i = 1; i < len; ++i)

-    {

-        hr = pReader->Read(pos, 1, &b);

-

-        if (hr < 0)

-            return hr;

-

-        if (hr != 0L)

-            return E_BUFFER_NOT_FULL;

-

-        result <<= 8;

-        result |= b;

-

-        ++pos;

-    }

-

-    return result;

-}

-

-

-long long mkvparser::UnserializeUInt(

-    IMkvReader* pReader,

-    long long pos,

-    long long size)

-{

-    assert(pReader);

-    assert(pos >= 0);

-    assert(size > 0);

-    assert(size <= 8);

-

-    long long result = 0;

-

-    for (long long i = 0; i < size; ++i)

-    {

-        unsigned char b;

-

-        const long hr = pReader->Read(pos, 1, &b);

-

-        if (hr < 0)

-            return hr;

-        result <<= 8;

-        result |= b;

-

-        ++pos;

-    }

-

-    return result;

-}

-

-

-float mkvparser::Unserialize4Float(

-    IMkvReader* pReader,

-    long long pos)

-{

-    assert(pReader);

-    assert(pos >= 0);

-

-    long long total, available;

-

-    long hr = pReader->Length(&total, &available);

-    assert(hr >= 0);

-    assert(available <= total);

-    assert((pos + 4) <= available);

-

-    float result;

-

-    unsigned char* const p = (unsigned char*)&result;

-    unsigned char* q = p + 4;

-

-    for (;;)

-    {

-        hr = pReader->Read(pos, 1, --q);

-        assert(hr == 0L);

-

-        if (q == p)

-            break;

-

-        ++pos;

-    }

-

-    return result;

-}

-

-

-double mkvparser::Unserialize8Double(

-    IMkvReader* pReader,

-    long long pos)

-{

-    assert(pReader);

-    assert(pos >= 0);

-

-    double result;

-

-    unsigned char* const p = (unsigned char*)&result;

-    unsigned char* q = p + 8;

-

-    for (;;)

-    {

-        const long hr = pReader->Read(pos, 1, --q);

-        assert(hr == 0L);

-

-        if (q == p)

-            break;

-

-        ++pos;

-    }

-

-    return result;

-}

-

-signed char mkvparser::Unserialize1SInt(

-    IMkvReader* pReader,

-    long long pos)

-{

-    assert(pReader);

-    assert(pos >= 0);

-

-    long long total, available;

-

-    long hr = pReader->Length(&total, &available);

-    assert(hr == 0);

-    assert(available <= total);

-    assert(pos < available);

-

-    signed char result;

-

-    hr = pReader->Read(pos, 1, (unsigned char*)&result);

-    assert(hr == 0);

-

-    return result;

-}

-

-short mkvparser::Unserialize2SInt(

-    IMkvReader* pReader,

-    long long pos)

-{

-    assert(pReader);

-    assert(pos >= 0);

-

-    long long total, available;

-

-    long hr = pReader->Length(&total, &available);

-    assert(hr >= 0);

-    assert(available <= total);

-    assert((pos + 2) <= available);

-

-    short result;

-

-    unsigned char* const p = (unsigned char*)&result;

-    unsigned char* q = p + 2;

-

-    for (;;)

-    {

-        hr = pReader->Read(pos, 1, --q);

-        assert(hr == 0L);

-

-        if (q == p)

-            break;

-

-        ++pos;

-    }

-

-    return result;

-}

-

-

-bool mkvparser::Match(

-    IMkvReader* pReader,

-    long long& pos,

-    unsigned long id_,

-    long long& val)

-

-{

-    assert(pReader);

-    assert(pos >= 0);

-

-    long long total, available;

-

-    long hr = pReader->Length(&total, &available);

-    assert(hr >= 0);

-    assert(available <= total);

-

-    long len;

-

-    const long long id = ReadUInt(pReader, pos, len);

-    assert(id >= 0);

-    assert(len > 0);

-    assert(len <= 8);

-    assert((pos + len) <= available);

-

-    if ((unsigned long)id != id_)

-        return false;

-

-    pos += len;  //consume id

-

-    const long long size = ReadUInt(pReader, pos, len);

-    assert(size >= 0);

-    assert(size <= 8);

-    assert(len > 0);

-    assert(len <= 8);

-    assert((pos + len) <= available);

-

-    pos += len;  //consume length of size of payload

-

-    val = UnserializeUInt(pReader, pos, size);

-    assert(val >= 0);

-

-    pos += size;  //consume size of payload

-

-    return true;

-}

-

-bool mkvparser::Match(

-    IMkvReader* pReader,

-    long long& pos,

-    unsigned long id_,

-    char*& val)

-{

-    assert(pReader);

-    assert(pos >= 0);

-

-    long long total, available;

-

-    long hr = pReader->Length(&total, &available);

-    assert(hr >= 0);

-    assert(available <= total);

-

-    long len;

-

-    const long long id = ReadUInt(pReader, pos, len);

-    assert(id >= 0);

-    assert(len > 0);

-    assert(len <= 8);

-    assert((pos + len) <= available);

-

-    if ((unsigned long)id != id_)

-        return false;

-

-    pos += len;  //consume id

-

-    const long long size_ = ReadUInt(pReader, pos, len);

-    assert(size_ >= 0);

-    assert(len > 0);

-    assert(len <= 8);

-    assert((pos + len) <= available);

-

-    pos += len;  //consume length of size of payload

-    assert((pos + size_) <= available);

-

-    const size_t size = static_cast<size_t>(size_);

-    val = new char[size+1];

-

-    for (size_t i = 0; i < size; ++i)

-    {

-        char c;

-

-        hr = pReader->Read(pos + i, 1, (unsigned char*)&c);

-        assert(hr == 0L);

-

-        val[i] = c;

-

-        if (c == '\0')

-            break;

-

-    }

-

-    val[size] = '\0';

-    pos += size_;  //consume size of payload

-

-    return true;

-}

-

-bool mkvparser::Match(

-    IMkvReader* pReader,

-    long long& pos,

-    unsigned long id_,

-    unsigned char*& buf,

-    size_t& buflen)

-{

-    assert(pReader);

-    assert(pos >= 0);

-

-    long long total, available;

-

-    long hr = pReader->Length(&total, &available);

-    assert(hr >= 0);

-    assert(available <= total);

-

-    long len;

-    const long long id = ReadUInt(pReader, pos, len);

-    assert(id >= 0);

-    assert(len > 0);

-    assert(len <= 8);

-    assert((pos + len) <= available);

-

-    if ((unsigned long)id != id_)

-        return false;

-

-    pos += len;  //consume id

-

-    const long long size_ = ReadUInt(pReader, pos, len);

-    assert(size_ >= 0);

-    assert(len > 0);

-    assert(len <= 8);

-    assert((pos + len) <= available);

-

-    pos += len;  //consume length of size of payload

-    assert((pos + size_) <= available);

-

-    const long buflen_ = static_cast<long>(size_);

-

-    buf = new (std::nothrow) unsigned char[buflen_];

-    assert(buf);  //TODO

-

-    hr = pReader->Read(pos, buflen_, buf);

-    assert(hr == 0L);

-

-    buflen = buflen_;

-

-    pos += size_;  //consume size of payload

-    return true;

-}

-

-

-bool mkvparser::Match(

-    IMkvReader* pReader,

-    long long& pos,

-    unsigned long id_,

-    double& val)

-{

-    assert(pReader);

-    assert(pos >= 0);

-

-    long long total, available;

-

-    long hr = pReader->Length(&total, &available);

-    assert(hr >= 0);

-    assert(available <= total);

-    long idlen;

-    const long long id = ReadUInt(pReader, pos, idlen);

-    assert(id >= 0);  //TODO

-

-    if ((unsigned long)id != id_)

-        return false;

-

-    long sizelen;

-    const long long size = ReadUInt(pReader, pos + idlen, sizelen);

-

-    switch (size)

-    {

-        case 4:

-        case 8:

-            break;

-        default:

-            return false;

-    }

-

-    pos += idlen + sizelen;  //consume id and size fields

-    assert((pos + size) <= available);

-

-    if (size == 4)

-        val = Unserialize4Float(pReader, pos);

-    else

-    {

-        assert(size == 8);

-        val = Unserialize8Double(pReader, pos);

-    }

-

-    pos += size;  //consume size of payload

-

-    return true;

-}

-

-

-bool mkvparser::Match(

-    IMkvReader* pReader,

-    long long& pos,

-    unsigned long id_,

-    short& val)

-{

-    assert(pReader);

-    assert(pos >= 0);

-

-    long long total, available;

-

-    long hr = pReader->Length(&total, &available);

-    assert(hr >= 0);

-    assert(available <= total);

-

-    long len;

-    const long long id = ReadUInt(pReader, pos, len);

-    assert(id >= 0);

-    assert((pos + len) <= available);

-

-    if ((unsigned long)id != id_)

-        return false;

-

-    pos += len;  //consume id

-

-    const long long size = ReadUInt(pReader, pos, len);

-    assert(size <= 2);

-    assert((pos + len) <= available);

-

-    pos += len;  //consume length of size of payload

-    assert((pos + size) <= available);

-

-    //TODO:

-    // Generalize this to work for any size signed int

-    if (size == 1)

-        val = Unserialize1SInt(pReader, pos);

-    else

-        val = Unserialize2SInt(pReader, pos);

-

-    pos += size;  //consume size of payload

-

-    return true;

-}

-

-

-namespace mkvparser

-{

-

-EBMLHeader::EBMLHeader():

-    m_docType(NULL)

-{

-}

-

-EBMLHeader::~EBMLHeader()

-{

-    delete[] m_docType;

-}

-

-long long EBMLHeader::Parse(

-    IMkvReader* pReader,

-    long long& pos)

-{

-    assert(pReader);

-

-    long long total, available;

-

-    long hr = pReader->Length(&total, &available);

-

-    if (hr < 0)

-        return hr;

-

-    pos = 0;

-    long long end = (1024 < available)? 1024: available;

-

-    for (;;)

-    {

-        unsigned char b = 0;

-

-        while (pos < end)

-        {

-            hr = pReader->Read(pos, 1, &b);

-

-            if (hr < 0)

-                return hr;

-

-            if (b == 0x1A)

-                break;

-

-            ++pos;

-        }

-

-        if (b != 0x1A)

-        {

-            if ((pos >= 1024) ||

-                (available >= total) ||

-                ((total - available) < 5))

-                  return -1;

-

-            return available + 5;  //5 = 4-byte ID + 1st byte of size

-        }

-

-        if ((total - pos) < 5)

-            return E_FILE_FORMAT_INVALID;

-

-        if ((available - pos) < 5)

-            return pos + 5;  //try again later

-

-        long len;

-

-        const long long result = ReadUInt(pReader, pos, len);

-

-        if (result < 0)  //error

-            return result;

-

-        if (result == 0x0A45DFA3)  //ReadId masks-off length indicator bits

-        {

-            assert(len == 4);

-            pos += len;

-            break;

-        }

-

-        ++pos;  //throw away just the 0x1A byte, and try again

-    }

-

-    long len;

-    long long result = GetUIntLength(pReader, pos, len);

-

-    if (result < 0)  //error

-        return result;

-

-    if (result > 0)  //need more data

-        return result;

-

-    assert(len > 0);

-    assert(len <= 8);

-

-    if ((total -  pos) < len)

-        return E_FILE_FORMAT_INVALID;

-    if ((available - pos) < len)

-        return pos + len;  //try again later

-

-    result = ReadUInt(pReader, pos, len);

-

-    if (result < 0)  //error

-        return result;

-

-    pos += len;  //consume u-int

-

-    if ((total - pos) < result)

-        return E_FILE_FORMAT_INVALID;

-

-    if ((available - pos) < result)

-        return pos + result;

-

-    end = pos + result;

-

-    m_version = 1;

-    m_readVersion = 1;

-    m_maxIdLength = 4;

-    m_maxSizeLength = 8;

-    m_docTypeVersion = 1;

-    m_docTypeReadVersion = 1;

-

-    while (pos < end)

-    {

-        if (Match(pReader, pos, 0x0286, m_version))

-            ;

-        else if (Match(pReader, pos, 0x02F7, m_readVersion))

-            ;

-        else if (Match(pReader, pos, 0x02F2, m_maxIdLength))

-            ;

-        else if (Match(pReader, pos, 0x02F3, m_maxSizeLength))

-            ;

-        else if (Match(pReader, pos, 0x0282, m_docType))

-            ;

-        else if (Match(pReader, pos, 0x0287, m_docTypeVersion))

-            ;

-        else if (Match(pReader, pos, 0x0285, m_docTypeReadVersion))

-            ;

-        else

-        {

-            result = ReadUInt(pReader, pos, len);

-            assert(result > 0);

-            assert(len > 0);

-            assert(len <= 8);

-

-            pos += len;

-            assert(pos < end);

-

-            result = ReadUInt(pReader, pos, len);

-            assert(result >= 0);

-            assert(len > 0);

-            assert(len <= 8);

-

-            pos += len + result;

-            assert(pos <= end);

-        }

-    }

-

-    assert(pos == end);

-

-    return 0;

-}

-

-

-Segment::Segment(

-    IMkvReader* pReader,

-    long long start,

-    long long size) :

-    m_pReader(pReader),

-    m_start(start),

-    m_size(size),

-    m_pos(start),

-    m_pInfo(NULL),

-    m_pTracks(NULL),

-    m_pCues(NULL),

-    m_clusters(NULL),

-    m_clusterCount(0),

-    m_clusterPreloadCount(0),

-    m_clusterSize(0)

-{

-}

-

-

-Segment::~Segment()

-{

-    const long count = m_clusterCount + m_clusterPreloadCount;

-

-    Cluster** i = m_clusters;

-    Cluster** j = m_clusters + count;

-

-    while (i != j)

-    {

-        Cluster* const p = *i++;

-        assert(p);

-

-        delete p;

-    }

-

-    delete[] m_clusters;

-

-    delete m_pTracks;

-    delete m_pInfo;

-    delete m_pCues;

-}

-

-

-long long Segment::CreateInstance(

-    IMkvReader* pReader,

-    long long pos,

-    Segment*& pSegment)

-{

-    assert(pReader);

-    assert(pos >= 0);

-

-    pSegment = NULL;

-

-    long long total, available;

-

-    long hr = pReader->Length(&total, &available);

-    assert(hr >= 0);

-    assert(available <= total);

-

-    //I would assume that in practice this loop would execute

-    //exactly once, but we allow for other elements (e.g. Void)

-    //to immediately follow the EBML header.  This is fine for

-    //the source filter case (since the entire file is available),

-    //but in the splitter case over a network we should probably

-    //just give up early.  We could for example decide only to

-    //execute this loop a maximum of, say, 10 times.

-

-    while (pos < total)

-    {

-        //Read ID

-

-        long len;

-        long long result = GetUIntLength(pReader, pos, len);

-

-        if (result)  //error, or too few available bytes

-            return result;

-

-        if ((pos + len) > total)

-            return E_FILE_FORMAT_INVALID;

-

-        if ((pos + len) > available)

-            return pos + len;

-

-        //TODO: if we liberalize the behavior of ReadUInt, we can

-        //probably eliminate having to use GetUIntLength here.

-        const long long id = ReadUInt(pReader, pos, len);

-

-        if (id < 0)  //error

-            return id;

-

-        pos += len;  //consume ID

-

-        //Read Size

-

-        result = GetUIntLength(pReader, pos, len);

-

-        if (result)  //error, or too few available bytes

-            return result;

-

-        if ((pos + len) > total)

-            return E_FILE_FORMAT_INVALID;

-

-        if ((pos + len) > available)

-            return pos + len;

-

-        //TODO: if we liberalize the behavior of ReadUInt, we can

-        //probably eliminate having to use GetUIntLength here.

-        const long long size = ReadUInt(pReader, pos, len);

-

-        if (size < 0)

-            return size;

-

-        pos += len;  //consume length of size of element

-

-        //Pos now points to start of payload

-

-        if ((pos + size) > total)

-            return E_FILE_FORMAT_INVALID;

-

-        if (id == 0x08538067)  //Segment ID

-        {

-            pSegment = new  Segment(pReader, pos, size);

-            assert(pSegment);  //TODO

-

-            return 0;    //success

-        }

-

-        pos += size;  //consume payload

-    }

-

-    assert(pos == total);

-

-    pSegment = new Segment(pReader, pos, 0);

-    assert(pSegment);  //TODO

-

-    return 0;  //success (sort of)

-}

-

-

-long long Segment::ParseHeaders()

-{

-    //Outermost (level 0) segment object has been constructed,

-    //and pos designates start of payload.  We need to find the

-    //inner (level 1) elements.

-    long long total, available;

-

-    long hr = m_pReader->Length(&total, &available);

-    assert(hr >= 0);

-    assert(available <= total);

-

-    const long long stop = m_start + m_size;

-    assert(stop <= total);

-    assert(m_pos <= stop);

-

-    bool bQuit = false;

-

-    while ((m_pos < stop) && !bQuit)

-    {

-        long long pos = m_pos;

-

-        long len;

-        long long result = GetUIntLength(m_pReader, pos, len);

-

-        if (result)  //error, or too few available bytes

-            return result;

-

-        if ((pos + len) > stop)

-            return E_FILE_FORMAT_INVALID;

-

-        if ((pos + len) > available)

-            return pos + len;

-

-        const long long idpos = pos;

-        const long long id = ReadUInt(m_pReader, idpos, len);

-

-        if (id < 0)  //error

-            return id;

-

-        pos += len;  //consume ID

-

-        //Read Size

-        result = GetUIntLength(m_pReader, pos, len);

-

-        if (result)  //error, or too few available bytes

-            return result;

-

-        if ((pos + len) > stop)

-            return E_FILE_FORMAT_INVALID;

-

-        if ((pos + len) > available)

-            return pos + len;

-

-        const long long size = ReadUInt(m_pReader, pos, len);

-

-        if (size < 0)

-            return size;

-

-        pos += len;  //consume length of size of element

-

-        //Pos now points to start of payload

-

-        if ((pos + size) > stop)

-            return E_FILE_FORMAT_INVALID;

-

-        //We read EBML elements either in total or nothing at all.

-

-        if ((pos + size) > available)

-            return pos + size;

-

-        if (id == 0x0549A966)  //Segment Info ID

-        {

-            assert(m_pInfo == NULL);

-

-            m_pInfo = new SegmentInfo(this, pos, size);

-            assert(m_pInfo);  //TODO

-        }

-        else if (id == 0x0654AE6B)  //Tracks ID

-        {

-            assert(m_pTracks == NULL);

-

-            m_pTracks = new Tracks(this, pos, size);

-            assert(m_pTracks);  //TODO

-        }

-        else if (id == 0x0C53BB6B)  //Cues ID

-        {

-            if (m_pCues == NULL)

-            {

-                m_pCues = new Cues(this, pos, size);

-                assert(m_pCues);  //TODO

-            }

-        }

-        else if (id == 0x014D9B74)  //SeekHead ID

-        {

-            ParseSeekHead(pos, size);

-        }

-        else if (id == 0x0F43B675)  //Cluster ID

-        {

-            bQuit = true;

-        }

-

-        if (!bQuit)

-            m_pos = pos + size;  //consume payload

-    }

-

-    assert(m_pos <= stop);

-

-    if (m_pInfo == NULL)  //TODO: liberalize this behavior

-        return E_FILE_FORMAT_INVALID;

-

-    if (m_pTracks == NULL)

-        return E_FILE_FORMAT_INVALID;

-

-    return 0;  //success

-}

-

-

-#if 0

-long Segment::ParseCluster(Cluster*& pCluster, long long& pos_) const

-{

-    pCluster = NULL;

-    pos_ = -1;

-

-    const long long stop = m_start + m_size;

-    assert(m_pos <= stop);

-

-    long long pos = m_pos;

-    long long off = -1;

-

-    while (pos < stop)

-    {

-        long len;

-        const long long idpos = pos;

-

-        const long long id = SyncReadUInt(m_pReader, pos, stop, len);

-

-        if (id < 0)  //error

-            return static_cast<long>(id);

-

-        if (id == 0)

-            return E_FILE_FORMAT_INVALID;

-

-        pos += len;  //consume id

-        assert(pos < stop);

-

-        const long long size = SyncReadUInt(m_pReader, pos, stop, len);

-

-        if (size < 0)  //error

-            return static_cast<long>(size);

-

-        pos += len;  //consume size

-        assert(pos <= stop);

-

-        if (size == 0)  //weird

-            continue;

-

-        //pos now points to start of payload

-

-        pos += size;  //consume payload

-        assert(pos <= stop);

-

-        if (id == 0x0F43B675)  //Cluster ID

-        {

-            off = idpos - m_start;  // >= 0 means we found a cluster

-            break;

-        }

-    }

-

-    assert(pos <= stop);

-

-    //Indicate to caller how much of file has been consumed. This is

-    //used later in AddCluster to adjust the current parse position

-    //(the value cached in the segment object itself) to the

-    //file position value just past the cluster we parsed.

-

-    if (off < 0)  //we did not found any more clusters

-    {

-        pos_ = stop;  //pos_ >= 0 here means EOF (cluster is NULL)

-        return 0;     //TODO: confirm this return value

-    }

-

-    //We found a cluster.  Now read something, to ensure that it is

-    //fully loaded in the network cache.

-

-    if (pos >= stop)  //we parsed the entire segment

-    {

-        //We did find a cluster, but it was very last element in the segment.

-        //Our preference is that the loop above runs 1 1/2 times:

-        //the first pass finds the cluster, and the second pass

-        //finds the element the follows the cluster.  In this case, however,

-        //we reached the end of the file without finding another element,

-        //so we didn't actually read anything yet associated with "end of the

-        //cluster".  And we must perform an actual read, in order

-        //to guarantee that all of the data that belongs to this

-        //cluster has been loaded into the network cache.  So instead

-        //of reading the next element that follows the cluster, we

-        //read the last byte of the cluster (which is also the last

-        //byte in the file).

-

-        //Read the last byte of the file. (Reading 0 bytes at pos

-        //might work too -- it would depend on how the reader is

-        //implemented.  Here we take the more conservative approach,

-        //since this makes fewer assumptions about the network

-        //reader abstraction.)

-

-        unsigned char b;

-

-        const int result = m_pReader->Read(pos - 1, 1, &b);

-        assert(result == 0);

-

-        pos_ = stop;

-    }

-    else

-    {

-        long len;

-        const long long idpos = pos;

-

-        const long long id = SyncReadUInt(m_pReader, pos, stop, len);

-

-        if (id < 0)  //error

-            return static_cast<long>(id);

-

-        if (id == 0)

-            return E_BUFFER_NOT_FULL;

-

-        pos += len;  //consume id

-        assert(pos < stop);

-

-        const long long size = SyncReadUInt(m_pReader, pos, stop, len);

-

-        if (size < 0)  //error

-            return static_cast<long>(size);

-

-        pos_ = idpos;

-    }

-

-    //We found a cluster, and it has been completely loaded into the

-    //network cache.  (We can guarantee this because we actually read

-    //the EBML tag that follows the cluster, or, if we reached EOF,

-    //because we actually read the last byte of the cluster).

-

-    Segment* const this_ = const_cast<Segment*>(this);

-

-    pCluster = Cluster::Parse(this_, m_clusterCount, off);

-    assert(pCluster);

-    assert(pCluster->m_index == m_clusterCount);

-

-    return 0;

-}

-

-

-bool Segment::AddCluster(Cluster* pCluster, long long pos)

-{

-    assert(pos >= m_start);

-

-    const long long stop = m_start + m_size;

-    assert(pos <= stop);

-

-    if (pCluster)

-    {

-        AppendCluster(pCluster);

-        assert(m_clusters);

-        assert(m_clusterSize > pCluster->m_index);

-        assert(m_clusters[pCluster->m_index] == pCluster);

-    }

-

-    m_pos = pos;  //m_pos >= stop is now we know we have all clusters

-

-    return (pos >= stop);

-}

-#endif

-

-

-long Segment::LoadCluster()

-{

-    const long long stop = m_start + m_size;

-

-    while (m_pos < stop)

-    {

-        long long pos = m_pos;

-

-        long len;

-

-        long long result = GetUIntLength(m_pReader, pos, len);

-

-        if (result < 0)  //error

-            return static_cast<long>(result);

-

-        if ((pos + len) > stop)

-            return E_FILE_FORMAT_INVALID;

-

-        const long long idpos = pos;

-        const long long id = ReadUInt(m_pReader, idpos, len);

-

-        if (id < 0)  //error

-            return static_cast<long>(id);

-

-        pos += len;  //consume ID

-

-        //Read Size

-        result = GetUIntLength(m_pReader, pos, len);

-

-        if (result < 0)  //error

-            return static_cast<long>(result);

-

-        if ((pos + len) > stop)

-            return E_FILE_FORMAT_INVALID;

-

-        const long long size = ReadUInt(m_pReader, pos, len);

-

-        if (size < 0)  //error

-            return static_cast<long>(size);

-

-        pos += len;  //consume length of size of element

-

-        if (size == 0)  //weird

-        {

-            m_pos = pos;

-            continue;

-        }

-

-        //Pos now points to start of payload

-

-        if ((pos + size) > stop)

-            return E_FILE_FORMAT_INVALID;

-

-        if (id == 0x0C53BB6B)  //Cues ID

-        {

-            if (m_pCues == NULL)

-            {

-                m_pCues = new Cues(this, pos, size);

-                assert(m_pCues);  //TODO

-            }

-

-            m_pos = pos + size;  //consume payload

-            continue;

-        }

-

-        if (id != 0x0F43B675)  //Cluster ID

-        {

-            m_pos = pos + size;  //consume payload

-            continue;

-        }

-

-        const long idx = m_clusterCount;

-        const long long idoff = idpos - m_start;

-

-        if (m_clusterPreloadCount > 0)

-        {

-            assert(idx < m_clusterSize);

-

-            Cluster* const pCluster = m_clusters[idx];

-            assert(pCluster);

-            assert(pCluster->m_index < 0);

-

-            const long long off_ = pCluster->m_pos;

-            assert(off_);

-

-            const long long off = off_ * ((off_ >= 0) ? 1 : -1);

-            assert(idoff <= off);

-

-            if (idoff == off)  //cluster has been preloaded already

-            {

-                pCluster->m_index = idx;

-                ++m_clusterCount;

-                --m_clusterPreloadCount;

-

-                m_pos = pos + size;  //consume payload

-                break;

-            }

-        }

-

-        Cluster* const pCluster = Cluster::Parse(this, idx, idoff);

-        assert(pCluster);

-        assert(pCluster->m_index == idx);

-

-        AppendCluster(pCluster);

-        assert(m_clusters);

-        assert(idx < m_clusterSize);

-        assert(m_clusters[idx] == pCluster);

-

-        m_pos = pos + size;  //consume payload

-        break;

-    }

-

-    assert(m_pos <= stop);

-    return 0;

-}

-

-

-void Segment::AppendCluster(Cluster* pCluster)

-{

-    assert(pCluster);

-    assert(pCluster->m_index >= 0);

-

-    const long count = m_clusterCount + m_clusterPreloadCount;

-

-    long& size = m_clusterSize;

-    assert(size >= count);

-

-    const long idx = pCluster->m_index;

-    assert(idx == m_clusterCount);

-

-    if (count >= size)

-    {

-        long n;

-

-        if (size > 0)

-            n = 2 * size;

-        else if (m_pInfo == 0)

-            n = 2048;

-        else

-        {

-            const long long ns = m_pInfo->GetDuration();

-

-            if (ns <= 0)

-                n = 2048;

-            else

-            {

-                const long long sec = (ns + 999999999LL) / 1000000000LL;

-                n = static_cast<long>(sec);

-            }

-        }

-

-        Cluster** const qq = new Cluster*[n];

-        Cluster** q = qq;

-

-        Cluster** p = m_clusters;

-        Cluster** const pp = p + count;

-

-        while (p != pp)

-            *q++ = *p++;

-

-        delete[] m_clusters;

-

-        m_clusters = qq;

-        size = n;

-    }

-

-    if (m_clusterPreloadCount > 0)

-    {

-        assert(m_clusters);

-

-        Cluster** const p = m_clusters + m_clusterCount;

-        assert(*p);

-        assert((*p)->m_index < 0);

-

-        Cluster** q = p + m_clusterPreloadCount;

-        assert(q < (m_clusters + size));

-

-        for (;;)

-        {

-            Cluster** const qq = q - 1;

-            assert((*qq)->m_index < 0);

-

-            *q = *qq;

-            q = qq;

-

-            if (q == p)

-                break;

-        }

-    }

-

-    m_clusters[idx] = pCluster;

-    ++m_clusterCount;

-}

-

-

-void Segment::PreloadCluster(Cluster* pCluster, ptrdiff_t idx)

-{

-    assert(pCluster);

-    assert(pCluster->m_index < 0);

-    assert(idx >= m_clusterCount);

-

-    const long count = m_clusterCount + m_clusterPreloadCount;

-

-    long& size = m_clusterSize;

-    assert(size >= count);

-

-    if (count >= size)

-    {

-        long n;

-

-        if (size > 0)

-            n = 2 * size;

-        else if (m_pInfo == 0)

-            n = 2048;

-        else

-        {

-            const long long ns = m_pInfo->GetDuration();

-

-            if (ns <= 0)

-                n = 2048;

-            else

-            {

-                const long long sec = (ns + 999999999LL) / 1000000000LL;

-                n = static_cast<long>(sec);

-            }

-        }

-

-        Cluster** const qq = new Cluster*[n];

-        Cluster** q = qq;

-

-        Cluster** p = m_clusters;

-        Cluster** const pp = p + count;

-

-        while (p != pp)

-            *q++ = *p++;

-

-        delete[] m_clusters;

-

-        m_clusters = qq;

-        size = n;

-    }

-

-    assert(m_clusters);

-

-    Cluster** const p = m_clusters + idx;

-

-    Cluster** q = m_clusters + count;

-    assert(q >= p);

-    assert(q < (m_clusters + size));

-

-    while (q > p)

-    {

-        Cluster** const qq = q - 1;

-        assert((*qq)->m_index < 0);

-

-        *q = *qq;

-        q = qq;

-    }

-

-    m_clusters[idx] = pCluster;

-    ++m_clusterPreloadCount;

-}

-

-

-long Segment::Load()

-{

-    assert(m_clusters == NULL);

-    assert(m_clusterSize == 0);

-    assert(m_clusterCount == 0);

-

-    //Outermost (level 0) segment object has been constructed,

-    //and pos designates start of payload.  We need to find the

-    //inner (level 1) elements.

-    const long long stop = m_start + m_size;

-

-#ifdef _DEBUG  //TODO: this is really Microsoft-specific

-    {

-        long long total, available;

-

-        long hr = m_pReader->Length(&total, &available);

-        assert(hr >= 0);

-        assert(available >= total);

-        assert(stop <= total);

-    }

-#endif

-

-    while (m_pos < stop)

-    {

-        long long pos = m_pos;

-

-        long len;

-

-        long long result = GetUIntLength(m_pReader, pos, len);

-

-        if (result < 0)  //error

-            return static_cast<long>(result);

-

-        if ((pos + len) > stop)

-            return E_FILE_FORMAT_INVALID;

-

-        const long long idpos = pos;

-        const long long id = ReadUInt(m_pReader, idpos, len);

-

-        if (id < 0)  //error

-            return static_cast<long>(id);

-

-        pos += len;  //consume ID

-

-        //Read Size

-        result = GetUIntLength(m_pReader, pos, len);

-

-        if (result < 0)  //error

-            return static_cast<long>(result);

-

-        if ((pos + len) > stop)

-            return E_FILE_FORMAT_INVALID;

-

-        const long long size = ReadUInt(m_pReader, pos, len);

-

-        if (size < 0)  //error

-            return static_cast<long>(size);

-

-        pos += len;  //consume length of size of element

-

-        //Pos now points to start of payload

-

-        if ((pos + size) > stop)

-            return E_FILE_FORMAT_INVALID;

-

-        if (id == 0x0F43B675)  //Cluster ID

-        {

-            const long idx = m_clusterCount;

-            const long long off = idpos - m_start;

-

-            Cluster* const pCluster = Cluster::Parse(this, idx, off);

-            assert(pCluster);

-            assert(pCluster->m_index == idx);

-

-            AppendCluster(pCluster);

-            assert(m_clusters);

-            assert(m_clusterSize > idx);

-            assert(m_clusters[idx] == pCluster);

-        }

-        else if (id == 0x0C53BB6B)  //Cues ID

-        {

-            assert(m_pCues == NULL);

-

-            m_pCues = new Cues(this, pos, size);

-            assert(m_pCues);  //TODO

-        }

-        else if (id == 0x0549A966)  //SegmentInfo ID

-        {

-            assert(m_pInfo == NULL);

-

-            m_pInfo = new  SegmentInfo(this, pos, size);

-            assert(m_pInfo);

-        }

-        else if (id == 0x0654AE6B)  //Tracks ID

-        {

-            assert(m_pTracks == NULL);

-

-            m_pTracks = new Tracks(this, pos, size);

-            assert(m_pTracks);  //TODO

-        }

-

-        m_pos = pos + size;  //consume payload

-    }

-

-    assert(m_pos >= stop);

-

-    if (m_pInfo == NULL)

-        return E_FILE_FORMAT_INVALID;  //TODO: ignore this case?

-

-    if (m_pTracks == NULL)

-        return E_FILE_FORMAT_INVALID;

-

-    if (m_clusters == NULL)  //TODO: ignore this case?

-        return E_FILE_FORMAT_INVALID;

-

-    //TODO: decide whether we require Cues element

-    //if (m_pCues == NULL)

-    //   return E_FILE_FORMAT_INVALID;

-

-    return 0;

-}

-

-

-void Segment::ParseSeekHead(long long start, long long size_)

-{

-    long long pos = start;

-    const long long stop = start + size_;

-

-    while (pos < stop)

-    {

-        long len;

-

-        const long long id = ReadUInt(m_pReader, pos, len);

-        assert(id >= 0);  //TODO

-        assert((pos + len) <= stop);

-

-        pos += len;  //consume ID

-

-        const long long size = ReadUInt(m_pReader, pos, len);

-        assert(size >= 0);

-        assert((pos + len) <= stop);

-

-        pos += len;  //consume Size field

-        assert((pos + size) <= stop);

-

-        if (id == 0x0DBB)  //SeekEntry ID

-            ParseSeekEntry(pos, size);

-

-        pos += size;  //consume payload

-        assert(pos <= stop);

-    }

-

-    assert(pos == stop);

-}

-

-

-void Segment::ParseCues(long long off)

-{

-    if (m_pCues)

-        return;

-

-    //odbgstream os;

-    //os << "Segment::ParseCues (begin)" << endl;

-

-    long long pos = m_start + off;

-    const long long stop = m_start + m_size;

-

-    long len;

-

-    long long result = GetUIntLength(m_pReader, pos, len);

-    assert(result == 0);

-    assert((pos + len) <= stop);

-

-    const long long idpos = pos;

-

-    const long long id = ReadUInt(m_pReader, idpos, len);

-    assert(id == 0x0C53BB6B);  //Cues ID

-

-    pos += len;  //consume ID

-    assert(pos < stop);

-

-    //Read Size

-

-    result = GetUIntLength(m_pReader, pos, len);

-    assert(result == 0);

-    assert((pos + len) <= stop);

-

-    const long long size = ReadUInt(m_pReader, pos, len);

-    assert(size >= 0);

-

-    pos += len;  //consume length of size of element

-    assert((pos + size) <= stop);

-

-    //Pos now points to start of payload

-

-    m_pCues = new Cues(this, pos, size);

-    assert(m_pCues);  //TODO

-

-    //os << "Segment::ParseCues (end)" << endl;

-}

-

-

-void Segment::ParseSeekEntry(

-   long long start,

-   long long size_)

-{

-    long long pos = start;

-

-    const long long stop = start + size_;

-

-    long len;

-

-    const long long seekIdId = ReadUInt(m_pReader, pos, len);

-    //seekIdId;

-    assert(seekIdId == 0x13AB);  //SeekID ID

-    assert((pos + len) <= stop);

-

-    pos += len;  //consume id

-

-    const long long seekIdSize = ReadUInt(m_pReader, pos, len);

-    assert(seekIdSize >= 0);

-    assert((pos + len) <= stop);

-

-    pos += len;  //consume size

-

-    const long long seekId = ReadUInt(m_pReader, pos, len);  //payload

-    assert(seekId >= 0);

-    assert(len == seekIdSize);

-    assert((pos + len) <= stop);

-

-    pos += seekIdSize;  //consume payload

-

-    const long long seekPosId = ReadUInt(m_pReader, pos, len);

-    //seekPosId;

-    assert(seekPosId == 0x13AC);  //SeekPos ID

-    assert((pos + len) <= stop);

-

-    pos += len;  //consume id

-

-    const long long seekPosSize = ReadUInt(m_pReader, pos, len);

-    assert(seekPosSize >= 0);

-    assert((pos + len) <= stop);

-

-    pos += len;  //consume size

-    assert((pos + seekPosSize) <= stop);

-

-    const long long seekOff = UnserializeUInt(m_pReader, pos, seekPosSize);

-    assert(seekOff >= 0);

-    assert(seekOff < m_size);

-

-    pos += seekPosSize;  //consume payload

-    assert(pos == stop);

-

-    const long long seekPos = m_start + seekOff;

-    assert(seekPos < (m_start + m_size));

-

-    if (seekId == 0x0C53BB6B)  //Cues ID

-        ParseCues(seekOff);

-}

-

-

-Cues::Cues(Segment* pSegment, long long start_, long long size_) :

-    m_pSegment(pSegment),

-    m_start(start_),

-    m_size(size_),

-    m_cue_points(NULL),

-    m_count(0),

-    m_preload_count(0),

-    m_pos(start_)

-{

-}

-

-

-Cues::~Cues()

-{

-    const size_t n = m_count + m_preload_count;

-

-    CuePoint** p = m_cue_points;

-    CuePoint** const q = p + n;

-

-    while (p != q)

-    {

-        CuePoint* const pCP = *p++;

-        assert(pCP);

-

-        delete pCP;

-    }

-

-    delete[] m_cue_points;

-}

-

-

-void Cues::Init() const

-{

-    if (m_cue_points)

-        return;

-

-    assert(m_count == 0);

-    assert(m_preload_count == 0);

-

-    IMkvReader* const pReader = m_pSegment->m_pReader;

-

-    const long long stop = m_start + m_size;

-    long long pos = m_start;

-

-    size_t cue_points_size = 0;

-

-    while (pos < stop)

-    {

-        const long long idpos = pos;

-

-        long len;

-

-        const long long id = ReadUInt(pReader, pos, len);

-        assert(id >= 0);  //TODO

-        assert((pos + len) <= stop);

-

-        pos += len;  //consume ID

-

-        const long long size = ReadUInt(pReader, pos, len);

-        assert(size >= 0);

-        assert((pos + len) <= stop);

-

-        pos += len;  //consume Size field

-        assert((pos + size) <= stop);

-

-        if (id == 0x3B)  //CuePoint ID

-            PreloadCuePoint(cue_points_size, idpos);

-

-        pos += size;  //consume payload

-        assert(pos <= stop);

-    }

-}

-

-

-void Cues::PreloadCuePoint(

-    size_t& cue_points_size,

-    long long pos) const

-{

-    assert(m_count == 0);

-

-    if (m_preload_count >= cue_points_size)

-    {

-        size_t n;

-

-        if (cue_points_size > 0)

-            n = static_cast<size_t>(2 * cue_points_size);

-        else

-        {

-            const SegmentInfo* const pInfo = m_pSegment->GetInfo();

-

-            if (pInfo == NULL)

-                n = 2048;

-            else

-            {

-                const long long ns = pInfo->GetDuration();

-

-                if (ns <= 0)

-                    n = 2048;

-                else

-                {

-                    const long long sec = (ns + 999999999LL) / 1000000000LL;

-                    n = static_cast<size_t>(sec);

-                }

-            }

-        }

-

-        CuePoint** const qq = new CuePoint*[n];

-        CuePoint** q = qq;  //beginning of target

-

-        CuePoint** p = m_cue_points;                //beginning of source

-        CuePoint** const pp = p + m_preload_count;  //end of source

-

-        while (p != pp)

-            *q++ = *p++;

-

-        delete[] m_cue_points;

-

-        m_cue_points = qq;

-        cue_points_size = n;

-    }

-

-    CuePoint* const pCP = new CuePoint(m_preload_count, pos);

-    m_cue_points[m_preload_count++] = pCP;

-}

-

-

-bool Cues::LoadCuePoint() const

-{

-    //odbgstream os;

-    //os << "Cues::LoadCuePoint" << endl;

-

-    const long long stop = m_start + m_size;

-

-    if (m_pos >= stop)

-        return false;  //nothing else to do

-

-    Init();

-

-    IMkvReader* const pReader = m_pSegment->m_pReader;

-

-    while (m_pos < stop)

-    {

-        const long long idpos = m_pos;

-

-        long len;

-

-        const long long id = ReadUInt(pReader, m_pos, len);

-        assert(id >= 0);  //TODO

-        assert((m_pos + len) <= stop);

-

-        m_pos += len;  //consume ID

-

-        const long long size = ReadUInt(pReader, m_pos, len);

-        assert(size >= 0);

-        assert((m_pos + len) <= stop);

-

-        m_pos += len;  //consume Size field

-        assert((m_pos + size) <= stop);

-

-        if (id != 0x3B)  //CuePoint ID

-        {

-            m_pos += size;  //consume payload

-            assert(m_pos <= stop);

-

-            continue;

-        }

-

-        assert(m_preload_count > 0);

-

-        CuePoint* const pCP = m_cue_points[m_count];

-        assert(pCP);

-        assert((pCP->GetTimeCode() >= 0) || (-pCP->GetTimeCode() == idpos));

-

-        pCP->Load(pReader);

-        ++m_count;

-        --m_preload_count;

-

-        m_pos += size;  //consume payload

-        assert(m_pos <= stop);

-

-        break;

-    }

-

-    return (m_pos < stop);

-}

-

-

-bool Cues::Find(

-    long long time_ns,

-    const Track* pTrack,

-    const CuePoint*& pCP,

-    const CuePoint::TrackPosition*& pTP) const

-{

-    assert(time_ns >= 0);

-    assert(pTrack);

-

-    LoadCuePoint();

-

-    assert(m_cue_points);

-    assert(m_count > 0);

-

-    CuePoint** const ii = m_cue_points;

-    CuePoint** i = ii;

-

-    CuePoint** const jj = ii + m_count + m_preload_count;

-    CuePoint** j = jj;

-

-    pCP = *i;

-    assert(pCP);

-

-    if (time_ns <= pCP->GetTime(m_pSegment))

-    {

-        pTP = pCP->Find(pTrack);

-        return (pTP != NULL);

-    }

-

-    IMkvReader* const pReader = m_pSegment->m_pReader;

-

-    while (i < j)

-    {

-        //INVARIANT:

-        //[ii, i) <= time_ns

-        //[i, j)  ?

-        //[j, jj) > time_ns

-

-        CuePoint** const k = i + (j - i) / 2;

-        assert(k < jj);

-

-        CuePoint* const pCP = *k;

-        assert(pCP);

-

-        pCP->Load(pReader);

-

-        const long long t = pCP->GetTime(m_pSegment);

-

-        if (t <= time_ns)

-            i = k + 1;

-        else

-            j = k;

-

-        assert(i <= j);

-    }

-

-    assert(i == j);

-    assert(i <= jj);

-    assert(i > ii);

-

-    pCP = *--i;

-    assert(pCP);

-    assert(pCP->GetTime(m_pSegment) <= time_ns);

-

-    //TODO: here and elsewhere, it's probably not correct to search

-    //for the cue point with this time, and then search for a matching

-    //track.  In principle, the matching track could be on some earlier

-    //cue point, and with our current algorithm, we'd miss it.  To make

-    //this bullet-proof, we'd need to create a secondary structure,

-    //with a list of cue points that apply to a track, and then search

-    //that track-based structure for a matching cue point.

-

-    pTP = pCP->Find(pTrack);

-    return (pTP != NULL);

-}

-

-

-#if 0

-bool Cues::FindNext(

-    long long time_ns,

-    const Track* pTrack,

-    const CuePoint*& pCP,

-    const CuePoint::TrackPosition*& pTP) const

-{

-    pCP = 0;

-    pTP = 0;

-

-    if (m_count == 0)

-        return false;

-

-    assert(m_cue_points);

-

-    const CuePoint* const* const ii = m_cue_points;

-    const CuePoint* const* i = ii;

-

-    const CuePoint* const* const jj = ii + m_count;

-    const CuePoint* const* j = jj;

-

-    while (i < j)

-    {

-        //INVARIANT:

-        //[ii, i) <= time_ns

-        //[i, j)  ?

-        //[j, jj) > time_ns

-

-        const CuePoint* const* const k = i + (j - i) / 2;

-        assert(k < jj);

-

-        pCP = *k;

-        assert(pCP);

-

-        const long long t = pCP->GetTime(m_pSegment);

-

-        if (t <= time_ns)

-            i = k + 1;

-        else

-            j = k;

-

-        assert(i <= j);

-    }

-

-    assert(i == j);

-    assert(i <= jj);

-

-    if (i >= jj)  //time_ns is greater than max cue point

-        return false;

-

-    pCP = *i;

-    assert(pCP);

-    assert(pCP->GetTime(m_pSegment) > time_ns);

-

-    pTP = pCP->Find(pTrack);

-    return (pTP != NULL);

-}

-#endif

-

-

-const CuePoint* Cues::GetFirst() const

-{

-    LoadCuePoint();  //init cues

-

-    const size_t count = m_count + m_preload_count;

-

-    if (count == 0)  //weird

-        return NULL;

-

-    CuePoint* const* const pp = m_cue_points;

-    assert(pp);

-

-    CuePoint* const pCP = pp[0];

-    assert(pCP);

-    assert(pCP->GetTimeCode() >= 0);

-

-    return pCP;

-}

-

-

-const CuePoint* Cues::GetLast() const

-{

-    LoadCuePoint();  //init cues

-

-    const size_t count = m_count + m_preload_count;

-

-    if (count == 0)  //weird

-        return NULL;

-

-    const size_t index = count - 1;

-

-    CuePoint* const* const pp = m_cue_points;

-    assert(pp);

-

-    CuePoint* const pCP = pp[index];

-    assert(pCP);

-

-    pCP->Load(m_pSegment->m_pReader);

-    assert(pCP->GetTimeCode() >= 0);

-

-    return pCP;

-}

-

-

-const CuePoint* Cues::GetNext(const CuePoint* pCurr) const

-{

-    if (pCurr == NULL)

-        return NULL;

-

-    assert(pCurr->GetTimeCode() >= 0);

-    assert(m_cue_points);

-    assert(m_count >= 1);

-

-    const size_t count = m_count + m_preload_count;

-

-    size_t index = pCurr->m_index;

-    assert(index < count);

-

-    CuePoint* const* const pp = m_cue_points;

-    assert(pp);

-    assert(pp[index] == pCurr);

-

-    ++index;

-

-    if (index >= count)

-        return NULL;

-

-    CuePoint* const pNext = pp[index];

-    assert(pNext);

-

-    pNext->Load(m_pSegment->m_pReader);

-

-    return pNext;

-}

-

-

-const BlockEntry* Cues::GetBlock(

-    const CuePoint* pCP,

-    const CuePoint::TrackPosition* pTP) const

-{

-    if (pCP == NULL)

-        return NULL;

-

-    if (pTP == NULL)

-        return NULL;

-

-    return m_pSegment->GetBlock(*pCP, *pTP);

-}

-

-

-const BlockEntry* Segment::GetBlock(

-    const CuePoint& cp,

-    const CuePoint::TrackPosition& tp)

-{

-    Cluster** const ii = m_clusters;

-    Cluster** i = ii;

-

-    const long count = m_clusterCount + m_clusterPreloadCount;

-

-    Cluster** const jj = ii + count;

-    Cluster** j = jj;

-

-    while (i < j)

-    {

-        //INVARIANT:

-        //[ii, i) < pTP->m_pos

-        //[i, j) ?

-        //[j, jj)  > pTP->m_pos

-

-        Cluster** const k = i + (j - i) / 2;

-        assert(k < jj);

-

-        Cluster* const pCluster = *k;

-        assert(pCluster);

-

-        const long long pos_ = pCluster->m_pos;

-        assert(pos_);

-

-        const long long pos = pos_ * ((pos_ < 0) ? -1 : 1);

-

-        if (pos < tp.m_pos)

-            i = k + 1;

-        else if (pos > tp.m_pos)

-            j = k;

-        else

-            return pCluster->GetEntry(cp, tp);

-    }

-

-    assert(i == j);

-

-    Cluster* const pCluster = Cluster::Parse(this, -1, tp.m_pos);

-    const ptrdiff_t idx = i - m_clusters;

-

-    PreloadCluster(pCluster, idx);

-    assert(m_clusters);

-    assert(m_clusterPreloadCount > 0);

-    assert(m_clusters[idx] == pCluster);

-

-    return pCluster->GetEntry(cp, tp);

-}

-

-

-

-CuePoint::CuePoint(size_t idx, long long pos) :

-    m_index(idx),

-    m_timecode(-1 * pos),

-    m_track_positions(NULL),

-    m_track_positions_count(0)

-{

-    assert(pos > 0);

-}

-

-

-CuePoint::~CuePoint()

-{

-    delete[] m_track_positions;

-}

-

-

-void CuePoint::Load(IMkvReader* pReader)

-{

-    //odbgstream os;

-    //os << "CuePoint::Load(begin): timecode=" << m_timecode << endl;

-

-    if (m_timecode >= 0)  //already loaded

-        return;

-

-    assert(m_track_positions == NULL);

-    assert(m_track_positions_count == 0);

-

-    long long pos_ = -m_timecode;

-

-    long long stop;

-

-    {

-        long len;

-

-        const long long id = ReadUInt(pReader, pos_, len);

-        assert(id == 0x3B);  //CuePoint ID

-        //assert((pos + len) <= stop);

-

-        pos_ += len;  //consume ID

-

-        const long long size = ReadUInt(pReader, pos_, len);

-        assert(size >= 0);

-        //assert((pos + len) <= stop);

-

-        pos_ += len;  //consume Size field

-        //assert((pos + size) <= stop);

-

-        //pos_ now points to start of payload

-

-        stop = pos_ + size;

-    }

-

-    long long pos = pos_;

-

-    //First count number of track positions

-

-    while (pos < stop)

-    {

-        long len;

-

-        const long long id = ReadUInt(pReader, pos, len);

-        assert(id >= 0);  //TODO

-        assert((pos + len) <= stop);

-

-        pos += len;  //consume ID

-

-        const long long size = ReadUInt(pReader, pos, len);

-        assert(size >= 0);

-        assert((pos + len) <= stop);

-

-        pos += len;  //consume Size field

-        assert((pos + size) <= stop);

-

-        if (id == 0x33)  //CueTime ID

-            m_timecode = UnserializeUInt(pReader, pos, size);

-

-        else if (id == 0x37) //CueTrackPosition(s) ID

-            ++m_track_positions_count;

-

-        pos += size;  //consume payload

-        assert(pos <= stop);

-    }

-

-    assert(m_timecode >= 0);

-    assert(m_track_positions_count > 0);

-

-    //os << "CuePoint::Load(cont'd): idpos=" << idpos

-    //   << " timecode=" << m_timecode

-    //   << endl;

-

-    m_track_positions = new TrackPosition[m_track_positions_count];

-

-    //Now parse track positions

-

-    TrackPosition* p = m_track_positions;

-    pos = pos_;

-

-    while (pos < stop)

-    {

-        long len;

-

-        const long long id = ReadUInt(pReader, pos, len);

-        assert(id >= 0);  //TODO

-        assert((pos + len) <= stop);

-

-        pos += len;  //consume ID

-

-        const long long size = ReadUInt(pReader, pos, len);

-        assert(size >= 0);

-        assert((pos + len) <= stop);

-

-        pos += len;  //consume Size field

-        assert((pos + size) <= stop);

-

-        if (id == 0x37) //CueTrackPosition(s) ID

-        {

-            TrackPosition& tp = *p++;

-            tp.Parse(pReader, pos, size);

-        }

-

-        pos += size;  //consume payload

-        assert(pos <= stop);

-    }

-

-    assert(size_t(p - m_track_positions) == m_track_positions_count);

-}

-

-

-

-void CuePoint::TrackPosition::Parse(

-    IMkvReader* pReader,

-    long long start_,

-    long long size_)

-{

-    const long long stop = start_ + size_;

-    long long pos = start_;

-

-    m_track = -1;

-    m_pos = -1;

-    m_block = 1;  //default

-

-    while (pos < stop)

-    {

-        long len;

-

-        const long long id = ReadUInt(pReader, pos, len);

-        assert(id >= 0);  //TODO

-        assert((pos + len) <= stop);

-

-        pos += len;  //consume ID

-

-        const long long size = ReadUInt(pReader, pos, len);

-        assert(size >= 0);

-        assert((pos + len) <= stop);

-

-        pos += len;  //consume Size field

-        assert((pos + size) <= stop);

-

-        if (id == 0x77)  //CueTrack ID

-            m_track = UnserializeUInt(pReader, pos, size);

-

-        else if (id == 0x71)  //CueClusterPos ID

-            m_pos = UnserializeUInt(pReader, pos, size);

-

-        else if (id == 0x1378)  //CueBlockNumber

-            m_block = UnserializeUInt(pReader, pos, size);

-

-        pos += size;  //consume payload

-        assert(pos <= stop);

-    }

-

-    assert(m_pos >= 0);

-    //assert(m_track > 0);

-    //assert(m_block > 0);

-}

-

-

-const CuePoint::TrackPosition* CuePoint::Find(const Track* pTrack) const

-{

-    assert(pTrack);

-

-    const long long n = pTrack->GetNumber();

-

-    const TrackPosition* i = m_track_positions;

-    const TrackPosition* const j = i + m_track_positions_count;

-

-    while (i != j)

-    {

-        const TrackPosition& p = *i++;

-

-        if (p.m_track == n)

-            return &p;

-    }

-

-    return NULL;  //no matching track number found

-}

-

-

-long long CuePoint::GetTimeCode() const

-{

-    return m_timecode;

-}

-

-long long CuePoint::GetTime(Segment* pSegment) const

-{

-    assert(pSegment);

-    assert(m_timecode >= 0);

-

-    const SegmentInfo* const pInfo = pSegment->GetInfo();

-    assert(pInfo);

-

-    const long long scale = pInfo->GetTimeCodeScale();

-    assert(scale >= 1);

-

-    const long long time = scale * m_timecode;

-

-    return time;

-}

-

-

-long long Segment::Unparsed() const

-{

-    const long long stop = m_start + m_size;

-

-    const long long result = stop - m_pos;

-    assert(result >= 0);

-

-    return result;

-}

-

-

-Cluster* Segment::GetFirst()

-{

-    if ((m_clusters == NULL) || (m_clusterCount <= 0))

-       return &m_eos;

-

-    Cluster* const pCluster = m_clusters[0];

-    assert(pCluster);

-

-    return pCluster;

-}

-

-

-Cluster* Segment::GetLast()

-{

-    if ((m_clusters == NULL) || (m_clusterCount <= 0))

-        return &m_eos;

-

-    const long idx = m_clusterCount - 1;

-

-    Cluster* const pCluster = m_clusters[idx];

-    assert(pCluster);

-

-    return pCluster;

-}

-

-

-unsigned long Segment::GetCount() const

-{

-    return m_clusterCount;

-}

-

-

-Cluster* Segment::GetNext(const Cluster* pCurr)

-{

-    assert(pCurr);

-    assert(pCurr != &m_eos);

-    assert(m_clusters);

-

-    long idx =  pCurr->m_index;

-

-    if (idx >= 0)

-    {

-        assert(m_clusterCount > 0);

-        assert(idx < m_clusterCount);

-        assert(pCurr == m_clusters[idx]);

-

-        ++idx;

-

-        if (idx >= m_clusterCount)

-            return &m_eos;  //caller will LoadCluster as desired

-

-        Cluster* const pNext = m_clusters[idx];

-        assert(pNext);

-        assert(pNext->m_index >= 0);

-        assert(pNext->m_index == idx);

-

-        return pNext;

-    }

-

-    assert(m_clusterPreloadCount > 0);

-

-    const long long off_ = pCurr->m_pos;

-    const long long off = off_ * ((off_ < 0) ? -1 : 1);

-

-    long long pos = m_start + off;

-    const long long stop = m_start + m_size;  //end of segment

-

-    {

-        long len;

-

-        long long result = GetUIntLength(m_pReader, pos, len);

-        assert(result == 0);  //TODO

-        assert((pos + len) <= stop);  //TODO

-

-        const long long id = ReadUInt(m_pReader, pos, len);

-        assert(id == 0x0F43B675);  //Cluster ID   //TODO

-

-        pos += len;  //consume ID

-

-        //Read Size

-        result = GetUIntLength(m_pReader, pos, len);

-        assert(result == 0);  //TODO

-        assert((pos + len) <= stop);  //TODO

-

-        const long long size = ReadUInt(m_pReader, pos, len);

-        assert(size > 0);  //TODO

-        assert((pCurr->m_size <= 0) || (pCurr->m_size == size));

-

-        pos += len;  //consume length of size of element

-        assert((pos + size) <= stop);  //TODO

-

-        //Pos now points to start of payload

-

-        pos += size;  //consume payload

-    }

-

-    long long off_next = 0;

-

-    while (pos < stop)

-    {

-        long len;

-

-        long long result = GetUIntLength(m_pReader, pos, len);

-        assert(result == 0);  //TODO

-        assert((pos + len) <= stop);  //TODO

-

-        const long long idpos = pos;  //pos of next (potential) cluster

-

-        const long long id = ReadUInt(m_pReader, idpos, len);

-        assert(id > 0);  //TODO

-

-        pos += len;  //consume ID

-

-        //Read Size

-        result = GetUIntLength(m_pReader, pos, len);

-        assert(result == 0);  //TODO

-        assert((pos + len) <= stop);  //TODO

-

-        const long long size = ReadUInt(m_pReader, pos, len);

-        assert(size >= 0);  //TODO

-

-        pos += len;  //consume length of size of element

-        assert((pos + size) <= stop);  //TODO

-

-        //Pos now points to start of payload

-

-        if (size == 0)  //weird

-            continue;

-

-        if (id == 0x0F43B675)  //Cluster ID

-        {

-            off_next = idpos - m_start;

-            break;

-        }

-

-        pos += size;  //consume payload

-    }

-

-    if (off_next <= 0)

-        return 0;

-

-    Cluster** const ii = m_clusters + m_clusterCount;

-    Cluster** i = ii;

-

-    Cluster** const jj = ii + m_clusterPreloadCount;

-    Cluster** j = jj;

-

-    while (i < j)

-    {

-        //INVARIANT:

-        //[0, i) < pos_next

-        //[i, j) ?

-        //[j, jj)  > pos_next

-

-        Cluster** const k = i + (j - i) / 2;

-        assert(k < jj);

-

-        Cluster* const pNext = *k;

-        assert(pNext);

-        assert(pNext->m_index < 0);

-

-        const long long pos_ = pNext->m_pos;

-        assert(pos_);

-

-        pos = pos_ * ((pos_ < 0) ? -1 : 1);

-

-        if (pos < off_next)

-            i = k + 1;

-        else if (pos > off_next)

-            j = k;

-        else

-            return pNext;

-    }

-

-    assert(i == j);

-

-    Cluster* const pNext = Cluster::Parse(this, -1, off_next);

-    const ptrdiff_t idx_next = i - m_clusters;  //insertion position

-

-    PreloadCluster(pNext, idx_next);

-    assert(m_clusters);

-    assert(idx_next < m_clusterSize);

-    assert(m_clusters[idx_next] == pNext);

-

-    return pNext;

-}

-

-

-Cluster* Segment::FindCluster(long long time_ns)

-{

-    if ((m_clusters == NULL) || (m_clusterCount <= 0))

-        return &m_eos;

-

-    {

-        Cluster* const pCluster = m_clusters[0];

-        assert(pCluster);

-        assert(pCluster->m_index == 0);

-

-        if (time_ns <= pCluster->GetTime())

-            return pCluster;

-    }

-

-    //Binary search of cluster array

-

-    long i = 0;

-    long j = m_clusterCount;

-

-    while (i < j)

-    {

-        //INVARIANT:

-        //[0, i) <= time_ns

-        //[i, j) ?

-        //[j, m_clusterCount)  > time_ns

-

-        const long k = i + (j - i) / 2;

-        assert(k < m_clusterCount);

-

-        Cluster* const pCluster = m_clusters[k];

-        assert(pCluster);

-        assert(pCluster->m_index == k);

-

-        const long long t = pCluster->GetTime();

-

-        if (t <= time_ns)

-            i = k + 1;

-        else

-            j = k;

-

-        assert(i <= j);

-    }

-

-    assert(i == j);

-    assert(i > 0);

-    assert(i <= m_clusterCount);

-

-    const long k = i - 1;

-

-    Cluster* const pCluster = m_clusters[k];

-    assert(pCluster);

-    assert(pCluster->m_index == k);

-    assert(pCluster->GetTime() <= time_ns);

-

-    return pCluster;

-}

-

-

-const BlockEntry* Segment::Seek(

-    long long time_ns,

-    const Track* pTrack)

-{

-    assert(pTrack);

-

-    if ((m_clusters == NULL) || (m_clusterCount <= 0))

-        return pTrack->GetEOS();

-

-    Cluster** const i = m_clusters;

-    assert(i);

-

-    {

-        Cluster* const pCluster = *i;

-        assert(pCluster);

-        assert(pCluster->m_index == 0);  //m_clusterCount > 0

-        assert(pCluster->m_pSegment == this);

-

-        if (time_ns <= pCluster->GetTime())

-            return pCluster->GetEntry(pTrack);

-    }

-

-    Cluster** const j = i + m_clusterCount;

-

-    if (pTrack->GetType() == 2)  //audio

-    {

-        //TODO: we could decide to use cues for this, as we do for video.

-        //But we only use it for video because looking around for a keyframe

-        //can get expensive.  Audio doesn't require anything special so a

-        //straight cluster search is good enough (we assume).

-

-        Cluster** lo = i;

-        Cluster** hi = j;

-

-        while (lo < hi)

-        {

-            //INVARIANT:

-            //[i, lo) <= time_ns

-            //[lo, hi) ?

-            //[hi, j)  > time_ns

-

-            Cluster** const mid = lo + (hi - lo) / 2;

-            assert(mid < hi);

-

-            Cluster* const pCluster = *mid;

-            assert(pCluster);

-            assert(pCluster->m_index == long(mid - m_clusters));

-            assert(pCluster->m_pSegment == this);

-

-            const long long t = pCluster->GetTime();

-

-            if (t <= time_ns)

-                lo = mid + 1;

-            else

-                hi = mid;

-

-            assert(lo <= hi);

-        }

-

-        assert(lo == hi);

-        assert(lo > i);

-        assert(lo <= j);

-

-        Cluster* const pCluster = *--lo;

-        assert(pCluster);

-        assert(pCluster->GetTime() <= time_ns);

-

-        return pCluster->GetEntry(pTrack);

-    }

-

-    assert(pTrack->GetType() == 1);  //video

-

-    Cluster** lo = i;

-    Cluster** hi = j;

-

-    while (lo < hi)

-    {

-        //INVARIANT:

-        //[i, lo) <= time_ns

-        //[lo, hi) ?

-        //[hi, j)  > time_ns

-

-        Cluster** const mid = lo + (hi - lo) / 2;

-        assert(mid < hi);

-

-        Cluster* const pCluster = *mid;

-        assert(pCluster);

-

-        const long long t = pCluster->GetTime();

-

-        if (t <= time_ns)

-            lo = mid + 1;

-        else

-            hi = mid;

-

-        assert(lo <= hi);

-    }

-

-    assert(lo == hi);

-    assert(lo > i);

-    assert(lo <= j);

-

-    Cluster* pCluster = *--lo;

-    assert(pCluster);

-    assert(pCluster->GetTime() <= time_ns);

-

-    {

-        const BlockEntry* const pBlockEntry = pCluster->GetEntry(pTrack);

-        assert(pBlockEntry);

-

-        if (!pBlockEntry->EOS())  //found a keyframe

-        {

-            const Block* const pBlock = pBlockEntry->GetBlock();

-            assert(pBlock);

-

-            //TODO: this isn't necessarily the keyframe we want,

-            //since there might another keyframe on this same

-            //cluster with a greater timecode that but that is

-            //still less than the requested time.  For now we

-            //simply return the first keyframe we find.

-

-            if (pBlock->GetTime(pCluster) <= time_ns)

-                return pBlockEntry;

-        }

-    }

-

-    const VideoTrack* const pVideo = static_cast<const VideoTrack*>(pTrack);

-

-    while (lo != i)

-    {

-        pCluster = *--lo;

-        assert(pCluster);

-        assert(pCluster->GetTime() <= time_ns);

-

-        const BlockEntry* const pBlockEntry = pCluster->GetMaxKey(pVideo);

-        assert(pBlockEntry);

-

-        if (!pBlockEntry->EOS())

-            return pBlockEntry;

-    }

-

-    //weird: we're on the first cluster, but no keyframe found

-    //should never happen but we must return something anyway

-

-    return pTrack->GetEOS();

-}

-

-

-#if 0

-bool Segment::SearchCues(

-    long long time_ns,

-    Track* pTrack,

-    Cluster*& pCluster,

-    const BlockEntry*& pBlockEntry,

-    const CuePoint*& pCP,

-    const CuePoint::TrackPosition*& pTP)

-{

-    if (pTrack->GetType() != 1)  //not video

-        return false;  //TODO: for now, just handle video stream

-

-    if (m_pCues == NULL)

-        return false;

-

-    if (!m_pCues->Find(time_ns, pTrack, pCP, pTP))

-        return false;  //weird

-

-    assert(pCP);

-    assert(pTP);

-    assert(pTP->m_track == pTrack->GetNumber());

-

-    //We have the cue point and track position we want,

-    //so we now need to search for the cluster having

-    //the indicated position.

-

-    return GetCluster(pCP, pTP, pCluster, pBlockEntry);

-}

-#endif

-

-

-Tracks* Segment::GetTracks() const

-{

-    return m_pTracks;

-}

-

-

-const SegmentInfo* Segment::GetInfo() const

-{

-    return m_pInfo;

-}

-

-

-const Cues* Segment::GetCues() const

-{

-    return m_pCues;

-}

-

-

-long long Segment::GetDuration() const

-{

-    assert(m_pInfo);

-    return m_pInfo->GetDuration();

-}

-

-

-SegmentInfo::SegmentInfo(Segment* pSegment, long long start, long long size_) :

-    m_pSegment(pSegment),

-    m_start(start),

-    m_size(size_),

-    m_pMuxingAppAsUTF8(NULL),

-    m_pWritingAppAsUTF8(NULL),

-    m_pTitleAsUTF8(NULL)

-{

-    IMkvReader* const pReader = m_pSegment->m_pReader;

-

-    long long pos = start;

-    const long long stop = start + size_;

-

-    m_timecodeScale = 1000000;

-    m_duration = -1;

-

-    while (pos < stop)

-    {

-        if (Match(pReader, pos, 0x0AD7B1, m_timecodeScale))

-            assert(m_timecodeScale > 0);

-

-        else if (Match(pReader, pos, 0x0489, m_duration))

-            assert(m_duration >= 0);

-

-        else if (Match(pReader, pos, 0x0D80, m_pMuxingAppAsUTF8))   //[4D][80]

-            assert(m_pMuxingAppAsUTF8);

-

-        else if (Match(pReader, pos, 0x1741, m_pWritingAppAsUTF8))  //[57][41]

-            assert(m_pWritingAppAsUTF8);

-

-        else if (Match(pReader, pos, 0x3BA9, m_pTitleAsUTF8))       //[7B][A9]

-            assert(m_pTitleAsUTF8);

-

-        else

-        {

-            long len;

-

-            const long long id = ReadUInt(pReader, pos, len);

-            //id;

-            assert(id >= 0);

-            assert((pos + len) <= stop);

-

-            pos += len;  //consume id

-            assert((stop - pos) > 0);

-

-            const long long size = ReadUInt(pReader, pos, len);

-            assert(size >= 0);

-            assert((pos + len) <= stop);

-

-            pos += len + size;  //consume size and payload

-            assert(pos <= stop);

-        }

-    }

-

-    assert(pos == stop);

-}

-

-SegmentInfo::~SegmentInfo()

-{

-    if (m_pMuxingAppAsUTF8)

-    {

-        delete[] m_pMuxingAppAsUTF8;

-        m_pMuxingAppAsUTF8 = NULL;

-    }

-

-    if (m_pWritingAppAsUTF8)

-    {

-        delete[] m_pWritingAppAsUTF8;

-        m_pWritingAppAsUTF8 = NULL;

-    }

-

-    if (m_pTitleAsUTF8)

-    {

-        delete[] m_pTitleAsUTF8;

-        m_pTitleAsUTF8 = NULL;

-    }

-}

-

-long long SegmentInfo::GetTimeCodeScale() const

-{

-    return m_timecodeScale;

-}

-

-

-long long SegmentInfo::GetDuration() const

-{

-    if (m_duration < 0)

-        return -1;

-

-    assert(m_timecodeScale >= 1);

-

-    const double dd = double(m_duration) * double(m_timecodeScale);

-    const long long d = static_cast<long long>(dd);

-

-    return d;

-}

-

-const char* SegmentInfo::GetMuxingAppAsUTF8() const

-{

-    return m_pMuxingAppAsUTF8;

-}

-

-

-const char* SegmentInfo::GetWritingAppAsUTF8() const

-{

-    return m_pWritingAppAsUTF8;

-}

-

-const char* SegmentInfo::GetTitleAsUTF8() const

-{

-    return m_pTitleAsUTF8;

-}

-

-Track::Track(Segment* pSegment, const Info& i) :

-    m_pSegment(pSegment),

-    m_info(i)

-{

-}

-

-Track::~Track()

-{

-    Info& info = const_cast<Info&>(m_info);

-    info.Clear();

-}

-

-Track::Info::Info():

-    type(-1),

-    number(-1),

-    uid(-1),

-    nameAsUTF8(NULL),

-    codecId(NULL),

-    codecPrivate(NULL),

-    codecPrivateSize(0),

-    codecNameAsUTF8(NULL)

-{

-}

-

-

-void Track::Info::Clear()

-{

-    delete[] nameAsUTF8;

-    nameAsUTF8 = NULL;

-

-    delete[] codecId;

-    codecId = NULL;

-

-    delete[] codecPrivate;

-    codecPrivate = NULL;

-

-    codecPrivateSize = 0;

-

-    delete[] codecNameAsUTF8;

-    codecNameAsUTF8 = NULL;

-}

-

-const BlockEntry* Track::GetEOS() const

-{

-    return &m_eos;

-}

-

-long long Track::GetType() const

-{

-    return m_info.type;

-}

-

-long long Track::GetNumber() const

-{

-    return m_info.number;

-}

-

-const char* Track::GetNameAsUTF8() const

-{

-    return m_info.nameAsUTF8;

-}

-

-const char* Track::GetCodecNameAsUTF8() const

-{

-    return m_info.codecNameAsUTF8;

-}

-

-

-const char* Track::GetCodecId() const

-{

-    return m_info.codecId;

-}

-

-const unsigned char* Track::GetCodecPrivate(size_t& size) const

-{

-    size = m_info.codecPrivateSize;

-    return m_info.codecPrivate;

-}

-

-

-long Track::GetFirst(const BlockEntry*& pBlockEntry) const

-{

-    Cluster* pCluster = m_pSegment->GetFirst();

-

-    //If Segment::GetFirst returns NULL, then this must be a network

-    //download, and we haven't loaded any clusters yet.  In this case,

-    //returning NULL from Track::GetFirst means the same thing.

-

-    for (int i = 0; i < 100; ++i)  //arbitrary upper bound

-    {

-        if (pCluster == NULL)

-        {

-            pBlockEntry = GetEOS();

-            return 1;

-        }

-

-        if (pCluster->EOS())

-        {

-            if (m_pSegment->Unparsed() <= 0)  //all clusters have been loaded

-            {

-                pBlockEntry = GetEOS();

-                return 1;

-            }

-

-            pBlockEntry = 0;

-            return E_BUFFER_NOT_FULL;

-        }

-

-        pBlockEntry = pCluster->GetFirst();

-

-        while (pBlockEntry)

-        {

-            const Block* const pBlock = pBlockEntry->GetBlock();

-            assert(pBlock);

-

-            if (pBlock->GetTrackNumber() == m_info.number)

-                return 0;

-

-            pBlockEntry = pCluster->GetNext(pBlockEntry);

-        }

-

-        pCluster = m_pSegment->GetNext(pCluster);

-    }

-

-    //NOTE: if we get here, it means that we didn't find a block with

-    //a matching track number.  We interpret that as an error (which

-    //might be too conservative).

-

-    pBlockEntry = GetEOS();  //so we can return a non-NULL value

-    return 1;

-}

-

-

-long Track::GetNext(

-    const BlockEntry* pCurrEntry,

-    const BlockEntry*& pNextEntry) const

-{

-    assert(pCurrEntry);

-    assert(!pCurrEntry->EOS());  //?

-

-    const Block* const pCurrBlock = pCurrEntry->GetBlock();

-    assert(pCurrBlock->GetTrackNumber() == m_info.number);

-

-    Cluster* pCluster = pCurrEntry->GetCluster();

-    assert(pCluster);

-    assert(!pCluster->EOS());

-

-    pNextEntry = pCluster->GetNext(pCurrEntry);

-

-    for (int i = 0; i < 100; ++i)  //arbitrary upper bound to search

-    {

-        while (pNextEntry)

-        {

-            const Block* const pNextBlock = pNextEntry->GetBlock();

-            assert(pNextBlock);

-

-            if (pNextBlock->GetTrackNumber() == m_info.number)

-                return 0;

-

-            pNextEntry = pCluster->GetNext(pNextEntry);

-        }

-

-        pCluster = m_pSegment->GetNext(pCluster);

-

-        if (pCluster == NULL)

-        {

-            pNextEntry = GetEOS();

-            return 1;

-        }

-

-        if (pCluster->EOS())

-        {

-            if (m_pSegment->Unparsed() <= 0)   //all clusters have been loaded

-            {

-                pNextEntry = GetEOS();

-                return 1;

-            }

-

-            //TODO: there is a potential O(n^2) problem here: we tell the

-            //caller to (pre)load another cluster, which he does, but then he

-            //calls GetNext again, which repeats the same search.  This is

-            //a pathological case, since the only way it can happen is if

-            //there exists a long sequence of clusters none of which contain a

-            // block from this track.  One way around this problem is for the

-            //caller to be smarter when he loads another cluster: don't call

-            //us back until you have a cluster that contains a block from this

-            //track. (Of course, that's not cheap either, since our caller

-            //would have to scan the each cluster as it's loaded, so that

-            //would just push back the problem.)

-

-            pNextEntry = NULL;

-            return E_BUFFER_NOT_FULL;

-        }

-

-        pNextEntry = pCluster->GetFirst();

-    }

-

-    //NOTE: if we get here, it means that we didn't find a block with

-    //a matching track number after lots of searching, so we give

-    //up trying.

-

-    pNextEntry = GetEOS();  //so we can return a non-NULL value

-    return 1;

-}

-

-

-Track::EOSBlock::EOSBlock()

-{

-}

-

-

-bool Track::EOSBlock::EOS() const

-{

-    return true;

-}

-

-

-Cluster* Track::EOSBlock::GetCluster() const

-{

-    return NULL;

-}

-

-

-size_t Track::EOSBlock::GetIndex() const

-{

-    return 0;

-}

-

-

-const Block* Track::EOSBlock::GetBlock() const

-{

-    return NULL;

-}

-

-

-bool Track::EOSBlock::IsBFrame() const

-{

-    return false;

-}

-

-

-VideoTrack::VideoTrack(Segment* pSegment, const Info& i) :

-    Track(pSegment, i),

-    m_width(-1),

-    m_height(-1),

-    m_rate(-1)

-{

-    assert(i.type == 1);

-    assert(i.number > 0);

-

-    IMkvReader* const pReader = pSegment->m_pReader;

-

-    const Settings& s = i.settings;

-    assert(s.start >= 0);

-    assert(s.size >= 0);

-

-    long long pos = s.start;

-    assert(pos >= 0);

-

-    const long long stop = pos + s.size;

-

-    while (pos < stop)

-    {

-#ifdef _DEBUG

-        long len;

-        const long long id = ReadUInt(pReader, pos, len);

-        assert(id >= 0);  //TODO: handle error case

-        assert((pos + len) <= stop);

-#endif

-        if (Match(pReader, pos, 0x30, m_width))

-            ;

-        else if (Match(pReader, pos, 0x3A, m_height))

-            ;

-        else if (Match(pReader, pos, 0x0383E3, m_rate))

-            ;

-        else

-        {

-            long len;

-            const long long id = ReadUInt(pReader, pos, len);

-            assert(id >= 0);  //TODO: handle error case

-            assert((pos + len) <= stop);

-

-            pos += len;  //consume id

-

-            const long long size = ReadUInt(pReader, pos, len);

-            assert(size >= 0);  //TODO: handle error case

-            assert((pos + len) <= stop);

-

-            pos += len;  //consume length of size

-            assert((pos + size) <= stop);

-

-            //pos now designates start of payload

-

-            pos += size;  //consume payload

-            assert(pos <= stop);

-        }

-    }

-

-    return;

-}

-

-

-bool VideoTrack::VetEntry(const BlockEntry* pBlockEntry) const

-{

-    assert(pBlockEntry);

-

-    const Block* const pBlock = pBlockEntry->GetBlock();

-    assert(pBlock);

-    assert(pBlock->GetTrackNumber() == m_info.number);

-

-    return pBlock->IsKey();

-}

-

-

-long long VideoTrack::GetWidth() const

-{

-    return m_width;

-}

-

-

-long long VideoTrack::GetHeight() const

-{

-    return m_height;

-}

-

-

-double VideoTrack::GetFrameRate() const

-{

-    return m_rate;

-}

-

-

-AudioTrack::AudioTrack(Segment* pSegment, const Info& i) :

-    Track(pSegment, i),

-    m_rate(0.0),

-    m_channels(0),

-    m_bitDepth(-1)

-{

-    assert(i.type == 2);

-    assert(i.number > 0);

-

-    IMkvReader* const pReader = pSegment->m_pReader;

-

-    const Settings& s = i.settings;

-    assert(s.start >= 0);

-    assert(s.size >= 0);

-

-    long long pos = s.start;

-    assert(pos >= 0);

-

-    const long long stop = pos + s.size;

-

-    while (pos < stop)

-    {

-#ifdef _DEBUG

-        long len;

-        const long long id = ReadUInt(pReader, pos, len);

-        assert(id >= 0);  //TODO: handle error case

-        assert((pos + len) <= stop);

-#endif

-        if (Match(pReader, pos, 0x35, m_rate))

-            ;

-        else if (Match(pReader, pos, 0x1F, m_channels))

-            ;

-        else if (Match(pReader, pos, 0x2264, m_bitDepth))

-            ;

-        else

-        {

-            long len;

-            const long long id = ReadUInt(pReader, pos, len);

-            assert(id >= 0);  //TODO: handle error case

-            assert((pos + len) <= stop);

-

-            pos += len;  //consume id

-

-            const long long size = ReadUInt(pReader, pos, len);

-            assert(size >= 0);  //TODO: handle error case

-            assert((pos + len) <= stop);

-

-            pos += len;  //consume length of size

-            assert((pos + size) <= stop);

-

-            //pos now designates start of payload

-

-            pos += size;  //consume payload

-            assert(pos <= stop);

-        }

-    }

-

-    return;

-}

-

-

-bool AudioTrack::VetEntry(const BlockEntry* pBlockEntry) const

-{

-    assert(pBlockEntry);

-

-    const Block* const pBlock = pBlockEntry->GetBlock();

-    assert(pBlock);

-    assert(pBlock->GetTrackNumber() == m_info.number);

-

-    return true;

-}

-

-

-double AudioTrack::GetSamplingRate() const

-{

-    return m_rate;

-}

-

-

-long long AudioTrack::GetChannels() const

-{

-    return m_channels;

-}

-

-long long AudioTrack::GetBitDepth() const

-{

-    return m_bitDepth;

-}

-

-Tracks::Tracks(Segment* pSegment, long long start, long long size_) :

-    m_pSegment(pSegment),

-    m_start(start),

-    m_size(size_),

-    m_trackEntries(NULL),

-    m_trackEntriesEnd(NULL)

-{

-    long long stop = m_start + m_size;

-    IMkvReader* const pReader = m_pSegment->m_pReader;

-

-    long long pos1 = m_start;

-    int count = 0;

-

-    while (pos1 < stop)

-    {

-        long len;

-        const long long id = ReadUInt(pReader, pos1, len);

-        assert(id >= 0);

-        assert((pos1 + len) <= stop);

-

-        pos1 += len;  //consume id

-

-        const long long size = ReadUInt(pReader, pos1, len);

-        assert(size >= 0);

-        assert((pos1 + len) <= stop);

-

-        pos1 += len;  //consume length of size

-

-        //pos now desinates start of element

-        if (id == 0x2E)  //TrackEntry ID

-            ++count;

-

-        pos1 += size;  //consume payload

-        assert(pos1 <= stop);

-    }

-

-    if (count <= 0)

-        return;

-

-    m_trackEntries = new Track*[count];

-    m_trackEntriesEnd = m_trackEntries;

-

-    long long pos = m_start;

-

-    while (pos < stop)

-    {

-        long len;

-        const long long id = ReadUInt(pReader, pos, len);

-        assert(id >= 0);

-        assert((pos + len) <= stop);

-

-        pos += len;  //consume id

-

-        const long long size1 = ReadUInt(pReader, pos, len);

-        assert(size1 >= 0);

-        assert((pos + len) <= stop);

-

-        pos += len;  //consume length of size

-

-        //pos now desinates start of element

-

-        if (id == 0x2E)  //TrackEntry ID

-            ParseTrackEntry(pos, size1, *m_trackEntriesEnd++);

-

-        pos += size1;  //consume payload

-        assert(pos <= stop);

-    }

-}

-

-

-unsigned long Tracks::GetTracksCount() const

-{

-    const ptrdiff_t result = m_trackEntriesEnd - m_trackEntries;

-    assert(result >= 0);

-

-    return static_cast<unsigned long>(result);

-}

-

-

-void Tracks::ParseTrackEntry(

-    long long start,

-    long long size,

-    Track*& pTrack)

-{

-    IMkvReader* const pReader = m_pSegment->m_pReader;

-

-    long long pos = start;

-    const long long stop = start + size;

-

-    Track::Info i;

-

-    Track::Settings videoSettings;

-    videoSettings.start = -1;

-

-    Track::Settings audioSettings;

-    audioSettings.start = -1;

-

-    while (pos < stop)

-    {

-#ifdef _DEBUG

-        long len;

-        const long long id = ReadUInt(pReader, pos, len);

-        len;

-        id;

-#endif

-        if (Match(pReader, pos, 0x57, i.number))

-            assert(i.number > 0);

-        else if (Match(pReader, pos, 0x33C5, i.uid))

-            ;

-        else if (Match(pReader, pos, 0x03, i.type))

-            ;

-        else if (Match(pReader, pos, 0x136E, i.nameAsUTF8))

-            assert(i.nameAsUTF8);

-        else if (Match(pReader, pos, 0x06, i.codecId))

-            ;

-        else if (Match(pReader,

-                       pos,

-                       0x23A2,

-                       i.codecPrivate,

-                       i.codecPrivateSize))

-            ;

-        else if (Match(pReader, pos, 0x058688, i.codecNameAsUTF8))

-            assert(i.codecNameAsUTF8);

-        else

-        {

-            long len;

-

-            const long long id = ReadUInt(pReader, pos, len);

-            assert(id >= 0);  //TODO: handle error case

-            assert((pos + len) <= stop);

-

-            pos += len;  //consume id

-

-            const long long size = ReadUInt(pReader, pos, len);

-            assert(size >= 0);  //TODO: handle error case

-            assert((pos + len) <= stop);

-

-            pos += len;  //consume length of size

-            const long long start = pos;

-

-            pos += size;  //consume payload

-            assert(pos <= stop);

-

-            if (id == 0x60)

-            {

-                videoSettings.start = start;

-                videoSettings.size = size;

-            }

-            else if (id == 0x61)

-            {

-                audioSettings.start = start;

-                audioSettings.size = size;

-            }

-        }

-    }

-

-    assert(pos == stop);

-    //TODO: propertly vet info.number, to ensure both its existence,

-    //and that it is unique among all tracks.

-    assert(i.number > 0);

-

-    //TODO: vet settings, to ensure that video settings (0x60)

-    //were specified when type = 1, and that audio settings (0x61)

-    //were specified when type = 2.

-    if (i.type == 1)  //video

-    {

-        assert(audioSettings.start < 0);

-        assert(videoSettings.start >= 0);

-

-        i.settings = videoSettings;

-

-        VideoTrack* const t = new VideoTrack(m_pSegment, i);

-        assert(t);  //TODO

-        pTrack = t;

-    }

-    else if (i.type == 2)  //audio

-    {

-        assert(videoSettings.start < 0);

-        assert(audioSettings.start >= 0);

-

-        i.settings = audioSettings;

-

-        AudioTrack* const t = new  AudioTrack(m_pSegment, i);

-        assert(t);  //TODO

-        pTrack = t;

-    }

-    else

-    {

-        // for now we do not support other track types yet.

-        // TODO: support other track types

-        i.Clear();

-

-        pTrack = NULL;

-    }

-

-    return;

-}

-

-

-Tracks::~Tracks()

-{

-    Track** i = m_trackEntries;

-    Track** const j = m_trackEntriesEnd;

-

-    while (i != j)

-    {

-        Track* const pTrack = *i++;

-        delete pTrack;

-    }

-

-    delete[] m_trackEntries;

-}

-

-

-Track* Tracks::GetTrackByNumber(unsigned long tn_) const

-{

-    const long long tn = tn_;

-

-    Track** i = m_trackEntries;

-    Track** const j = m_trackEntriesEnd;

-

-    while (i != j)

-    {

-        Track* const pTrack = *i++;

-

-        if (pTrack == NULL)

-            continue;

-

-        if (tn == pTrack->GetNumber())

-            return pTrack;

-    }

-

-    return NULL;  //not found

-}

-

-

-Track* Tracks::GetTrackByIndex(unsigned long idx) const

-{

-    const ptrdiff_t count = m_trackEntriesEnd - m_trackEntries;

-

-    if (idx >= static_cast<unsigned long>(count))

-         return NULL;

-

-    return m_trackEntries[idx];

-}

-

-

-void Cluster::Load()

-{

-    assert(m_pSegment);

-    assert(m_pos);

-    assert(m_size);

-

-    if (m_pos > 0)  //loaded

-    {

-        assert(m_size > 0);

-        assert(m_timecode >= 0);

-        return;

-    }

-

-    assert(m_pos < 0);  //not loaded yet

-    assert(m_size < 0);

-    assert(m_timecode < 0);

-

-    IMkvReader* const pReader = m_pSegment->m_pReader;

-

-    m_pos *= -1;                                  //relative to segment

-    long long pos = m_pSegment->m_start + m_pos;  //absolute

-

-    long len;

-

-    const long long id_ = ReadUInt(pReader, pos, len);

-    assert(id_ >= 0);

-    assert(id_ == 0x0F43B675);  //Cluster ID

-

-    pos += len;  //consume id

-

-    const long long size_ = ReadUInt(pReader, pos, len);

-    assert(size_ >= 0);

-

-    pos += len;  //consume size

-

-    m_size = size_;

-    const long long stop = pos + size_;

-

-    long long timecode = -1;

-

-    while (pos < stop)

-    {

-        if (Match(pReader, pos, 0x67, timecode))

-            break;

-        else

-        {

-            const long long id = ReadUInt(pReader, pos, len);

-            assert(id >= 0);  //TODO

-            assert((pos + len) <= stop);

-

-            pos += len;  //consume id

-

-            const long long size = ReadUInt(pReader, pos, len);

-            assert(size >= 0);  //TODO

-            assert((pos + len) <= stop);

-

-            pos += len;  //consume size

-

-            if (id == 0x20)  //BlockGroup ID

-                break;

-

-            if (id == 0x23)  //SimpleBlock ID

-                break;

-

-            pos += size;  //consume payload

-            assert(pos <= stop);

-        }

-    }

-

-    assert(pos <= stop);

-    assert(timecode >= 0);

-

-    m_timecode = timecode;

-}

-

-

-Cluster* Cluster::Parse(

-    Segment* pSegment,

-    long idx,

-    long long off)

-{

-    assert(pSegment);

-    assert(off >= 0);

-    assert(off < pSegment->m_size);

-

-    Cluster* const pCluster = new Cluster(pSegment, idx, -off);

-    assert(pCluster);

-

-    return pCluster;

-}

-

-

-Cluster::Cluster() :

-    m_pSegment(NULL),

-    m_index(0),

-    m_pos(0),

-    m_size(0),

-    m_timecode(0),

-    m_entries(NULL),

-    m_entriesCount(0)

-{

-}

-

-

-Cluster::Cluster(

-    Segment* pSegment,

-    long idx,

-    long long off) :

-    m_pSegment(pSegment),

-    m_index(idx),

-    m_pos(off),

-    m_size(-1),

-    m_timecode(-1),

-    m_entries(NULL),

-    m_entriesCount(0)

-{

-}

-

-

-Cluster::~Cluster()

-{

-    BlockEntry** i = m_entries;

-    BlockEntry** const j = m_entries + m_entriesCount;

-

-    while (i != j)

-    {

-         BlockEntry* p = *i++;

-         assert(p);

-

-         delete p;

-    }

-

-    delete[] m_entries;

-}

-

-

-bool Cluster::EOS() const

-{

-    return (m_pSegment == NULL);

-}

-

-

-void Cluster::LoadBlockEntries()

-{

-    if (m_entries)

-        return;

-

-    assert(m_pSegment);

-    assert(m_pos);

-    assert(m_size);

-    assert(m_entriesCount == 0);

-

-    IMkvReader* const pReader = m_pSegment->m_pReader;

-

-    if (m_pos < 0)

-        m_pos *= -1;  //relative to segment

-

-    long long pos = m_pSegment->m_start + m_pos;  //absolute

-

-    {

-        long len;

-

-        const long long id = ReadUInt(pReader, pos, len);

-        id;

-        assert(id >= 0);

-        assert(id == 0x0F43B675);  //Cluster ID

-

-        pos += len;  //consume id

-

-        const long long size = ReadUInt(pReader, pos, len);

-        assert(size > 0);

-

-        pos += len;  //consume size

-

-        //pos now points to start of payload

-

-        if (m_size >= 0)

-            assert(size == m_size);

-        else

-            m_size = size;

-    }

-

-    const long long stop = pos + m_size;

-    long long timecode = -1;  //of cluster itself

-

-    //First count the number of entries

-

-    long long idx = pos;  //points to start of payload

-    m_entriesCount = 0;

-

-    while (idx < stop)

-    {

-        if (Match(pReader, idx, 0x67, timecode))

-        {

-            if (m_timecode >= 0)

-                assert(timecode == m_timecode);

-            else

-                m_timecode = timecode;

-        }

-        else

-        {

-            long len;

-

-            const long long id = ReadUInt(pReader, idx, len);

-            assert(id >= 0);  //TODO

-            assert((idx + len) <= stop);

-

-            idx += len;  //consume id

-

-            const long long size = ReadUInt(pReader, idx, len);

-            assert(size >= 0);  //TODO

-            assert((idx + len) <= stop);

-

-            idx += len;  //consume size

-

-            if (id == 0x20)  //BlockGroup ID

-                ++m_entriesCount;

-            else if (id == 0x23)  //SimpleBlock ID

-                ++m_entriesCount;

-

-            idx += size;  //consume payload

-            assert(idx <= stop);

-        }

-    }

-

-    assert(idx == stop);

-    assert(m_timecode >= 0);

-

-    if (m_entriesCount == 0)  //TODO: handle empty clusters

-        return;

-

-    m_entries = new BlockEntry*[m_entriesCount];

-    size_t index = 0;

-

-    while (pos < stop)

-    {

-        if (Match(pReader, pos, 0x67, timecode))

-            assert(timecode == m_timecode);

-        else

-        {

-            long len;

-            const long long id = ReadUInt(pReader, pos, len);

-            assert(id >= 0);  //TODO

-            assert((pos + len) <= stop);

-

-            pos += len;  //consume id

-

-            const long long size = ReadUInt(pReader, pos, len);

-            assert(size >= 0);  //TODO

-            assert((pos + len) <= stop);

-

-            pos += len;  //consume size

-

-            if (id == 0x20)  //BlockGroup ID

-                ParseBlockGroup(pos, size, index++);

-            else if (id == 0x23)  //SimpleBlock ID

-                ParseSimpleBlock(pos, size, index++);

-

-            pos += size;  //consume payload

-            assert(pos <= stop);

-        }

-    }

-

-    assert(pos == stop);

-    assert(timecode >= 0);

-    assert(index == m_entriesCount);

-}

-

-

-

-long long Cluster::GetTimeCode()

-{

-    Load();

-    return m_timecode;

-}

-

-

-long long Cluster::GetTime()

-{

-    const long long tc = GetTimeCode();

-    assert(tc >= 0);

-

-    const SegmentInfo* const pInfo = m_pSegment->GetInfo();

-    assert(pInfo);

-

-    const long long scale = pInfo->GetTimeCodeScale();

-    assert(scale >= 1);

-

-    const long long t = m_timecode * scale;

-

-    return t;

-}

-

-

-long long Cluster::GetFirstTime()

-{

-    const BlockEntry* const pEntry = GetFirst();

-

-    if (pEntry == NULL)  //empty cluster

-        return GetTime();

-

-    const Block* const pBlock = pEntry->GetBlock();

-    assert(pBlock);

-

-    return pBlock->GetTime(this);

-}

-

-

-long long Cluster::GetLastTime()

-{

-    const BlockEntry* const pEntry = GetLast();

-

-    if (pEntry == NULL)  //empty cluster

-        return GetTime();

-

-    const Block* const pBlock = pEntry->GetBlock();

-    assert(pBlock);

-

-    return pBlock->GetTime(this);

-}

-

-

-void Cluster::ParseBlockGroup(long long start, long long size, size_t index)

-{

-    assert(m_entries);

-    assert(m_entriesCount);

-    assert(index < m_entriesCount);

-

-    BlockGroup* const pGroup =

-        new (std::nothrow) BlockGroup(this, index, start, size);

-    assert(pGroup);  //TODO

-

-    m_entries[index] = pGroup;

-}

-

-

-

-void Cluster::ParseSimpleBlock(long long start, long long size, size_t index)

-{

-    assert(m_entries);

-    assert(m_entriesCount);

-    assert(index < m_entriesCount);

-

-    SimpleBlock* const pSimpleBlock =

-        new (std::nothrow) SimpleBlock(this, index, start, size);

-    assert(pSimpleBlock);  //TODO

-

-    m_entries[index] = pSimpleBlock;

-}

-

-

-const BlockEntry* Cluster::GetFirst()

-{

-    LoadBlockEntries();

-    //assert(m_entries);

-    //assert(m_entriesCount >= 1);

-

-    if ((m_entries == NULL) || (m_entriesCount == 0))

-        return NULL;

-

-    const BlockEntry* const pFirst = m_entries[0];

-    assert(pFirst);

-

-    return pFirst;

-}

-

-

-const BlockEntry* Cluster::GetLast()

-{

-    LoadBlockEntries();

-    //assert(m_entries);

-    //assert(m_entriesCount >= 1);

-

-    if ((m_entries == NULL) || (m_entriesCount == 0))

-        return NULL;

-

-    const size_t idx = m_entriesCount - 1;

-

-    const BlockEntry* const pLast = m_entries[idx];

-    assert(pLast);

-

-    return pLast;

-}

-

-

-const BlockEntry* Cluster::GetNext(const BlockEntry* pEntry) const

-{

-    assert(pEntry);

-    assert(m_entries);

-    assert(m_entriesCount);

-

-    size_t idx = pEntry->GetIndex();

-    assert(idx < m_entriesCount);

-    assert(m_entries[idx] == pEntry);

-

-    ++idx;

-

-    if (idx >= m_entriesCount)

-      return NULL;

-

-    return m_entries[idx];

-}

-

-

-const BlockEntry* Cluster::GetEntry(const Track* pTrack)

-{

-    assert(pTrack);

-

-    if (m_pSegment == NULL)  //EOS

-        return pTrack->GetEOS();

-

-    LoadBlockEntries();

-

-    if ((m_entries == NULL) || (m_entriesCount == 0))

-        return NULL;

-

-    BlockEntry** i = m_entries;

-    assert(i);

-

-    BlockEntry** const j = i + m_entriesCount;

-

-    while (i != j)

-    {

-        const BlockEntry* const pEntry = *i++;

-        assert(pEntry);

-        assert(!pEntry->EOS());

-

-        const Block* const pBlock = pEntry->GetBlock();

-        assert(pBlock);

-

-        if (pBlock->GetTrackNumber() != pTrack->GetNumber())

-            continue;

-

-        if (pTrack->VetEntry(pEntry))

-            return pEntry;

-    }

-

-    return pTrack->GetEOS();  //no satisfactory block found

-}

-

-

-const BlockEntry*

-Cluster::GetEntry(

-    const CuePoint& cp,

-    const CuePoint::TrackPosition& tp)

-{

-    assert(m_pSegment);

-

-    LoadBlockEntries();

-

-    if (m_entries == NULL)

-        return NULL;

-

-    const long long count = m_entriesCount;

-

-    if (count <= 0)

-        return NULL;

-

-    const long long tc = cp.GetTimeCode();

-

-    if ((tp.m_block > 0) && (tp.m_block <= count))

-    {

-        const size_t block = static_cast<size_t>(tp.m_block);

-        const size_t index = block - 1;

-

-        const BlockEntry* const pEntry = m_entries[index];

-        assert(pEntry);

-        assert(!pEntry->EOS());

-

-        const Block* const pBlock = pEntry->GetBlock();

-        assert(pBlock);

-

-        if ((pBlock->GetTrackNumber() == tp.m_track) &&

-            (pBlock->GetTimeCode(this) == tc))

-        {

-            return pEntry;

-        }

-    }

-

-    const BlockEntry* const* i = m_entries;

-    const BlockEntry* const* const j = i + count;

-

-    while (i != j)

-    {

-        const BlockEntry* const pEntry = *i++;

-        assert(pEntry);

-        assert(!pEntry->EOS());

-

-        const Block* const pBlock = pEntry->GetBlock();

-        assert(pBlock);

-

-        if (pBlock->GetTrackNumber() != tp.m_track)

-            continue;

-

-        const long long tc_ = pBlock->GetTimeCode(this);

-

-        if (tc_ < tc)

-            continue;

-

-        if (tc_ > tc)

-            return NULL;

-

-        const Tracks* const pTracks = m_pSegment->GetTracks();

-        assert(pTracks);

-

-        const long tn = static_cast<long>(tp.m_track);

-        const Track* const pTrack = pTracks->GetTrackByNumber(tn);

-

-        if (pTrack == NULL)

-            return NULL;

-

-        const long long type = pTrack->GetType();

-

-        if (type == 2)  //audio

-            return pEntry;

-

-        if (type != 1)  //not video

-            return NULL;

-

-        if (!pBlock->IsKey())

-            return NULL;

-

-        return pEntry;

-    }

-

-    return NULL;

-}

-

-

-const BlockEntry* Cluster::GetMaxKey(const VideoTrack* pTrack)

-{

-    assert(pTrack);

-

-    if (m_pSegment == NULL)  //EOS

-        return pTrack->GetEOS();

-

-    LoadBlockEntries();

-    //assert(m_entries);

-

-    BlockEntry** i = m_entries + m_entriesCount;

-    BlockEntry** const j = m_entries;

-

-    while (i != j)

-    {

-        const BlockEntry* const pEntry = *--i;

-        assert(pEntry);

-        assert(!pEntry->EOS());

-

-        const Block* const pBlock = pEntry->GetBlock();

-        assert(pBlock);

-

-        if (pBlock->GetTrackNumber() != pTrack->GetNumber())

-            continue;

-

-        if (pBlock->IsKey())

-            return pEntry;

-    }

-

-    return pTrack->GetEOS();  //no satisfactory block found

-}

-

-

-

-BlockEntry::BlockEntry()

-{

-}

-

-

-BlockEntry::~BlockEntry()

-{

-}

-

-

-SimpleBlock::SimpleBlock(

-    Cluster* pCluster,

-    size_t idx,

-    long long start,

-    long long size) :

-    m_pCluster(pCluster),

-    m_index(idx),

-    m_block(start, size, pCluster->m_pSegment->m_pReader)

-{

-}

-

-

-bool SimpleBlock::EOS() const

-{

-    return false;

-}

-

-

-Cluster* SimpleBlock::GetCluster() const

-{

-    return m_pCluster;

-}

-

-

-size_t SimpleBlock::GetIndex() const

-{

-    return m_index;

-}

-

-

-const Block* SimpleBlock::GetBlock() const

-{

-    return &m_block;

-}

-

-

-bool SimpleBlock::IsBFrame() const

-{

-    return false;

-}

-

-

-BlockGroup::BlockGroup(

-    Cluster* pCluster,

-    size_t idx,

-    long long start,

-    long long size_) :

-    m_pCluster(pCluster),

-    m_index(idx),

-    m_prevTimeCode(0),

-    m_nextTimeCode(0),

-    m_pBlock(NULL)  //TODO: accept multiple blocks within a block group

-{

-    IMkvReader* const pReader = m_pCluster->m_pSegment->m_pReader;

-

-    long long pos = start;

-    const long long stop = start + size_;

-

-    bool bSimpleBlock = false;

-    bool bReferenceBlock = false;

-

-    while (pos < stop)

-    {

-        short t;

-

-        if (Match(pReader, pos, 0x7B, t))

-        {

-            if (t < 0)

-                m_prevTimeCode = t;

-            else if (t > 0)

-                m_nextTimeCode = t;

-            else

-                assert(false);

-

-            bReferenceBlock = true;

-        }

-        else

-        {

-            long len;

-            const long long id = ReadUInt(pReader, pos, len);

-            assert(id >= 0);  //TODO

-            assert((pos + len) <= stop);

-

-            pos += len;  //consume ID

-

-            const long long size = ReadUInt(pReader, pos, len);

-            assert(size >= 0);  //TODO

-            assert((pos + len) <= stop);

-

-            pos += len;  //consume size

-

-            switch (id)

-            {

-                case 0x23:  //SimpleBlock ID

-                    bSimpleBlock = true;

-                    //YES, FALL THROUGH TO NEXT CASE

-

-                case 0x21:  //Block ID

-                    ParseBlock(pos, size);

-                    break;

-

-                default:

-                    break;

-            }

-

-            pos += size;  //consume payload

-            assert(pos <= stop);

-        }

-    }

-

-    assert(pos == stop);

-    assert(m_pBlock);

-

-    if (!bSimpleBlock)

-        m_pBlock->SetKey(!bReferenceBlock);

-}

-

-

-BlockGroup::~BlockGroup()

-{

-    delete m_pBlock;

-}

-

-

-void BlockGroup::ParseBlock(long long start, long long size)

-{

-    IMkvReader* const pReader = m_pCluster->m_pSegment->m_pReader;

-

-    Block* const pBlock = new Block(start, size, pReader);

-    assert(pBlock);  //TODO

-

-    //TODO: the Matroska spec says you have multiple blocks within the

-    //same block group, with blocks ranked by priority (the flag bits).

-

-    assert(m_pBlock == NULL);

-    m_pBlock = pBlock;

-}

-

-

-bool BlockGroup::EOS() const

-{

-    return false;

-}

-

-

-Cluster* BlockGroup::GetCluster() const

-{

-    return m_pCluster;

-}

-

-

-size_t BlockGroup::GetIndex() const

-{

-    return m_index;

-}

-

-

-const Block* BlockGroup::GetBlock() const

-{

-    return m_pBlock;

-}

-

-

-short BlockGroup::GetPrevTimeCode() const

-{

-    return m_prevTimeCode;

-}

-

-

-short BlockGroup::GetNextTimeCode() const

-{

-    return m_nextTimeCode;

-}

-

-

-bool BlockGroup::IsBFrame() const

-{

-    return (m_nextTimeCode > 0);

-}

-

-

-

-Block::Block(long long start, long long size_, IMkvReader* pReader) :

-    m_start(start),

-    m_size(size_)

-{

-    long long pos = start;

-    const long long stop = start + size_;

-

-    long len;

-

-    m_track = ReadUInt(pReader, pos, len);

-    assert(m_track > 0);

-    assert((pos + len) <= stop);

-

-    pos += len;  //consume track number

-    assert((stop - pos) >= 2);

-

-    m_timecode = Unserialize2SInt(pReader, pos);

-

-    pos += 2;

-    assert((stop - pos) >= 1);

-

-    const long hr = pReader->Read(pos, 1, &m_flags);

-    assert(hr == 0L);

-

-    ++pos;

-    assert(pos <= stop);

-

-    m_frameOff = pos;

-

-    const long long frame_size = stop - pos;

-

-    assert(frame_size <= 2147483647L);

-

-    m_frameSize = static_cast<long>(frame_size);

-}

-

-

-long long Block::GetTimeCode(Cluster* pCluster) const

-{

-    assert(pCluster);

-

-    const long long tc0 = pCluster->GetTimeCode();

-    assert(tc0 >= 0);

-

-    const long long tc = tc0 + static_cast<long long>(m_timecode);

-    assert(tc >= 0);

-

-    return tc;  //unscaled timecode units

-}

-

-

-long long Block::GetTime(Cluster* pCluster) const

-{

-    assert(pCluster);

-

-    const long long tc = GetTimeCode(pCluster);

-

-    const Segment* const pSegment = pCluster->m_pSegment;

-    const SegmentInfo* const pInfo = pSegment->GetInfo();

-    assert(pInfo);

-

-    const long long scale = pInfo->GetTimeCodeScale();

-    assert(scale >= 1);

-

-    const long long ns = tc * scale;

-

-    return ns;

-}

-

-

-long long Block::GetTrackNumber() const

-{

-    return m_track;

-}

-

-

-bool Block::IsKey() const

-{

-    return ((m_flags & static_cast<unsigned char>(1 << 7)) != 0);

-}

-

-unsigned char Block::Flags() const {

-    return m_flags;

-}

-

-void Block::SetKey(bool bKey)

-{

-    if (bKey)

-        m_flags |= static_cast<unsigned char>(1 << 7);

-    else

-        m_flags &= 0x7F;

-}

-

-

-long long Block::GetOffset() const

-{

-  return m_frameOff;

-}

-

-

-long Block::GetSize() const

-{

-    return m_frameSize;

-}

-

-

-long Block::Read(IMkvReader* pReader, unsigned char* buf) const

-{

-

-    assert(pReader);

-    assert(buf);

-

-    const long hr = pReader->Read(m_frameOff, m_frameSize, buf);

-

-    return hr;

-}

-

-

-}  //end namespace mkvparser

diff --git a/media/libstagefright/matroska/mkvparser.hpp b/media/libstagefright/matroska/mkvparser.hpp
deleted file mode 100644
index f7d8948..0000000
--- a/media/libstagefright/matroska/mkvparser.hpp
+++ /dev/null
@@ -1,556 +0,0 @@
-// Copyright (c) 2010 The WebM project authors. All Rights Reserved.

-//

-// Use of this source code is governed by a BSD-style license

-// that can be found in the LICENSE file in the root of the source

-// tree. An additional intellectual property rights grant can be found

-// in the file PATENTS.  All contributing project authors may

-// be found in the AUTHORS file in the root of the source tree.

-

-#ifndef MKVPARSER_HPP

-#define MKVPARSER_HPP

-

-#include <cstdlib>

-#include <cstdio>

-

-namespace mkvparser

-{

-

-const int E_FILE_FORMAT_INVALID = -2;

-const int E_BUFFER_NOT_FULL = -3;

-

-class IMkvReader

-{

-public:

-    virtual int Read(long long pos, long len, unsigned char* buf) = 0;

-    virtual int Length(long long* total, long long* available) = 0;

-protected:

-    virtual ~IMkvReader();

-};

-

-long long GetUIntLength(IMkvReader*, long long, long&);

-long long ReadUInt(IMkvReader*, long long, long&);

-long long SyncReadUInt(IMkvReader*, long long pos, long long stop, long&);

-long long UnserializeUInt(IMkvReader*, long long pos, long long size);

-float Unserialize4Float(IMkvReader*, long long);

-double Unserialize8Double(IMkvReader*, long long);

-short Unserialize2SInt(IMkvReader*, long long);

-signed char Unserialize1SInt(IMkvReader*, long long);

-bool Match(IMkvReader*, long long&, unsigned long, long long&);

-bool Match(IMkvReader*, long long&, unsigned long, char*&);

-bool Match(IMkvReader*, long long&, unsigned long,unsigned char*&, size_t&);

-bool Match(IMkvReader*, long long&, unsigned long, double&);

-bool Match(IMkvReader*, long long&, unsigned long, short&);

-

-void GetVersion(int& major, int& minor, int& build, int& revision);

-

-struct EBMLHeader

-{

-    EBMLHeader();

-    ~EBMLHeader();

-    long long m_version;

-    long long m_readVersion;

-    long long m_maxIdLength;

-    long long m_maxSizeLength;

-    char* m_docType;

-    long long m_docTypeVersion;

-    long long m_docTypeReadVersion;

-

-    long long Parse(IMkvReader*, long long&);

-};

-

-

-class Segment;

-class Track;

-class Cluster;

-

-class Block

-{

-    Block(const Block&);

-    Block& operator=(const Block&);

-

-public:

-    const long long m_start;

-    const long long m_size;

-

-    Block(long long start, long long size, IMkvReader*);

-

-    long long GetTrackNumber() const;

-    long long GetTimeCode(Cluster*) const;  //absolute, but not scaled

-    long long GetTime(Cluster*) const;      //absolute, and scaled (ns units)

-    bool IsKey() const;

-    void SetKey(bool);

-

-    unsigned char Flags() const;

-

-    long long GetOffset() const;

-    long GetSize() const;

-    long Read(IMkvReader*, unsigned char*) const;

-

-private:

-    long long m_track;   //Track::Number()

-    short m_timecode;  //relative to cluster

-    unsigned char m_flags;

-    long long m_frameOff;

-    long m_frameSize;

-

-};

-

-

-class BlockEntry

-{

-    BlockEntry(const BlockEntry&);

-    BlockEntry& operator=(const BlockEntry&);

-

-public:

-    virtual ~BlockEntry();

-    virtual bool EOS() const = 0;

-    virtual Cluster* GetCluster() const = 0;

-    virtual size_t GetIndex() const = 0;

-    virtual const Block* GetBlock() const = 0;

-    virtual bool IsBFrame() const = 0;

-

-protected:

-    BlockEntry();

-

-};

-

-

-class SimpleBlock : public BlockEntry

-{

-    SimpleBlock(const SimpleBlock&);

-    SimpleBlock& operator=(const SimpleBlock&);

-

-public:

-    SimpleBlock(Cluster*, size_t, long long start, long long size);

-

-    bool EOS() const;

-    Cluster* GetCluster() const;

-    size_t GetIndex() const;

-    const Block* GetBlock() const;

-    bool IsBFrame() const;

-

-protected:

-    Cluster* const m_pCluster;

-    const size_t m_index;

-    Block m_block;

-

-};

-

-

-class BlockGroup : public BlockEntry

-{

-    BlockGroup(const BlockGroup&);

-    BlockGroup& operator=(const BlockGroup&);

-

-public:

-    BlockGroup(Cluster*, size_t, long long, long long);

-    ~BlockGroup();

-

-    bool EOS() const;

-    Cluster* GetCluster() const;

-    size_t GetIndex() const;

-    const Block* GetBlock() const;

-    bool IsBFrame() const;

-

-    short GetPrevTimeCode() const;  //relative to block's time

-    short GetNextTimeCode() const;  //as above

-

-protected:

-    Cluster* const m_pCluster;

-    const size_t m_index;

-

-private:

-    BlockGroup(Cluster*, size_t, unsigned long);

-    void ParseBlock(long long start, long long size);

-

-    short m_prevTimeCode;

-    short m_nextTimeCode;

-

-    //TODO: the Matroska spec says you can have multiple blocks within the

-    //same block group, with blocks ranked by priority (the flag bits).

-    //For now we just cache a single block.

-#if 0

-    typedef std::deque<Block*> blocks_t;

-    blocks_t m_blocks;  //In practice should contain only a single element.

-#else

-    Block* m_pBlock;

-#endif

-

-};

-

-

-class Track

-{

-    Track(const Track&);

-    Track& operator=(const Track&);

-

-public:

-    Segment* const m_pSegment;

-    virtual ~Track();

-

-    long long GetType() const;

-    long long GetNumber() const;

-    const char* GetNameAsUTF8() const;

-    const char* GetCodecNameAsUTF8() const;

-    const char* GetCodecId() const;

-    const unsigned char* GetCodecPrivate(size_t&) const;

-

-    const BlockEntry* GetEOS() const;

-

-    struct Settings

-    {

-        long long start;

-        long long size;

-    };

-

-    struct Info

-    {

-        long long type;

-        long long number;

-        long long uid;

-        char* nameAsUTF8;

-        char* codecId;

-        unsigned char* codecPrivate;

-        size_t codecPrivateSize;

-        char* codecNameAsUTF8;

-        Settings settings;

-        Info();

-        void Clear();

-    };

-

-    long GetFirst(const BlockEntry*&) const;

-    long GetNext(const BlockEntry* pCurr, const BlockEntry*& pNext) const;

-    virtual bool VetEntry(const BlockEntry*) const = 0;

-

-protected:

-    Track(Segment*, const Info&);

-    const Info m_info;

-

-    class EOSBlock : public BlockEntry

-    {

-    public:

-        EOSBlock();

-

-        bool EOS() const;

-        Cluster* GetCluster() const;

-        size_t GetIndex() const;

-        const Block* GetBlock() const;

-        bool IsBFrame() const;

-    };

-

-    EOSBlock m_eos;

-

-};

-

-

-class VideoTrack : public Track

-{

-    VideoTrack(const VideoTrack&);

-    VideoTrack& operator=(const VideoTrack&);

-

-public:

-    VideoTrack(Segment*, const Info&);

-    long long GetWidth() const;

-    long long GetHeight() const;

-    double GetFrameRate() const;

-

-    bool VetEntry(const BlockEntry*) const;

-

-private:

-    long long m_width;

-    long long m_height;

-    double m_rate;

-

-};

-

-

-class AudioTrack : public Track

-{

-    AudioTrack(const AudioTrack&);

-    AudioTrack& operator=(const AudioTrack&);

-

-public:

-    AudioTrack(Segment*, const Info&);

-    double GetSamplingRate() const;

-    long long GetChannels() const;

-    long long GetBitDepth() const;

-    bool VetEntry(const BlockEntry*) const;

-

-private:

-    double m_rate;

-    long long m_channels;

-    long long m_bitDepth;

-};

-

-

-class Tracks

-{

-    Tracks(const Tracks&);

-    Tracks& operator=(const Tracks&);

-

-public:

-    Segment* const m_pSegment;

-    const long long m_start;

-    const long long m_size;

-

-    Tracks(Segment*, long long start, long long size);

-    virtual ~Tracks();

-

-    Track* GetTrackByNumber(unsigned long tn) const;

-    Track* GetTrackByIndex(unsigned long idx) const;

-

-private:

-    Track** m_trackEntries;

-    Track** m_trackEntriesEnd;

-

-    void ParseTrackEntry(long long, long long, Track*&);

-

-public:

-    unsigned long GetTracksCount() const;

-};

-

-

-class SegmentInfo

-{

-    SegmentInfo(const SegmentInfo&);

-    SegmentInfo& operator=(const SegmentInfo&);

-

-public:

-    Segment* const m_pSegment;

-    const long long m_start;

-    const long long m_size;

-

-    SegmentInfo(Segment*, long long start, long long size);

-    ~SegmentInfo();

-    long long GetTimeCodeScale() const;

-    long long GetDuration() const;  //scaled

-    const char* GetMuxingAppAsUTF8() const;

-    const char* GetWritingAppAsUTF8() const;

-    const char* GetTitleAsUTF8() const;

-

-private:

-    long long m_timecodeScale;

-    double m_duration;

-    char* m_pMuxingAppAsUTF8;

-    char* m_pWritingAppAsUTF8;

-    char* m_pTitleAsUTF8;

-};

-

-class Cues;

-class CuePoint

-{

-    friend class Cues;

-

-    CuePoint(size_t, long long);

-    ~CuePoint();

-

-    CuePoint(const CuePoint&);

-    CuePoint& operator=(const CuePoint&);

-

-public:

-    void Load(IMkvReader*);

-

-    long long GetTimeCode() const;      //absolute but unscaled

-    long long GetTime(Segment*) const;  //absolute and scaled (ns units)

-

-    struct TrackPosition

-    {

-        long long m_track;

-        long long m_pos;  //of cluster

-        long long m_block;

-        //codec_state  //defaults to 0

-        //reference = clusters containing req'd referenced blocks

-        //  reftime = timecode of the referenced block

-

-        void Parse(IMkvReader*, long long, long long);

-    };

-

-    const TrackPosition* Find(const Track*) const;

-

-private:

-    const size_t m_index;

-    long long m_timecode;

-    TrackPosition* m_track_positions;

-    size_t m_track_positions_count;

-

-};

-

-

-class Cues

-{

-    friend class Segment;

-

-    Cues(Segment*, long long start, long long size);

-    ~Cues();

-

-    Cues(const Cues&);

-    Cues& operator=(const Cues&);

-

-public:

-    Segment* const m_pSegment;

-    const long long m_start;

-    const long long m_size;

-

-    bool Find(  //lower bound of time_ns

-        long long time_ns,

-        const Track*,

-        const CuePoint*&,

-        const CuePoint::TrackPosition*&) const;

-

-#if 0

-    bool FindNext(  //upper_bound of time_ns

-        long long time_ns,

-        const Track*,

-        const CuePoint*&,

-        const CuePoint::TrackPosition*&) const;

-#endif

-

-    const CuePoint* GetFirst() const;

-    const CuePoint* GetLast() const;

-

-    const CuePoint* GetNext(const CuePoint*) const;

-

-    const BlockEntry* GetBlock(

-                        const CuePoint*,

-                        const CuePoint::TrackPosition*) const;

-

-private:

-    void Init() const;

-    bool LoadCuePoint() const;

-    void PreloadCuePoint(size_t&, long long) const;

-

-    mutable CuePoint** m_cue_points;

-    mutable size_t m_count;

-    mutable size_t m_preload_count;

-    mutable long long m_pos;

-

-};

-

-

-class Cluster

-{

-    Cluster(const Cluster&);

-    Cluster& operator=(const Cluster&);

-

-public:

-    Segment* const m_pSegment;

-

-public:

-    static Cluster* Parse(Segment*, long, long long off);

-

-    Cluster();  //EndOfStream

-    ~Cluster();

-

-    bool EOS() const;

-

-    long long GetTimeCode();   //absolute, but not scaled

-    long long GetTime();       //absolute, and scaled (nanosecond units)

-    long long GetFirstTime();  //time (ns) of first (earliest) block

-    long long GetLastTime();   //time (ns) of last (latest) block

-

-    const BlockEntry* GetFirst();

-    const BlockEntry* GetLast();

-    const BlockEntry* GetNext(const BlockEntry*) const;

-    const BlockEntry* GetEntry(const Track*);

-    const BlockEntry* GetEntry(

-        const CuePoint&,

-        const CuePoint::TrackPosition&);

-    const BlockEntry* GetMaxKey(const VideoTrack*);

-

-protected:

-    Cluster(Segment*, long, long long off);

-

-public:

-    //TODO: these should all be private, with public selector functions

-    long m_index;

-    long long m_pos;

-    long long m_size;

-

-private:

-    long long m_timecode;

-    BlockEntry** m_entries;

-    size_t m_entriesCount;

-

-    void Load();

-    void LoadBlockEntries();

-    void ParseBlockGroup(long long, long long, size_t);

-    void ParseSimpleBlock(long long, long long, size_t);

-

-};

-

-

-class Segment

-{

-    friend class Cues;

-

-    Segment(const Segment&);

-    Segment& operator=(const Segment&);

-

-private:

-    Segment(IMkvReader*, long long pos, long long size);

-

-public:

-    IMkvReader* const m_pReader;

-    const long long m_start;  //posn of segment payload

-    const long long m_size;   //size of segment payload

-    Cluster m_eos;  //TODO: make private?

-

-    static long long CreateInstance(IMkvReader*, long long, Segment*&);

-    ~Segment();

-

-    long Load();  //loads headers and all clusters

-

-    //for incremental loading (splitter)

-    long long Unparsed() const;

-    long long ParseHeaders();  //stops when first cluster is found

-    long LoadCluster();        //loads one cluster

-

-#if 0

-    //This pair parses one cluster, but only changes the state of the

-    //segment object when the cluster is actually added to the index.

-    long ParseCluster(Cluster*&, long long& newpos) const;

-    bool AddCluster(Cluster*, long long);

-#endif

-

-    Tracks* GetTracks() const;

-    const SegmentInfo* GetInfo() const;

-    const Cues* GetCues() const;

-

-    long long GetDuration() const;

-

-    unsigned long GetCount() const;

-    Cluster* GetFirst();

-    Cluster* GetLast();

-    Cluster* GetNext(const Cluster*);

-

-    Cluster* FindCluster(long long time_nanoseconds);

-    const BlockEntry* Seek(long long time_nanoseconds, const Track*);

-

-private:

-

-    long long m_pos;  //absolute file posn; what has been consumed so far

-    SegmentInfo* m_pInfo;

-    Tracks* m_pTracks;

-    Cues* m_pCues;

-    Cluster** m_clusters;

-    long m_clusterCount;         //number of entries for which m_index >= 0

-    long m_clusterPreloadCount;  //number of entries for which m_index < 0

-    long m_clusterSize;          //array size

-

-    void AppendCluster(Cluster*);

-    void PreloadCluster(Cluster*, ptrdiff_t);

-

-    void ParseSeekHead(long long pos, long long size);

-    void ParseSeekEntry(long long pos, long long size);

-    void ParseCues(long long);

-

-    const BlockEntry* GetBlock(

-        const CuePoint&,

-        const CuePoint::TrackPosition&);

-

-};

-

-

-}  //end namespace mkvparser

-

-#endif  //MKVPARSER_HPP

diff --git a/media/libstagefright/mpeg2ts/ATSParser.cpp b/media/libstagefright/mpeg2ts/ATSParser.cpp
index c88c6c1..7d4bc6e 100644
--- a/media/libstagefright/mpeg2ts/ATSParser.cpp
+++ b/media/libstagefright/mpeg2ts/ATSParser.cpp
@@ -32,6 +32,7 @@
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/MetaData.h>
+#include <media/IStreamSource.h>
 #include <utils/KeyedVector.h>
 
 namespace android {
@@ -43,19 +44,27 @@
 static const size_t kTSPacketSize = 188;
 
 struct ATSParser::Program : public RefBase {
-    Program(unsigned programMapPID);
+    Program(ATSParser *parser, unsigned programMapPID);
 
     bool parsePID(
             unsigned pid, unsigned payload_unit_start_indicator,
             ABitReader *br);
 
-    void signalDiscontinuity(bool isASeek);
+    void signalDiscontinuity(
+            DiscontinuityType type, const sp<AMessage> &extra);
+
+    void signalEOS(status_t finalResult);
 
     sp<MediaSource> getSource(SourceType type);
 
     int64_t convertPTSToTimestamp(uint64_t PTS);
 
+    bool PTSTimeDeltaEstablished() const {
+        return mFirstPTSValid;
+    }
+
 private:
+    ATSParser *mParser;
     unsigned mProgramMapPID;
     KeyedVector<unsigned, sp<Stream> > mStreams;
     bool mFirstPTSValid;
@@ -69,11 +78,18 @@
 struct ATSParser::Stream : public RefBase {
     Stream(Program *program, unsigned elementaryPID, unsigned streamType);
 
+    unsigned type() const { return mStreamType; }
+    unsigned pid() const { return mElementaryPID; }
+    void setPID(unsigned pid) { mElementaryPID = pid; }
+
     void parse(
             unsigned payload_unit_start_indicator,
             ABitReader *br);
 
-    void signalDiscontinuity(bool isASeek);
+    void signalDiscontinuity(
+            DiscontinuityType type, const sp<AMessage> &extra);
+
+    void signalEOS(status_t finalResult);
 
     sp<MediaSource> getSource(SourceType type);
 
@@ -88,6 +104,8 @@
     sp<ABuffer> mBuffer;
     sp<AnotherPacketSource> mSource;
     bool mPayloadStarted;
+    DiscontinuityType mPendingDiscontinuity;
+    sp<AMessage> mPendingDiscontinuityExtra;
 
     ElementaryStreamQueue mQueue;
 
@@ -100,13 +118,17 @@
 
     void extractAACFrames(const sp<ABuffer> &buffer);
 
+    void deferDiscontinuity(
+            DiscontinuityType type, const sp<AMessage> &extra);
+
     DISALLOW_EVIL_CONSTRUCTORS(Stream);
 };
 
 ////////////////////////////////////////////////////////////////////////////////
 
-ATSParser::Program::Program(unsigned programMapPID)
-    : mProgramMapPID(programMapPID),
+ATSParser::Program::Program(ATSParser *parser, unsigned programMapPID)
+    : mParser(parser),
+      mProgramMapPID(programMapPID),
       mFirstPTSValid(false),
       mFirstPTS(0) {
 }
@@ -135,12 +157,24 @@
     return true;
 }
 
-void ATSParser::Program::signalDiscontinuity(bool isASeek) {
+void ATSParser::Program::signalDiscontinuity(
+        DiscontinuityType type, const sp<AMessage> &extra) {
     for (size_t i = 0; i < mStreams.size(); ++i) {
-        mStreams.editValueAt(i)->signalDiscontinuity(isASeek);
+        mStreams.editValueAt(i)->signalDiscontinuity(type, extra);
     }
 }
 
+void ATSParser::Program::signalEOS(status_t finalResult) {
+    for (size_t i = 0; i < mStreams.size(); ++i) {
+        mStreams.editValueAt(i)->signalEOS(finalResult);
+    }
+}
+
+struct StreamInfo {
+    unsigned mType;
+    unsigned mPID;
+};
+
 void ATSParser::Program::parseProgramMap(ABitReader *br) {
     unsigned table_id = br->getBits(8);
     LOGV("  table_id = %u", table_id);
@@ -155,7 +189,7 @@
 
     unsigned section_length = br->getBits(12);
     LOGV("  section_length = %u", section_length);
-    CHECK((section_length & 0xc00) == 0);
+    CHECK_EQ(section_length & 0xc00, 0u);
     CHECK_LE(section_length, 1021u);
 
     MY_LOGV("  program_number = %u", br->getBits(16));
@@ -170,10 +204,12 @@
 
     unsigned program_info_length = br->getBits(12);
     LOGV("  program_info_length = %u", program_info_length);
-    CHECK((program_info_length & 0xc00) == 0);
+    CHECK_EQ(program_info_length & 0xc00, 0u);
 
     br->skipBits(program_info_length * 8);  // skip descriptors
 
+    Vector<StreamInfo> infos;
+
     // infoBytesRemaining is the number of bytes that make up the
     // variable length section of ES_infos. It does not include the
     // final CRC.
@@ -194,7 +230,7 @@
 
         unsigned ES_info_length = br->getBits(12);
         LOGV("    ES_info_length = %u", ES_info_length);
-        CHECK((ES_info_length & 0xc00) == 0);
+        CHECK_EQ(ES_info_length & 0xc00, 0u);
 
         CHECK_GE(infoBytesRemaining - 5, ES_info_length);
 
@@ -217,31 +253,61 @@
         CHECK_EQ(info_bytes_remaining, 0u);
 #endif
 
-        ssize_t index = mStreams.indexOfKey(elementaryPID);
-#if 0  // XXX revisit
-        CHECK_LT(index, 0);
-        mStreams.add(elementaryPID,
-                     new Stream(this, elementaryPID, streamType));
-#else
-        if (index < 0) {
-            mStreams.add(elementaryPID,
-                         new Stream(this, elementaryPID, streamType));
-        }
-#endif
+        StreamInfo info;
+        info.mType = streamType;
+        info.mPID = elementaryPID;
+        infos.push(info);
 
         infoBytesRemaining -= 5 + ES_info_length;
     }
 
     CHECK_EQ(infoBytesRemaining, 0u);
-
     MY_LOGV("  CRC = 0x%08x", br->getBits(32));
+
+    bool PIDsChanged = false;
+    for (size_t i = 0; i < infos.size(); ++i) {
+        StreamInfo &info = infos.editItemAt(i);
+
+        ssize_t index = mStreams.indexOfKey(info.mPID);
+
+        if (index >= 0 && mStreams.editValueAt(index)->type() != info.mType) {
+            LOGI("uh oh. stream PIDs have changed.");
+            PIDsChanged = true;
+            break;
+        }
+    }
+
+    if (PIDsChanged) {
+        mStreams.clear();
+    }
+
+    for (size_t i = 0; i < infos.size(); ++i) {
+        StreamInfo &info = infos.editItemAt(i);
+
+        ssize_t index = mStreams.indexOfKey(info.mPID);
+
+        if (index < 0) {
+            sp<Stream> stream = new Stream(this, info.mPID, info.mType);
+            mStreams.add(info.mPID, stream);
+
+            if (PIDsChanged) {
+                sp<AMessage> extra;
+                stream->signalDiscontinuity(DISCONTINUITY_FORMATCHANGE, extra);
+            }
+        }
+    }
 }
 
 sp<MediaSource> ATSParser::Program::getSource(SourceType type) {
+    size_t index = (type == MPEG2ADTS_AUDIO) ? 0 : 0;
+
     for (size_t i = 0; i < mStreams.size(); ++i) {
         sp<MediaSource> source = mStreams.editValueAt(i)->getSource(type);
         if (source != NULL) {
-            return source;
+            if (index == 0) {
+                return source;
+            }
+            --index;
         }
     }
 
@@ -269,11 +335,14 @@
     : mProgram(program),
       mElementaryPID(elementaryPID),
       mStreamType(streamType),
-      mBuffer(new ABuffer(128 * 1024)),
+      mBuffer(new ABuffer(192 * 1024)),
       mPayloadStarted(false),
+      mPendingDiscontinuity(DISCONTINUITY_NONE),
       mQueue(streamType == 0x1b
               ? ElementaryStreamQueue::H264 : ElementaryStreamQueue::AAC) {
     mBuffer->setRange(0, 0);
+
+    LOGV("new stream PID 0x%02x, type 0x%02x", elementaryPID, streamType);
 }
 
 ATSParser::Stream::~Stream() {
@@ -298,32 +367,75 @@
     }
 
     size_t payloadSizeBits = br->numBitsLeft();
-    CHECK((payloadSizeBits % 8) == 0);
+    CHECK_EQ(payloadSizeBits % 8, 0u);
 
-    CHECK_LE(mBuffer->size() + payloadSizeBits / 8, mBuffer->capacity());
+    size_t neededSize = mBuffer->size() + payloadSizeBits / 8;
+    if (mBuffer->capacity() < neededSize) {
+        // Increment in multiples of 64K.
+        neededSize = (neededSize + 65535) & ~65535;
+
+        LOGI("resizing buffer to %d bytes", neededSize);
+
+        sp<ABuffer> newBuffer = new ABuffer(neededSize);
+        memcpy(newBuffer->data(), mBuffer->data(), mBuffer->size());
+        newBuffer->setRange(0, mBuffer->size());
+        mBuffer = newBuffer;
+    }
 
     memcpy(mBuffer->data() + mBuffer->size(), br->data(), payloadSizeBits / 8);
     mBuffer->setRange(0, mBuffer->size() + payloadSizeBits / 8);
 }
 
-void ATSParser::Stream::signalDiscontinuity(bool isASeek) {
-    LOGV("Stream discontinuity");
+void ATSParser::Stream::signalDiscontinuity(
+        DiscontinuityType type, const sp<AMessage> &extra) {
     mPayloadStarted = false;
     mBuffer->setRange(0, 0);
 
-    mQueue.clear();
+    switch (type) {
+        case DISCONTINUITY_SEEK:
+        case DISCONTINUITY_FORMATCHANGE:
+        {
+            bool isASeek = (type == DISCONTINUITY_SEEK);
 
-    if (isASeek) {
-        // This is only a "minor" discontinuity, we stay within the same
-        // bitstream.
+            mQueue.clear(!isASeek);
 
-        mSource->clear();
-        return;
+            uint64_t resumeAtPTS;
+            if (extra != NULL
+                    && extra->findInt64(
+                        IStreamListener::kKeyResumeAtPTS,
+                        (int64_t *)&resumeAtPTS)) {
+                int64_t resumeAtMediaTimeUs =
+                    mProgram->convertPTSToTimestamp(resumeAtPTS);
+
+                extra->setInt64("resume-at-mediatimeUs", resumeAtMediaTimeUs);
+            }
+
+            if (mSource != NULL) {
+                mSource->queueDiscontinuity(type, extra);
+            } else {
+                deferDiscontinuity(type, extra);
+            }
+            break;
+        }
+
+        default:
+            TRESPASS();
+            break;
     }
+}
 
-    if (mStreamType == 0x1b && mSource != NULL) {
-        // Don't signal discontinuities on audio streams.
-        mSource->queueDiscontinuity();
+void ATSParser::Stream::deferDiscontinuity(
+        DiscontinuityType type, const sp<AMessage> &extra) {
+    if (type > mPendingDiscontinuity) {
+        // Only upgrade discontinuities.
+        mPendingDiscontinuity = type;
+        mPendingDiscontinuityExtra = extra;
+    }
+}
+
+void ATSParser::Stream::signalEOS(status_t finalResult) {
+    if (mSource != NULL) {
+        mSource->signalEOS(finalResult);
     }
 }
 
@@ -468,7 +580,7 @@
                     br->data(), br->numBitsLeft() / 8);
 
             size_t payloadSizeBits = br->numBitsLeft();
-            CHECK((payloadSizeBits % 8) == 0);
+            CHECK_EQ(payloadSizeBits % 8, 0u);
 
             LOGV("There's %d bytes of payload.", payloadSizeBits / 8);
         }
@@ -503,7 +615,10 @@
     int64_t timeUs = mProgram->convertPTSToTimestamp(PTS);
 
     status_t err = mQueue.appendData(data, size, timeUs);
-    CHECK_EQ(err, (status_t)OK);
+
+    if (err != OK) {
+        return;
+    }
 
     sp<ABuffer> accessUnit;
     while ((accessUnit = mQueue.dequeueAccessUnit()) != NULL) {
@@ -513,12 +628,24 @@
             if (meta != NULL) {
                 LOGV("created source!");
                 mSource = new AnotherPacketSource(meta);
+
+                if (mPendingDiscontinuity != DISCONTINUITY_NONE) {
+                    mSource->queueDiscontinuity(
+                            mPendingDiscontinuity, mPendingDiscontinuityExtra);
+                    mPendingDiscontinuity = DISCONTINUITY_NONE;
+                    mPendingDiscontinuityExtra.clear();
+                }
+
                 mSource->queueAccessUnit(accessUnit);
             }
         } else if (mQueue.getFormat() != NULL) {
             // After a discontinuity we invalidate the queue's format
             // and won't enqueue any access units to the source until
             // the queue has reestablished the new format.
+
+            if (mSource->getFormat() == NULL) {
+                mSource->setFormat(mQueue.getFormat());
+            }
             mSource->queueAccessUnit(accessUnit);
         }
     }
@@ -548,9 +675,18 @@
     parseTS(&br);
 }
 
-void ATSParser::signalDiscontinuity(bool isASeek) {
+void ATSParser::signalDiscontinuity(
+        DiscontinuityType type, const sp<AMessage> &extra) {
     for (size_t i = 0; i < mPrograms.size(); ++i) {
-        mPrograms.editItemAt(i)->signalDiscontinuity(isASeek);
+        mPrograms.editItemAt(i)->signalDiscontinuity(type, extra);
+    }
+}
+
+void ATSParser::signalEOS(status_t finalResult) {
+    CHECK_NE(finalResult, (status_t)OK);
+
+    for (size_t i = 0; i < mPrograms.size(); ++i) {
+        mPrograms.editItemAt(i)->signalEOS(finalResult);
     }
 }
 
@@ -568,7 +704,7 @@
 
     unsigned section_length = br->getBits(12);
     LOGV("  section_length = %u", section_length);
-    CHECK((section_length & 0xc00) == 0);
+    CHECK_EQ(section_length & 0xc00, 0u);
 
     MY_LOGV("  transport_stream_id = %u", br->getBits(16));
     MY_LOGV("  reserved = %u", br->getBits(2));
@@ -593,7 +729,7 @@
 
             LOGV("    program_map_PID = 0x%04x", programMapPID);
 
-            mPrograms.push(new Program(programMapPID));
+            mPrograms.push(new Program(this, programMapPID));
         }
     }
 
@@ -680,4 +816,12 @@
     return NULL;
 }
 
+bool ATSParser::PTSTimeDeltaEstablished() {
+    if (mPrograms.isEmpty()) {
+        return false;
+    }
+
+    return mPrograms.editItemAt(0)->PTSTimeDeltaEstablished();
+}
+
 }  // namespace android
diff --git a/media/libstagefright/mpeg2ts/ATSParser.h b/media/libstagefright/mpeg2ts/ATSParser.h
index 11b1de4..3936f05 100644
--- a/media/libstagefright/mpeg2ts/ATSParser.h
+++ b/media/libstagefright/mpeg2ts/ATSParser.h
@@ -21,19 +21,31 @@
 #include <sys/types.h>
 
 #include <media/stagefright/foundation/ABase.h>
+#include <media/stagefright/foundation/AMessage.h>
 #include <utils/Vector.h>
 #include <utils/RefBase.h>
 
 namespace android {
 
 struct ABitReader;
+struct ABuffer;
 struct MediaSource;
 
 struct ATSParser : public RefBase {
+    enum DiscontinuityType {
+        DISCONTINUITY_NONE,
+        DISCONTINUITY_SEEK,
+        DISCONTINUITY_FORMATCHANGE
+    };
+
     ATSParser();
 
     void feedTSPacket(const void *data, size_t size);
-    void signalDiscontinuity(bool isASeek = false);
+
+    void signalDiscontinuity(
+            DiscontinuityType type, const sp<AMessage> &extra);
+
+    void signalEOS(status_t finalResult);
 
     enum SourceType {
         AVC_VIDEO,
@@ -41,6 +53,8 @@
     };
     sp<MediaSource> getSource(SourceType type);
 
+    bool PTSTimeDeltaEstablished();
+
 protected:
     virtual ~ATSParser();
 
diff --git a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp b/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
index ea747c8..59de17e 100644
--- a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
+++ b/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
@@ -33,6 +33,11 @@
       mEOSResult(OK) {
 }
 
+void AnotherPacketSource::setFormat(const sp<MetaData> &meta) {
+    CHECK(mFormat == NULL);
+    mFormat = meta;
+}
+
 AnotherPacketSource::~AnotherPacketSource() {
 }
 
@@ -48,6 +53,34 @@
     return mFormat;
 }
 
+status_t AnotherPacketSource::dequeueAccessUnit(sp<ABuffer> *buffer) {
+    buffer->clear();
+
+    Mutex::Autolock autoLock(mLock);
+    while (mEOSResult == OK && mBuffers.empty()) {
+        mCondition.wait(mLock);
+    }
+
+    if (!mBuffers.empty()) {
+        *buffer = *mBuffers.begin();
+        mBuffers.erase(mBuffers.begin());
+
+        int32_t discontinuity;
+        if ((*buffer)->meta()->findInt32("discontinuity", &discontinuity)) {
+
+            if (discontinuity == ATSParser::DISCONTINUITY_FORMATCHANGE) {
+                mFormat.clear();
+            }
+
+            return INFO_DISCONTINUITY;
+        }
+
+        return OK;
+    }
+
+    return mEOSResult;
+}
+
 status_t AnotherPacketSource::read(
         MediaBuffer **out, const ReadOptions *) {
     *out = NULL;
@@ -62,13 +95,15 @@
         mBuffers.erase(mBuffers.begin());
 
         int32_t discontinuity;
-        if (buffer->meta()->findInt32("discontinuity", &discontinuity)
-                && discontinuity) {
+        if (buffer->meta()->findInt32("discontinuity", &discontinuity)) {
+            if (discontinuity == ATSParser::DISCONTINUITY_FORMATCHANGE) {
+                mFormat.clear();
+            }
+
             return INFO_DISCONTINUITY;
         } else {
-            uint64_t timeUs;
-            CHECK(buffer->meta()->findInt64(
-                        "time", (int64_t *)&timeUs));
+            int64_t timeUs;
+            CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
 
             MediaBuffer *mediaBuffer = new MediaBuffer(buffer->size());
             mediaBuffer->meta_data()->setInt64(kKeyTime, timeUs);
@@ -92,7 +127,7 @@
     }
 
     int64_t timeUs;
-    CHECK(buffer->meta()->findInt64("time", &timeUs));
+    CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
     LOGV("queueAccessUnit timeUs=%lld us (%.2f secs)", timeUs, timeUs / 1E6);
 
     Mutex::Autolock autoLock(mLock);
@@ -100,22 +135,32 @@
     mCondition.signal();
 }
 
-void AnotherPacketSource::queueDiscontinuity() {
+void AnotherPacketSource::queueDiscontinuity(
+        ATSParser::DiscontinuityType type,
+        const sp<AMessage> &extra) {
     sp<ABuffer> buffer = new ABuffer(0);
-    buffer->meta()->setInt32("discontinuity", true);
+    buffer->meta()->setInt32("discontinuity", static_cast<int32_t>(type));
+    buffer->meta()->setMessage("extra", extra);
 
     Mutex::Autolock autoLock(mLock);
 
+#if 0
+    if (type == ATSParser::DISCONTINUITY_SEEK
+            || type == ATSParser::DISCONTINUITY_FORMATCHANGE) {
+        // XXX Fix this: This will also clear any pending discontinuities,
+        // If there's a pending DISCONTINUITY_FORMATCHANGE and the new
+        // discontinuity is "just" a DISCONTINUITY_SEEK, this will effectively
+        // downgrade the type of discontinuity received by the client.
+
+        mBuffers.clear();
+        mEOSResult = OK;
+    }
+#endif
+
     mBuffers.push_back(buffer);
     mCondition.signal();
 }
 
-void AnotherPacketSource::clear() {
-    Mutex::Autolock autoLock(mLock);
-    mBuffers.clear();
-    mEOSResult = OK;
-}
-
 void AnotherPacketSource::signalEOS(status_t result) {
     CHECK(result != OK);
 
@@ -134,4 +179,19 @@
     return false;
 }
 
+status_t AnotherPacketSource::nextBufferTime(int64_t *timeUs) {
+    *timeUs = 0;
+
+    Mutex::Autolock autoLock(mLock);
+
+    if (mBuffers.empty()) {
+        return mEOSResult != OK ? mEOSResult : -EWOULDBLOCK;
+    }
+
+    sp<ABuffer> buffer = *mBuffers.begin();
+    CHECK(buffer->meta()->findInt64("timeUs", timeUs));
+
+    return OK;
+}
+
 }  // namespace android
diff --git a/media/libstagefright/mpeg2ts/AnotherPacketSource.h b/media/libstagefright/mpeg2ts/AnotherPacketSource.h
index 6999175..439c785 100644
--- a/media/libstagefright/mpeg2ts/AnotherPacketSource.h
+++ b/media/libstagefright/mpeg2ts/AnotherPacketSource.h
@@ -23,6 +23,8 @@
 #include <utils/threads.h>
 #include <utils/List.h>
 
+#include "ATSParser.h"
+
 namespace android {
 
 struct ABuffer;
@@ -30,6 +32,8 @@
 struct AnotherPacketSource : public MediaSource {
     AnotherPacketSource(const sp<MetaData> &meta);
 
+    void setFormat(const sp<MetaData> &meta);
+
     virtual status_t start(MetaData *params = NULL);
     virtual status_t stop();
     virtual sp<MetaData> getFormat();
@@ -39,11 +43,16 @@
 
     bool hasBufferAvailable(status_t *finalResult);
 
+    status_t nextBufferTime(int64_t *timeUs);
+
     void queueAccessUnit(const sp<ABuffer> &buffer);
-    void queueDiscontinuity();
+
+    void queueDiscontinuity(
+            ATSParser::DiscontinuityType type, const sp<AMessage> &extra);
+
     void signalEOS(status_t result);
 
-    void clear();
+    status_t dequeueAccessUnit(sp<ABuffer> *buffer);
 
 protected:
     virtual ~AnotherPacketSource();
diff --git a/media/libstagefright/mpeg2ts/ESQueue.cpp b/media/libstagefright/mpeg2ts/ESQueue.cpp
index b0b9e66..dcaf9f7 100644
--- a/media/libstagefright/mpeg2ts/ESQueue.cpp
+++ b/media/libstagefright/mpeg2ts/ESQueue.cpp
@@ -40,10 +40,43 @@
     return mFormat;
 }
 
-void ElementaryStreamQueue::clear() {
-    mBuffer->setRange(0, 0);
-    mTimestamps.clear();
-    mFormat.clear();
+void ElementaryStreamQueue::clear(bool clearFormat) {
+    if (mBuffer != NULL) {
+        mBuffer->setRange(0, 0);
+    }
+
+    mRangeInfos.clear();
+
+    if (clearFormat) {
+        mFormat.clear();
+    }
+}
+
+static bool IsSeeminglyValidADTSHeader(const uint8_t *ptr, size_t size) {
+    if (size < 3) {
+        // Not enough data to verify header.
+        return false;
+    }
+
+    if (ptr[0] != 0xff || (ptr[1] >> 4) != 0x0f) {
+        return false;
+    }
+
+    unsigned layer = (ptr[1] >> 1) & 3;
+
+    if (layer != 0) {
+        return false;
+    }
+
+    unsigned ID = (ptr[1] >> 3) & 1;
+    unsigned profile_ObjectType = ptr[2] >> 6;
+
+    if (ID == 1 && profile_ObjectType == 3) {
+        // MPEG-2 profile 3 is reserved.
+        return false;
+    }
+
+    return true;
 }
 
 status_t ElementaryStreamQueue::appendData(
@@ -52,9 +85,34 @@
         switch (mMode) {
             case H264:
             {
+#if 0
                 if (size < 4 || memcmp("\x00\x00\x00\x01", data, 4)) {
                     return ERROR_MALFORMED;
                 }
+#else
+                uint8_t *ptr = (uint8_t *)data;
+
+                ssize_t startOffset = -1;
+                for (size_t i = 0; i + 3 < size; ++i) {
+                    if (!memcmp("\x00\x00\x00\x01", &ptr[i], 4)) {
+                        startOffset = i;
+                        break;
+                    }
+                }
+
+                if (startOffset < 0) {
+                    return ERROR_MALFORMED;
+                }
+
+                if (startOffset > 0) {
+                    LOGI("found something resembling an H.264 syncword at "
+                         "offset %ld",
+                         startOffset);
+                }
+
+                data = &ptr[startOffset];
+                size -= startOffset;
+#endif
                 break;
             }
 
@@ -62,9 +120,31 @@
             {
                 uint8_t *ptr = (uint8_t *)data;
 
+#if 0
                 if (size < 2 || ptr[0] != 0xff || (ptr[1] >> 4) != 0x0f) {
                     return ERROR_MALFORMED;
                 }
+#else
+                ssize_t startOffset = -1;
+                for (size_t i = 0; i < size; ++i) {
+                    if (IsSeeminglyValidADTSHeader(&ptr[i], size - i)) {
+                        startOffset = i;
+                        break;
+                    }
+                }
+
+                if (startOffset < 0) {
+                    return ERROR_MALFORMED;
+                }
+
+                if (startOffset > 0) {
+                    LOGI("found something resembling an AAC syncword at offset %ld",
+                         startOffset);
+                }
+
+                data = &ptr[startOffset];
+                size -= startOffset;
+#endif
                 break;
             }
 
@@ -94,7 +174,17 @@
     memcpy(mBuffer->data() + mBuffer->size(), data, size);
     mBuffer->setRange(0, mBuffer->size() + size);
 
-    mTimestamps.push_back(timeUs);
+    RangeInfo info;
+    info.mLength = size;
+    info.mTimestampUs = timeUs;
+    mRangeInfos.push_back(info);
+
+#if 0
+    if (mMode == AAC) {
+        LOGI("size = %d, timeUs = %.2f secs", size, timeUs / 1E6);
+        hexdump(data, size);
+    }
+#endif
 
     return OK;
 }
@@ -109,6 +199,7 @@
 }
 
 sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitAAC() {
+    Vector<size_t> ranges;
     Vector<size_t> frameOffsets;
     Vector<size_t> frameSizes;
     size_t auSize = 0;
@@ -134,6 +225,14 @@
 
             mFormat = MakeAACCodecSpecificData(
                     profile, sampling_freq_index, channel_configuration);
+
+            int32_t sampleRate;
+            int32_t numChannels;
+            CHECK(mFormat->findInt32(kKeySampleRate, &sampleRate));
+            CHECK(mFormat->findInt32(kKeyChannelCount, &numChannels));
+
+            LOGI("found AAC codec config (%d Hz, %d channels)",
+                 sampleRate, numChannels);
         } else {
             // profile_ObjectType, sampling_frequency_index, private_bits,
             // channel_configuration, original_copy, home
@@ -162,6 +261,7 @@
 
         size_t headerSize = protection_absent ? 7 : 9;
 
+        ranges.push(aac_frame_length);
         frameOffsets.push(offset + headerSize);
         frameSizes.push(aac_frame_length - headerSize);
         auSize += aac_frame_length - headerSize;
@@ -173,11 +273,23 @@
         return NULL;
     }
 
+    int64_t timeUs = -1;
+
+    for (size_t i = 0; i < ranges.size(); ++i) {
+        int64_t tmpUs = fetchTimestamp(ranges.itemAt(i));
+
+        if (i == 0) {
+            timeUs = tmpUs;
+        }
+    }
+
     sp<ABuffer> accessUnit = new ABuffer(auSize);
     size_t dstOffset = 0;
     for (size_t i = 0; i < frameOffsets.size(); ++i) {
+        size_t frameOffset = frameOffsets.itemAt(i);
+
         memcpy(accessUnit->data() + dstOffset,
-               mBuffer->data() + frameOffsets.itemAt(i),
+               mBuffer->data() + frameOffset,
                frameSizes.itemAt(i));
 
         dstOffset += frameSizes.itemAt(i);
@@ -187,61 +299,46 @@
             mBuffer->size() - offset);
     mBuffer->setRange(0, mBuffer->size() - offset);
 
-    CHECK_GT(mTimestamps.size(), 0u);
-    int64_t timeUs = *mTimestamps.begin();
-    mTimestamps.erase(mTimestamps.begin());
-
-    accessUnit->meta()->setInt64("time", timeUs);
+    if (timeUs >= 0) {
+        accessUnit->meta()->setInt64("timeUs", timeUs);
+    } else {
+        LOGW("no time for AAC access unit");
+    }
 
     return accessUnit;
 }
 
-// static
-sp<MetaData> ElementaryStreamQueue::MakeAACCodecSpecificData(
-        unsigned profile, unsigned sampling_freq_index,
-        unsigned channel_configuration) {
-    sp<MetaData> meta = new MetaData;
-    meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_AAC);
+int64_t ElementaryStreamQueue::fetchTimestamp(size_t size) {
+    int64_t timeUs = -1;
+    bool first = true;
 
-    CHECK_LE(sampling_freq_index, 11u);
-    static const int32_t kSamplingFreq[] = {
-        96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050,
-        16000, 12000, 11025, 8000
-    };
-    meta->setInt32(kKeySampleRate, kSamplingFreq[sampling_freq_index]);
-    meta->setInt32(kKeyChannelCount, channel_configuration);
+    while (size > 0) {
+        CHECK(!mRangeInfos.empty());
 
-    static const uint8_t kStaticESDS[] = {
-        0x03, 22,
-        0x00, 0x00,     // ES_ID
-        0x00,           // streamDependenceFlag, URL_Flag, OCRstreamFlag
+        RangeInfo *info = &*mRangeInfos.begin();
 
-        0x04, 17,
-        0x40,                       // Audio ISO/IEC 14496-3
-        0x00, 0x00, 0x00, 0x00,
-        0x00, 0x00, 0x00, 0x00,
-        0x00, 0x00, 0x00, 0x00,
+        if (first) {
+            timeUs = info->mTimestampUs;
+            first = false;
+        }
 
-        0x05, 2,
-        // AudioSpecificInfo follows
+        if (info->mLength > size) {
+            info->mLength -= size;
 
-        // oooo offf fccc c000
-        // o - audioObjectType
-        // f - samplingFreqIndex
-        // c - channelConfig
-    };
-    sp<ABuffer> csd = new ABuffer(sizeof(kStaticESDS) + 2);
-    memcpy(csd->data(), kStaticESDS, sizeof(kStaticESDS));
+            if (first) {
+                info->mTimestampUs = -1;
+            }
 
-    csd->data()[sizeof(kStaticESDS)] =
-        ((profile + 1) << 3) | (sampling_freq_index >> 1);
+            size = 0;
+        } else {
+            size -= info->mLength;
 
-    csd->data()[sizeof(kStaticESDS) + 1] =
-        ((sampling_freq_index << 7) & 0x80) | (channel_configuration << 3);
+            mRangeInfos.erase(mRangeInfos.begin());
+            info = NULL;
+        }
+    }
 
-    meta->setData(kKeyESDS, 0, csd->data(), csd->size());
-
-    return meta;
+    return timeUs;
 }
 
 struct NALPosition {
@@ -333,11 +430,10 @@
 
             mBuffer->setRange(0, mBuffer->size() - nextScan);
 
-            CHECK_GT(mTimestamps.size(), 0u);
-            int64_t timeUs = *mTimestamps.begin();
-            mTimestamps.erase(mTimestamps.begin());
+            int64_t timeUs = fetchTimestamp(nextScan);
+            CHECK_GE(timeUs, 0ll);
 
-            accessUnit->meta()->setInt64("time", timeUs);
+            accessUnit->meta()->setInt64("timeUs", timeUs);
 
             if (mFormat == NULL) {
                 mFormat = MakeAVCCodecSpecificData(accessUnit);
diff --git a/media/libstagefright/mpeg2ts/ESQueue.h b/media/libstagefright/mpeg2ts/ESQueue.h
index 9eaf834..153cfe6 100644
--- a/media/libstagefright/mpeg2ts/ESQueue.h
+++ b/media/libstagefright/mpeg2ts/ESQueue.h
@@ -19,6 +19,7 @@
 #define ES_QUEUE_H_
 
 #include <media/stagefright/foundation/ABase.h>
+#include <utils/Errors.h>
 #include <utils/List.h>
 #include <utils/RefBase.h>
 
@@ -35,26 +36,31 @@
     ElementaryStreamQueue(Mode mode);
 
     status_t appendData(const void *data, size_t size, int64_t timeUs);
-    void clear();
+    void clear(bool clearFormat);
 
     sp<ABuffer> dequeueAccessUnit();
 
     sp<MetaData> getFormat();
 
 private:
+    struct RangeInfo {
+        int64_t mTimestampUs;
+        size_t mLength;
+    };
+
     Mode mMode;
 
     sp<ABuffer> mBuffer;
-    List<int64_t> mTimestamps;
+    List<RangeInfo> mRangeInfos;
 
     sp<MetaData> mFormat;
 
     sp<ABuffer> dequeueAccessUnitH264();
     sp<ABuffer> dequeueAccessUnitAAC();
 
-    static sp<MetaData> MakeAACCodecSpecificData(
-            unsigned profile, unsigned sampling_freq_index,
-            unsigned channel_configuration);
+    // consume a logical (compressed) access unit of size "size",
+    // returns its timestamp in us (or -1 if no time information).
+    int64_t fetchTimestamp(size_t size);
 
     DISALLOW_EVIL_CONSTRUCTORS(ElementaryStreamQueue);
 };
diff --git a/media/libstagefright/mpeg2ts/MPEG2TSExtractor.cpp b/media/libstagefright/mpeg2ts/MPEG2TSExtractor.cpp
index 3176810..dfec47f 100644
--- a/media/libstagefright/mpeg2ts/MPEG2TSExtractor.cpp
+++ b/media/libstagefright/mpeg2ts/MPEG2TSExtractor.cpp
@@ -19,7 +19,7 @@
 #include <utils/Log.h>
 
 #include "include/MPEG2TSExtractor.h"
-#include "include/LiveSource.h"
+#include "include/LiveSession.h"
 #include "include/NuCachedSource2.h"
 
 #include <media/stagefright/DataSource.h>
@@ -82,8 +82,8 @@
     sp<MetaData> meta = mImpl->getFormat();
 
     int64_t durationUs;
-    if (mExtractor->mLiveSource != NULL
-            && mExtractor->mLiveSource->getDuration(&durationUs)) {
+    if (mExtractor->mLiveSession != NULL
+            && mExtractor->mLiveSession->getDuration(&durationUs) == OK) {
         meta->setInt64(kKeyDuration, durationUs);
     }
 
@@ -202,20 +202,13 @@
     LOGI("haveAudio=%d, haveVideo=%d", haveAudio, haveVideo);
 }
 
-static bool isDiscontinuity(const uint8_t *data, ssize_t size) {
-    return size == 188 && data[0] == 0x00;
-}
-
 status_t MPEG2TSExtractor::feedMore() {
     Mutex::Autolock autoLock(mLock);
 
     uint8_t packet[kTSPacketSize];
     ssize_t n = mDataSource->readAt(mOffset, packet, kTSPacketSize);
 
-    if (isDiscontinuity(packet, n)) {
-        LOGI("XXX discontinuity detected");
-        mParser->signalDiscontinuity();
-    } else if (n < (ssize_t)kTSPacketSize) {
+    if (n < (ssize_t)kTSPacketSize) {
         return (n < 0) ? (status_t)n : ERROR_END_OF_STREAM;
     } else {
         mParser->feedTSPacket(packet, kTSPacketSize);
@@ -226,32 +219,20 @@
     return OK;
 }
 
-void MPEG2TSExtractor::setLiveSource(const sp<LiveSource> &liveSource) {
+void MPEG2TSExtractor::setLiveSession(const sp<LiveSession> &liveSession) {
     Mutex::Autolock autoLock(mLock);
 
-    mLiveSource = liveSource;
+    mLiveSession = liveSession;
 }
 
 void MPEG2TSExtractor::seekTo(int64_t seekTimeUs) {
     Mutex::Autolock autoLock(mLock);
 
-    if (mLiveSource == NULL) {
+    if (mLiveSession == NULL) {
         return;
     }
 
-    if (mDataSource->flags() & DataSource::kIsCachingDataSource) {
-        static_cast<NuCachedSource2 *>(mDataSource.get())->suspend();
-    }
-
-    if (mLiveSource->seekTo(seekTimeUs)) {
-        mParser->signalDiscontinuity(true  /* isSeek */);
-        mOffset = 0;
-    }
-
-    if (mDataSource->flags() & DataSource::kIsCachingDataSource) {
-        static_cast<NuCachedSource2 *>(mDataSource.get())
-            ->clearCacheAndResume();
-    }
+    mLiveSession->seekTo(seekTimeUs);
 }
 
 uint32_t MPEG2TSExtractor::flags() const {
@@ -259,7 +240,7 @@
 
     uint32_t flags = CAN_PAUSE;
 
-    if (mLiveSource != NULL && mLiveSource->isSeekable()) {
+    if (mLiveSession != NULL && mLiveSession->isSeekable()) {
         flags |= CAN_SEEK_FORWARD | CAN_SEEK_BACKWARD | CAN_SEEK;
     }
 
diff --git a/media/libstagefright/omx/Android.mk b/media/libstagefright/omx/Android.mk
index ead1675..6e069c8 100644
--- a/media/libstagefright/omx/Android.mk
+++ b/media/libstagefright/omx/Android.mk
@@ -31,7 +31,6 @@
         libutils                \
         libui                   \
         libcutils               \
-        libstagefright_color_conversion
 
 ifneq ($(BUILD_WITHOUT_PV),true)
 LOCAL_SHARED_LIBRARIES += \
diff --git a/media/libstagefright/omx/OMX.cpp b/media/libstagefright/omx/OMX.cpp
index 2ba63f7..4b1c3a7 100644
--- a/media/libstagefright/omx/OMX.cpp
+++ b/media/libstagefright/omx/OMX.cpp
@@ -24,14 +24,11 @@
 #include <sys/resource.h>
 
 #include "../include/OMX.h"
-#include "OMXRenderer.h"
 
 #include "../include/OMXNodeInstance.h"
-#include "../include/SoftwareRenderer.h"
 
 #include <binder/IMemory.h>
 #include <media/stagefright/MediaDebug.h>
-#include <media/stagefright/VideoRenderer.h>
 #include <utils/threads.h>
 
 #include "OMXMaster.h"
@@ -295,6 +292,21 @@
             index, params, size);
 }
 
+status_t OMX::enableGraphicBuffers(
+        node_id node, OMX_U32 port_index, OMX_BOOL enable) {
+    return findInstance(node)->enableGraphicBuffers(port_index, enable);
+}
+
+status_t OMX::getGraphicBufferUsage(
+        node_id node, OMX_U32 port_index, OMX_U32* usage) {
+    return findInstance(node)->getGraphicBufferUsage(port_index, usage);
+}
+
+status_t OMX::storeMetaDataInBuffers(
+        node_id node, OMX_U32 port_index, OMX_BOOL enable) {
+    return findInstance(node)->storeMetaDataInBuffers(port_index, enable);
+}
+
 status_t OMX::useBuffer(
         node_id node, OMX_U32 port_index, const sp<IMemory> &params,
         buffer_id *buffer) {
@@ -302,6 +314,13 @@
             port_index, params, buffer);
 }
 
+status_t OMX::useGraphicBuffer(
+        node_id node, OMX_U32 port_index,
+        const sp<GraphicBuffer> &graphicBuffer, buffer_id *buffer) {
+    return findInstance(node)->useGraphicBuffer(
+            port_index, graphicBuffer, buffer);
+}
+
 status_t OMX::allocateBuffer(
         node_id node, OMX_U32 port_index, size_t size,
         buffer_id *buffer, void **buffer_data) {
@@ -431,135 +450,4 @@
     mNodeIDToInstance.removeItem(node);
 }
 
-////////////////////////////////////////////////////////////////////////////////
-
-struct SharedVideoRenderer : public VideoRenderer {
-    SharedVideoRenderer(void *libHandle, VideoRenderer *obj)
-        : mLibHandle(libHandle),
-          mObj(obj) {
-    }
-
-    virtual ~SharedVideoRenderer() {
-        delete mObj;
-        mObj = NULL;
-
-        dlclose(mLibHandle);
-        mLibHandle = NULL;
-    }
-
-    virtual void render(
-            const void *data, size_t size, void *platformPrivate) {
-        return mObj->render(data, size, platformPrivate);
-    }
-
-private:
-    void *mLibHandle;
-    VideoRenderer *mObj;
-
-    SharedVideoRenderer(const SharedVideoRenderer &);
-    SharedVideoRenderer &operator=(const SharedVideoRenderer &);
-};
-
-sp<IOMXRenderer> OMX::createRenderer(
-        const sp<ISurface> &surface,
-        const char *componentName,
-        OMX_COLOR_FORMATTYPE colorFormat,
-        size_t encodedWidth, size_t encodedHeight,
-        size_t displayWidth, size_t displayHeight,
-        int32_t rotationDegrees) {
-    Mutex::Autolock autoLock(mLock);
-
-    VideoRenderer *impl = NULL;
-
-    void *libHandle = dlopen("libstagefrighthw.so", RTLD_NOW);
-
-    if (libHandle) {
-        typedef VideoRenderer *(*CreateRendererWithRotationFunc)(
-                const sp<ISurface> &surface,
-                const char *componentName,
-                OMX_COLOR_FORMATTYPE colorFormat,
-                size_t displayWidth, size_t displayHeight,
-                size_t decodedWidth, size_t decodedHeight,
-                int32_t rotationDegrees);
-
-        typedef VideoRenderer *(*CreateRendererFunc)(
-                const sp<ISurface> &surface,
-                const char *componentName,
-                OMX_COLOR_FORMATTYPE colorFormat,
-                size_t displayWidth, size_t displayHeight,
-                size_t decodedWidth, size_t decodedHeight);
-
-        CreateRendererWithRotationFunc funcWithRotation =
-            (CreateRendererWithRotationFunc)dlsym(
-                    libHandle,
-                    "_Z26createRendererWithRotationRKN7android2spINS_8"
-                    "ISurfaceEEEPKc20OMX_COLOR_FORMATTYPEjjjji");
-
-        if (funcWithRotation) {
-            impl = (*funcWithRotation)(
-                    surface, componentName, colorFormat,
-                    displayWidth, displayHeight, encodedWidth, encodedHeight,
-                    rotationDegrees);
-        } else {
-            CreateRendererFunc func =
-                (CreateRendererFunc)dlsym(
-                        libHandle,
-                        "_Z14createRendererRKN7android2spINS_8ISurfaceEEEPKc20"
-                        "OMX_COLOR_FORMATTYPEjjjj");
-
-            if (func) {
-                impl = (*func)(surface, componentName, colorFormat,
-                        displayWidth, displayHeight, encodedWidth, encodedHeight);
-            }
-        }
-
-        if (impl) {
-            impl = new SharedVideoRenderer(libHandle, impl);
-            libHandle = NULL;
-        }
-
-        if (libHandle) {
-            dlclose(libHandle);
-            libHandle = NULL;
-        }
-    }
-
-    if (!impl) {
-        LOGW("Using software renderer.");
-        impl = new SoftwareRenderer(
-                colorFormat,
-                surface,
-                displayWidth, displayHeight,
-                encodedWidth, encodedHeight);
-
-        if (((SoftwareRenderer *)impl)->initCheck() != OK) {
-            delete impl;
-            impl = NULL;
-
-            return NULL;
-        }
-    }
-
-    return new OMXRenderer(impl);
-}
-
-OMXRenderer::OMXRenderer(VideoRenderer *impl)
-    : mImpl(impl) {
-}
-
-OMXRenderer::~OMXRenderer() {
-    delete mImpl;
-    mImpl = NULL;
-}
-
-void OMXRenderer::render(IOMX::buffer_id buffer) {
-    OMX_BUFFERHEADERTYPE *header = (OMX_BUFFERHEADERTYPE *)buffer;
-
-    mImpl->render(
-            header->pBuffer + header->nOffset,
-            header->nFilledLen,
-            header->pPlatformPrivate);
-}
-
 }  // namespace android
-
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index 5db516e..6cbd599 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-//#define LOG_NDEBUG 0
+#define LOG_NDEBUG 0
 #define LOG_TAG "OMXNodeInstance"
 #include <utils/Log.h>
 
@@ -24,6 +24,7 @@
 #include <OMX_Component.h>
 
 #include <binder/IMemory.h>
+#include <media/stagefright/HardwareAPI.h>
 #include <media/stagefright/MediaDebug.h>
 #include <media/stagefright/MediaErrors.h>
 
@@ -40,6 +41,11 @@
           mIsBackup(false) {
     }
 
+    BufferMeta(const sp<GraphicBuffer> &graphicBuffer)
+        : mGraphicBuffer(graphicBuffer),
+          mIsBackup(false) {
+    }
+
     void CopyFromOMX(const OMX_BUFFERHEADERTYPE *header) {
         if (!mIsBackup) {
             return;
@@ -61,6 +67,7 @@
     }
 
 private:
+    sp<GraphicBuffer> mGraphicBuffer;
     sp<IMemory> mMem;
     size_t mSize;
     bool mIsBackup;
@@ -117,6 +124,8 @@
 }
 
 status_t OMXNodeInstance::freeNode(OMXMaster *master) {
+    static int32_t kMaxNumIterations = 10;
+
     // Transition the node from its current state all the way down
     // to "Loaded".
     // This ensures that all active buffers are properly freed even
@@ -136,9 +145,16 @@
             LOGV("forcing Executing->Idle");
             sendCommand(OMX_CommandStateSet, OMX_StateIdle);
             OMX_ERRORTYPE err;
+            int32_t iteration = 0;
             while ((err = OMX_GetState(mHandle, &state)) == OMX_ErrorNone
                    && state != OMX_StateIdle
                    && state != OMX_StateInvalid) {
+                if (++iteration > kMaxNumIterations) {
+                    LOGE("component failed to enter Idle state, aborting.");
+                    state = OMX_StateInvalid;
+                    break;
+                }
+
                 usleep(100000);
             }
             CHECK_EQ(err, OMX_ErrorNone);
@@ -158,9 +174,16 @@
             freeActiveBuffers();
 
             OMX_ERRORTYPE err;
+            int32_t iteration = 0;
             while ((err = OMX_GetState(mHandle, &state)) == OMX_ErrorNone
                    && state != OMX_StateLoaded
                    && state != OMX_StateInvalid) {
+                if (++iteration > kMaxNumIterations) {
+                    LOGE("component failed to enter Loaded state, aborting.");
+                    state = OMX_StateInvalid;
+                    break;
+                }
+
                 LOGV("waiting for Loaded state...");
                 usleep(100000);
             }
@@ -178,8 +201,10 @@
             break;
     }
 
+    LOGV("calling destroyComponentInstance");
     OMX_ERRORTYPE err = master->destroyComponentInstance(
             static_cast<OMX_COMPONENTTYPE *>(mHandle));
+    LOGV("destroyComponentInstance returned err %d", err);
 
     mHandle = NULL;
 
@@ -240,6 +265,113 @@
     return StatusFromOMXError(err);
 }
 
+status_t OMXNodeInstance::enableGraphicBuffers(
+        OMX_U32 portIndex, OMX_BOOL enable) {
+    Mutex::Autolock autoLock(mLock);
+
+    OMX_INDEXTYPE index;
+    OMX_ERRORTYPE err = OMX_GetExtensionIndex(
+            mHandle,
+            const_cast<OMX_STRING>("OMX.google.android.index.enableAndroidNativeBuffers"),
+            &index);
+
+    if (err != OMX_ErrorNone) {
+        LOGE("OMX_GetExtensionIndex failed");
+
+        return StatusFromOMXError(err);
+    }
+
+    OMX_VERSIONTYPE ver;
+    ver.s.nVersionMajor = 1;
+    ver.s.nVersionMinor = 0;
+    ver.s.nRevision = 0;
+    ver.s.nStep = 0;
+    EnableAndroidNativeBuffersParams params = {
+        sizeof(EnableAndroidNativeBuffersParams), ver, portIndex, enable,
+    };
+
+    err = OMX_SetParameter(mHandle, index, &params);
+
+    if (err != OMX_ErrorNone) {
+        LOGE("OMX_EnableAndroidNativeBuffers failed with error %d (0x%08x)",
+                err, err);
+
+        return UNKNOWN_ERROR;
+    }
+
+    return OK;
+}
+
+status_t OMXNodeInstance::getGraphicBufferUsage(
+        OMX_U32 portIndex, OMX_U32* usage) {
+    Mutex::Autolock autoLock(mLock);
+
+    OMX_INDEXTYPE index;
+    OMX_ERRORTYPE err = OMX_GetExtensionIndex(
+            mHandle,
+            const_cast<OMX_STRING>(
+                    "OMX.google.android.index.getAndroidNativeBufferUsage"),
+            &index);
+
+    if (err != OMX_ErrorNone) {
+        LOGE("OMX_GetExtensionIndex failed");
+
+        return StatusFromOMXError(err);
+    }
+
+    OMX_VERSIONTYPE ver;
+    ver.s.nVersionMajor = 1;
+    ver.s.nVersionMinor = 0;
+    ver.s.nRevision = 0;
+    ver.s.nStep = 0;
+    GetAndroidNativeBufferUsageParams params = {
+        sizeof(GetAndroidNativeBufferUsageParams), ver, portIndex, 0,
+    };
+
+    err = OMX_GetParameter(mHandle, index, &params);
+
+    if (err != OMX_ErrorNone) {
+        LOGE("OMX_GetAndroidNativeBufferUsage failed with error %d (0x%08x)",
+                err, err);
+        return UNKNOWN_ERROR;
+    }
+
+    *usage = params.nUsage;
+
+    return OK;
+}
+
+status_t OMXNodeInstance::storeMetaDataInBuffers(
+        OMX_U32 portIndex,
+        OMX_BOOL enable) {
+    Mutex::Autolock autolock(mLock);
+
+    OMX_INDEXTYPE index;
+    OMX_STRING name = const_cast<OMX_STRING>(
+            "OMX.google.android.index.storeMetaDataInBuffers");
+
+    OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, name, &index);
+    if (err != OMX_ErrorNone) {
+        LOGE("OMX_GetExtensionIndex %s failed", name);
+        return StatusFromOMXError(err);
+    }
+
+    StoreMetaDataInBuffersParams params;
+    memset(&params, 0, sizeof(params));
+    params.nSize = sizeof(params);
+
+    // Version: 1.0.0.0
+    params.nVersion.s.nVersionMajor = 1;
+
+    params.nPortIndex = portIndex;
+    params.bStoreMetaData = enable;
+    if ((err = OMX_SetParameter(mHandle, index, &params)) != OMX_ErrorNone) {
+        LOGE("OMX_SetParameter() failed for StoreMetaDataInBuffers: 0x%08x", err);
+        return UNKNOWN_ERROR;
+    }
+    return err;
+}
+
 status_t OMXNodeInstance::useBuffer(
         OMX_U32 portIndex, const sp<IMemory> &params,
         OMX::buffer_id *buffer) {
@@ -273,6 +405,60 @@
     return OK;
 }
 
+status_t OMXNodeInstance::useGraphicBuffer(
+        OMX_U32 portIndex, const sp<GraphicBuffer>& graphicBuffer,
+        OMX::buffer_id *buffer) {
+    Mutex::Autolock autoLock(mLock);
+
+    OMX_INDEXTYPE index;
+    OMX_ERRORTYPE err = OMX_GetExtensionIndex(
+            mHandle,
+            const_cast<OMX_STRING>("OMX.google.android.index.useAndroidNativeBuffer"),
+            &index);
+
+    if (err != OMX_ErrorNone) {
+        LOGE("OMX_GetExtensionIndex failed");
+
+        return StatusFromOMXError(err);
+    }
+
+    BufferMeta *bufferMeta = new BufferMeta(graphicBuffer);
+
+    OMX_BUFFERHEADERTYPE *header;
+
+    OMX_VERSIONTYPE ver;
+    ver.s.nVersionMajor = 1;
+    ver.s.nVersionMinor = 0;
+    ver.s.nRevision = 0;
+    ver.s.nStep = 0;
+    UseAndroidNativeBufferParams params = {
+        sizeof(UseAndroidNativeBufferParams), ver, portIndex, bufferMeta,
+        &header, graphicBuffer,
+    };
+
+    err = OMX_SetParameter(mHandle, index, &params);
+
+    if (err != OMX_ErrorNone) {
+        LOGE("OMX_UseAndroidNativeBuffer failed with error %d (0x%08x)", err,
+                err);
+
+        delete bufferMeta;
+        bufferMeta = NULL;
+
+        *buffer = 0;
+
+        return UNKNOWN_ERROR;
+    }
+
+    CHECK_EQ(header->pAppPrivate, bufferMeta);
+
+    *buffer = header;
+
+    addActiveBuffer(portIndex, *buffer);
+
+    return OK;
+}
+
 status_t OMXNodeInstance::allocateBuffer(
         OMX_U32 portIndex, size_t size, OMX::buffer_id *buffer,
         void **buffer_data) {
@@ -498,4 +684,3 @@
 }
 
 }  // namespace android
-
diff --git a/media/libstagefright/omx/OMXRenderer.h b/media/libstagefright/omx/OMXRenderer.h
deleted file mode 100644
index 4d194ce..0000000
--- a/media/libstagefright/omx/OMXRenderer.h
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef OMX_RENDERER_H_
-
-#define OMX_RENDERER_H_
-
-#include <media/IOMX.h>
-
-namespace android {
-
-class VideoRenderer;
-
-class OMXRenderer : public BnOMXRenderer {
-public:
-    // Assumes ownership of "impl".
-    OMXRenderer(VideoRenderer *impl);
-    virtual ~OMXRenderer();
-
-    virtual void render(IOMX::buffer_id buffer);
-
-private:
-    VideoRenderer *mImpl;
-
-    OMXRenderer(const OMXRenderer &);
-    OMXRenderer &operator=(const OMXRenderer &);
-};
-
-}  // namespace android
-
-#endif  // OMX_RENDERER_H_
diff --git a/media/libstagefright/rtsp/AMPEG4AudioAssembler.cpp b/media/libstagefright/rtsp/AMPEG4AudioAssembler.cpp
index 8bfe285..11d9c22 100644
--- a/media/libstagefright/rtsp/AMPEG4AudioAssembler.cpp
+++ b/media/libstagefright/rtsp/AMPEG4AudioAssembler.cpp
@@ -14,6 +14,9 @@
  * limitations under the License.
  */
 
+//#define LOG_NDEBUG 0
+#define LOG_TAG "AMPEG4AudioAssembler"
+
 #include "AMPEG4AudioAssembler.h"
 
 #include "ARTPSource.h"
@@ -139,7 +142,10 @@
     return OK;
 }
 
-static status_t parseAudioSpecificConfig(ABitReader *bits) {
+static status_t parseAudioSpecificConfig(ABitReader *bits, sp<ABuffer> *asc) {
+    const uint8_t *dataStart = bits->data();
+    size_t totalNumBits = bits->numBitsLeft();
+
     unsigned audioObjectType;
     CHECK_EQ(parseAudioObjectType(bits, &audioObjectType), (status_t)OK);
 
@@ -185,13 +191,13 @@
         }
     }
 
-#if 0
-    // This is not supported here as the upper layers did not explicitly
-    // signal the length of AudioSpecificConfig.
-
     if (extensionAudioObjectType != 5 && bits->numBitsLeft() >= 16) {
+        size_t numBitsLeftAtStart = bits->numBitsLeft();
+
         unsigned syncExtensionType = bits->getBits(11);
         if (syncExtensionType == 0x2b7) {
+            LOGI("found syncExtension");
+
             CHECK_EQ(parseAudioObjectType(bits, &extensionAudioObjectType),
                      (status_t)OK);
 
@@ -203,9 +209,45 @@
                     /* unsigned extensionSamplingFrequency = */bits->getBits(24);
                 }
             }
+
+            size_t numBitsInExtension =
+                numBitsLeftAtStart - bits->numBitsLeft();
+
+            if (numBitsInExtension & 7) {
+                // Apparently an extension is always considered an even
+                // multiple of 8 bits long.
+
+                LOGI("Skipping %d bits after sync extension",
+                     8 - (numBitsInExtension & 7));
+
+                bits->skipBits(8 - (numBitsInExtension & 7));
+            }
+        } else {
+            bits->putBits(syncExtensionType, 11);
         }
     }
-#endif
+
+    if (asc != NULL) {
+        size_t bitpos = totalNumBits & 7;
+
+        ABitReader bs(dataStart, (totalNumBits + 7) / 8);
+
+        totalNumBits -= bits->numBitsLeft();
+
+        size_t numBytes = (totalNumBits + 7) / 8;
+
+        *asc = new ABuffer(numBytes);
+
+        if (bitpos & 7) {
+            bs.skipBits(8 - (bitpos & 7));
+        }
+
+        uint8_t *dstPtr = (*asc)->data();
+        while (numBytes > 0) {
+            *dstPtr++ = bs.getBits(8);
+            --numBytes;
+        }
+    }
 
     return OK;
 }
@@ -214,6 +256,7 @@
         ABitReader *bits,
         unsigned *numSubFrames,
         unsigned *frameLengthType,
+        ssize_t *fixedFrameLength,
         bool *otherDataPresent,
         unsigned *otherDataLenBits) {
     unsigned audioMuxVersion = bits->getBits(1);
@@ -242,12 +285,14 @@
 
     if (audioMuxVersion == 0) {
         // AudioSpecificConfig
-        CHECK_EQ(parseAudioSpecificConfig(bits), (status_t)OK);
+        CHECK_EQ(parseAudioSpecificConfig(bits, NULL /* asc */), (status_t)OK);
     } else {
         TRESPASS();  // XXX to be implemented
     }
 
     *frameLengthType = bits->getBits(3);
+    *fixedFrameLength = -1;
+
     switch (*frameLengthType) {
         case 0:
         {
@@ -260,7 +305,14 @@
 
         case 1:
         {
-            /* unsigned frameLength = */bits->getBits(9);
+            *fixedFrameLength = bits->getBits(9);
+            break;
+        }
+
+        case 2:
+        {
+            // reserved
+            TRESPASS();
             break;
         }
 
@@ -338,9 +390,21 @@
                 break;
             }
 
-            default:
-                TRESPASS();  // XXX to be implemented
+            case 2:
+            {
+                // reserved
+
+                TRESPASS();
                 break;
+            }
+
+            default:
+            {
+                CHECK_GE(mFixedFrameLength, 0);
+
+                payloadLength = mFixedFrameLength;
+                break;
+            }
         }
 
         CHECK_LE(offset + payloadLength, buffer->size());
@@ -393,6 +457,7 @@
     ABitReader bits(config->data(), config->size());
     status_t err = parseStreamMuxConfig(
             &bits, &mNumSubFrames, &mFrameLengthType,
+            &mFixedFrameLength,
             &mOtherDataPresent, &mOtherDataLenBits);
 
     CHECK_EQ(err, (status_t)NO_ERROR);
diff --git a/media/libstagefright/rtsp/AMPEG4AudioAssembler.h b/media/libstagefright/rtsp/AMPEG4AudioAssembler.h
index 9cef94c..1361cd2 100644
--- a/media/libstagefright/rtsp/AMPEG4AudioAssembler.h
+++ b/media/libstagefright/rtsp/AMPEG4AudioAssembler.h
@@ -46,6 +46,7 @@
     bool mMuxConfigPresent;
     unsigned mNumSubFrames;
     unsigned mFrameLengthType;
+    ssize_t mFixedFrameLength;
     bool mOtherDataPresent;
     unsigned mOtherDataLenBits;
 
diff --git a/media/libstagefright/rtsp/APacketSource.cpp b/media/libstagefright/rtsp/APacketSource.cpp
index 7f09248..6819fef 100644
--- a/media/libstagefright/rtsp/APacketSource.cpp
+++ b/media/libstagefright/rtsp/APacketSource.cpp
@@ -20,6 +20,7 @@
 
 #include "APacketSource.h"
 
+#include "ARawAudioAssembler.h"
 #include "ASessionDescription.h"
 
 #include "avc_utils.h"
@@ -373,7 +374,17 @@
         br.skipBits(2);  // chroma_format
         br.skipBits(1);  // low_delay
         if (br.getBits(1)) {  // vbv_parameters
-            TRESPASS();
+            br.skipBits(15);  // first_half_bit_rate
+            CHECK(br.getBits(1));  // marker_bit
+            br.skipBits(15);  // latter_half_bit_rate
+            CHECK(br.getBits(1));  // marker_bit
+            br.skipBits(15);  // first_half_vbv_buffer_size
+            CHECK(br.getBits(1));  // marker_bit
+            br.skipBits(3);  // latter_half_vbv_buffer_size
+            br.skipBits(11);  // first_half_vbv_occupancy
+            CHECK(br.getBits(1));  // marker_bit
+            br.skipBits(15);  // latter_half_vbv_occupancy
+            CHECK(br.getBits(1));  // marker_bit
         }
     }
     unsigned video_object_layer_shape = br.getBits(2);
@@ -651,6 +662,8 @@
         mFormat->setData(
                 kKeyESDS, 0,
                 codecSpecificData->data(), codecSpecificData->size());
+    } else if (ARawAudioAssembler::Supports(desc.c_str())) {
+        ARawAudioAssembler::MakeFormat(desc.c_str(), mFormat);
     } else {
         mInitCheck = ERROR_UNSUPPORTED;
     }
diff --git a/media/libstagefright/rtsp/ARTPAssembler.cpp b/media/libstagefright/rtsp/ARTPAssembler.cpp
index 9ba2b37..a897c10 100644
--- a/media/libstagefright/rtsp/ARTPAssembler.cpp
+++ b/media/libstagefright/rtsp/ARTPAssembler.cpp
@@ -65,13 +65,9 @@
 
 // static
 void ARTPAssembler::CopyTimes(const sp<ABuffer> &to, const sp<ABuffer> &from) {
-    uint64_t ntpTime;
-    CHECK(from->meta()->findInt64("ntp-time", (int64_t *)&ntpTime));
-
     uint32_t rtpTime;
     CHECK(from->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
 
-    to->meta()->setInt64("ntp-time", ntpTime);
     to->meta()->setInt32("rtp-time", rtpTime);
 
     // Copy the seq number.
diff --git a/media/libstagefright/rtsp/ARTPConnection.cpp b/media/libstagefright/rtsp/ARTPConnection.cpp
index 72943ff..47de4e0 100644
--- a/media/libstagefright/rtsp/ARTPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTPConnection.cpp
@@ -169,12 +169,6 @@
             break;
         }
 
-        case kWhatFakeTimestamps:
-        {
-            onFakeTimestamps();
-            break;
-        }
-
         default:
         {
             TRESPASS();
@@ -463,12 +457,6 @@
     buffer->setInt32Data(u16at(&data[2]));
     buffer->setRange(payloadOffset, size - payloadOffset);
 
-    if ((mFlags & kFakeTimestamps) && !source->timeEstablished()) {
-        source->timeUpdate(rtpTime, 0);
-        source->timeUpdate(rtpTime + 90000, 0x100000000ll);
-        CHECK(source->timeEstablished());
-    }
-
     source->processRTPPacket(buffer);
 
     return OK;
@@ -594,9 +582,7 @@
 
     sp<ARTPSource> source = findSource(s, id);
 
-    if ((mFlags & kFakeTimestamps) == 0) {
-        source->timeUpdate(rtpTime, ntpTime);
-    }
+    source->timeUpdate(rtpTime, ntpTime);
 
     return 0;
 }
@@ -654,27 +640,5 @@
     }
 }
 
-void ARTPConnection::fakeTimestamps() {
-    (new AMessage(kWhatFakeTimestamps, id()))->post();
-}
-
-void ARTPConnection::onFakeTimestamps() {
-    List<StreamInfo>::iterator it = mStreams.begin();
-    while (it != mStreams.end()) {
-        StreamInfo &info = *it++;
-
-        for (size_t j = 0; j < info.mSources.size(); ++j) {
-            sp<ARTPSource> source = info.mSources.valueAt(j);
-
-            if (!source->timeEstablished()) {
-                source->timeUpdate(0, 0);
-                source->timeUpdate(0 + 90000, 0x100000000ll);
-
-                mFlags |= kFakeTimestamps;
-            }
-        }
-    }
-}
-
 }  // namespace android
 
diff --git a/media/libstagefright/rtsp/ARTPConnection.h b/media/libstagefright/rtsp/ARTPConnection.h
index a17b382..edbcc35 100644
--- a/media/libstagefright/rtsp/ARTPConnection.h
+++ b/media/libstagefright/rtsp/ARTPConnection.h
@@ -29,7 +29,6 @@
 
 struct ARTPConnection : public AHandler {
     enum Flags {
-        kFakeTimestamps      = 1,
         kRegularlyRequestFIR = 2,
     };
 
@@ -51,8 +50,6 @@
     static void MakePortPair(
             int *rtpSocket, int *rtcpSocket, unsigned *rtpPort);
 
-    void fakeTimestamps();
-
 protected:
     virtual ~ARTPConnection();
     virtual void onMessageReceived(const sp<AMessage> &msg);
@@ -63,7 +60,6 @@
         kWhatRemoveStream,
         kWhatPollStreams,
         kWhatInjectPacket,
-        kWhatFakeTimestamps,
     };
 
     static const int64_t kSelectTimeoutUs;
@@ -81,7 +77,6 @@
     void onPollStreams();
     void onInjectPacket(const sp<AMessage> &msg);
     void onSendReceiverReports();
-    void onFakeTimestamps();
 
     status_t receive(StreamInfo *info, bool receiveRTP);
 
diff --git a/media/libstagefright/rtsp/ARTPSession.cpp b/media/libstagefright/rtsp/ARTPSession.cpp
index 39c6619..c6bcb12 100644
--- a/media/libstagefright/rtsp/ARTPSession.cpp
+++ b/media/libstagefright/rtsp/ARTPSession.cpp
@@ -44,9 +44,7 @@
 
     mDesc = desc;
 
-    mRTPConn = new ARTPConnection(
-            ARTPConnection::kFakeTimestamps
-                | ARTPConnection::kRegularlyRequestFIR);
+    mRTPConn = new ARTPConnection(ARTPConnection::kRegularlyRequestFIR);
 
     looper()->registerHandler(mRTPConn);
 
diff --git a/media/libstagefright/rtsp/ARTPSource.cpp b/media/libstagefright/rtsp/ARTPSource.cpp
index 87b5a7e..3aa07ce 100644
--- a/media/libstagefright/rtsp/ARTPSource.cpp
+++ b/media/libstagefright/rtsp/ARTPSource.cpp
@@ -25,6 +25,7 @@
 #include "AH263Assembler.h"
 #include "AMPEG4AudioAssembler.h"
 #include "AMPEG4ElementaryAssembler.h"
+#include "ARawAudioAssembler.h"
 #include "ASessionDescription.h"
 
 #include <media/stagefright/foundation/ABuffer.h>
@@ -42,12 +43,12 @@
     : mID(id),
       mHighestSeqNumber(0),
       mNumBuffersReceived(0),
-      mNumTimes(0),
       mLastNTPTime(0),
       mLastNTPTimeUpdateUs(0),
       mIssueFIRRequests(false),
       mLastFIRRequestUs(-1),
-      mNextFIRSeqNo((rand() * 256.0) / RAND_MAX) {
+      mNextFIRSeqNo((rand() * 256.0) / RAND_MAX),
+      mNotify(notify) {
     unsigned long PT;
     AString desc;
     AString params;
@@ -70,6 +71,8 @@
             || !strncasecmp(desc.c_str(), "mpeg4-generic/", 14)) {
         mAssembler = new AMPEG4ElementaryAssembler(notify, desc, params);
         mIssueFIRRequests = true;
+    } else if (ARawAudioAssembler::Supports(desc.c_str())) {
+        mAssembler = new ARawAudioAssembler(notify, desc.c_str(), params);
     } else {
         TRESPASS();
     }
@@ -80,52 +83,25 @@
 }
 
 void ARTPSource::processRTPPacket(const sp<ABuffer> &buffer) {
-    if (queuePacket(buffer)
-            && mNumTimes == 2
-            && mAssembler != NULL) {
+    if (queuePacket(buffer) && mAssembler != NULL) {
         mAssembler->onPacketReceived(this);
     }
 }
 
 void ARTPSource::timeUpdate(uint32_t rtpTime, uint64_t ntpTime) {
-    LOGV("timeUpdate");
-
     mLastNTPTime = ntpTime;
     mLastNTPTimeUpdateUs = ALooper::GetNowUs();
 
-    if (mNumTimes == 2) {
-        mNTPTime[0] = mNTPTime[1];
-        mRTPTime[0] = mRTPTime[1];
-        mNumTimes = 1;
-    }
-    mNTPTime[mNumTimes] = ntpTime;
-    mRTPTime[mNumTimes++] = rtpTime;
-
-    if (timeEstablished()) {
-        for (List<sp<ABuffer> >::iterator it = mQueue.begin();
-             it != mQueue.end(); ++it) {
-            sp<AMessage> meta = (*it)->meta();
-
-            uint32_t rtpTime;
-            CHECK(meta->findInt32("rtp-time", (int32_t *)&rtpTime));
-
-            meta->setInt64("ntp-time", RTP2NTP(rtpTime));
-        }
-    }
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("time-update", true);
+    notify->setInt32("rtp-time", rtpTime);
+    notify->setInt64("ntp-time", ntpTime);
+    notify->post();
 }
 
 bool ARTPSource::queuePacket(const sp<ABuffer> &buffer) {
     uint32_t seqNum = (uint32_t)buffer->int32Data();
 
-    if (mNumTimes == 2) {
-        sp<AMessage> meta = buffer->meta();
-
-        uint32_t rtpTime;
-        CHECK(meta->findInt32("rtp-time", (int32_t *)&rtpTime));
-
-        meta->setInt64("ntp-time", RTP2NTP(rtpTime));
-    }
-
     if (mNumBuffersReceived++ == 0) {
         mHighestSeqNumber = seqNum;
         mQueue.push_back(buffer);
@@ -180,14 +156,6 @@
     return true;
 }
 
-uint64_t ARTPSource::RTP2NTP(uint32_t rtpTime) const {
-    CHECK_EQ(mNumTimes, 2u);
-
-    return mNTPTime[0] + (double)(mNTPTime[1] - mNTPTime[0])
-            * ((double)rtpTime - (double)mRTPTime[0])
-            / (double)(mRTPTime[1] - mRTPTime[0]);
-}
-
 void ARTPSource::byeReceived() {
     mAssembler->onByeReceived();
 }
diff --git a/media/libstagefright/rtsp/ARTPSource.h b/media/libstagefright/rtsp/ARTPSource.h
index e62c3f1..b70f94e 100644
--- a/media/libstagefright/rtsp/ARTPSource.h
+++ b/media/libstagefright/rtsp/ARTPSource.h
@@ -46,10 +46,6 @@
     void addReceiverReport(const sp<ABuffer> &buffer);
     void addFIR(const sp<ABuffer> &buffer);
 
-    bool timeEstablished() const {
-        return mNumTimes == 2;
-    }
-
 private:
     uint32_t mID;
     uint32_t mHighestSeqNumber;
@@ -58,10 +54,6 @@
     List<sp<ABuffer> > mQueue;
     sp<ARTPAssembler> mAssembler;
 
-    size_t mNumTimes;
-    uint64_t mNTPTime[2];
-    uint32_t mRTPTime[2];
-
     uint64_t mLastNTPTime;
     int64_t mLastNTPTimeUpdateUs;
 
@@ -69,7 +61,7 @@
     int64_t mLastFIRRequestUs;
     uint8_t mNextFIRSeqNo;
 
-    uint64_t RTP2NTP(uint32_t rtpTime) const;
+    sp<AMessage> mNotify;
 
     bool queuePacket(const sp<ABuffer> &buffer);
 
diff --git a/media/libstagefright/rtsp/ARTPWriter.cpp b/media/libstagefright/rtsp/ARTPWriter.cpp
index 155fd96..5a033e1 100644
--- a/media/libstagefright/rtsp/ARTPWriter.cpp
+++ b/media/libstagefright/rtsp/ARTPWriter.cpp
@@ -46,7 +46,7 @@
 
 ARTPWriter::ARTPWriter(int fd)
     : mFlags(0),
-      mFd(fd),
+      mFd(dup(fd)),
       mLooper(new ALooper),
       mReflector(new AHandlerReflector<ARTPWriter>(this)) {
     CHECK_GE(fd, 0);
diff --git a/media/libstagefright/rtsp/ARTSPController.cpp b/media/libstagefright/rtsp/ARTSPController.cpp
index a7563ff..1328d2e 100644
--- a/media/libstagefright/rtsp/ARTSPController.cpp
+++ b/media/libstagefright/rtsp/ARTSPController.cpp
@@ -69,7 +69,14 @@
 void ARTSPController::disconnect() {
     Mutex::Autolock autoLock(mLock);
 
-    if (mState != CONNECTED) {
+    if (mState == CONNECTING) {
+        mState = DISCONNECTED;
+        mConnectionResult = ERROR_IO;
+        mCondition.broadcast();
+
+        mHandler.clear();
+        return;
+    } else if (mState != CONNECTED) {
         return;
     }
 
diff --git a/media/libstagefright/rtsp/ARawAudioAssembler.cpp b/media/libstagefright/rtsp/ARawAudioAssembler.cpp
new file mode 100644
index 0000000..dd47ea3
--- /dev/null
+++ b/media/libstagefright/rtsp/ARawAudioAssembler.cpp
@@ -0,0 +1,143 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ARawAudioAssembler"
+#include <utils/Log.h>
+
+#include "ARawAudioAssembler.h"
+
+#include "ARTPSource.h"
+#include "ASessionDescription.h"
+
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/hexdump.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/Utils.h>
+
+namespace android {
+
+ARawAudioAssembler::ARawAudioAssembler(
+        const sp<AMessage> &notify, const char *desc, const AString &params)
+    : mNotifyMsg(notify),
+      mNextExpectedSeqNoValid(false),
+      mNextExpectedSeqNo(0) {
+}
+
+ARawAudioAssembler::~ARawAudioAssembler() {
+}
+
+ARTPAssembler::AssemblyStatus ARawAudioAssembler::assembleMore(
+        const sp<ARTPSource> &source) {
+    return addPacket(source);
+}
+
+ARTPAssembler::AssemblyStatus ARawAudioAssembler::addPacket(
+        const sp<ARTPSource> &source) {
+    List<sp<ABuffer> > *queue = source->queue();
+
+    if (queue->empty()) {
+        return NOT_ENOUGH_DATA;
+    }
+
+    if (mNextExpectedSeqNoValid) {
+        List<sp<ABuffer> >::iterator it = queue->begin();
+        while (it != queue->end()) {
+            if ((uint32_t)(*it)->int32Data() >= mNextExpectedSeqNo) {
+                break;
+            }
+
+            it = queue->erase(it);
+        }
+
+        if (queue->empty()) {
+            return NOT_ENOUGH_DATA;
+        }
+    }
+
+    sp<ABuffer> buffer = *queue->begin();
+
+    if (!mNextExpectedSeqNoValid) {
+        mNextExpectedSeqNoValid = true;
+        mNextExpectedSeqNo = (uint32_t)buffer->int32Data();
+    } else if ((uint32_t)buffer->int32Data() != mNextExpectedSeqNo) {
+        LOGV("Not the sequence number I expected");
+
+        return WRONG_SEQUENCE_NUMBER;
+    }
+
+    // hexdump(buffer->data(), buffer->size());
+
+    if (buffer->size() < 1) {
+        queue->erase(queue->begin());
+        ++mNextExpectedSeqNo;
+
+        LOGV("raw audio packet too short.");
+
+        return MALFORMED_PACKET;
+    }
+
+    sp<AMessage> msg = mNotifyMsg->dup();
+    msg->setObject("access-unit", buffer);
+    msg->post();
+
+    queue->erase(queue->begin());
+    ++mNextExpectedSeqNo;
+
+    return OK;
+}
+
+void ARawAudioAssembler::packetLost() {
+    CHECK(mNextExpectedSeqNoValid);
+    ++mNextExpectedSeqNo;
+}
+
+void ARawAudioAssembler::onByeReceived() {
+    sp<AMessage> msg = mNotifyMsg->dup();
+    msg->setInt32("eos", true);
+    msg->post();
+}
+
+// static
+bool ARawAudioAssembler::Supports(const char *desc) {
+    return !strncmp(desc, "PCMU/", 5)
+        || !strncmp(desc, "PCMA/", 5);
+}
+
+// static
+void ARawAudioAssembler::MakeFormat(
+        const char *desc, const sp<MetaData> &format) {
+    if (!strncmp(desc, "PCMU/", 5)) {
+        format->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_G711_MLAW);
+    } else if (!strncmp(desc, "PCMA/", 5)) {
+        format->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_G711_ALAW);
+    } else {
+        TRESPASS();
+    }
+
+    int32_t sampleRate, numChannels;
+    ASessionDescription::ParseFormatDesc(
+            desc, &sampleRate, &numChannels);
+
+    format->setInt32(kKeySampleRate, sampleRate);
+    format->setInt32(kKeyChannelCount, numChannels);
+}
+
+}  // namespace android
+
diff --git a/media/libstagefright/rtsp/ARawAudioAssembler.h b/media/libstagefright/rtsp/ARawAudioAssembler.h
new file mode 100644
index 0000000..ed7af08
--- /dev/null
+++ b/media/libstagefright/rtsp/ARawAudioAssembler.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef A_RAW_AUDIO_ASSEMBLER_H_
+
+#define A_RAW_AUDIO_ASSEMBLER_H_
+
+#include "ARTPAssembler.h"
+
+namespace android {
+
+struct AMessage;
+struct AString;
+struct MetaData;
+
+struct ARawAudioAssembler : public ARTPAssembler {
+    ARawAudioAssembler(
+            const sp<AMessage> &notify,
+            const char *desc, const AString &params);
+
+    static bool Supports(const char *desc);
+
+    static void MakeFormat(
+            const char *desc, const sp<MetaData> &format);
+
+protected:
+    virtual ~ARawAudioAssembler();
+
+    virtual AssemblyStatus assembleMore(const sp<ARTPSource> &source);
+    virtual void onByeReceived();
+    virtual void packetLost();
+
+private:
+    bool mIsWide;
+
+    sp<AMessage> mNotifyMsg;
+    bool mNextExpectedSeqNoValid;
+    uint32_t mNextExpectedSeqNo;
+
+    AssemblyStatus addPacket(const sp<ARTPSource> &source);
+
+    DISALLOW_EVIL_CONSTRUCTORS(ARawAudioAssembler);
+};
+
+}  // namespace android
+
+#endif  // A_RAW_AUDIO_ASSEMBLER_H_
diff --git a/media/libstagefright/rtsp/Android.mk b/media/libstagefright/rtsp/Android.mk
index 0bbadc1..8530ff3 100644
--- a/media/libstagefright/rtsp/Android.mk
+++ b/media/libstagefright/rtsp/Android.mk
@@ -9,15 +9,14 @@
         AMPEG4AudioAssembler.cpp    \
         AMPEG4ElementaryAssembler.cpp \
         APacketSource.cpp           \
+        ARawAudioAssembler.cpp      \
         ARTPAssembler.cpp           \
         ARTPConnection.cpp          \
-        ARTPSession.cpp             \
         ARTPSource.cpp              \
         ARTPWriter.cpp              \
         ARTSPConnection.cpp         \
         ARTSPController.cpp         \
         ASessionDescription.cpp     \
-        UDPPusher.cpp               \
 
 LOCAL_C_INCLUDES:= \
 	$(JNI_H_INCLUDE) \
@@ -57,4 +56,4 @@
 
 LOCAL_MODULE:= rtp_test
 
-include $(BUILD_EXECUTABLE)
+# include $(BUILD_EXECUTABLE)
diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h
index 72a2fdb..d15d9c5 100644
--- a/media/libstagefright/rtsp/MyHandler.h
+++ b/media/libstagefright/rtsp/MyHandler.h
@@ -40,9 +40,9 @@
 #include <sys/socket.h>
 #include <netdb.h>
 
-// If no access units are received within 3 secs, assume that the rtp
+// If no access units are received within 5 secs, assume that the rtp
 // stream has ended and signal end of stream.
-static int64_t kAccessUnitTimeoutUs = 3000000ll;
+static int64_t kAccessUnitTimeoutUs = 5000000ll;
 
 // If no access units arrive for the first 10 secs after starting the
 // stream, assume none ever will and signal EOS or switch transports.
@@ -102,7 +102,9 @@
           mSetupTracksSuccessful(false),
           mSeekPending(false),
           mFirstAccessUnit(true),
-          mFirstAccessUnitNTP(0),
+          mNTPAnchorUs(-1),
+          mMediaAnchorUs(-1),
+          mLastMediaTimeUs(0),
           mNumAccessUnitsReceived(0),
           mCheckPending(false),
           mCheckGeneration(0),
@@ -618,7 +620,8 @@
                 mSetupTracksSuccessful = false;
                 mSeekPending = false;
                 mFirstAccessUnit = true;
-                mFirstAccessUnitNTP = 0;
+                mNTPAnchorUs = -1;
+                mMediaAnchorUs = -1;
                 mNumAccessUnitsReceived = 0;
                 mReceivedFirstRTCPPacket = false;
                 mReceivedFirstRTPPacket = false;
@@ -699,6 +702,20 @@
 
             case 'accu':
             {
+                int32_t timeUpdate;
+                if (msg->findInt32("time-update", &timeUpdate) && timeUpdate) {
+                    size_t trackIndex;
+                    CHECK(msg->findSize("track-index", &trackIndex));
+
+                    uint32_t rtpTime;
+                    uint64_t ntpTime;
+                    CHECK(msg->findInt32("rtp-time", (int32_t *)&rtpTime));
+                    CHECK(msg->findInt64("ntp-time", (int64_t *)&ntpTime));
+
+                    onTimeUpdate(trackIndex, rtpTime, ntpTime);
+                    break;
+                }
+
                 int32_t first;
                 if (msg->findInt32("first-rtcp", &first)) {
                     mReceivedFirstRTCPPacket = true;
@@ -750,51 +767,11 @@
                     break;
                 }
 
-                uint64_t ntpTime;
-                CHECK(accessUnit->meta()->findInt64(
-                            "ntp-time", (int64_t *)&ntpTime));
-
-                uint32_t rtpTime;
-                CHECK(accessUnit->meta()->findInt32(
-                            "rtp-time", (int32_t *)&rtpTime));
-
                 if (track->mNewSegment) {
                     track->mNewSegment = false;
-
-                    LOGV("first segment unit ntpTime=0x%016llx rtpTime=%u seq=%d",
-                         ntpTime, rtpTime, seqNum);
                 }
 
-                if (mFirstAccessUnit) {
-                    mDoneMsg->setInt32("result", OK);
-                    mDoneMsg->post();
-                    mDoneMsg = NULL;
-
-                    mFirstAccessUnit = false;
-                    mFirstAccessUnitNTP = ntpTime;
-                }
-
-                if (ntpTime >= mFirstAccessUnitNTP) {
-                    ntpTime -= mFirstAccessUnitNTP;
-                } else {
-                    ntpTime = 0;
-                }
-
-                int64_t timeUs = (int64_t)(ntpTime * 1E6 / (1ll << 32));
-
-                accessUnit->meta()->setInt64("timeUs", timeUs);
-
-#if 0
-                int32_t damaged;
-                if (accessUnit->meta()->findInt32("damaged", &damaged)
-                        && damaged != 0) {
-                    LOGI("ignoring damaged AU");
-                } else
-#endif
-                {
-                    TrackInfo *track = &mTracks.editItemAt(trackIndex);
-                    track->mPacketSource->queueAccessUnit(accessUnit);
-                }
+                onAccessUnitComplete(trackIndex, accessUnit);
                 break;
             }
 
@@ -845,9 +822,15 @@
             {
                 // Session is paused now.
                 for (size_t i = 0; i < mTracks.size(); ++i) {
-                    mTracks.editItemAt(i).mPacketSource->flushQueue();
+                    TrackInfo *info = &mTracks.editItemAt(i);
+
+                    info->mPacketSource->flushQueue();
+                    info->mRTPAnchor = 0;
+                    info->mNTPAnchorUs = -1;
                 }
 
+                mNTPAnchorUs = -1;
+
                 int64_t timeUs;
                 CHECK(msg->findInt64("time", &timeUs));
 
@@ -898,6 +881,11 @@
                     } else {
                         parsePlayResponse(response);
 
+                        ssize_t i = response->mHeaders.indexOfKey("rtp-info");
+                        CHECK_GE(i, 0);
+
+                        LOGV("rtp-info: %s", response->mHeaders.valueAt(i).c_str());
+
                         LOGI("seek completed.");
                     }
                 }
@@ -939,7 +927,8 @@
                         mTryFakeRTCP = true;
 
                         mReceivedFirstRTCPPacket = true;
-                        mRTPConn->fakeTimestamps();
+
+                        fakeTimestamps();
                     } else if (!mReceivedFirstRTPPacket && !mTryTCPInterleaving) {
                         LOGW("Never received any data, switching transports.");
 
@@ -1047,7 +1036,7 @@
 
             uint32_t rtpTime = strtoul(val.c_str(), &end, 10);
 
-            LOGV("track #%d: rtpTime=%u <=> ntp=%.2f", n, rtpTime, npt1);
+            LOGV("track #%d: rtpTime=%u <=> npt=%.2f", n, rtpTime, npt1);
 
             info->mPacketSource->setNormalPlayTimeMapping(
                     rtpTime, (int64_t)(npt1 * 1E6));
@@ -1070,6 +1059,25 @@
     }
 
 private:
+    struct TrackInfo {
+        AString mURL;
+        int mRTPSocket;
+        int mRTCPSocket;
+        bool mUsingInterleavedTCP;
+        uint32_t mFirstSeqNumInSegment;
+        bool mNewSegment;
+
+        uint32_t mRTPAnchor;
+        int64_t mNTPAnchorUs;
+        int32_t mTimeScale;
+
+        sp<APacketSource> mPacketSource;
+
+        // Stores packets temporarily while no notion of time
+        // has been established yet.
+        List<sp<ABuffer> > mPackets;
+    };
+
     sp<ALooper> mLooper;
     sp<ALooper> mNetLooper;
     sp<ARTSPConnection> mConn;
@@ -1083,7 +1091,11 @@
     bool mSetupTracksSuccessful;
     bool mSeekPending;
     bool mFirstAccessUnit;
-    uint64_t mFirstAccessUnitNTP;
+
+    int64_t mNTPAnchorUs;
+    int64_t mMediaAnchorUs;
+    int64_t mLastMediaTimeUs;
+
     int64_t mNumAccessUnitsReceived;
     bool mCheckPending;
     int32_t mCheckGeneration;
@@ -1093,16 +1105,6 @@
     bool mReceivedFirstRTPPacket;
     bool mSeekable;
 
-    struct TrackInfo {
-        AString mURL;
-        int mRTPSocket;
-        int mRTCPSocket;
-        bool mUsingInterleavedTCP;
-        uint32_t mFirstSeqNumInSegment;
-        bool mNewSegment;
-
-        sp<APacketSource> mPacketSource;
-    };
     Vector<TrackInfo> mTracks;
 
     sp<AMessage> mDoneMsg;
@@ -1134,6 +1136,20 @@
         info->mUsingInterleavedTCP = false;
         info->mFirstSeqNumInSegment = 0;
         info->mNewSegment = true;
+        info->mRTPAnchor = 0;
+        info->mNTPAnchorUs = -1;
+
+        unsigned long PT;
+        AString formatDesc;
+        AString formatParams;
+        mSessionDesc->getFormatType(index, &PT, &formatDesc, &formatParams);
+
+        int32_t timescale;
+        int32_t numChannels;
+        ASessionDescription::ParseFormatDesc(
+                formatDesc.c_str(), &timescale, &numChannels);
+
+        info->mTimeScale = timescale;
 
         LOGV("track #%d URL=%s", mTracks.size(), trackURL.c_str());
 
@@ -1212,6 +1228,96 @@
         return true;
     }
 
+    void fakeTimestamps() {
+        for (size_t i = 0; i < mTracks.size(); ++i) {
+            onTimeUpdate(i, 0, 0ll);
+        }
+    }
+
+    void onTimeUpdate(int32_t trackIndex, uint32_t rtpTime, uint64_t ntpTime) {
+        LOGV("onTimeUpdate track %d, rtpTime = 0x%08x, ntpTime = 0x%016llx",
+             trackIndex, rtpTime, ntpTime);
+
+        int64_t ntpTimeUs = (int64_t)(ntpTime * 1E6 / (1ll << 32));
+
+        TrackInfo *track = &mTracks.editItemAt(trackIndex);
+
+        track->mRTPAnchor = rtpTime;
+        track->mNTPAnchorUs = ntpTimeUs;
+
+        if (mNTPAnchorUs < 0) {
+            mNTPAnchorUs = ntpTimeUs;
+            mMediaAnchorUs = mLastMediaTimeUs;
+        }
+    }
+
+    void onAccessUnitComplete(
+            int32_t trackIndex, const sp<ABuffer> &accessUnit) {
+        LOGV("onAccessUnitComplete track %d", trackIndex);
+
+        if (mFirstAccessUnit) {
+            mDoneMsg->setInt32("result", OK);
+            mDoneMsg->post();
+            mDoneMsg = NULL;
+
+            mFirstAccessUnit = false;
+        }
+
+        TrackInfo *track = &mTracks.editItemAt(trackIndex);
+
+        if (mNTPAnchorUs < 0 || mMediaAnchorUs < 0 || track->mNTPAnchorUs < 0) {
+            LOGV("storing accessUnit, no time established yet");
+            track->mPackets.push_back(accessUnit);
+            return;
+        }
+
+        while (!track->mPackets.empty()) {
+            sp<ABuffer> accessUnit = *track->mPackets.begin();
+            track->mPackets.erase(track->mPackets.begin());
+
+            if (addMediaTimestamp(trackIndex, track, accessUnit)) {
+                track->mPacketSource->queueAccessUnit(accessUnit);
+            }
+        }
+
+        if (addMediaTimestamp(trackIndex, track, accessUnit)) {
+            track->mPacketSource->queueAccessUnit(accessUnit);
+        }
+    }
+
+    bool addMediaTimestamp(
+            int32_t trackIndex, const TrackInfo *track,
+            const sp<ABuffer> &accessUnit) {
+        uint32_t rtpTime;
+        CHECK(accessUnit->meta()->findInt32(
+                    "rtp-time", (int32_t *)&rtpTime));
+
+        int64_t relRtpTimeUs =
+            (((int64_t)rtpTime - (int64_t)track->mRTPAnchor) * 1000000ll)
+                / track->mTimeScale;
+
+        int64_t ntpTimeUs = track->mNTPAnchorUs + relRtpTimeUs;
+
+        int64_t mediaTimeUs = mMediaAnchorUs + ntpTimeUs - mNTPAnchorUs;
+
+        if (mediaTimeUs > mLastMediaTimeUs) {
+            mLastMediaTimeUs = mediaTimeUs;
+        }
+
+        if (mediaTimeUs < 0) {
+            LOGV("dropping early accessUnit.");
+            return false;
+        }
+
+        LOGV("track %d rtpTime=%d mediaTimeUs = %lld us (%.2f secs)",
+             trackIndex, rtpTime, mediaTimeUs, mediaTimeUs / 1E6);
+
+        accessUnit->meta()->setInt64("timeUs", mediaTimeUs);
+
+        return true;
+    }
+
+
     DISALLOW_EVIL_CONSTRUCTORS(MyHandler);
 };
 
diff --git a/media/libstagefright/string.cpp b/media/libstagefright/string.cpp
deleted file mode 100644
index 8b2c36c..0000000
--- a/media/libstagefright/string.cpp
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "include/stagefright_string.h"
-
-#include <media/stagefright/MediaDebug.h>
-
-namespace android {
-
-// static
-string::size_type string::npos = (string::size_type)-1;
-
-string::string() {
-}
-
-string::string(const char *s, size_t length)
-    : mString(s, length) {
-}
-
-string::string(const string &from, size_type start, size_type length) {
-    CHECK(start <= from.size());
-    if (length == npos) {
-        length = from.size() - start;
-    } else {
-        CHECK(start + length <= from.size());
-    }
-
-    mString.setTo(from.c_str() + start, length);
-}
-
-string::string(const char *s)
-    : mString(s) {
-}
-
-const char *string::c_str() const {
-    return mString.string();
-}
-
-string::size_type string::size() const {
-    return mString.length();
-}
-
-void string::clear() {
-    mString = String8();
-}
-
-string::size_type string::find(char c) const {
-    char s[2];
-    s[0] = c;
-    s[1] = '\0';
-
-    ssize_t index = mString.find(s);
-
-    return index < 0 ? npos : (size_type)index;
-}
-
-bool string::operator<(const string &other) const {
-    return mString < other.mString;
-}
-
-bool string::operator==(const string &other) const {
-    return mString == other.mString;
-}
-
-string &string::operator+=(char c) {
-    mString.append(&c, 1);
-
-    return *this;
-}
-
-void string::erase(size_t from, size_t length) {
-    String8 s(mString.string(), from);
-    s.append(mString.string() + from + length);
-    
-    mString = s;
-}
-
-}  // namespace android
-
diff --git a/media/libstagefright/yuv/Android.mk b/media/libstagefright/yuv/Android.mk
new file mode 100644
index 0000000..7697e3c
--- /dev/null
+++ b/media/libstagefright/yuv/Android.mk
@@ -0,0 +1,15 @@
+LOCAL_PATH:= $(call my-dir)
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:=               \
+        YUVImage.cpp            \
+        YUVCanvas.cpp
+
+LOCAL_SHARED_LIBRARIES :=       \
+        libcutils
+
+LOCAL_MODULE:= libstagefright_yuv
+
+LOCAL_PRELINK_MODULE := false
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/yuv/YUVCanvas.cpp b/media/libstagefright/yuv/YUVCanvas.cpp
new file mode 100644
index 0000000..38aa779
--- /dev/null
+++ b/media/libstagefright/yuv/YUVCanvas.cpp
@@ -0,0 +1,111 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "YUVCanvas"
+
+#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/YUVCanvas.h>
+#include <media/stagefright/YUVImage.h>
+#include <ui/Rect.h>
+
+namespace android {
+
+YUVCanvas::YUVCanvas(YUVImage &yuvImage)
+    : mYUVImage(yuvImage) {
+}
+
+YUVCanvas::~YUVCanvas() {
+}
+
+void YUVCanvas::FillYUV(uint8_t yValue, uint8_t uValue, uint8_t vValue) {
+    for (int32_t y = 0; y < mYUVImage.height(); ++y) {
+        for (int32_t x = 0; x < mYUVImage.width(); ++x) {
+            mYUVImage.setPixelValue(x, y, yValue, uValue, vValue);
+        }
+    }
+}
+
+void YUVCanvas::FillYUVRectangle(const Rect& rect,
+        uint8_t yValue, uint8_t uValue, uint8_t vValue) {
+    for (int32_t y = rect.top; y < rect.bottom; ++y) {
+        for (int32_t x = rect.left; x < rect.right; ++x) {
+            mYUVImage.setPixelValue(x, y, yValue, uValue, vValue);
+        }
+    }
+}
+
+void YUVCanvas::CopyImageRect(
+        const Rect& srcRect,
+        int32_t destStartX, int32_t destStartY,
+        const YUVImage &srcImage) {
+
+    // Try fast copy first
+    if (YUVImage::fastCopyRectangle(
+                srcRect,
+                destStartX, destStartY,
+                srcImage, mYUVImage)) {
+        return;
+    }
+
+    int32_t srcStartX = srcRect.left;
+    int32_t srcStartY = srcRect.top;
+    for (int32_t offsetY = 0; offsetY < srcRect.height(); ++offsetY) {
+        for (int32_t offsetX = 0; offsetX < srcRect.width(); ++offsetX) {
+            int32_t srcX = srcStartX + offsetX;
+            int32_t srcY = srcStartY + offsetY;
+
+            int32_t destX = destStartX + offsetX;
+            int32_t destY = destStartY + offsetY;
+
+            uint8_t yValue;
+            uint8_t uValue;
+            uint8_t vValue;
+
+            srcImage.getPixelValue(srcX, srcY, &yValue, &uValue, &vValue);
+            mYUVImage.setPixelValue(destX, destY, yValue, uValue, vValue);
+        }
+    }
+}
+
+void YUVCanvas::downsample(
+        int32_t srcOffsetX, int32_t srcOffsetY,
+        int32_t skipX, int32_t skipY,
+        const YUVImage &srcImage) {
+    // TODO: Add a low pass filter for downsampling.
+
+    // Check that srcImage is big enough to fill mYUVImage.
+    CHECK((srcOffsetX + (mYUVImage.width() - 1) * skipX) < srcImage.width());
+    CHECK((srcOffsetY + (mYUVImage.height() - 1) * skipY) < srcImage.height());
+
+    uint8_t yValue;
+    uint8_t uValue;
+    uint8_t vValue;
+
+    int32_t srcY = srcOffsetY;
+    for (int32_t y = 0; y < mYUVImage.height(); ++y) {
+        int32_t srcX = srcOffsetX;
+        for (int32_t x = 0; x < mYUVImage.width(); ++x) {
+            srcImage.getPixelValue(srcX, srcY, &yValue, &uValue, &vValue);
+            mYUVImage.setPixelValue(x, y, yValue, uValue, vValue);
+
+            srcX += skipX;
+        }
+        srcY += skipY;
+    }
+}
+
+}  // namespace android
diff --git a/media/libstagefright/yuv/YUVImage.cpp b/media/libstagefright/yuv/YUVImage.cpp
new file mode 100644
index 0000000..b712062
--- /dev/null
+++ b/media/libstagefright/yuv/YUVImage.cpp
@@ -0,0 +1,413 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "YUVImage"
+
+#include <media/stagefright/YUVImage.h>
+#include <ui/Rect.h>
+#include <media/stagefright/MediaDebug.h>
+
+namespace android {
+
+YUVImage::YUVImage(YUVFormat yuvFormat, int32_t width, int32_t height) {
+    mYUVFormat = yuvFormat;
+    mWidth = width;
+    mHeight = height;
+
+    size_t numberOfBytes = bufferSize(yuvFormat, width, height);
+    uint8_t *buffer = new uint8_t[numberOfBytes];
+    mBuffer = buffer;
+    mOwnBuffer = true;
+
+    initializeYUVPointers();
+}
+
+YUVImage::YUVImage(YUVFormat yuvFormat, int32_t width, int32_t height, uint8_t *buffer) {
+    mYUVFormat = yuvFormat;
+    mWidth = width;
+    mHeight = height;
+    mBuffer = buffer;
+    mOwnBuffer = false;
+
+    initializeYUVPointers();
+}
+
+//static
+size_t YUVImage::bufferSize(YUVFormat yuvFormat, int32_t width, int32_t height) {
+    int32_t numberOfPixels = width*height;
+    size_t numberOfBytes = 0;
+    if (yuvFormat == YUV420Planar || yuvFormat == YUV420SemiPlanar) {
+        // Y takes numberOfPixels bytes and U/V take numberOfPixels/4 bytes each.
+        numberOfBytes = (size_t)(numberOfPixels + (numberOfPixels >> 1));
+    } else {
+        LOGE("Format not supported");
+    }
+    return numberOfBytes;
+}
+
+bool YUVImage::initializeYUVPointers() {
+    int32_t numberOfPixels = mWidth * mHeight;
+
+    if (mYUVFormat == YUV420Planar) {
+        mYdata = (uint8_t *)mBuffer;
+        mUdata = mYdata + numberOfPixels;
+        mVdata = mUdata + (numberOfPixels >> 2);
+    } else if (mYUVFormat == YUV420SemiPlanar) {
+        // U and V channels are interleaved as VUVUVU.
+        // So V data starts at the end of Y channel and
+        // U data starts right after V's start.
+        mYdata = (uint8_t *)mBuffer;
+        mVdata = mYdata + numberOfPixels;
+        mUdata = mVdata + 1;
+    } else {
+        LOGE("Format not supported");
+        return false;
+    }
+    return true;
+}
+
+YUVImage::~YUVImage() {
+    if (mOwnBuffer) delete[] mBuffer;
+}
+
+bool YUVImage::getOffsets(int32_t x, int32_t y,
+        int32_t *yOffset, int32_t *uOffset, int32_t *vOffset) const {
+    *yOffset = y*mWidth + x;
+
+    int32_t uvOffset = (y >> 1) * (mWidth >> 1) + (x >> 1);
+    if (mYUVFormat == YUV420Planar) {
+        *uOffset = uvOffset;
+        *vOffset = uvOffset;
+    } else if (mYUVFormat == YUV420SemiPlanar) {
+        // Since U and V channels are interleaved, offsets need
+        // to be doubled.
+        *uOffset = 2*uvOffset;
+        *vOffset = 2*uvOffset;
+    } else {
+        LOGE("Format not supported");
+        return false;
+    }
+
+    return true;
+}
+
+bool YUVImage::getOffsetIncrementsPerDataRow(
+        int32_t *yDataOffsetIncrement,
+        int32_t *uDataOffsetIncrement,
+        int32_t *vDataOffsetIncrement) const {
+    *yDataOffsetIncrement = mWidth;
+
+    int32_t uvDataOffsetIncrement = mWidth >> 1;
+
+    if (mYUVFormat == YUV420Planar) {
+        *uDataOffsetIncrement = uvDataOffsetIncrement;
+        *vDataOffsetIncrement = uvDataOffsetIncrement;
+    } else if (mYUVFormat == YUV420SemiPlanar) {
+        // Since U and V channels are interleaved, offsets need
+        // to be doubled.
+        *uDataOffsetIncrement = 2*uvDataOffsetIncrement;
+        *vDataOffsetIncrement = 2*uvDataOffsetIncrement;
+    } else {
+        LOGE("Format not supported");
+        return false;
+    }
+
+    return true;
+}
+
+uint8_t* YUVImage::getYAddress(int32_t offset) const {
+    return mYdata + offset;
+}
+
+uint8_t* YUVImage::getUAddress(int32_t offset) const {
+    return mUdata + offset;
+}
+
+uint8_t* YUVImage::getVAddress(int32_t offset) const {
+    return mVdata + offset;
+}
+
+bool YUVImage::getYUVAddresses(int32_t x, int32_t y,
+        uint8_t **yAddr, uint8_t **uAddr, uint8_t **vAddr) const {
+    int32_t yOffset;
+    int32_t uOffset;
+    int32_t vOffset;
+    if (!getOffsets(x, y, &yOffset, &uOffset, &vOffset)) return false;
+
+    *yAddr = getYAddress(yOffset);
+    *uAddr = getUAddress(uOffset);
+    *vAddr = getVAddress(vOffset);
+
+    return true;
+}
+
+bool YUVImage::validPixel(int32_t x, int32_t y) const {
+    return (x >= 0 && x < mWidth &&
+            y >= 0 && y < mHeight);
+}
+
+bool YUVImage::getPixelValue(int32_t x, int32_t y,
+        uint8_t *yPtr, uint8_t *uPtr, uint8_t *vPtr) const {
+    CHECK(validPixel(x, y));
+
+    uint8_t *yAddr;
+    uint8_t *uAddr;
+    uint8_t *vAddr;
+    if (!getYUVAddresses(x, y, &yAddr, &uAddr, &vAddr)) return false;
+
+    *yPtr = *yAddr;
+    *uPtr = *uAddr;
+    *vPtr = *vAddr;
+
+    return true;
+}
+
+bool YUVImage::setPixelValue(int32_t x, int32_t y,
+        uint8_t yValue, uint8_t uValue, uint8_t vValue) {
+    CHECK(validPixel(x, y));
+
+    uint8_t *yAddr;
+    uint8_t *uAddr;
+    uint8_t *vAddr;
+    if (!getYUVAddresses(x, y, &yAddr, &uAddr, &vAddr)) return false;
+
+    *yAddr = yValue;
+    *uAddr = uValue;
+    *vAddr = vValue;
+
+    return true;
+}
+
+void YUVImage::fastCopyRectangle420Planar(
+        const Rect& srcRect,
+        int32_t destStartX, int32_t destStartY,
+        const YUVImage &srcImage, YUVImage &destImage) {
+    CHECK(srcImage.mYUVFormat == YUV420Planar);
+    CHECK(destImage.mYUVFormat == YUV420Planar);
+
+    int32_t srcStartX = srcRect.left;
+    int32_t srcStartY = srcRect.top;
+    int32_t width = srcRect.width();
+    int32_t height = srcRect.height();
+
+    // Get source and destination start addresses
+    uint8_t *ySrcAddrBase;
+    uint8_t *uSrcAddrBase;
+    uint8_t *vSrcAddrBase;
+    srcImage.getYUVAddresses(srcStartX, srcStartY,
+            &ySrcAddrBase, &uSrcAddrBase, &vSrcAddrBase);
+
+    uint8_t *yDestAddrBase;
+    uint8_t *uDestAddrBase;
+    uint8_t *vDestAddrBase;
+    destImage.getYUVAddresses(destStartX, destStartY,
+            &yDestAddrBase, &uDestAddrBase, &vDestAddrBase);
+
+    // Get source and destination offset increments incurred in going
+    // from one data row to next.
+    int32_t ySrcOffsetIncrement;
+    int32_t uSrcOffsetIncrement;
+    int32_t vSrcOffsetIncrement;
+    srcImage.getOffsetIncrementsPerDataRow(
+            &ySrcOffsetIncrement, &uSrcOffsetIncrement, &vSrcOffsetIncrement);
+
+    int32_t yDestOffsetIncrement;
+    int32_t uDestOffsetIncrement;
+    int32_t vDestOffsetIncrement;
+    destImage.getOffsetIncrementsPerDataRow(
+            &yDestOffsetIncrement, &uDestOffsetIncrement, &vDestOffsetIncrement);
+
+    // Copy Y
+    {
+        size_t numberOfYBytesPerRow = (size_t) width;
+        uint8_t *ySrcAddr = ySrcAddrBase;
+        uint8_t *yDestAddr = yDestAddrBase;
+        for (int32_t offsetY = 0; offsetY < height; ++offsetY) {
+            memcpy(yDestAddr, ySrcAddr, numberOfYBytesPerRow);
+
+            ySrcAddr += ySrcOffsetIncrement;
+            yDestAddr += yDestOffsetIncrement;
+        }
+    }
+
+    // Copy U
+    {
+        size_t numberOfUBytesPerRow = (size_t) (width >> 1);
+        uint8_t *uSrcAddr = uSrcAddrBase;
+        uint8_t *uDestAddr = uDestAddrBase;
+        // Every other row has an entry for U/V channel values. Hence only
+        // go half the height.
+        for (int32_t offsetY = 0; offsetY < (height >> 1); ++offsetY) {
+            memcpy(uDestAddr, uSrcAddr, numberOfUBytesPerRow);
+
+            uSrcAddr += uSrcOffsetIncrement;
+            uDestAddr += uDestOffsetIncrement;
+        }
+    }
+
+    // Copy V
+    {
+        size_t numberOfVBytesPerRow = (size_t) (width >> 1);
+        uint8_t *vSrcAddr = vSrcAddrBase;
+        uint8_t *vDestAddr = vDestAddrBase;
+        // Every other pixel row has a U/V data row. Hence only go half the height.
+        for (int32_t offsetY = 0; offsetY < (height >> 1); ++offsetY) {
+            memcpy(vDestAddr, vSrcAddr, numberOfVBytesPerRow);
+
+            vSrcAddr += vSrcOffsetIncrement;
+            vDestAddr += vDestOffsetIncrement;
+        }
+    }
+}
+
+void YUVImage::fastCopyRectangle420SemiPlanar(
+        const Rect& srcRect,
+        int32_t destStartX, int32_t destStartY,
+        const YUVImage &srcImage, YUVImage &destImage) {
+    CHECK(srcImage.mYUVFormat == YUV420SemiPlanar);
+    CHECK(destImage.mYUVFormat == YUV420SemiPlanar);
+
+    int32_t srcStartX = srcRect.left;
+    int32_t srcStartY = srcRect.top;
+    int32_t width = srcRect.width();
+    int32_t height = srcRect.height();
+
+    // Get source and destination start addresses
+    uint8_t *ySrcAddrBase;
+    uint8_t *uSrcAddrBase;
+    uint8_t *vSrcAddrBase;
+    srcImage.getYUVAddresses(srcStartX, srcStartY,
+            &ySrcAddrBase, &uSrcAddrBase, &vSrcAddrBase);
+
+    uint8_t *yDestAddrBase;
+    uint8_t *uDestAddrBase;
+    uint8_t *vDestAddrBase;
+    destImage.getYUVAddresses(destStartX, destStartY,
+            &yDestAddrBase, &uDestAddrBase, &vDestAddrBase);
+
+    // Get source and destination offset increments incurred in going
+    // from one data row to next.
+    int32_t ySrcOffsetIncrement;
+    int32_t uSrcOffsetIncrement;
+    int32_t vSrcOffsetIncrement;
+    srcImage.getOffsetIncrementsPerDataRow(
+            &ySrcOffsetIncrement, &uSrcOffsetIncrement, &vSrcOffsetIncrement);
+
+    int32_t yDestOffsetIncrement;
+    int32_t uDestOffsetIncrement;
+    int32_t vDestOffsetIncrement;
+    destImage.getOffsetIncrementsPerDataRow(
+            &yDestOffsetIncrement, &uDestOffsetIncrement, &vDestOffsetIncrement);
+
+    // Copy Y
+    {
+        size_t numberOfYBytesPerRow = (size_t) width;
+        uint8_t *ySrcAddr = ySrcAddrBase;
+        uint8_t *yDestAddr = yDestAddrBase;
+        for (int32_t offsetY = 0; offsetY < height; ++offsetY) {
+            memcpy(yDestAddr, ySrcAddr, numberOfYBytesPerRow);
+
+            ySrcAddr = ySrcAddr + ySrcOffsetIncrement;
+            yDestAddr = yDestAddr + yDestOffsetIncrement;
+        }
+    }
+
+    // Copy UV
+    {
+        // UV are interleaved. So number of UV bytes per row is 2*(width/2).
+        size_t numberOfUVBytesPerRow = (size_t) width;
+        uint8_t *vSrcAddr = vSrcAddrBase;
+        uint8_t *vDestAddr = vDestAddrBase;
+        // Every other pixel row has a U/V data row. Hence only go half the height.
+        for (int32_t offsetY = 0; offsetY < (height >> 1); ++offsetY) {
+            memcpy(vDestAddr, vSrcAddr, numberOfUVBytesPerRow);
+
+            vSrcAddr += vSrcOffsetIncrement;
+            vDestAddr += vDestOffsetIncrement;
+        }
+    }
+}
+
+// static
+bool YUVImage::fastCopyRectangle(
+        const Rect& srcRect,
+        int32_t destStartX, int32_t destStartY,
+        const YUVImage &srcImage, YUVImage &destImage) {
+    if (srcImage.mYUVFormat == destImage.mYUVFormat) {
+        if (srcImage.mYUVFormat == YUV420Planar) {
+            fastCopyRectangle420Planar(
+                    srcRect,
+                    destStartX, destStartY,
+                    srcImage, destImage);
+        } else if (srcImage.mYUVFormat == YUV420SemiPlanar) {
+            fastCopyRectangle420SemiPlanar(
+                    srcRect,
+                    destStartX, destStartY,
+                    srcImage, destImage);
+        }
+        return true;
+    }
+    return false;
+}
+
+uint8_t clamp(uint8_t v, uint8_t minValue, uint8_t maxValue) {
+    CHECK(maxValue >= minValue);
+
+    if (v < minValue) return minValue;
+    else if (v > maxValue) return maxValue;
+    else return v;
+}
+
+void YUVImage::yuv2rgb(uint8_t yValue, uint8_t uValue, uint8_t vValue,
+        uint8_t *r, uint8_t *g, uint8_t *b) const {
+    *r = yValue + (1.370705 * (vValue-128));
+    *g = yValue - (0.698001 * (vValue-128)) - (0.337633 * (uValue-128));
+    *b = yValue + (1.732446 * (uValue-128));
+
+    *r = clamp(*r, 0, 255);
+    *g = clamp(*g, 0, 255);
+    *b = clamp(*b, 0, 255);
+}
+
+bool YUVImage::writeToPPM(const char *filename) const {
+    FILE *fp = fopen(filename, "w");
+    if (fp == NULL) {
+        return false;
+    }
+    fprintf(fp, "P3\n");
+    fprintf(fp, "%d %d\n", mWidth, mHeight);
+    fprintf(fp, "255\n");
+    for (int32_t y = 0; y < mHeight; ++y) {
+        for (int32_t x = 0; x < mWidth; ++x) {
+            uint8_t yValue;
+            uint8_t uValue;
+            uint8_t vValue;
+            getPixelValue(x, y, &yValue, &uValue, & vValue);
+
+            uint8_t rValue;
+            uint8_t gValue;
+            uint8_t bValue;
+            yuv2rgb(yValue, uValue, vValue, &rValue, &gValue, &bValue);
+
+            fprintf(fp, "%d %d %d\n", (int32_t)rValue, (int32_t)gValue, (int32_t)bValue);
+        }
+    }
+    fclose(fp);
+    return true;
+}
+
+}  // namespace android
diff --git a/media/mtp/Android.mk b/media/mtp/Android.mk
new file mode 100644
index 0000000..c25285e
--- /dev/null
+++ b/media/mtp/Android.mk
@@ -0,0 +1,76 @@
+#
+# Copyright (C) 2010 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+LOCAL_PATH:= $(call my-dir)
+
+ifneq ($(TARGET_SIMULATOR),true)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:=                                       \
+                  MtpDataPacket.cpp                     \
+                  MtpDebug.cpp                          \
+                  MtpDevice.cpp                         \
+                  MtpEventPacket.cpp                    \
+                  MtpDeviceInfo.cpp                     \
+                  MtpObjectInfo.cpp                     \
+                  MtpPacket.cpp                         \
+                  MtpProperty.cpp                       \
+                  MtpRequestPacket.cpp                  \
+                  MtpResponsePacket.cpp                 \
+                  MtpServer.cpp                         \
+                  MtpStorageInfo.cpp                    \
+                  MtpStringBuffer.cpp                   \
+                  MtpStorage.cpp                        \
+                  MtpUtils.cpp                          \
+
+LOCAL_MODULE:= libmtp
+
+LOCAL_CFLAGS := -DMTP_DEVICE -DMTP_HOST
+
+LOCAL_SHARED_LIBRARIES := libutils libcutils libusbhost libbinder
+
+include $(BUILD_SHARED_LIBRARY)
+
+endif
+
+ifeq ($(HOST_OS),linux)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:=                                       \
+                  MtpDataPacket.cpp                     \
+                  MtpDebug.cpp                          \
+                  MtpDevice.cpp                         \
+                  MtpEventPacket.cpp                    \
+                  MtpDeviceInfo.cpp                     \
+                  MtpObjectInfo.cpp                     \
+                  MtpPacket.cpp                         \
+                  MtpProperty.cpp                       \
+                  MtpRequestPacket.cpp                  \
+                  MtpResponsePacket.cpp                 \
+                  MtpStorageInfo.cpp                    \
+                  MtpStringBuffer.cpp                   \
+                  MtpStorage.cpp                        \
+                  MtpUtils.cpp                          \
+
+LOCAL_MODULE:= libmtp
+
+LOCAL_CFLAGS := -DMTP_HOST
+
+include $(BUILD_HOST_STATIC_LIBRARY)
+
+endif
diff --git a/media/mtp/MtpDataPacket.cpp b/media/mtp/MtpDataPacket.cpp
new file mode 100644
index 0000000..0b0c80d
--- /dev/null
+++ b/media/mtp/MtpDataPacket.cpp
@@ -0,0 +1,506 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpDataPacket"
+
+#include <stdio.h>
+#include <sys/types.h>
+#include <fcntl.h>
+
+#include <usbhost/usbhost.h>
+
+#include "MtpDataPacket.h"
+#include "MtpStringBuffer.h"
+
+namespace android {
+
+MtpDataPacket::MtpDataPacket()
+    :   MtpPacket(16384),   // MAX_USBFS_BUFFER_SIZE
+        mOffset(MTP_CONTAINER_HEADER_SIZE)
+{
+}
+
+MtpDataPacket::~MtpDataPacket() {
+}
+
+void MtpDataPacket::reset() {
+    MtpPacket::reset();
+    mOffset = MTP_CONTAINER_HEADER_SIZE;
+}
+
+void MtpDataPacket::setOperationCode(MtpOperationCode code) {
+    MtpPacket::putUInt16(MTP_CONTAINER_CODE_OFFSET, code);
+}
+
+void MtpDataPacket::setTransactionID(MtpTransactionID id) {
+    MtpPacket::putUInt32(MTP_CONTAINER_TRANSACTION_ID_OFFSET, id);
+}
+
+uint16_t MtpDataPacket::getUInt16() {
+    int offset = mOffset;
+    uint16_t result = (uint16_t)mBuffer[offset] | ((uint16_t)mBuffer[offset + 1] << 8);
+    mOffset += 2;
+    return result;
+}
+
+uint32_t MtpDataPacket::getUInt32() {
+    int offset = mOffset;
+    uint32_t result = (uint32_t)mBuffer[offset] | ((uint32_t)mBuffer[offset + 1] << 8) |
+           ((uint32_t)mBuffer[offset + 2] << 16)  | ((uint32_t)mBuffer[offset + 3] << 24);
+    mOffset += 4;
+    return result;
+}
+
+uint64_t MtpDataPacket::getUInt64() {
+    int offset = mOffset;
+    uint64_t result = (uint64_t)mBuffer[offset] | ((uint64_t)mBuffer[offset + 1] << 8) |
+           ((uint64_t)mBuffer[offset + 2] << 16) | ((uint64_t)mBuffer[offset + 3] << 24) |
+           ((uint64_t)mBuffer[offset + 4] << 32) | ((uint64_t)mBuffer[offset + 5] << 40) |
+           ((uint64_t)mBuffer[offset + 6] << 48)  | ((uint64_t)mBuffer[offset + 7] << 56);
+    mOffset += 8;
+    return result;
+}
+
+void MtpDataPacket::getUInt128(uint128_t& value) {
+    value[0] = getUInt32();
+    value[1] = getUInt32();
+    value[2] = getUInt32();
+    value[3] = getUInt32();
+}
+
+void MtpDataPacket::getString(MtpStringBuffer& string)
+{
+    string.readFromPacket(this);
+}
+
+Int8List* MtpDataPacket::getAInt8() {
+    Int8List* result = new Int8List;
+    int count = getUInt32();
+    for (int i = 0; i < count; i++)
+        result->push(getInt8());
+    return result;
+}
+
+UInt8List* MtpDataPacket::getAUInt8() {
+    UInt8List* result = new UInt8List;
+    int count = getUInt32();
+    for (int i = 0; i < count; i++)
+        result->push(getUInt8());
+    return result;
+}
+
+Int16List* MtpDataPacket::getAInt16() {
+    Int16List* result = new Int16List;
+    int count = getUInt32();
+    for (int i = 0; i < count; i++)
+        result->push(getInt16());
+    return result;
+}
+
+UInt16List* MtpDataPacket::getAUInt16() {
+    UInt16List* result = new UInt16List;
+    int count = getUInt32();
+    for (int i = 0; i < count; i++)
+        result->push(getUInt16());
+    return result;
+}
+
+Int32List* MtpDataPacket::getAInt32() {
+    Int32List* result = new Int32List;
+    int count = getUInt32();
+    for (int i = 0; i < count; i++)
+        result->push(getInt32());
+    return result;
+}
+
+UInt32List* MtpDataPacket::getAUInt32() {
+    UInt32List* result = new UInt32List;
+    int count = getUInt32();
+    for (int i = 0; i < count; i++)
+        result->push(getUInt32());
+    return result;
+}
+
+Int64List* MtpDataPacket::getAInt64() {
+    Int64List* result = new Int64List;
+    int count = getUInt32();
+    for (int i = 0; i < count; i++)
+        result->push(getInt64());
+    return result;
+}
+
+UInt64List* MtpDataPacket::getAUInt64() {
+    UInt64List* result = new UInt64List;
+    int count = getUInt32();
+    for (int i = 0; i < count; i++)
+        result->push(getUInt64());
+    return result;
+}
+
+void MtpDataPacket::putInt8(int8_t value) {
+    allocate(mOffset + 1);
+    mBuffer[mOffset++] = (uint8_t)value;
+    if (mPacketSize < mOffset)
+        mPacketSize = mOffset;
+}
+
+void MtpDataPacket::putUInt8(uint8_t value) {
+    allocate(mOffset + 1);
+    mBuffer[mOffset++] = (uint8_t)value;
+    if (mPacketSize < mOffset)
+        mPacketSize = mOffset;
+}
+
+void MtpDataPacket::putInt16(int16_t value) {
+    allocate(mOffset + 2);
+    mBuffer[mOffset++] = (uint8_t)(value & 0xFF);
+    mBuffer[mOffset++] = (uint8_t)((value >> 8) & 0xFF);
+    if (mPacketSize < mOffset)
+        mPacketSize = mOffset;
+}
+
+void MtpDataPacket::putUInt16(uint16_t value) {
+    allocate(mOffset + 2);
+    mBuffer[mOffset++] = (uint8_t)(value & 0xFF);
+    mBuffer[mOffset++] = (uint8_t)((value >> 8) & 0xFF);
+    if (mPacketSize < mOffset)
+        mPacketSize = mOffset;
+}
+
+void MtpDataPacket::putInt32(int32_t value) {
+    allocate(mOffset + 4);
+    mBuffer[mOffset++] = (uint8_t)(value & 0xFF);
+    mBuffer[mOffset++] = (uint8_t)((value >> 8) & 0xFF);
+    mBuffer[mOffset++] = (uint8_t)((value >> 16) & 0xFF);
+    mBuffer[mOffset++] = (uint8_t)((value >> 24) & 0xFF);
+    if (mPacketSize < mOffset)
+        mPacketSize = mOffset;
+}
+
+void MtpDataPacket::putUInt32(uint32_t value) {
+    allocate(mOffset + 4);
+    mBuffer[mOffset++] = (uint8_t)(value & 0xFF);
+    mBuffer[mOffset++] = (uint8_t)((value >> 8) & 0xFF);
+    mBuffer[mOffset++] = (uint8_t)((value >> 16) & 0xFF);
+    mBuffer[mOffset++] = (uint8_t)((value >> 24) & 0xFF);
+    if (mPacketSize < mOffset)
+        mPacketSize = mOffset;
+}
+
+void MtpDataPacket::putInt64(int64_t value) {
+    allocate(mOffset + 8);
+    mBuffer[mOffset++] = (uint8_t)(value & 0xFF);
+    mBuffer[mOffset++] = (uint8_t)((value >> 8) & 0xFF);
+    mBuffer[mOffset++] = (uint8_t)((value >> 16) & 0xFF);
+    mBuffer[mOffset++] = (uint8_t)((value >> 24) & 0xFF);
+    mBuffer[mOffset++] = (uint8_t)((value >> 32) & 0xFF);
+    mBuffer[mOffset++] = (uint8_t)((value >> 40) & 0xFF);
+    mBuffer[mOffset++] = (uint8_t)((value >> 48) & 0xFF);
+    mBuffer[mOffset++] = (uint8_t)((value >> 56) & 0xFF);
+    if (mPacketSize < mOffset)
+        mPacketSize = mOffset;
+}
+
+void MtpDataPacket::putUInt64(uint64_t value) {
+    allocate(mOffset + 8);
+    mBuffer[mOffset++] = (uint8_t)(value & 0xFF);
+    mBuffer[mOffset++] = (uint8_t)((value >> 8) & 0xFF);
+    mBuffer[mOffset++] = (uint8_t)((value >> 16) & 0xFF);
+    mBuffer[mOffset++] = (uint8_t)((value >> 24) & 0xFF);
+    mBuffer[mOffset++] = (uint8_t)((value >> 32) & 0xFF);
+    mBuffer[mOffset++] = (uint8_t)((value >> 40) & 0xFF);
+    mBuffer[mOffset++] = (uint8_t)((value >> 48) & 0xFF);
+    mBuffer[mOffset++] = (uint8_t)((value >> 56) & 0xFF);
+    if (mPacketSize < mOffset)
+        mPacketSize = mOffset;
+}
+
+void MtpDataPacket::putInt128(const int128_t& value) {
+    putInt32(value[0]);
+    putInt32(value[1]);
+    putInt32(value[2]);
+    putInt32(value[3]);
+}
+
+void MtpDataPacket::putUInt128(const uint128_t& value) {
+    putUInt32(value[0]);
+    putUInt32(value[1]);
+    putUInt32(value[2]);
+    putUInt32(value[3]);
+}
+
+void MtpDataPacket::putInt128(int64_t value) {
+    putInt64(value);
+    putInt64(value < 0 ? -1 : 0);
+}
+
+void MtpDataPacket::putUInt128(uint64_t value) {
+    putUInt64(value);
+    putUInt64(0);
+}
+
+void MtpDataPacket::putAInt8(const int8_t* values, int count) {
+    putUInt32(count);
+    for (int i = 0; i < count; i++)
+        putInt8(*values++);
+}
+
+void MtpDataPacket::putAUInt8(const uint8_t* values, int count) {
+    putUInt32(count);
+    for (int i = 0; i < count; i++)
+        putUInt8(*values++);
+}
+
+void MtpDataPacket::putAInt16(const int16_t* values, int count) {
+    putUInt32(count);
+    for (int i = 0; i < count; i++)
+        putInt16(*values++);
+}
+
+void MtpDataPacket::putAUInt16(const uint16_t* values, int count) {
+    putUInt32(count);
+    for (int i = 0; i < count; i++)
+        putUInt16(*values++);
+}
+
+void MtpDataPacket::putAUInt16(const UInt16List* values) {
+    size_t count = (values ? values->size() : 0);
+    putUInt32(count);
+    for (size_t i = 0; i < count; i++)
+        putUInt16((*values)[i]);
+}
+
+void MtpDataPacket::putAInt32(const int32_t* values, int count) {
+    putUInt32(count);
+    for (int i = 0; i < count; i++)
+        putInt32(*values++);
+}
+
+void MtpDataPacket::putAUInt32(const uint32_t* values, int count) {
+    putUInt32(count);
+    for (int i = 0; i < count; i++)
+        putUInt32(*values++);
+}
+
+void MtpDataPacket::putAUInt32(const UInt32List* list) {
+    if (!list) {
+        putEmptyArray();
+    } else {
+        size_t size = list->size();
+        putUInt32(size);
+        for (size_t i = 0; i < size; i++)
+            putUInt32((*list)[i]);
+    }
+}
+
+void MtpDataPacket::putAInt64(const int64_t* values, int count) {
+    putUInt32(count);
+    for (int i = 0; i < count; i++)
+        putInt64(*values++);
+}
+
+void MtpDataPacket::putAUInt64(const uint64_t* values, int count) {
+    putUInt32(count);
+    for (int i = 0; i < count; i++)
+        putUInt64(*values++);
+}
+
+void MtpDataPacket::putString(const MtpStringBuffer& string) {
+    string.writeToPacket(this);
+}
+
+void MtpDataPacket::putString(const char* s) {
+    MtpStringBuffer string(s);
+    string.writeToPacket(this);
+}
+
+void MtpDataPacket::putString(const uint16_t* string) {
+    int count = 0;
+    for (int i = 0; i < 256; i++) {
+        if (string[i])
+            count++;
+        else
+            break;
+    }
+    putUInt8(count > 0 ? count + 1 : 0);
+    for (int i = 0; i < count; i++)
+        putUInt16(string[i]);
+    // only terminate with zero if string is not empty
+    if (count > 0)
+        putUInt16(0);
+}
+
+#ifdef MTP_DEVICE 
+int MtpDataPacket::read(int fd) {
+    // first read the header
+    int ret = ::read(fd, mBuffer, MTP_CONTAINER_HEADER_SIZE);
+    if (ret != MTP_CONTAINER_HEADER_SIZE)
+        return -1;
+    // then the following data
+    int total = MtpPacket::getUInt32(MTP_CONTAINER_LENGTH_OFFSET);
+    allocate(total);
+    int remaining = total - MTP_CONTAINER_HEADER_SIZE;
+    ret = ::read(fd, &mBuffer[0] + MTP_CONTAINER_HEADER_SIZE, remaining);
+    if (ret != remaining)
+        return -1;
+
+    mPacketSize = total;
+    mOffset = MTP_CONTAINER_HEADER_SIZE;
+    return total;
+}
+
+int MtpDataPacket::readDataHeader(int fd) {
+    int ret = ::read(fd, mBuffer, MTP_CONTAINER_HEADER_SIZE);
+    if (ret > 0)
+        mPacketSize = ret;
+    else
+        mPacketSize = 0;
+    return ret;
+}
+
+int MtpDataPacket::write(int fd) {
+    MtpPacket::putUInt32(MTP_CONTAINER_LENGTH_OFFSET, mPacketSize);
+    MtpPacket::putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_DATA);
+    // send header separately from data
+    int ret = ::write(fd, mBuffer, MTP_CONTAINER_HEADER_SIZE);
+    if (ret == MTP_CONTAINER_HEADER_SIZE)
+        ret = ::write(fd, mBuffer + MTP_CONTAINER_HEADER_SIZE,
+                        mPacketSize - MTP_CONTAINER_HEADER_SIZE);
+    return (ret < 0 ? ret : 0);
+}
+
+int MtpDataPacket::writeDataHeader(int fd, uint32_t length) {
+    MtpPacket::putUInt32(MTP_CONTAINER_LENGTH_OFFSET, length);
+    MtpPacket::putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_DATA);
+    int ret = ::write(fd, mBuffer, MTP_CONTAINER_HEADER_SIZE);
+    return (ret < 0 ? ret : 0);
+}
+#endif // MTP_DEVICE
+
+#ifdef MTP_HOST
+int MtpDataPacket::read(struct usb_request *request) {
+    // first read the header
+    request->buffer = mBuffer;
+    request->buffer_length = mBufferSize;
+    int length = transfer(request);
+    if (length >= MTP_CONTAINER_HEADER_SIZE) {
+        // look at the length field to see if the data spans multiple packets
+        uint32_t totalLength = MtpPacket::getUInt32(MTP_CONTAINER_LENGTH_OFFSET);
+        allocate(totalLength);
+        while (totalLength > length) {
+            request->buffer = mBuffer + length;
+            request->buffer_length = totalLength - length;
+            int ret = transfer(request);
+            if (ret >= 0)
+                length += ret;
+            else {
+                length = ret;
+                break;
+            }
+        }
+    }
+    if (length >= 0)
+        mPacketSize = length;
+    return length;
+}
+
+int MtpDataPacket::readData(struct usb_request *request, void* buffer, int length) {
+    int read = 0;
+    while (read < length) {
+        request->buffer = (char *)buffer + read;
+        request->buffer_length = length - read;
+        int ret = transfer(request);
+        if (ret < 0) {
+            return ret;
+        }
+        read += ret;
+    }
+    return read;
+}
+
+// Queue a read request.  Call readDataWait to wait for result
+int MtpDataPacket::readDataAsync(struct usb_request *req) {
+    if (usb_request_queue(req)) {
+        LOGE("usb_endpoint_queue failed, errno: %d", errno);
+        return -1;
+    }
+    return 0;
+}
+
+// Wait for result of readDataAsync
+int MtpDataPacket::readDataWait(struct usb_device *device) {
+    struct usb_request *req = usb_request_wait(device);
+    return (req ? req->actual_length : -1);
+}
+
+int MtpDataPacket::readDataHeader(struct usb_request *request) {
+    request->buffer = mBuffer;
+    request->buffer_length = request->max_packet_size;
+    int length = transfer(request);
+    if (length >= 0)
+        mPacketSize = length;
+    return length;
+}
+
+int MtpDataPacket::writeDataHeader(struct usb_request *request, uint32_t length) {
+    MtpPacket::putUInt32(MTP_CONTAINER_LENGTH_OFFSET, length);
+    MtpPacket::putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_DATA);
+    request->buffer = mBuffer;
+    request->buffer_length = MTP_CONTAINER_HEADER_SIZE;
+    int ret = transfer(request);
+    return (ret < 0 ? ret : 0);
+}
+
+int MtpDataPacket::write(struct usb_request *request) {
+    MtpPacket::putUInt32(MTP_CONTAINER_LENGTH_OFFSET, mPacketSize);
+    MtpPacket::putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_DATA);
+
+    // send header separately from data
+    request->buffer = mBuffer;
+    request->buffer_length = MTP_CONTAINER_HEADER_SIZE;
+    int ret = transfer(request);
+    if (ret == MTP_CONTAINER_HEADER_SIZE) {
+        request->buffer = mBuffer + MTP_CONTAINER_HEADER_SIZE;
+        request->buffer_length = mPacketSize - MTP_CONTAINER_HEADER_SIZE;
+        ret = transfer(request);
+    }
+    return (ret < 0 ? ret : 0);
+}
+
+int MtpDataPacket::write(struct usb_request *request, void* buffer, uint32_t length) {
+    request->buffer = buffer;
+    request->buffer_length = length;
+    int ret = transfer(request);
+    return (ret < 0 ? ret : 0);
+}
+
+#endif // MTP_HOST
+
+void* MtpDataPacket::getData(int& outLength) const {
+    int length = mPacketSize - MTP_CONTAINER_HEADER_SIZE;
+    if (length > 0) {
+        void* result = malloc(length);
+        if (result) {
+            memcpy(result, mBuffer + MTP_CONTAINER_HEADER_SIZE, length);
+            outLength = length;
+            return result;
+        }
+    }
+    outLength = 0;
+    return NULL;
+}
+
+}  // namespace android
diff --git a/media/mtp/MtpDataPacket.h b/media/mtp/MtpDataPacket.h
new file mode 100644
index 0000000..577cea1
--- /dev/null
+++ b/media/mtp/MtpDataPacket.h
@@ -0,0 +1,124 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_DATA_PACKET_H
+#define _MTP_DATA_PACKET_H
+
+#include "MtpPacket.h"
+#include "mtp.h"
+
+struct usb_device;
+struct usb_request;
+
+namespace android {
+
+class MtpStringBuffer;
+
+class MtpDataPacket : public MtpPacket {
+private:
+    // current offset for get/put methods
+    int                 mOffset;
+
+public:
+                        MtpDataPacket();
+    virtual             ~MtpDataPacket();
+
+    virtual void        reset();
+
+    void                setOperationCode(MtpOperationCode code);
+    void                setTransactionID(MtpTransactionID id);
+
+    inline uint8_t      getUInt8() { return (uint8_t)mBuffer[mOffset++]; }
+    inline int8_t       getInt8() { return (int8_t)mBuffer[mOffset++]; }
+    uint16_t            getUInt16();
+    inline int16_t      getInt16() { return (int16_t)getUInt16(); }
+    uint32_t            getUInt32();
+    inline int32_t      getInt32() { return (int32_t)getUInt32(); }
+    uint64_t            getUInt64();
+    inline int64_t      getInt64() { return (int64_t)getUInt64(); }
+    void                getUInt128(uint128_t& value);
+    inline void         getInt128(int128_t& value) { getUInt128((uint128_t&)value); }
+    void                getString(MtpStringBuffer& string);
+
+    Int8List*           getAInt8();
+    UInt8List*          getAUInt8();
+    Int16List*          getAInt16();
+    UInt16List*         getAUInt16();
+    Int32List*          getAInt32();
+    UInt32List*         getAUInt32();
+    Int64List*          getAInt64();
+    UInt64List*         getAUInt64();
+
+    void                putInt8(int8_t value);
+    void                putUInt8(uint8_t value);
+    void                putInt16(int16_t value);
+    void                putUInt16(uint16_t value);
+    void                putInt32(int32_t value);
+    void                putUInt32(uint32_t value);
+    void                putInt64(int64_t value);
+    void                putUInt64(uint64_t value);
+    void                putInt128(const int128_t& value);
+    void                putUInt128(const uint128_t& value);
+    void                putInt128(int64_t value);
+    void                putUInt128(uint64_t value);
+
+    void                putAInt8(const int8_t* values, int count);
+    void                putAUInt8(const uint8_t* values, int count);
+    void                putAInt16(const int16_t* values, int count);
+    void                putAUInt16(const uint16_t* values, int count);
+    void                putAUInt16(const UInt16List* values);
+    void                putAInt32(const int32_t* values, int count);
+    void                putAUInt32(const uint32_t* values, int count);
+    void                putAUInt32(const UInt32List* list);
+    void                putAInt64(const int64_t* values, int count);
+    void                putAUInt64(const uint64_t* values, int count);
+    void                putString(const MtpStringBuffer& string);
+    void                putString(const char* string);
+    void                putString(const uint16_t* string);
+    inline void         putEmptyString() { putUInt8(0); }
+    inline void         putEmptyArray() { putUInt32(0); }
+
+
+#ifdef MTP_DEVICE
+    // fill our buffer with data from the given file descriptor
+    int                 read(int fd);
+    int                 readDataHeader(int fd);
+
+    // write our data to the given file descriptor
+    int                 write(int fd);
+    int                 writeDataHeader(int fd, uint32_t length);
+#endif
+
+#ifdef MTP_HOST
+    int                 read(struct usb_request *request);
+    int                 readData(struct usb_request *request, void* buffer, int length);
+    int                 readDataAsync(struct usb_request *req);
+    int                 readDataWait(struct usb_device *device);
+    int                 readDataHeader(struct usb_request *ep);
+
+    int                 writeDataHeader(struct usb_request *ep, uint32_t length);
+    int                 write(struct usb_request *ep);
+    int                 write(struct usb_request *ep, void* buffer, uint32_t length);
+#endif
+
+    inline bool         hasData() const { return mPacketSize > MTP_CONTAINER_HEADER_SIZE; }
+    inline uint32_t     getContainerLength() const { return MtpPacket::getUInt32(MTP_CONTAINER_LENGTH_OFFSET); }
+    void*               getData(int& outLength) const;
+};
+
+}; // namespace android
+
+#endif // _MTP_DATA_PACKET_H
diff --git a/media/mtp/MtpDatabase.h b/media/mtp/MtpDatabase.h
new file mode 100644
index 0000000..4d9a1ae
--- /dev/null
+++ b/media/mtp/MtpDatabase.h
@@ -0,0 +1,110 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_DATABASE_H
+#define _MTP_DATABASE_H
+
+#include "MtpTypes.h"
+
+namespace android {
+
+class MtpDataPacket;
+class MtpProperty;
+
+class MtpDatabase {
+public:
+    virtual ~MtpDatabase() {}
+
+    // called from SendObjectInfo to reserve a database entry for the incoming file
+    virtual MtpObjectHandle         beginSendObject(const char* path,
+                                            MtpObjectFormat format,
+                                            MtpObjectHandle parent,
+                                            MtpStorageID storage,
+                                            uint64_t size,
+                                            time_t modified) = 0;
+
+    // called to report success or failure of the SendObject file transfer
+    // success should signal a notification of the new object's creation,
+    // failure should remove the database entry created in beginSendObject
+    virtual void                    endSendObject(const char* path,
+                                            MtpObjectHandle handle,
+                                            MtpObjectFormat format,
+                                            bool succeeded) = 0;
+
+    virtual MtpObjectHandleList*    getObjectList(MtpStorageID storageID,
+                                            MtpObjectFormat format,
+                                            MtpObjectHandle parent) = 0;
+
+    virtual int                     getNumObjects(MtpStorageID storageID,
+                                            MtpObjectFormat format,
+                                            MtpObjectHandle parent) = 0;
+
+    // callee should delete[] the results from these
+    // results can be NULL
+    virtual MtpObjectFormatList*    getSupportedPlaybackFormats() = 0;
+    virtual MtpObjectFormatList*    getSupportedCaptureFormats() = 0;
+    virtual MtpObjectPropertyList*  getSupportedObjectProperties(MtpObjectFormat format) = 0;
+    virtual MtpDevicePropertyList*  getSupportedDeviceProperties() = 0;
+
+    virtual MtpResponseCode         getObjectPropertyValue(MtpObjectHandle handle,
+                                            MtpObjectProperty property,
+                                            MtpDataPacket& packet) = 0;
+
+    virtual MtpResponseCode         setObjectPropertyValue(MtpObjectHandle handle,
+                                            MtpObjectProperty property,
+                                            MtpDataPacket& packet) = 0;
+
+    virtual MtpResponseCode         getDevicePropertyValue(MtpDeviceProperty property,
+                                            MtpDataPacket& packet) = 0;
+
+    virtual MtpResponseCode         setDevicePropertyValue(MtpDeviceProperty property,
+                                            MtpDataPacket& packet) = 0;
+
+    virtual MtpResponseCode         resetDeviceProperty(MtpDeviceProperty property) = 0;
+
+    virtual MtpResponseCode         getObjectPropertyList(MtpObjectHandle handle,
+                                            uint32_t format, uint32_t property,
+                                            int groupCode, int depth,
+                                            MtpDataPacket& packet) = 0;
+
+    virtual MtpResponseCode         getObjectInfo(MtpObjectHandle handle,
+                                            MtpDataPacket& packet) = 0;
+
+    virtual MtpResponseCode         getObjectFilePath(MtpObjectHandle handle,
+                                            MtpString& outFilePath,
+                                            int64_t& outFileLength,
+                                            MtpObjectFormat& outFormat) = 0;
+
+    virtual MtpResponseCode         deleteFile(MtpObjectHandle handle) = 0;
+
+    virtual MtpObjectHandleList*    getObjectReferences(MtpObjectHandle handle) = 0;
+
+    virtual MtpResponseCode         setObjectReferences(MtpObjectHandle handle,
+                                            MtpObjectHandleList* references) = 0;
+
+    virtual MtpProperty*            getObjectPropertyDesc(MtpObjectProperty property,
+                                            MtpObjectFormat format) = 0;
+
+    virtual MtpProperty*            getDevicePropertyDesc(MtpDeviceProperty property) = 0;
+
+    virtual void                    sessionStarted() = 0;
+
+    virtual void                    sessionEnded() = 0;
+};
+
+}; // namespace android
+
+#endif // _MTP_DATABASE_H
diff --git a/media/mtp/MtpDebug.cpp b/media/mtp/MtpDebug.cpp
new file mode 100644
index 0000000..1668ecf
--- /dev/null
+++ b/media/mtp/MtpDebug.cpp
@@ -0,0 +1,396 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "MtpDebug.h"
+
+namespace android {
+
+struct CodeEntry {
+    const char* name;
+    uint16_t code;
+};
+
+static const CodeEntry sOperationCodes[] = {
+    { "MTP_OPERATION_GET_DEVICE_INFO",              0x1001 },
+    { "MTP_OPERATION_OPEN_SESSION",                 0x1002 },
+    { "MTP_OPERATION_CLOSE_SESSION",                0x1003 },
+    { "MTP_OPERATION_GET_STORAGE_IDS",              0x1004 },
+    { "MTP_OPERATION_GET_STORAGE_INFO",             0x1005 },
+    { "MTP_OPERATION_GET_NUM_OBJECTS",              0x1006 },
+    { "MTP_OPERATION_GET_OBJECT_HANDLES",           0x1007 },
+    { "MTP_OPERATION_GET_OBJECT_INFO",              0x1008 },
+    { "MTP_OPERATION_GET_OBJECT",                   0x1009 },
+    { "MTP_OPERATION_GET_THUMB",                    0x100A },
+    { "MTP_OPERATION_DELETE_OBJECT",                0x100B },
+    { "MTP_OPERATION_SEND_OBJECT_INFO",             0x100C },
+    { "MTP_OPERATION_SEND_OBJECT",                  0x100D },
+    { "MTP_OPERATION_INITIATE_CAPTURE",             0x100E },
+    { "MTP_OPERATION_FORMAT_STORE",                 0x100F },
+    { "MTP_OPERATION_RESET_DEVICE",                 0x1010 },
+    { "MTP_OPERATION_SELF_TEST",                    0x1011 },
+    { "MTP_OPERATION_SET_OBJECT_PROTECTION",        0x1012 },
+    { "MTP_OPERATION_POWER_DOWN",                   0x1013 },
+    { "MTP_OPERATION_GET_DEVICE_PROP_DESC",         0x1014 },
+    { "MTP_OPERATION_GET_DEVICE_PROP_VALUE",        0x1015 },
+    { "MTP_OPERATION_SET_DEVICE_PROP_VALUE",        0x1016 },
+    { "MTP_OPERATION_RESET_DEVICE_PROP_VALUE",      0x1017 },
+    { "MTP_OPERATION_TERMINATE_OPEN_CAPTURE",       0x1018 },
+    { "MTP_OPERATION_MOVE_OBJECT",                  0x1019 },
+    { "MTP_OPERATION_COPY_OBJECT",                  0x101A },
+    { "MTP_OPERATION_GET_PARTIAL_OBJECT",           0x101B },
+    { "MTP_OPERATION_INITIATE_OPEN_CAPTURE",        0x101C },
+    { "MTP_OPERATION_GET_OBJECT_PROPS_SUPPORTED",   0x9801 },
+    { "MTP_OPERATION_GET_OBJECT_PROP_DESC",         0x9802 },
+    { "MTP_OPERATION_GET_OBJECT_PROP_VALUE",        0x9803 },
+    { "MTP_OPERATION_SET_OBJECT_PROP_VALUE",        0x9804 },
+    { "MTP_OPERATION_GET_OBJECT_PROP_LIST",         0x9805 },
+    { "MTP_OPERATION_SET_OBJECT_PROP_LIST",         0x9806 },
+    { "MTP_OPERATION_GET_INTERDEPENDENT_PROP_DESC", 0x9807 },
+    { "MTP_OPERATION_SEND_OBJECT_PROP_LIST",        0x9808 },
+    { "MTP_OPERATION_GET_OBJECT_REFERENCES",        0x9810 },
+    { "MTP_OPERATION_SET_OBJECT_REFERENCES",        0x9811 },
+    { "MTP_OPERATION_SKIP",                         0x9820 },
+    { 0,                                            0      },
+};
+
+static const CodeEntry sFormatCodes[] = {
+    { "MTP_FORMAT_UNDEFINED",                       0x3000 },
+    { "MTP_FORMAT_ASSOCIATION",                     0x3001 },
+    { "MTP_FORMAT_SCRIPT",                          0x3002 },
+    { "MTP_FORMAT_EXECUTABLE",                      0x3003 },
+    { "MTP_FORMAT_TEXT",                            0x3004 },
+    { "MTP_FORMAT_HTML",                            0x3005 },
+    { "MTP_FORMAT_DPOF",                            0x3006 },
+    { "MTP_FORMAT_AIFF",                            0x3007 },
+    { "MTP_FORMAT_WAV",                             0x3008 },
+    { "MTP_FORMAT_MP3",                             0x3009 },
+    { "MTP_FORMAT_AVI",                             0x300A },
+    { "MTP_FORMAT_MPEG",                            0x300B },
+    { "MTP_FORMAT_ASF",                             0x300C },
+    { "MTP_FORMAT_DEFINED",                         0x3800 },
+    { "MTP_FORMAT_EXIF_JPEG",                       0x3801 },
+    { "MTP_FORMAT_TIFF_EP",                         0x3802 },
+    { "MTP_FORMAT_FLASHPIX",                        0x3803 },
+    { "MTP_FORMAT_BMP",                             0x3804 },
+    { "MTP_FORMAT_CIFF",                            0x3805 },
+    { "MTP_FORMAT_GIF",                             0x3807 },
+    { "MTP_FORMAT_JFIF",                            0x3808 },
+    { "MTP_FORMAT_CD",                              0x3809 },
+    { "MTP_FORMAT_PICT",                            0x380A },
+    { "MTP_FORMAT_PNG",                             0x380B },
+    { "MTP_FORMAT_TIFF",                            0x380D },
+    { "MTP_FORMAT_TIFF_IT",                         0x380E },
+    { "MTP_FORMAT_JP2",                             0x380F },
+    { "MTP_FORMAT_JPX",                             0x3810 },
+    { "MTP_FORMAT_UNDEFINED_FIRMWARE",              0xB802 },
+    { "MTP_FORMAT_WINDOWS_IMAGE_FORMAT",            0xB881 },
+    { "MTP_FORMAT_UNDEFINED_AUDIO",                 0xB900 },
+    { "MTP_FORMAT_WMA",                             0xB901 },
+    { "MTP_FORMAT_OGG",                             0xB902 },
+    { "MTP_FORMAT_AAC",                             0xB903 },
+    { "MTP_FORMAT_AUDIBLE",                         0xB904 },
+    { "MTP_FORMAT_FLAC",                            0xB906 },
+    { "MTP_FORMAT_UNDEFINED_VIDEO",                 0xB980 },
+    { "MTP_FORMAT_WMV",                             0xB981 },
+    { "MTP_FORMAT_MP4_CONTAINER",                   0xB982 },
+    { "MTP_FORMAT_MP2",                             0xB983 },
+    { "MTP_FORMAT_3GP_CONTAINER",                   0xB984 },
+    { "MTP_FORMAT_UNDEFINED_COLLECTION",            0xBA00 },
+    { "MTP_FORMAT_ABSTRACT_MULTIMEDIA_ALBUM",       0xBA01 },
+    { "MTP_FORMAT_ABSTRACT_IMAGE_ALBUM",            0xBA02 },
+    { "MTP_FORMAT_ABSTRACT_AUDIO_ALBUM",            0xBA03 },
+    { "MTP_FORMAT_ABSTRACT_VIDEO_ALBUM",            0xBA04 },
+    { "MTP_FORMAT_ABSTRACT_AV_PLAYLIST",            0xBA05 },
+    { "MTP_FORMAT_ABSTRACT_CONTACT_GROUP",          0xBA06 },
+    { "MTP_FORMAT_ABSTRACT_MESSAGE_FOLDER",         0xBA07 },
+    { "MTP_FORMAT_ABSTRACT_CHAPTERED_PRODUCTION",   0xBA08 },
+    { "MTP_FORMAT_ABSTRACT_AUDIO_PLAYLIST",         0xBA09 },
+    { "MTP_FORMAT_ABSTRACT_VIDEO_PLAYLIST",         0xBA0A },
+    { "MTP_FORMAT_ABSTRACT_MEDIACAST",              0xBA0B },
+    { "MTP_FORMAT_WPL_PLAYLIST",                    0xBA10 },
+    { "MTP_FORMAT_M3U_PLAYLIST",                    0xBA11 },
+    { "MTP_FORMAT_MPL_PLAYLIST",                    0xBA12 },
+    { "MTP_FORMAT_ASX_PLAYLIST",                    0xBA13 },
+    { "MTP_FORMAT_PLS_PLAYLIST",                    0xBA14 },
+    { "MTP_FORMAT_UNDEFINED_DOCUMENT",              0xBA80 },
+    { "MTP_FORMAT_ABSTRACT_DOCUMENT",               0xBA81 },
+    { "MTP_FORMAT_XML_DOCUMENT",                    0xBA82 },
+    { "MTP_FORMAT_MS_WORD_DOCUMENT",                0xBA83 },
+    { "MTP_FORMAT_MHT_COMPILED_HTML_DOCUMENT",      0xBA84 },
+    { "MTP_FORMAT_MS_EXCEL_SPREADSHEET",            0xBA85 },
+    { "MTP_FORMAT_MS_POWERPOINT_PRESENTATION",      0xBA86 },
+    { "MTP_FORMAT_UNDEFINED_MESSAGE",               0xBB00 },
+    { "MTP_FORMAT_ABSTRACT_MESSSAGE",               0xBB01 },
+    { "MTP_FORMAT_UNDEFINED_CONTACT",               0xBB80 },
+    { "MTP_FORMAT_ABSTRACT_CONTACT",                0xBB81 },
+    { "MTP_FORMAT_VCARD_2",                         0xBB82 },
+    { 0,                                            0      },
+};
+
+static const CodeEntry sObjectPropCodes[] = {
+    { "MTP_PROPERTY_STORAGE_ID",                             0xDC01 },
+    { "MTP_PROPERTY_OBJECT_FORMAT",                          0xDC02 },
+    { "MTP_PROPERTY_PROTECTION_STATUS",                      0xDC03 },
+    { "MTP_PROPERTY_OBJECT_SIZE",                            0xDC04 },
+    { "MTP_PROPERTY_ASSOCIATION_TYPE",                       0xDC05 },
+    { "MTP_PROPERTY_ASSOCIATION_DESC",                       0xDC06 },
+    { "MTP_PROPERTY_OBJECT_FILE_NAME",                       0xDC07 },
+    { "MTP_PROPERTY_DATE_CREATED",                           0xDC08 },
+    { "MTP_PROPERTY_DATE_MODIFIED",                          0xDC09 },
+    { "MTP_PROPERTY_KEYWORDS",                               0xDC0A },
+    { "MTP_PROPERTY_PARENT_OBJECT",                          0xDC0B },
+    { "MTP_PROPERTY_ALLOWED_FOLDER_CONTENTS",                0xDC0C },
+    { "MTP_PROPERTY_HIDDEN",                                 0xDC0D },
+    { "MTP_PROPERTY_SYSTEM_OBJECT",                          0xDC0E },
+    { "MTP_PROPERTY_PERSISTENT_UID",                         0xDC41 },
+    { "MTP_PROPERTY_SYNC_ID",                                0xDC42 },
+    { "MTP_PROPERTY_PROPERTY_BAG",                           0xDC43 },
+    { "MTP_PROPERTY_NAME",                                   0xDC44 },
+    { "MTP_PROPERTY_CREATED_BY",                             0xDC45 },
+    { "MTP_PROPERTY_ARTIST",                                 0xDC46 },
+    { "MTP_PROPERTY_DATE_AUTHORED",                          0xDC47 },
+    { "MTP_PROPERTY_DESCRIPTION",                            0xDC48 },
+    { "MTP_PROPERTY_URL_REFERENCE",                          0xDC49 },
+    { "MTP_PROPERTY_LANGUAGE_LOCALE",                        0xDC4A },
+    { "MTP_PROPERTY_COPYRIGHT_INFORMATION",                  0xDC4B },
+    { "MTP_PROPERTY_SOURCE",                                 0xDC4C },
+    { "MTP_PROPERTY_ORIGIN_LOCATION",                        0xDC4D },
+    { "MTP_PROPERTY_DATE_ADDED",                             0xDC4E },
+    { "MTP_PROPERTY_NON_CONSUMABLE",                         0xDC4F },
+    { "MTP_PROPERTY_CORRUPT_UNPLAYABLE",                     0xDC50 },
+    { "MTP_PROPERTY_PRODUCER_SERIAL_NUMBER",                 0xDC51 },
+    { "MTP_PROPERTY_REPRESENTATIVE_SAMPLE_FORMAT",           0xDC81 },
+    { "MTP_PROPERTY_REPRESENTATIVE_SAMPLE_SIZE",             0xDC82 },
+    { "MTP_PROPERTY_REPRESENTATIVE_SAMPLE_HEIGHT",           0xDC83 },
+    { "MTP_PROPERTY_REPRESENTATIVE_SAMPLE_WIDTH",            0xDC84 },
+    { "MTP_PROPERTY_REPRESENTATIVE_SAMPLE_DURATION",         0xDC85 },
+    { "MTP_PROPERTY_REPRESENTATIVE_SAMPLE_DATA",             0xDC86 },
+    { "MTP_PROPERTY_WIDTH",                                  0xDC87 },
+    { "MTP_PROPERTY_HEIGHT",                                 0xDC88 },
+    { "MTP_PROPERTY_DURATION",                               0xDC89 },
+    { "MTP_PROPERTY_RATING",                                 0xDC8A },
+    { "MTP_PROPERTY_TRACK",                                  0xDC8B },
+    { "MTP_PROPERTY_GENRE",                                  0xDC8C },
+    { "MTP_PROPERTY_CREDITS",                                0xDC8D },
+    { "MTP_PROPERTY_LYRICS",                                 0xDC8E },
+    { "MTP_PROPERTY_SUBSCRIPTION_CONTENT_ID",                0xDC8F },
+    { "MTP_PROPERTY_PRODUCED_BY",                            0xDC90 },
+    { "MTP_PROPERTY_USE_COUNT",                              0xDC91 },
+    { "MTP_PROPERTY_SKIP_COUNT",                             0xDC92 },
+    { "MTP_PROPERTY_LAST_ACCESSED",                          0xDC93 },
+    { "MTP_PROPERTY_PARENTAL_RATING",                        0xDC94 },
+    { "MTP_PROPERTY_META_GENRE",                             0xDC95 },
+    { "MTP_PROPERTY_COMPOSER",                               0xDC96 },
+    { "MTP_PROPERTY_EFFECTIVE_RATING",                       0xDC97 },
+    { "MTP_PROPERTY_SUBTITLE",                               0xDC98 },
+    { "MTP_PROPERTY_ORIGINAL_RELEASE_DATE",                  0xDC99 },
+    { "MTP_PROPERTY_ALBUM_NAME",                             0xDC9A },
+    { "MTP_PROPERTY_ALBUM_ARTIST",                           0xDC9B },
+    { "MTP_PROPERTY_MOOD",                                   0xDC9C },
+    { "MTP_PROPERTY_DRM_STATUS",                             0xDC9D },
+    { "MTP_PROPERTY_SUB_DESCRIPTION",                        0xDC9E },
+    { "MTP_PROPERTY_IS_CROPPED",                             0xDCD1 },
+    { "MTP_PROPERTY_IS_COLOUR_CORRECTED",                    0xDCD2 },
+    { "MTP_PROPERTY_IMAGE_BIT_DEPTH",                        0xDCD3 },
+    { "MTP_PROPERTY_F_NUMBER",                               0xDCD4 },
+    { "MTP_PROPERTY_EXPOSURE_TIME",                          0xDCD5 },
+    { "MTP_PROPERTY_EXPOSURE_INDEX",                         0xDCD6 },
+    { "MTP_PROPERTY_TOTAL_BITRATE",                          0xDE91 },
+    { "MTP_PROPERTY_BITRATE_TYPE",                           0xDE92 },
+    { "MTP_PROPERTY_SAMPLE_RATE",                            0xDE93 },
+    { "MTP_PROPERTY_NUMBER_OF_CHANNELS",                     0xDE94 },
+    { "MTP_PROPERTY_AUDIO_BIT_DEPTH",                        0xDE95 },
+    { "MTP_PROPERTY_SCAN_TYPE",                              0xDE97 },
+    { "MTP_PROPERTY_AUDIO_WAVE_CODEC",                       0xDE99 },
+    { "MTP_PROPERTY_AUDIO_BITRATE",                          0xDE9A },
+    { "MTP_PROPERTY_VIDEO_FOURCC_CODEC",                     0xDE9B },
+    { "MTP_PROPERTY_VIDEO_BITRATE",                          0xDE9C },
+    { "MTP_PROPERTY_FRAMES_PER_THOUSAND_SECONDS",            0xDE9D },
+    { "MTP_PROPERTY_KEYFRAME_DISTANCE",                      0xDE9E },
+    { "MTP_PROPERTY_BUFFER_SIZE",                            0xDE9F },
+    { "MTP_PROPERTY_ENCODING_QUALITY",                       0xDEA0 },
+    { "MTP_PROPERTY_ENCODING_PROFILE",                       0xDEA1 },
+    { "MTP_PROPERTY_DISPLAY_NAME",                           0xDCE0 },
+    { "MTP_PROPERTY_BODY_TEXT",                              0xDCE1 },
+    { "MTP_PROPERTY_SUBJECT",                                0xDCE2 },
+    { "MTP_PROPERTY_PRIORITY",                               0xDCE3 },
+    { "MTP_PROPERTY_GIVEN_NAME",                             0xDD00 },
+    { "MTP_PROPERTY_MIDDLE_NAMES",                           0xDD01 },
+    { "MTP_PROPERTY_FAMILY_NAME",                            0xDD02 },
+    { "MTP_PROPERTY_PREFIX",                                 0xDD03 },
+    { "MTP_PROPERTY_SUFFIX",                                 0xDD04 },
+    { "MTP_PROPERTY_PHONETIC_GIVEN_NAME",                    0xDD05 },
+    { "MTP_PROPERTY_PHONETIC_FAMILY_NAME",                   0xDD06 },
+    { "MTP_PROPERTY_EMAIL_PRIMARY",                          0xDD07 },
+    { "MTP_PROPERTY_EMAIL_PERSONAL_1",                       0xDD08 },
+    { "MTP_PROPERTY_EMAIL_PERSONAL_2",                       0xDD09 },
+    { "MTP_PROPERTY_EMAIL_BUSINESS_1",                       0xDD0A },
+    { "MTP_PROPERTY_EMAIL_BUSINESS_2",                       0xDD0B },
+    { "MTP_PROPERTY_EMAIL_OTHERS",                           0xDD0C },
+    { "MTP_PROPERTY_PHONE_NUMBER_PRIMARY",                   0xDD0D },
+    { "MTP_PROPERTY_PHONE_NUMBER_PERSONAL",                  0xDD0E },
+    { "MTP_PROPERTY_PHONE_NUMBER_PERSONAL_2",                0xDD0F },
+    { "MTP_PROPERTY_PHONE_NUMBER_BUSINESS",                  0xDD10 },
+    { "MTP_PROPERTY_PHONE_NUMBER_BUSINESS_2",                0xDD11 },
+    { "MTP_PROPERTY_PHONE_NUMBER_MOBILE",                    0xDD12 },
+    { "MTP_PROPERTY_PHONE_NUMBER_MOBILE_2",                  0xDD13 },
+    { "MTP_PROPERTY_FAX_NUMBER_PRIMARY",                     0xDD14 },
+    { "MTP_PROPERTY_FAX_NUMBER_PERSONAL",                    0xDD15 },
+    { "MTP_PROPERTY_FAX_NUMBER_BUSINESS",                    0xDD16 },
+    { "MTP_PROPERTY_PAGER_NUMBER",                           0xDD17 },
+    { "MTP_PROPERTY_PHONE_NUMBER_OTHERS",                    0xDD18 },
+    { "MTP_PROPERTY_PRIMARY_WEB_ADDRESS",                    0xDD19 },
+    { "MTP_PROPERTY_PERSONAL_WEB_ADDRESS",                   0xDD1A },
+    { "MTP_PROPERTY_BUSINESS_WEB_ADDRESS",                   0xDD1B },
+    { "MTP_PROPERTY_INSTANT_MESSANGER_ADDRESS",              0xDD1C },
+    { "MTP_PROPERTY_INSTANT_MESSANGER_ADDRESS_2",            0xDD1D },
+    { "MTP_PROPERTY_INSTANT_MESSANGER_ADDRESS_3",            0xDD1E },
+    { "MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_FULL",           0xDD1F },
+    { "MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_LINE_1",         0xDD20 },
+    { "MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_LINE_2",         0xDD21 },
+    { "MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_CITY",           0xDD22 },
+    { "MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_REGION",         0xDD23 },
+    { "MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_POSTAL_CODE",    0xDD24 },
+    { "MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_COUNTRY",        0xDD25 },
+    { "MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_FULL",           0xDD26 },
+    { "MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_LINE_1",         0xDD27 },
+    { "MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_LINE_2",         0xDD28 },
+    { "MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_CITY",           0xDD29 },
+    { "MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_REGION",         0xDD2A },
+    { "MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_POSTAL_CODE",    0xDD2B },
+    { "MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_COUNTRY",        0xDD2C },
+    { "MTP_PROPERTY_POSTAL_ADDRESS_OTHER_FULL",              0xDD2D },
+    { "MTP_PROPERTY_POSTAL_ADDRESS_OTHER_LINE_1",            0xDD2E },
+    { "MTP_PROPERTY_POSTAL_ADDRESS_OTHER_LINE_2",            0xDD2F },
+    { "MTP_PROPERTY_POSTAL_ADDRESS_OTHER_CITY",              0xDD30 },
+    { "MTP_PROPERTY_POSTAL_ADDRESS_OTHER_REGION",            0xDD31 },
+    { "MTP_PROPERTY_POSTAL_ADDRESS_OTHER_POSTAL_CODE",       0xDD32 },
+    { "MTP_PROPERTY_POSTAL_ADDRESS_OTHER_COUNTRY",           0xDD33 },
+    { "MTP_PROPERTY_ORGANIZATION_NAME",                      0xDD34 },
+    { "MTP_PROPERTY_PHONETIC_ORGANIZATION_NAME",             0xDD35 },
+    { "MTP_PROPERTY_ROLE",                                   0xDD36 },
+    { "MTP_PROPERTY_BIRTHDATE",                              0xDD37 },
+    { "MTP_PROPERTY_MESSAGE_TO",                             0xDD40 },
+    { "MTP_PROPERTY_MESSAGE_CC",                             0xDD41 },
+    { "MTP_PROPERTY_MESSAGE_BCC",                            0xDD42 },
+    { "MTP_PROPERTY_MESSAGE_READ",                           0xDD43 },
+    { "MTP_PROPERTY_MESSAGE_RECEIVED_TIME",                  0xDD44 },
+    { "MTP_PROPERTY_MESSAGE_SENDER",                         0xDD45 },
+    { "MTP_PROPERTY_ACTIVITY_BEGIN_TIME",                    0xDD50 },
+    { "MTP_PROPERTY_ACTIVITY_END_TIME",                      0xDD51 },
+    { "MTP_PROPERTY_ACTIVITY_LOCATION",                      0xDD52 },
+    { "MTP_PROPERTY_ACTIVITY_REQUIRED_ATTENDEES",            0xDD54 },
+    { "MTP_PROPERTY_ACTIVITY_OPTIONAL_ATTENDEES",            0xDD55 },
+    { "MTP_PROPERTY_ACTIVITY_RESOURCES",                     0xDD56 },
+    { "MTP_PROPERTY_ACTIVITY_ACCEPTED",                      0xDD57 },
+    { "MTP_PROPERTY_ACTIVITY_TENTATIVE",                     0xDD58 },
+    { "MTP_PROPERTY_ACTIVITY_DECLINED",                      0xDD59 },
+    { "MTP_PROPERTY_ACTIVITY_REMAINDER_TIME",                0xDD5A },
+    { "MTP_PROPERTY_ACTIVITY_OWNER",                         0xDD5B },
+    { "MTP_PROPERTY_ACTIVITY_STATUS",                        0xDD5C },
+    { "MTP_PROPERTY_OWNER",                                  0xDD5D },
+    { "MTP_PROPERTY_EDITOR",                                 0xDD5E },
+    { "MTP_PROPERTY_WEBMASTER",                              0xDD5F },
+    { "MTP_PROPERTY_URL_SOURCE",                             0xDD60 },
+    { "MTP_PROPERTY_URL_DESTINATION",                        0xDD61 },
+    { "MTP_PROPERTY_TIME_BOOKMARK",                          0xDD62 },
+    { "MTP_PROPERTY_OBJECT_BOOKMARK",                        0xDD63 },
+    { "MTP_PROPERTY_BYTE_BOOKMARK",                          0xDD64 },
+    { "MTP_PROPERTY_LAST_BUILD_DATE",                        0xDD70 },
+    { "MTP_PROPERTY_TIME_TO_LIVE",                           0xDD71 },
+    { "MTP_PROPERTY_MEDIA_GUID",                             0xDD72 },
+    { 0,                                                     0      },
+};
+
+static const CodeEntry sDevicePropCodes[] = {
+    { "MTP_DEVICE_PROPERTY_UNDEFINED",                       0x5000 },
+    { "MTP_DEVICE_PROPERTY_BATTERY_LEVEL",                   0x5001 },
+    { "MTP_DEVICE_PROPERTY_FUNCTIONAL_MODE",                 0x5002 },
+    { "MTP_DEVICE_PROPERTY_IMAGE_SIZE",                      0x5003 },
+    { "MTP_DEVICE_PROPERTY_COMPRESSION_SETTING",             0x5004 },
+    { "MTP_DEVICE_PROPERTY_WHITE_BALANCE",                   0x5005 },
+    { "MTP_DEVICE_PROPERTY_RGB_GAIN",                        0x5006 },
+    { "MTP_DEVICE_PROPERTY_F_NUMBER",                        0x5007 },
+    { "MTP_DEVICE_PROPERTY_FOCAL_LENGTH",                    0x5008 },
+    { "MTP_DEVICE_PROPERTY_FOCUS_DISTANCE",                  0x5009 },
+    { "MTP_DEVICE_PROPERTY_FOCUS_MODE",                      0x500A },
+    { "MTP_DEVICE_PROPERTY_EXPOSURE_METERING_MODE",          0x500B },
+    { "MTP_DEVICE_PROPERTY_FLASH_MODE",                      0x500C },
+    { "MTP_DEVICE_PROPERTY_EXPOSURE_TIME",                   0x500D },
+    { "MTP_DEVICE_PROPERTY_EXPOSURE_PROGRAM_MODE",           0x500E },
+    { "MTP_DEVICE_PROPERTY_EXPOSURE_INDEX",                  0x500F },
+    { "MTP_DEVICE_PROPERTY_EXPOSURE_BIAS_COMPENSATION",      0x5010 },
+    { "MTP_DEVICE_PROPERTY_DATETIME",                        0x5011 },
+    { "MTP_DEVICE_PROPERTY_CAPTURE_DELAY",                   0x5012 },
+    { "MTP_DEVICE_PROPERTY_STILL_CAPTURE_MODE",              0x5013 },
+    { "MTP_DEVICE_PROPERTY_CONTRAST",                        0x5014 },
+    { "MTP_DEVICE_PROPERTY_SHARPNESS",                       0x5015 },
+    { "MTP_DEVICE_PROPERTY_DIGITAL_ZOOM",                    0x5016 },
+    { "MTP_DEVICE_PROPERTY_EFFECT_MODE",                     0x5017 },
+    { "MTP_DEVICE_PROPERTY_BURST_NUMBER",                    0x5018 },
+    { "MTP_DEVICE_PROPERTY_BURST_INTERVAL",                  0x5019 },
+    { "MTP_DEVICE_PROPERTY_TIMELAPSE_NUMBER",                0x501A },
+    { "MTP_DEVICE_PROPERTY_TIMELAPSE_INTERVAL",              0x501B },
+    { "MTP_DEVICE_PROPERTY_FOCUS_METERING_MODE",             0x501C },
+    { "MTP_DEVICE_PROPERTY_UPLOAD_URL",                      0x501D },
+    { "MTP_DEVICE_PROPERTY_ARTIST",                          0x501E },
+    { "MTP_DEVICE_PROPERTY_COPYRIGHT_INFO",                  0x501F },
+    { "MTP_DEVICE_PROPERTY_SYNCHRONIZATION_PARTNER",         0xD401 },
+    { "MTP_DEVICE_PROPERTY_DEVICE_FRIENDLY_NAME",            0xD402 },
+    { "MTP_DEVICE_PROPERTY_VOLUME",                          0xD403 },
+    { "MTP_DEVICE_PROPERTY_SUPPORTED_FORMATS_ORDERED",       0xD404 },
+    { "MTP_DEVICE_PROPERTY_DEVICE_ICON",                     0xD405 },
+    { "MTP_DEVICE_PROPERTY_PLAYBACK_RATE",                   0xD410 },
+    { "MTP_DEVICE_PROPERTY_PLAYBACK_OBJECT",                 0xD411 },
+    { "MTP_DEVICE_PROPERTY_PLAYBACK_CONTAINER_INDEX",        0xD412 },
+    { "MTP_DEVICE_PROPERTY_SESSION_INITIATOR_VERSION_INFO",  0xD406 },
+    { "MTP_DEVICE_PROPERTY_PERCEIVED_DEVICE_TYPE",           0xD407 },
+    { 0,                                                     0      },
+};
+
+static const char* getCodeName(uint16_t code, const CodeEntry* table) {
+    const CodeEntry* entry = table;
+    while (entry->name) {
+        if (entry->code == code)
+            return entry->name;
+        entry++;
+    }
+    return "UNKNOWN";
+}
+
+const char* MtpDebug::getOperationCodeName(MtpOperationCode code) {
+    return getCodeName(code, sOperationCodes);
+}
+
+const char* MtpDebug::getFormatCodeName(MtpObjectFormat code) {
+    if (code == 0)
+        return "NONE";
+    return getCodeName(code, sFormatCodes);
+}
+
+const char* MtpDebug::getObjectPropCodeName(MtpPropertyCode code) {
+    if (code == 0)
+        return "NONE";
+    return getCodeName(code, sObjectPropCodes);
+}
+
+const char* MtpDebug::getDevicePropCodeName(MtpPropertyCode code) {
+    if (code == 0)
+        return "NONE";
+    return getCodeName(code, sDevicePropCodes);
+}
+
+}  // namespace android
diff --git a/media/mtp/MtpDebug.h b/media/mtp/MtpDebug.h
new file mode 100644
index 0000000..5b53e31
--- /dev/null
+++ b/media/mtp/MtpDebug.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_DEBUG_H
+#define _MTP_DEBUG_H
+
+// #define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include "MtpTypes.h"
+
+namespace android {
+
+class MtpDebug {
+public:
+    static const char* getOperationCodeName(MtpOperationCode code);
+    static const char* getFormatCodeName(MtpObjectFormat code);
+    static const char* getObjectPropCodeName(MtpPropertyCode code);
+    static const char* getDevicePropCodeName(MtpPropertyCode code);
+};
+
+}; // namespace android
+
+#endif // _MTP_DEBUG_H
diff --git a/media/mtp/MtpDevice.cpp b/media/mtp/MtpDevice.cpp
new file mode 100644
index 0000000..2e86159
--- /dev/null
+++ b/media/mtp/MtpDevice.cpp
@@ -0,0 +1,835 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpDevice"
+
+#include "MtpDebug.h"
+#include "MtpDevice.h"
+#include "MtpDeviceInfo.h"
+#include "MtpObjectInfo.h"
+#include "MtpProperty.h"
+#include "MtpStorageInfo.h"
+#include "MtpStringBuffer.h"
+#include "MtpUtils.h"
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <sys/types.h>
+#include <sys/ioctl.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <errno.h>
+#include <endian.h>
+
+#include <usbhost/usbhost.h>
+
+namespace android {
+
+#if 0
+static bool isMtpDevice(uint16_t vendor, uint16_t product) {
+    // Sandisk Sansa Fuze
+    if (vendor == 0x0781 && product == 0x74c2)
+        return true;
+    // Samsung YP-Z5
+    if (vendor == 0x04e8 && product == 0x503c)
+        return true;
+    return false;
+}
+#endif
+
+MtpDevice* MtpDevice::open(const char* deviceName, int fd) {
+    struct usb_device *device = usb_device_new(deviceName, fd);
+    if (!device) {
+        LOGE("usb_device_new failed for %s", deviceName);
+        return NULL;
+    }
+
+    struct usb_descriptor_header* desc;
+    struct usb_descriptor_iter iter;
+
+    usb_descriptor_iter_init(device, &iter);
+
+    while ((desc = usb_descriptor_iter_next(&iter)) != NULL) {
+        if (desc->bDescriptorType == USB_DT_INTERFACE) {
+            struct usb_interface_descriptor *interface = (struct usb_interface_descriptor *)desc;
+
+            if (interface->bInterfaceClass == USB_CLASS_STILL_IMAGE &&
+                interface->bInterfaceSubClass == 1 && // Still Image Capture
+                interface->bInterfaceProtocol == 1)     // Picture Transfer Protocol (PIMA 15470)
+            {
+                char* manufacturerName = usb_device_get_manufacturer_name(device);
+                char* productName = usb_device_get_product_name(device);
+                LOGD("Found camera: \"%s\" \"%s\"\n", manufacturerName, productName);
+                free(manufacturerName);
+                free(productName);
+            } else if (interface->bInterfaceClass == 0xFF &&
+                    interface->bInterfaceSubClass == 0xFF &&
+                    interface->bInterfaceProtocol == 0) {
+                char* interfaceName = usb_device_get_string(device, interface->iInterface);
+                if (!interfaceName) {
+                    continue;
+                } else if (strcmp(interfaceName, "MTP")) {
+                    free(interfaceName);
+                    continue;
+                }
+                free(interfaceName);
+
+                // Looks like an android style MTP device
+                char* manufacturerName = usb_device_get_manufacturer_name(device);
+                char* productName = usb_device_get_product_name(device);
+                LOGD("Found MTP device: \"%s\" \"%s\"\n", manufacturerName, productName);
+                free(manufacturerName);
+                free(productName);
+            }
+#if 0
+             else {
+                // look for special cased devices based on vendor/product ID
+                // we are doing this mainly for testing purposes
+                uint16_t vendor = usb_device_get_vendor_id(device);
+                uint16_t product = usb_device_get_product_id(device);
+                if (!isMtpDevice(vendor, product)) {
+                    // not an MTP or PTP device
+                    continue;
+                }
+                // request MTP OS string and descriptor
+                // some music players need to see this before entering MTP mode.
+                char buffer[256];
+                memset(buffer, 0, sizeof(buffer));
+                int ret = usb_device_control_transfer(device,
+                        USB_DIR_IN|USB_RECIP_DEVICE|USB_TYPE_STANDARD,
+                        USB_REQ_GET_DESCRIPTOR, (USB_DT_STRING << 8) | 0xEE,
+                        0, buffer, sizeof(buffer), 0);
+                printf("usb_device_control_transfer returned %d errno: %d\n", ret, errno);
+                if (ret > 0) {
+                    printf("got MTP string %s\n", buffer);
+                    ret = usb_device_control_transfer(device,
+                            USB_DIR_IN|USB_RECIP_DEVICE|USB_TYPE_VENDOR, 1,
+                            0, 4, buffer, sizeof(buffer), 0);
+                    printf("OS descriptor got %d\n", ret);
+                } else {
+                    printf("no MTP string\n");
+                }
+            }
+#endif
+            // if we got here, then we have a likely MTP or PTP device
+
+            // interface should be followed by three endpoints
+            struct usb_endpoint_descriptor *ep;
+            struct usb_endpoint_descriptor *ep_in_desc = NULL;
+            struct usb_endpoint_descriptor *ep_out_desc = NULL;
+            struct usb_endpoint_descriptor *ep_intr_desc = NULL;
+            for (int i = 0; i < 3; i++) {
+                ep = (struct usb_endpoint_descriptor *)usb_descriptor_iter_next(&iter);
+                if (!ep || ep->bDescriptorType != USB_DT_ENDPOINT) {
+                    LOGE("endpoints not found\n");
+                    usb_device_close(device);
+                    return NULL;
+                }
+                if (ep->bmAttributes == USB_ENDPOINT_XFER_BULK) {
+                    if (ep->bEndpointAddress & USB_ENDPOINT_DIR_MASK)
+                        ep_in_desc = ep;
+                    else
+                        ep_out_desc = ep;
+                } else if (ep->bmAttributes == USB_ENDPOINT_XFER_INT &&
+                    ep->bEndpointAddress & USB_ENDPOINT_DIR_MASK) {
+                    ep_intr_desc = ep;
+                }
+            }
+            if (!ep_in_desc || !ep_out_desc || !ep_intr_desc) {
+                LOGE("endpoints not found\n");
+                usb_device_close(device);
+                return NULL;
+            }
+
+            if (usb_device_claim_interface(device, interface->bInterfaceNumber)) {
+                LOGE("usb_device_claim_interface failed errno: %d\n", errno);
+                usb_device_close(device);
+                return NULL;
+            }
+
+            MtpDevice* mtpDevice = new MtpDevice(device, interface->bInterfaceNumber,
+                        ep_in_desc, ep_out_desc, ep_intr_desc);
+            mtpDevice->initialize();
+            return mtpDevice;
+        }
+    }
+
+    usb_device_close(device);
+    LOGE("device not found");
+    return NULL;
+}
+
+MtpDevice::MtpDevice(struct usb_device* device, int interface,
+            const struct usb_endpoint_descriptor *ep_in,
+            const struct usb_endpoint_descriptor *ep_out,
+            const struct usb_endpoint_descriptor *ep_intr)
+    :   mDevice(device),
+        mInterface(interface),
+        mRequestIn1(NULL),
+        mRequestIn2(NULL),
+        mRequestOut(NULL),
+        mRequestIntr(NULL),
+        mDeviceInfo(NULL),
+        mSessionID(0),
+        mTransactionID(0),
+        mReceivedResponse(false)
+{
+    mRequestIn1 = usb_request_new(device, ep_in);
+    mRequestIn2 = usb_request_new(device, ep_in);
+    mRequestOut = usb_request_new(device, ep_out);
+    mRequestIntr = usb_request_new(device, ep_intr);
+}
+
+MtpDevice::~MtpDevice() {
+    close();
+    for (int i = 0; i < mDeviceProperties.size(); i++)
+        delete mDeviceProperties[i];
+    usb_request_free(mRequestIn1);
+    usb_request_free(mRequestIn2);
+    usb_request_free(mRequestOut);
+    usb_request_free(mRequestIntr);
+}
+
+void MtpDevice::initialize() {
+    openSession();
+    mDeviceInfo = getDeviceInfo();
+    if (mDeviceInfo) {
+        if (mDeviceInfo->mDeviceProperties) {
+            int count = mDeviceInfo->mDeviceProperties->size();
+            for (int i = 0; i < count; i++) {
+                MtpDeviceProperty propCode = (*mDeviceInfo->mDeviceProperties)[i];
+                MtpProperty* property = getDevicePropDesc(propCode);
+                if (property)
+                    mDeviceProperties.push(property);
+            }
+        }
+    }
+}
+
+void MtpDevice::close() {
+    if (mDevice) {
+        usb_device_release_interface(mDevice, mInterface);
+        usb_device_close(mDevice);
+        mDevice = NULL;
+    }
+}
+
+void MtpDevice::print() {
+    if (mDeviceInfo) {
+        mDeviceInfo->print();
+
+        if (mDeviceInfo->mDeviceProperties) {
+            LOGI("***** DEVICE PROPERTIES *****\n");
+            int count = mDeviceInfo->mDeviceProperties->size();
+            for (int i = 0; i < count; i++) {
+                MtpDeviceProperty propCode = (*mDeviceInfo->mDeviceProperties)[i];
+                MtpProperty* property = getDevicePropDesc(propCode);
+                if (property) {
+                    property->print();
+                    delete property;
+                }
+            }
+        }
+    }
+
+    if (mDeviceInfo->mPlaybackFormats) {
+            LOGI("***** OBJECT PROPERTIES *****\n");
+        int count = mDeviceInfo->mPlaybackFormats->size();
+        for (int i = 0; i < count; i++) {
+            MtpObjectFormat format = (*mDeviceInfo->mPlaybackFormats)[i];
+            LOGI("*** FORMAT: %s\n", MtpDebug::getFormatCodeName(format));
+            MtpObjectPropertyList* props = getObjectPropsSupported(format);
+            if (props) {
+                for (int j = 0; j < props->size(); j++) {
+                    MtpObjectProperty prop = (*props)[j];
+                    MtpProperty* property = getObjectPropDesc(prop, format);
+                    if (property) {
+                        property->print();
+                        delete property;
+                    } else {
+                        LOGE("could not fetch property: %s",
+                                MtpDebug::getObjectPropCodeName(prop));
+                    }
+                }
+            }
+        }
+    }
+}
+
+const char* MtpDevice::getDeviceName() {
+    if (mDevice)
+        return usb_device_get_name(mDevice);
+    else
+        return "???";
+}
+
+bool MtpDevice::openSession() {
+    Mutex::Autolock autoLock(mMutex);
+
+    mSessionID = 0;
+    mTransactionID = 0;
+    MtpSessionID newSession = 1;
+    mRequest.reset();
+    mRequest.setParameter(1, newSession);
+    if (!sendRequest(MTP_OPERATION_OPEN_SESSION))
+        return false;
+    MtpResponseCode ret = readResponse();
+    if (ret == MTP_RESPONSE_SESSION_ALREADY_OPEN)
+        newSession = mResponse.getParameter(1);
+    else if (ret != MTP_RESPONSE_OK)
+        return false;
+
+    mSessionID = newSession;
+    mTransactionID = 1;
+    return true;
+}
+
+bool MtpDevice::closeSession() {
+    // FIXME
+    return true;
+}
+
+MtpDeviceInfo* MtpDevice::getDeviceInfo() {
+    Mutex::Autolock autoLock(mMutex);
+
+    mRequest.reset();
+    if (!sendRequest(MTP_OPERATION_GET_DEVICE_INFO))
+        return NULL;
+    if (!readData())
+        return NULL;
+    MtpResponseCode ret = readResponse();
+    if (ret == MTP_RESPONSE_OK) {
+        MtpDeviceInfo* info = new MtpDeviceInfo;
+        info->read(mData);
+        return info;
+    }
+    return NULL;
+}
+
+MtpStorageIDList* MtpDevice::getStorageIDs() {
+    Mutex::Autolock autoLock(mMutex);
+
+    mRequest.reset();
+    if (!sendRequest(MTP_OPERATION_GET_STORAGE_IDS))
+        return NULL;
+    if (!readData())
+        return NULL;
+    MtpResponseCode ret = readResponse();
+    if (ret == MTP_RESPONSE_OK) {
+        return mData.getAUInt32();
+    }
+    return NULL;
+}
+
+MtpStorageInfo* MtpDevice::getStorageInfo(MtpStorageID storageID) {
+    Mutex::Autolock autoLock(mMutex);
+
+    mRequest.reset();
+    mRequest.setParameter(1, storageID);
+    if (!sendRequest(MTP_OPERATION_GET_STORAGE_INFO))
+        return NULL;
+    if (!readData())
+        return NULL;
+    MtpResponseCode ret = readResponse();
+    if (ret == MTP_RESPONSE_OK) {
+        MtpStorageInfo* info = new MtpStorageInfo(storageID);
+        info->read(mData);
+        return info;
+    }
+    return NULL;
+}
+
+MtpObjectHandleList* MtpDevice::getObjectHandles(MtpStorageID storageID,
+            MtpObjectFormat format, MtpObjectHandle parent) {
+    Mutex::Autolock autoLock(mMutex);
+
+    mRequest.reset();
+    mRequest.setParameter(1, storageID);
+    mRequest.setParameter(2, format);
+    mRequest.setParameter(3, parent);
+    if (!sendRequest(MTP_OPERATION_GET_OBJECT_HANDLES))
+        return NULL;
+    if (!readData())
+        return NULL;
+    MtpResponseCode ret = readResponse();
+    if (ret == MTP_RESPONSE_OK) {
+        return mData.getAUInt32();
+    }
+    return NULL;
+}
+
+MtpObjectInfo* MtpDevice::getObjectInfo(MtpObjectHandle handle) {
+    Mutex::Autolock autoLock(mMutex);
+
+    // FIXME - we might want to add some caching here
+
+    mRequest.reset();
+    mRequest.setParameter(1, handle);
+    if (!sendRequest(MTP_OPERATION_GET_OBJECT_INFO))
+        return NULL;
+    if (!readData())
+        return NULL;
+    MtpResponseCode ret = readResponse();
+    if (ret == MTP_RESPONSE_OK) {
+        MtpObjectInfo* info = new MtpObjectInfo(handle);
+        info->read(mData);
+        return info;
+    }
+    return NULL;
+}
+
+void* MtpDevice::getThumbnail(MtpObjectHandle handle, int& outLength) {
+    Mutex::Autolock autoLock(mMutex);
+
+    mRequest.reset();
+    mRequest.setParameter(1, handle);
+    if (sendRequest(MTP_OPERATION_GET_THUMB) && readData()) {
+        MtpResponseCode ret = readResponse();
+        if (ret == MTP_RESPONSE_OK) {
+            return mData.getData(outLength);
+        }
+    }
+    outLength = 0;
+    return NULL;
+}
+
+MtpObjectHandle MtpDevice::sendObjectInfo(MtpObjectInfo* info) {
+    Mutex::Autolock autoLock(mMutex);
+
+    mRequest.reset();
+    MtpObjectHandle parent = info->mParent;
+    if (parent == 0)
+        parent = MTP_PARENT_ROOT;
+
+    mRequest.setParameter(1, info->mStorageID);
+    mRequest.setParameter(2, info->mParent);
+
+    mData.putUInt32(info->mStorageID);
+    mData.putUInt16(info->mFormat);
+    mData.putUInt16(info->mProtectionStatus);
+    mData.putUInt32(info->mCompressedSize);
+    mData.putUInt16(info->mThumbFormat);
+    mData.putUInt32(info->mThumbCompressedSize);
+    mData.putUInt32(info->mThumbPixWidth);
+    mData.putUInt32(info->mThumbPixHeight);
+    mData.putUInt32(info->mImagePixWidth);
+    mData.putUInt32(info->mImagePixHeight);
+    mData.putUInt32(info->mImagePixDepth);
+    mData.putUInt32(info->mParent);
+    mData.putUInt16(info->mAssociationType);
+    mData.putUInt32(info->mAssociationDesc);
+    mData.putUInt32(info->mSequenceNumber);
+    mData.putString(info->mName);
+
+    char created[100], modified[100];
+    formatDateTime(info->mDateCreated, created, sizeof(created));
+    formatDateTime(info->mDateModified, modified, sizeof(modified));
+
+    mData.putString(created);
+    mData.putString(modified);
+    if (info->mKeywords)
+        mData.putString(info->mKeywords);
+    else
+        mData.putEmptyString();
+
+   if (sendRequest(MTP_OPERATION_SEND_OBJECT_INFO) && sendData()) {
+        MtpResponseCode ret = readResponse();
+        if (ret == MTP_RESPONSE_OK) {
+            info->mStorageID = mResponse.getParameter(1);
+            info->mParent = mResponse.getParameter(2);
+            info->mHandle = mResponse.getParameter(3);
+            return info->mHandle;
+        }
+    }
+    return (MtpObjectHandle)-1;
+}
+
+bool MtpDevice::sendObject(MtpObjectInfo* info, int srcFD) {
+    Mutex::Autolock autoLock(mMutex);
+
+    int remaining = info->mCompressedSize;
+    mRequest.reset();
+    mRequest.setParameter(1, info->mHandle);
+    if (sendRequest(MTP_OPERATION_SEND_OBJECT)) {
+        // send data header
+        writeDataHeader(MTP_OPERATION_SEND_OBJECT, remaining);
+
+        char buffer[65536];
+        while (remaining > 0) {
+            int count = read(srcFD, buffer, sizeof(buffer));
+            if (count > 0) {
+                int written = mData.write(mRequestOut, buffer, count);
+                // FIXME check error
+                remaining -= count;
+            } else {
+                break;
+            }
+        }
+    }
+    MtpResponseCode ret = readResponse();
+    return (remaining == 0 && ret == MTP_RESPONSE_OK);
+}
+
+bool MtpDevice::deleteObject(MtpObjectHandle handle) {
+    Mutex::Autolock autoLock(mMutex);
+
+    mRequest.reset();
+    mRequest.setParameter(1, handle);
+    if (sendRequest(MTP_OPERATION_DELETE_OBJECT)) {
+        MtpResponseCode ret = readResponse();
+        if (ret == MTP_RESPONSE_OK)
+            return true;
+    }
+    return false;
+}
+
+MtpObjectHandle MtpDevice::getParent(MtpObjectHandle handle) {
+    MtpObjectInfo* info = getObjectInfo(handle);
+    if (info) {
+        MtpObjectHandle parent = info->mParent;
+        delete info;
+        return parent;
+    } else {
+        return -1;
+    }
+}
+
+MtpObjectHandle MtpDevice::getStorageID(MtpObjectHandle handle) {
+    MtpObjectInfo* info = getObjectInfo(handle);
+    if (info) {
+        MtpObjectHandle storageId = info->mStorageID;
+        delete info;
+        return storageId;
+    } else {
+        return -1;
+    }
+}
+
+MtpObjectPropertyList* MtpDevice::getObjectPropsSupported(MtpObjectFormat format) {
+    Mutex::Autolock autoLock(mMutex);
+
+    mRequest.reset();
+    mRequest.setParameter(1, format);
+    if (!sendRequest(MTP_OPERATION_GET_OBJECT_PROPS_SUPPORTED))
+        return NULL;
+    if (!readData())
+        return NULL;
+    MtpResponseCode ret = readResponse();
+    if (ret == MTP_RESPONSE_OK) {
+        return mData.getAUInt16();
+    }
+    return NULL;
+
+}
+
+MtpProperty* MtpDevice::getDevicePropDesc(MtpDeviceProperty code) {
+    Mutex::Autolock autoLock(mMutex);
+
+    mRequest.reset();
+    mRequest.setParameter(1, code);
+    if (!sendRequest(MTP_OPERATION_GET_DEVICE_PROP_DESC))
+        return NULL;
+    if (!readData())
+        return NULL;
+    MtpResponseCode ret = readResponse();
+    if (ret == MTP_RESPONSE_OK) {
+        MtpProperty* property = new MtpProperty;
+        property->read(mData);
+        return property;
+    }
+    return NULL;
+}
+
+MtpProperty* MtpDevice::getObjectPropDesc(MtpObjectProperty code, MtpObjectFormat format) {
+    Mutex::Autolock autoLock(mMutex);
+
+    mRequest.reset();
+    mRequest.setParameter(1, code);
+    mRequest.setParameter(2, format);
+    if (!sendRequest(MTP_OPERATION_GET_OBJECT_PROP_DESC))
+        return NULL;
+    if (!readData())
+        return NULL;
+    MtpResponseCode ret = readResponse();
+    if (ret == MTP_RESPONSE_OK) {
+        MtpProperty* property = new MtpProperty;
+        property->read(mData);
+        return property;
+    }
+    return NULL;
+}
+
+bool MtpDevice::readObject(MtpObjectHandle handle,
+        bool (* callback)(void* data, int offset, int length, void* clientData),
+        int objectSize, void* clientData) {
+    Mutex::Autolock autoLock(mMutex);
+    bool result = false;
+
+    mRequest.reset();
+    mRequest.setParameter(1, handle);
+    if (sendRequest(MTP_OPERATION_GET_OBJECT)
+            && mData.readDataHeader(mRequestIn1)) {
+        uint32_t length = mData.getContainerLength();
+        if (length - MTP_CONTAINER_HEADER_SIZE != objectSize) {
+            LOGE("readObject error objectSize: %d, length: %d",
+                    objectSize, length);
+            goto fail;
+        }
+        length -= MTP_CONTAINER_HEADER_SIZE;
+        uint32_t remaining = length;
+        int offset = 0;
+
+        int initialDataLength = 0;
+        void* initialData = mData.getData(initialDataLength);
+        if (initialData) {
+            if (initialDataLength > 0) {
+                if (!callback(initialData, 0, initialDataLength, clientData))
+                    goto fail;
+                remaining -= initialDataLength;
+                offset += initialDataLength;
+            }
+            free(initialData);
+        }
+
+        // USB reads greater than 16K don't work
+        char buffer1[16384], buffer2[16384];
+        mRequestIn1->buffer = buffer1;
+        mRequestIn2->buffer = buffer2;
+        struct usb_request* req = mRequestIn1;
+        void* writeBuffer = NULL;
+        int writeLength = 0;
+
+        while (remaining > 0 || writeBuffer) {
+            if (remaining > 0) {
+                // queue up a read request
+                req->buffer_length = (remaining > sizeof(buffer1) ? sizeof(buffer1) : remaining);
+                if (mData.readDataAsync(req)) {
+                    LOGE("readDataAsync failed");
+                    goto fail;
+                }
+            } else {
+                req = NULL;
+            }
+
+            if (writeBuffer) {
+                // write previous buffer
+                if (!callback(writeBuffer, offset, writeLength, clientData)) {
+                    LOGE("write failed");
+                    // wait for pending read before failing
+                    if (req)
+                        mData.readDataWait(mDevice);
+                    goto fail;
+                }
+                offset += writeLength;
+                writeBuffer = NULL;
+            }
+
+            // wait for read to complete
+            if (req) {
+                int read = mData.readDataWait(mDevice);
+                if (read < 0)
+                    goto fail;
+
+                if (read > 0) {
+                    writeBuffer = req->buffer;
+                    writeLength = read;
+                    remaining -= read;
+                    req = (req == mRequestIn1 ? mRequestIn2 : mRequestIn1);
+                } else {
+                    writeBuffer = NULL;
+                }
+            }
+        }
+
+        MtpResponseCode response = readResponse();
+        if (response == MTP_RESPONSE_OK)
+            result = true;
+    }
+
+fail:
+    return result;
+}
+
+
+// reads the object's data and writes it to the specified file path
+bool MtpDevice::readObject(MtpObjectHandle handle, const char* destPath, int group, int perm) {
+    LOGD("readObject: %s", destPath);
+    int fd = ::open(destPath, O_RDWR | O_CREAT | O_TRUNC);
+    if (fd < 0) {
+        LOGE("open failed for %s", destPath);
+        return false;
+    }
+
+    fchown(fd, getuid(), group);
+    // set permissions
+    int mask = umask(0);
+    fchmod(fd, perm);
+    umask(mask);
+
+    Mutex::Autolock autoLock(mMutex);
+    bool result = false;
+
+    mRequest.reset();
+    mRequest.setParameter(1, handle);
+    if (sendRequest(MTP_OPERATION_GET_OBJECT)
+            && mData.readDataHeader(mRequestIn1)) {
+        uint32_t length = mData.getContainerLength();
+        if (length < MTP_CONTAINER_HEADER_SIZE)
+            goto fail;
+        length -= MTP_CONTAINER_HEADER_SIZE;
+        uint32_t remaining = length;
+
+        int initialDataLength = 0;
+        void* initialData = mData.getData(initialDataLength);
+        if (initialData) {
+            if (initialDataLength > 0) {
+                if (write(fd, initialData, initialDataLength) != initialDataLength) {
+                    free(initialData);
+                    goto fail;
+                }
+                remaining -= initialDataLength;
+            }
+            free(initialData);
+        }
+
+        // USB reads greater than 16K don't work
+        char buffer1[16384], buffer2[16384];
+        mRequestIn1->buffer = buffer1;
+        mRequestIn2->buffer = buffer2;
+        struct usb_request* req = mRequestIn1;
+        void* writeBuffer = NULL;
+        int writeLength = 0;
+
+        while (remaining > 0 || writeBuffer) {
+            if (remaining > 0) {
+                // queue up a read request
+                req->buffer_length = (remaining > sizeof(buffer1) ? sizeof(buffer1) : remaining);
+                if (mData.readDataAsync(req)) {
+                    LOGE("readDataAsync failed");
+                    goto fail;
+                }
+            } else {
+                req = NULL;
+            }
+
+            if (writeBuffer) {
+                // write previous buffer
+                if (write(fd, writeBuffer, writeLength) != writeLength) {
+                    LOGE("write failed");
+                    // wait for pending read before failing
+                    if (req)
+                        mData.readDataWait(mDevice);
+                    goto fail;
+                }
+                writeBuffer = NULL;
+            }
+
+            // wait for read to complete
+            if (req) {
+                int read = mData.readDataWait(mDevice);
+                if (read < 0)
+                    goto fail;
+
+                if (read > 0) {
+                    writeBuffer = req->buffer;
+                    writeLength = read;
+                    remaining -= read;
+                    req = (req == mRequestIn1 ? mRequestIn2 : mRequestIn1);
+                } else {
+                    writeBuffer = NULL;
+                }
+            }
+        }
+
+        MtpResponseCode response = readResponse();
+        if (response == MTP_RESPONSE_OK)
+            result = true;
+    }
+
+fail:
+    ::close(fd);
+    return result;
+}
+
+bool MtpDevice::sendRequest(MtpOperationCode operation) {
+    LOGV("sendRequest: %s\n", MtpDebug::getOperationCodeName(operation));
+    mReceivedResponse = false;
+    mRequest.setOperationCode(operation);
+    if (mTransactionID > 0)
+        mRequest.setTransactionID(mTransactionID++);
+    int ret = mRequest.write(mRequestOut);
+    mRequest.dump();
+    return (ret > 0);
+}
+
+bool MtpDevice::sendData() {
+    LOGV("sendData\n");
+    mData.setOperationCode(mRequest.getOperationCode());
+    mData.setTransactionID(mRequest.getTransactionID());
+    int ret = mData.write(mRequestOut);
+    mData.dump();
+    return (ret > 0);
+}
+
+bool MtpDevice::readData() {
+    mData.reset();
+    int ret = mData.read(mRequestIn1);
+    LOGV("readData returned %d\n", ret);
+    if (ret >= MTP_CONTAINER_HEADER_SIZE) {
+        if (mData.getContainerType() == MTP_CONTAINER_TYPE_RESPONSE) {
+            LOGD("got response packet instead of data packet");
+            // we got a response packet rather than data
+            // copy it to mResponse
+            mResponse.copyFrom(mData);
+            mReceivedResponse = true;
+            return false;
+        }
+        mData.dump();
+        return true;
+    }
+    else {
+        LOGV("readResponse failed\n");
+        return false;
+    }
+}
+
+bool MtpDevice::writeDataHeader(MtpOperationCode operation, int dataLength) {
+    mData.setOperationCode(operation);
+    mData.setTransactionID(mRequest.getTransactionID());
+    return (!mData.writeDataHeader(mRequestOut, dataLength));
+}
+
+MtpResponseCode MtpDevice::readResponse() {
+    LOGV("readResponse\n");
+    if (mReceivedResponse) {
+        mReceivedResponse = false;
+        return mResponse.getResponseCode();
+    }
+    int ret = mResponse.read(mRequestIn1);
+    // handle zero length packets, which might occur if the data transfer
+    // ends on a packet boundary
+    if (ret == 0)
+        ret = mResponse.read(mRequestIn1);
+    if (ret >= MTP_CONTAINER_HEADER_SIZE) {
+        mResponse.dump();
+        return mResponse.getResponseCode();
+    } else {
+        LOGD("readResponse failed\n");
+        return -1;
+    }
+}
+
+}  // namespace android
diff --git a/media/mtp/MtpDevice.h b/media/mtp/MtpDevice.h
new file mode 100644
index 0000000..b69203e
--- /dev/null
+++ b/media/mtp/MtpDevice.h
@@ -0,0 +1,116 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_DEVICE_H
+#define _MTP_DEVICE_H
+
+#include "MtpRequestPacket.h"
+#include "MtpDataPacket.h"
+#include "MtpResponsePacket.h"
+#include "MtpTypes.h"
+
+#include <utils/threads.h>
+
+struct usb_device;
+struct usb_request;
+struct usb_endpoint_descriptor;
+
+namespace android {
+
+class MtpDeviceInfo;
+class MtpObjectInfo;
+class MtpStorageInfo;
+
+class MtpDevice {
+private:
+    struct usb_device*      mDevice;
+    int                     mInterface;
+    struct usb_request*     mRequestIn1;
+    struct usb_request*     mRequestIn2;
+    struct usb_request*     mRequestOut;
+    struct usb_request*     mRequestIntr;
+    MtpDeviceInfo*          mDeviceInfo;
+    MtpPropertyList         mDeviceProperties;
+
+    // current session ID
+    MtpSessionID            mSessionID;
+    // current transaction ID
+    MtpTransactionID        mTransactionID;
+
+    MtpRequestPacket        mRequest;
+    MtpDataPacket           mData;
+    MtpResponsePacket       mResponse;
+    // set to true if we received a response packet instead of a data packet
+    bool                    mReceivedResponse;
+
+    // to ensure only one MTP transaction at a time
+    Mutex                   mMutex;
+
+public:
+                            MtpDevice(struct usb_device* device, int interface,
+                                    const struct usb_endpoint_descriptor *ep_in,
+                                    const struct usb_endpoint_descriptor *ep_out,
+                                    const struct usb_endpoint_descriptor *ep_intr);
+
+    static MtpDevice*       open(const char* deviceName, int fd);
+
+    virtual                 ~MtpDevice();
+
+    void                    initialize();
+    void                    close();
+    void                    print();
+    const char*             getDeviceName();
+
+    bool                    openSession();
+    bool                    closeSession();
+
+    MtpDeviceInfo*          getDeviceInfo();
+    MtpStorageIDList*       getStorageIDs();
+    MtpStorageInfo*         getStorageInfo(MtpStorageID storageID);
+    MtpObjectHandleList*    getObjectHandles(MtpStorageID storageID, MtpObjectFormat format,
+                                    MtpObjectHandle parent);
+    MtpObjectInfo*          getObjectInfo(MtpObjectHandle handle);
+    void*                   getThumbnail(MtpObjectHandle handle, int& outLength);
+    MtpObjectHandle         sendObjectInfo(MtpObjectInfo* info);
+    bool                    sendObject(MtpObjectInfo* info, int srcFD);
+    bool                    deleteObject(MtpObjectHandle handle);
+    MtpObjectHandle         getParent(MtpObjectHandle handle);
+    MtpObjectHandle         getStorageID(MtpObjectHandle handle);
+
+    MtpObjectPropertyList*  getObjectPropsSupported(MtpObjectFormat format);
+
+    MtpProperty*            getDevicePropDesc(MtpDeviceProperty code);
+    MtpProperty*            getObjectPropDesc(MtpObjectProperty code, MtpObjectFormat format);
+
+    bool                    readObject(MtpObjectHandle handle,
+                                    bool (* callback)(void* data, int offset,
+                                            int length, void* clientData),
+                                    int objectSize, void* clientData);
+    bool                    readObject(MtpObjectHandle handle, const char* destPath, int group,
+                                    int perm);
+
+private:
+    bool                    sendRequest(MtpOperationCode operation);
+    bool                    sendData();
+    bool                    readData();
+    bool                    writeDataHeader(MtpOperationCode operation, int dataLength);
+    MtpResponseCode         readResponse();
+
+};
+
+}; // namespace android
+
+#endif // _MTP_DEVICE_H
diff --git a/media/mtp/MtpDeviceInfo.cpp b/media/mtp/MtpDeviceInfo.cpp
new file mode 100644
index 0000000..5a9322e
--- /dev/null
+++ b/media/mtp/MtpDeviceInfo.cpp
@@ -0,0 +1,97 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpDeviceInfo"
+
+#include "MtpDebug.h"
+#include "MtpDataPacket.h"
+#include "MtpDeviceInfo.h"
+#include "MtpStringBuffer.h"
+
+namespace android {
+
+MtpDeviceInfo::MtpDeviceInfo()
+    :   mStandardVersion(0),
+        mVendorExtensionID(0),
+        mVendorExtensionVersion(0),
+        mVendorExtensionDesc(NULL),
+        mFunctionalCode(0),
+        mOperations(NULL),
+        mEvents(NULL),
+        mDeviceProperties(NULL),
+        mCaptureFormats(NULL),
+        mPlaybackFormats(NULL),
+        mManufacturer(NULL),
+        mModel(NULL),
+        mVersion(NULL),
+        mSerial(NULL)
+{
+}
+
+MtpDeviceInfo::~MtpDeviceInfo() {
+    if (mVendorExtensionDesc)
+        free(mVendorExtensionDesc);
+    delete mOperations;
+    delete mEvents;
+    delete mDeviceProperties;
+    delete mCaptureFormats;
+    delete mPlaybackFormats;
+    if (mManufacturer)
+        free(mManufacturer);
+    if (mModel)
+        free(mModel);
+    if (mVersion)
+        free(mVersion);
+    if (mSerial)
+        free(mSerial);
+}
+
+void MtpDeviceInfo::read(MtpDataPacket& packet) {
+    MtpStringBuffer string;
+
+    // read the device info
+    mStandardVersion = packet.getUInt16();
+    mVendorExtensionID = packet.getUInt32();
+    mVendorExtensionVersion = packet.getUInt16();
+
+    packet.getString(string);
+    mVendorExtensionDesc = strdup((const char *)string);
+
+    mFunctionalCode = packet.getUInt16();
+    mOperations = packet.getAUInt16();
+    mEvents = packet.getAUInt16();
+    mDeviceProperties = packet.getAUInt16();
+    mCaptureFormats = packet.getAUInt16();
+    mPlaybackFormats = packet.getAUInt16();
+
+    packet.getString(string);
+    mManufacturer = strdup((const char *)string);
+    packet.getString(string);
+    mModel = strdup((const char *)string);
+    packet.getString(string);
+    mVersion = strdup((const char *)string);
+    packet.getString(string);
+    mSerial = strdup((const char *)string);
+}
+
+void MtpDeviceInfo::print() {
+    LOGV("Device Info:\n\tmStandardVersion: %d\n\tmVendorExtensionID: %d\n\tmVendorExtensionVersiony: %d\n",
+            mStandardVersion, mVendorExtensionID, mVendorExtensionVersion);
+    LOGV("\tmVendorExtensionDesc: %s\n\tmFunctionalCode: %d\n\tmManufacturer: %s\n\tmModel: %s\n\tmVersion: %s\n\tmSerial: %s\n",
+            mVendorExtensionDesc, mFunctionalCode, mManufacturer, mModel, mVersion, mSerial);
+}
+
+}  // namespace android
diff --git a/media/mtp/MtpDeviceInfo.h b/media/mtp/MtpDeviceInfo.h
new file mode 100644
index 0000000..2abaa10
--- /dev/null
+++ b/media/mtp/MtpDeviceInfo.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_DEVICE_INFO_H
+#define _MTP_DEVICE_INFO_H
+
+struct stat;
+
+namespace android {
+
+class MtpDataPacket;
+
+class MtpDeviceInfo {
+public:
+    uint16_t                mStandardVersion;
+    uint32_t                mVendorExtensionID;
+    uint16_t                mVendorExtensionVersion;
+    char*                   mVendorExtensionDesc;
+    uint16_t                mFunctionalCode;
+    UInt16List*             mOperations;
+    UInt16List*             mEvents;
+    MtpDevicePropertyList*  mDeviceProperties;
+    MtpObjectFormatList*    mCaptureFormats;
+    MtpObjectFormatList*    mPlaybackFormats;
+    char*                   mManufacturer;
+    char*                   mModel;
+    char*                   mVersion;
+    char*                   mSerial;
+
+public:
+                            MtpDeviceInfo();
+    virtual                 ~MtpDeviceInfo();
+
+    void                    read(MtpDataPacket& packet);
+
+    void                    print();
+};
+
+}; // namespace android
+
+#endif // _MTP_DEVICE_INFO_H
diff --git a/media/mtp/MtpEventPacket.cpp b/media/mtp/MtpEventPacket.cpp
new file mode 100644
index 0000000..d2fca42
--- /dev/null
+++ b/media/mtp/MtpEventPacket.cpp
@@ -0,0 +1,70 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpEventPacket"
+
+#include <stdio.h>
+#include <sys/types.h>
+#include <fcntl.h>
+#include <sys/ioctl.h>
+
+#ifdef MTP_DEVICE
+#include <linux/usb/f_mtp.h>
+#endif
+
+#include "MtpEventPacket.h"
+
+#include <usbhost/usbhost.h>
+
+namespace android {
+
+MtpEventPacket::MtpEventPacket()
+    :   MtpPacket(512)
+{
+}
+
+MtpEventPacket::~MtpEventPacket() {
+}
+
+#ifdef MTP_DEVICE
+int MtpEventPacket::write(int fd) {
+    struct mtp_event    event;
+
+    putUInt32(MTP_CONTAINER_LENGTH_OFFSET, mPacketSize);
+    putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_EVENT);
+
+    event.data = mBuffer;
+    event.length = mPacketSize;
+    int ret = ::ioctl(fd, MTP_SEND_EVENT, (unsigned long)&event);
+    return (ret < 0 ? ret : 0);
+}
+#endif
+
+#ifdef MTP_HOST
+int MtpEventPacket::read(struct usb_request *request) {
+    request->buffer = mBuffer;
+    request->buffer_length = mBufferSize;
+    int ret = transfer(request);
+     if (ret >= 0)
+        mPacketSize = ret;
+    else
+        mPacketSize = 0;
+    return ret;
+}
+#endif
+
+}  // namespace android
+
diff --git a/media/mtp/MtpEventPacket.h b/media/mtp/MtpEventPacket.h
new file mode 100644
index 0000000..660baad
--- /dev/null
+++ b/media/mtp/MtpEventPacket.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_EVENT_PACKET_H
+#define _MTP_EVENT_PACKET_H
+
+#include "MtpPacket.h"
+#include "mtp.h"
+
+namespace android {
+
+class MtpEventPacket : public MtpPacket {
+
+public:
+                        MtpEventPacket();
+    virtual             ~MtpEventPacket();
+
+#ifdef MTP_DEVICE
+    // write our data to the given file descriptor
+    int                 write(int fd);
+#endif
+
+#ifdef MTP_HOST
+    // read our buffer with the given request
+    int                 read(struct usb_request *request);
+#endif
+
+    inline MtpEventCode     getEventCode() const { return getContainerCode(); }
+    inline void             setEventCode(MtpEventCode code)
+                                                     { return setContainerCode(code); }
+};
+
+}; // namespace android
+
+#endif // _MTP_EVENT_PACKET_H
diff --git a/media/mtp/MtpObjectInfo.cpp b/media/mtp/MtpObjectInfo.cpp
new file mode 100644
index 0000000..ea68c3b
--- /dev/null
+++ b/media/mtp/MtpObjectInfo.cpp
@@ -0,0 +1,108 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpObjectInfo"
+
+#include "MtpDebug.h"
+#include "MtpDataPacket.h"
+#include "MtpObjectInfo.h"
+#include "MtpStringBuffer.h"
+#include "MtpUtils.h"
+
+namespace android {
+
+MtpObjectInfo::MtpObjectInfo(MtpObjectHandle handle)
+    :   mHandle(handle),
+        mStorageID(0),
+        mFormat(0),
+        mProtectionStatus(0),
+        mCompressedSize(0),
+        mThumbFormat(0),
+        mThumbCompressedSize(0),
+        mThumbPixWidth(0),
+        mThumbPixHeight(0),
+        mImagePixWidth(0),
+        mImagePixHeight(0),
+        mImagePixDepth(0),
+        mParent(0),
+        mAssociationType(0),
+        mAssociationDesc(0),
+        mSequenceNumber(0),
+        mName(NULL),
+        mDateCreated(0),
+        mDateModified(0),
+        mKeywords(NULL)
+{
+}
+
+MtpObjectInfo::~MtpObjectInfo() {
+    if (mName)
+        free(mName);
+    if (mKeywords)
+        free(mKeywords);
+}
+
+void MtpObjectInfo::read(MtpDataPacket& packet) {
+    MtpStringBuffer string;
+    time_t time;
+
+    mStorageID = packet.getUInt32();
+    mFormat = packet.getUInt16();
+    mProtectionStatus = packet.getUInt16();
+    mCompressedSize = packet.getUInt32();
+    mThumbFormat = packet.getUInt16();
+    mThumbCompressedSize = packet.getUInt32();
+    mThumbPixWidth = packet.getUInt32();
+    mThumbPixHeight = packet.getUInt32();
+    mImagePixWidth = packet.getUInt32();
+    mImagePixHeight = packet.getUInt32();
+    mImagePixDepth = packet.getUInt32();
+    mParent = packet.getUInt32();
+    mAssociationType = packet.getUInt16();
+    mAssociationDesc = packet.getUInt32();
+    mSequenceNumber = packet.getUInt32();
+
+    packet.getString(string);
+    mName = strdup((const char *)string);
+
+    packet.getString(string);
+    if (parseDateTime((const char*)string, time))
+        mDateCreated = time;
+
+    packet.getString(string);
+    if (parseDateTime((const char*)string, time))
+        mDateModified = time;
+
+    packet.getString(string);
+    mKeywords = strdup((const char *)string);
+}
+
+void MtpObjectInfo::print() {
+    LOGD("MtpObject Info %08X: %s\n", mHandle, mName);
+    LOGD("  mStorageID: %08X mFormat: %04X mProtectionStatus: %d\n",
+            mStorageID, mFormat, mProtectionStatus);
+    LOGD("  mCompressedSize: %d mThumbFormat: %04X mThumbCompressedSize: %d\n",
+            mCompressedSize, mFormat, mThumbCompressedSize);
+    LOGD("  mThumbPixWidth: %d mThumbPixHeight: %d\n", mThumbPixWidth, mThumbPixHeight);
+    LOGD("  mImagePixWidth: %d mImagePixHeight: %d mImagePixDepth: %d\n",
+            mImagePixWidth, mImagePixHeight, mImagePixDepth);
+    LOGD("  mParent: %08X mAssociationType: %04X mAssociationDesc: %04X\n",
+            mParent, mAssociationType, mAssociationDesc);
+    LOGD("  mSequenceNumber: %d mDateCreated: %ld mDateModified: %ld mKeywords: %s\n",
+            mSequenceNumber, mDateCreated, mDateModified, mKeywords);
+}
+
+}  // namespace android
diff --git a/media/mtp/MtpObjectInfo.h b/media/mtp/MtpObjectInfo.h
new file mode 100644
index 0000000..c7a449c
--- /dev/null
+++ b/media/mtp/MtpObjectInfo.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_OBJECT_INFO_H
+#define _MTP_OBJECT_INFO_H
+
+#include "MtpTypes.h"
+
+namespace android {
+
+class MtpDataPacket;
+
+class MtpObjectInfo {
+public:
+    MtpObjectHandle     mHandle;
+    MtpStorageID        mStorageID;
+    MtpObjectFormat     mFormat;
+    uint16_t            mProtectionStatus;
+    uint32_t            mCompressedSize;
+    MtpObjectFormat     mThumbFormat;
+    uint32_t            mThumbCompressedSize;
+    uint32_t            mThumbPixWidth;
+    uint32_t            mThumbPixHeight;
+    uint32_t            mImagePixWidth;
+    uint32_t            mImagePixHeight;
+    uint32_t            mImagePixDepth;
+    MtpObjectHandle     mParent;
+    uint16_t            mAssociationType;
+    uint32_t            mAssociationDesc;
+    uint32_t            mSequenceNumber;
+    char*               mName;
+    time_t              mDateCreated;
+    time_t              mDateModified;
+    char*               mKeywords;
+
+public:
+                        MtpObjectInfo(MtpObjectHandle handle);
+    virtual             ~MtpObjectInfo();
+
+    void                read(MtpDataPacket& packet);
+
+    void                print();
+};
+
+}; // namespace android
+
+#endif // _MTP_OBJECT_INFO_H
diff --git a/media/mtp/MtpPacket.cpp b/media/mtp/MtpPacket.cpp
new file mode 100644
index 0000000..baf99e5
--- /dev/null
+++ b/media/mtp/MtpPacket.cpp
@@ -0,0 +1,166 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpPacket"
+
+#include "MtpDebug.h"
+#include "MtpPacket.h"
+#include "mtp.h"
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <stdio.h>
+
+#include <usbhost/usbhost.h>
+
+namespace android {
+
+MtpPacket::MtpPacket(int bufferSize)
+    :   mBuffer(NULL),
+        mBufferSize(bufferSize),
+        mAllocationIncrement(bufferSize),
+        mPacketSize(0)
+{
+    mBuffer = (uint8_t *)malloc(bufferSize);
+    if (!mBuffer) {
+        LOGE("out of memory!");
+        abort();
+    }
+}
+
+MtpPacket::~MtpPacket() {
+    if (mBuffer)
+        free(mBuffer);
+}
+
+void MtpPacket::reset() {
+    allocate(MTP_CONTAINER_HEADER_SIZE);
+    mPacketSize = MTP_CONTAINER_HEADER_SIZE;
+    memset(mBuffer, 0, mBufferSize);
+}
+
+void MtpPacket::allocate(int length) {
+    if (length > mBufferSize) {
+        int newLength = length + mAllocationIncrement;
+        mBuffer = (uint8_t *)realloc(mBuffer, newLength);
+        if (!mBuffer) {
+            LOGE("out of memory!");
+            abort();
+        }
+        mBufferSize = newLength;
+    }
+}
+
+void MtpPacket::dump() {
+#define DUMP_BYTES_PER_ROW  16
+    char buffer[500];
+    char* bufptr = buffer;
+
+    for (int i = 0; i < mPacketSize; i++) {
+        sprintf(bufptr, "%02X ", mBuffer[i]);
+        bufptr += strlen(bufptr);
+        if (i % DUMP_BYTES_PER_ROW == (DUMP_BYTES_PER_ROW - 1)) {
+            LOGV("%s", buffer);
+            bufptr = buffer;
+        }
+    }
+    if (bufptr != buffer) {
+        // print last line
+        LOGV("%s", buffer);
+    }
+    LOGV("\n");
+}
+
+void MtpPacket::copyFrom(const MtpPacket& src) {
+    int length = src.mPacketSize;
+    allocate(length);
+    mPacketSize = length;
+    memcpy(mBuffer, src.mBuffer, length);
+}
+
+uint16_t MtpPacket::getUInt16(int offset) const {
+    return ((uint16_t)mBuffer[offset + 1] << 8) | (uint16_t)mBuffer[offset];
+}
+
+uint32_t MtpPacket::getUInt32(int offset) const {
+    return ((uint32_t)mBuffer[offset + 3] << 24) | ((uint32_t)mBuffer[offset + 2] << 16) |
+           ((uint32_t)mBuffer[offset + 1] << 8)  | (uint32_t)mBuffer[offset];
+}
+
+void MtpPacket::putUInt16(int offset, uint16_t value) {
+    mBuffer[offset++] = (uint8_t)(value & 0xFF);
+    mBuffer[offset++] = (uint8_t)((value >> 8) & 0xFF);
+}
+
+void MtpPacket::putUInt32(int offset, uint32_t value) {
+    mBuffer[offset++] = (uint8_t)(value & 0xFF);
+    mBuffer[offset++] = (uint8_t)((value >> 8) & 0xFF);
+    mBuffer[offset++] = (uint8_t)((value >> 16) & 0xFF);
+    mBuffer[offset++] = (uint8_t)((value >> 24) & 0xFF);
+}
+
+uint16_t MtpPacket::getContainerCode() const {
+    return getUInt16(MTP_CONTAINER_CODE_OFFSET);
+}
+
+void MtpPacket::setContainerCode(uint16_t code) {
+    putUInt16(MTP_CONTAINER_CODE_OFFSET, code);
+}
+
+uint16_t MtpPacket::getContainerType() const {
+    return getUInt16(MTP_CONTAINER_TYPE_OFFSET);
+}
+
+MtpTransactionID MtpPacket::getTransactionID() const {
+    return getUInt32(MTP_CONTAINER_TRANSACTION_ID_OFFSET);
+}
+
+void MtpPacket::setTransactionID(MtpTransactionID id) {
+    putUInt32(MTP_CONTAINER_TRANSACTION_ID_OFFSET, id);
+}
+
+uint32_t MtpPacket::getParameter(int index) const {
+    if (index < 1 || index > 5) {
+        LOGE("index %d out of range in MtpPacket::getParameter", index);
+        return 0;
+    }
+    return getUInt32(MTP_CONTAINER_PARAMETER_OFFSET + (index - 1) * sizeof(uint32_t));
+}
+
+void MtpPacket::setParameter(int index, uint32_t value) {
+    if (index < 1 || index > 5) {
+        LOGE("index %d out of range in MtpPacket::setParameter", index);
+        return;
+    }
+    int offset = MTP_CONTAINER_PARAMETER_OFFSET + (index - 1) * sizeof(uint32_t);
+    if (mPacketSize < offset + sizeof(uint32_t))
+        mPacketSize = offset + sizeof(uint32_t);
+    putUInt32(offset, value);
+}
+
+#ifdef MTP_HOST
+int MtpPacket::transfer(struct usb_request* request) {
+    int result = usb_device_bulk_transfer(request->dev,
+                            request->endpoint,
+                            request->buffer,
+                            request->buffer_length,
+                            0);
+    request->actual_length = result;
+    return result;
+}
+#endif
+
+}  // namespace android
diff --git a/media/mtp/MtpPacket.h b/media/mtp/MtpPacket.h
new file mode 100644
index 0000000..0ffb1d3
--- /dev/null
+++ b/media/mtp/MtpPacket.h
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_PACKET_H
+#define _MTP_PACKET_H
+
+#include "MtpTypes.h"
+
+struct usb_request;
+
+namespace android {
+
+class MtpPacket {
+
+protected:
+    uint8_t*            mBuffer;
+    // current size of the buffer
+    int                 mBufferSize;
+    // number of bytes to add when resizing the buffer
+    int                 mAllocationIncrement;
+    // size of the data in the packet
+    int                 mPacketSize;
+
+public:
+                        MtpPacket(int bufferSize);
+    virtual             ~MtpPacket();
+
+    // sets packet size to the default container size and sets buffer to zero
+    virtual void        reset();
+
+    void                allocate(int length);
+    void                dump();
+    void                copyFrom(const MtpPacket& src);
+
+    uint16_t            getContainerCode() const;
+    void                setContainerCode(uint16_t code);
+
+    uint16_t            getContainerType() const;
+
+    MtpTransactionID    getTransactionID() const;
+    void                setTransactionID(MtpTransactionID id);
+
+    uint32_t            getParameter(int index) const;
+    void                setParameter(int index, uint32_t value);
+
+#ifdef MTP_HOST
+    int                 transfer(struct usb_request* request);
+#endif
+
+protected:
+    uint16_t            getUInt16(int offset) const;
+    uint32_t            getUInt32(int offset) const;
+    void                putUInt16(int offset, uint16_t value);
+    void                putUInt32(int offset, uint32_t value);
+};
+
+}; // namespace android
+
+#endif // _MTP_PACKET_H
diff --git a/media/mtp/MtpProperty.cpp b/media/mtp/MtpProperty.cpp
new file mode 100644
index 0000000..8016c35
--- /dev/null
+++ b/media/mtp/MtpProperty.cpp
@@ -0,0 +1,534 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpProperty"
+
+#include "MtpDataPacket.h"
+#include "MtpDebug.h"
+#include "MtpProperty.h"
+#include "MtpStringBuffer.h"
+#include "MtpUtils.h"
+
+namespace android {
+
+MtpProperty::MtpProperty()
+    :   mCode(0),
+        mType(0),
+        mWriteable(false),
+        mDefaultArrayLength(0),
+        mDefaultArrayValues(NULL),
+        mCurrentArrayLength(0),
+        mCurrentArrayValues(NULL),
+        mGroupCode(0),
+        mFormFlag(kFormNone),
+        mEnumLength(0),
+        mEnumValues(NULL)
+{
+    memset(&mDefaultValue, 0, sizeof(mDefaultValue));
+    memset(&mCurrentValue, 0, sizeof(mCurrentValue));
+    memset(&mMinimumValue, 0, sizeof(mMinimumValue));
+    memset(&mMaximumValue, 0, sizeof(mMaximumValue));
+}
+
+MtpProperty::MtpProperty(MtpPropertyCode propCode,
+                         MtpDataType type,
+                         bool writeable,
+                         int defaultValue)
+    :   mCode(propCode),
+        mType(type),
+        mWriteable(writeable),
+        mDefaultArrayLength(0),
+        mDefaultArrayValues(NULL),
+        mCurrentArrayLength(0),
+        mCurrentArrayValues(NULL),
+        mGroupCode(0),
+        mFormFlag(kFormNone),
+        mEnumLength(0),
+        mEnumValues(NULL)
+{
+    memset(&mDefaultValue, 0, sizeof(mDefaultValue));
+    memset(&mCurrentValue, 0, sizeof(mCurrentValue));
+    memset(&mMinimumValue, 0, sizeof(mMinimumValue));
+    memset(&mMaximumValue, 0, sizeof(mMaximumValue));
+
+    if (defaultValue) {
+        switch (type) {
+            case MTP_TYPE_INT8:
+                mDefaultValue.u.i8 = defaultValue;
+                break;
+            case MTP_TYPE_UINT8:
+                mDefaultValue.u.u8 = defaultValue;
+                break;
+            case MTP_TYPE_INT16:
+                mDefaultValue.u.i16 = defaultValue;
+                break;
+            case MTP_TYPE_UINT16:
+                mDefaultValue.u.u16 = defaultValue;
+                break;
+            case MTP_TYPE_INT32:
+                mDefaultValue.u.i32 = defaultValue;
+                break;
+            case MTP_TYPE_UINT32:
+                mDefaultValue.u.u32 = defaultValue;
+                break;
+            case MTP_TYPE_INT64:
+                mDefaultValue.u.i64 = defaultValue;
+                break;
+            case MTP_TYPE_UINT64:
+                mDefaultValue.u.u64 = defaultValue;
+                break;
+            default:
+                LOGE("unknown type %04X in MtpProperty::MtpProperty", type);
+        }
+    }
+}
+
+MtpProperty::~MtpProperty() {
+    if (mType == MTP_TYPE_STR) {
+        // free all strings
+        free(mDefaultValue.str);
+        free(mCurrentValue.str);
+        free(mMinimumValue.str);
+        free(mMaximumValue.str);
+        if (mDefaultArrayValues) {
+            for (int i = 0; i < mDefaultArrayLength; i++)
+                free(mDefaultArrayValues[i].str);
+        }
+        if (mCurrentArrayValues) {
+            for (int i = 0; i < mCurrentArrayLength; i++)
+                free(mCurrentArrayValues[i].str);
+        }
+        if (mEnumValues) {
+            for (int i = 0; i < mEnumLength; i++)
+                free(mEnumValues[i].str);
+        }
+    }
+    delete[] mDefaultArrayValues;
+    delete[] mCurrentArrayValues;
+    delete[] mEnumValues;
+}
+
+void MtpProperty::read(MtpDataPacket& packet) {
+    mCode = packet.getUInt16();
+    bool deviceProp = isDeviceProperty();
+    mType = packet.getUInt16();
+    mWriteable = (packet.getUInt8() == 1);
+    switch (mType) {
+        case MTP_TYPE_AINT8:
+        case MTP_TYPE_AUINT8:
+        case MTP_TYPE_AINT16:
+        case MTP_TYPE_AUINT16:
+        case MTP_TYPE_AINT32:
+        case MTP_TYPE_AUINT32:
+        case MTP_TYPE_AINT64:
+        case MTP_TYPE_AUINT64:
+        case MTP_TYPE_AINT128:
+        case MTP_TYPE_AUINT128:
+            mDefaultArrayValues = readArrayValues(packet, mDefaultArrayLength);
+            if (deviceProp)
+                mCurrentArrayValues = readArrayValues(packet, mCurrentArrayLength);
+            break;
+        default:
+            readValue(packet, mDefaultValue);
+            if (deviceProp)
+                readValue(packet, mCurrentValue);
+    }
+    if (!deviceProp)
+        mGroupCode = packet.getUInt32();
+    mFormFlag = packet.getUInt8();
+
+    if (mFormFlag == kFormRange) {
+            readValue(packet, mMinimumValue);
+            readValue(packet, mMaximumValue);
+            readValue(packet, mStepSize);
+    } else if (mFormFlag == kFormEnum) {
+        mEnumLength = packet.getUInt16();
+        mEnumValues = new MtpPropertyValue[mEnumLength];
+        for (int i = 0; i < mEnumLength; i++)
+            readValue(packet, mEnumValues[i]);
+    }
+}
+
+void MtpProperty::write(MtpDataPacket& packet) {
+    bool deviceProp = isDeviceProperty();
+
+    packet.putUInt16(mCode);
+    packet.putUInt16(mType);
+    packet.putUInt8(mWriteable ? 1 : 0);
+
+    switch (mType) {
+        case MTP_TYPE_AINT8:
+        case MTP_TYPE_AUINT8:
+        case MTP_TYPE_AINT16:
+        case MTP_TYPE_AUINT16:
+        case MTP_TYPE_AINT32:
+        case MTP_TYPE_AUINT32:
+        case MTP_TYPE_AINT64:
+        case MTP_TYPE_AUINT64:
+        case MTP_TYPE_AINT128:
+        case MTP_TYPE_AUINT128:
+            writeArrayValues(packet, mDefaultArrayValues, mDefaultArrayLength);
+            if (deviceProp)
+                writeArrayValues(packet, mCurrentArrayValues, mCurrentArrayLength);
+            break;
+        default:
+            writeValue(packet, mDefaultValue);
+            if (deviceProp)
+                writeValue(packet, mCurrentValue);
+    }
+    packet.putUInt32(mGroupCode);
+    if (!deviceProp)
+        packet.putUInt8(mFormFlag);
+    if (mFormFlag == kFormRange) {
+            writeValue(packet, mMinimumValue);
+            writeValue(packet, mMaximumValue);
+            writeValue(packet, mStepSize);
+    } else if (mFormFlag == kFormEnum) {
+        packet.putUInt16(mEnumLength);
+        for (int i = 0; i < mEnumLength; i++)
+            writeValue(packet, mEnumValues[i]);
+    }
+}
+
+void MtpProperty::setDefaultValue(const uint16_t* string) {
+    free(mDefaultValue.str);
+    if (string) {
+        MtpStringBuffer buffer(string);
+        mDefaultValue.str = strdup(buffer);
+    }
+    else
+        mDefaultValue.str = NULL;
+}
+
+void MtpProperty::setCurrentValue(const uint16_t* string) {
+    free(mCurrentValue.str);
+    if (string) {
+        MtpStringBuffer buffer(string);
+        mCurrentValue.str = strdup(buffer);
+    }
+    else
+        mCurrentValue.str = NULL;
+}
+
+void MtpProperty::setFormRange(int min, int max, int step) {
+    mFormFlag = kFormRange;
+    switch (mType) {
+        case MTP_TYPE_INT8:
+            mMinimumValue.u.i8 = min;
+            mMaximumValue.u.i8 = max;
+            mStepSize.u.i8 = step;
+            break;
+        case MTP_TYPE_UINT8:
+            mMinimumValue.u.u8 = min;
+            mMaximumValue.u.u8 = max;
+            mStepSize.u.u8 = step;
+            break;
+        case MTP_TYPE_INT16:
+            mMinimumValue.u.i16 = min;
+            mMaximumValue.u.i16 = max;
+            mStepSize.u.i16 = step;
+            break;
+        case MTP_TYPE_UINT16:
+            mMinimumValue.u.u16 = min;
+            mMaximumValue.u.u16 = max;
+            mStepSize.u.u16 = step;
+            break;
+        case MTP_TYPE_INT32:
+            mMinimumValue.u.i32 = min;
+            mMaximumValue.u.i32 = max;
+            mStepSize.u.i32 = step;
+            break;
+        case MTP_TYPE_UINT32:
+            mMinimumValue.u.u32 = min;
+            mMaximumValue.u.u32 = max;
+            mStepSize.u.u32 = step;
+            break;
+        case MTP_TYPE_INT64:
+            mMinimumValue.u.i64 = min;
+            mMaximumValue.u.i64 = max;
+            mStepSize.u.i64 = step;
+            break;
+        case MTP_TYPE_UINT64:
+            mMinimumValue.u.u64 = min;
+            mMaximumValue.u.u64 = max;
+            mStepSize.u.u64 = step;
+            break;
+        default:
+            LOGE("unsupported type for MtpProperty::setRange");
+            break;
+    }
+}
+
+void MtpProperty::setFormEnum(const int* values, int count) {
+     mFormFlag = kFormEnum;
+     delete[] mEnumValues;
+     mEnumValues = new MtpPropertyValue[count];
+     mEnumLength = count;
+
+    for (int i = 0; i < count; i++) {
+        int value = *values++;
+            switch (mType) {
+                case MTP_TYPE_INT8:
+                    mEnumValues[i].u.i8 = value;
+                    break;
+                case MTP_TYPE_UINT8:
+                    mEnumValues[i].u.u8 = value;
+                    break;
+                case MTP_TYPE_INT16:
+                    mEnumValues[i].u.i16 = value;
+                    break;
+                case MTP_TYPE_UINT16:
+                    mEnumValues[i].u.u16 = value;
+                    break;
+                case MTP_TYPE_INT32:
+                    mEnumValues[i].u.i32 = value;
+                    break;
+                case MTP_TYPE_UINT32:
+                    mEnumValues[i].u.u32 = value;
+                    break;
+                case MTP_TYPE_INT64:
+                    mEnumValues[i].u.i64 = value;
+                    break;
+                case MTP_TYPE_UINT64:
+                    mEnumValues[i].u.u64 = value;
+                    break;
+                default:
+                    LOGE("unsupported type for MtpProperty::setEnum");
+                    break;
+        }
+    }
+}
+
+void MtpProperty::setFormDateTime() {
+     mFormFlag = kFormDateTime;
+}
+
+void MtpProperty::print() {
+    MtpString buffer;
+    bool deviceProp = isDeviceProperty();
+    if (deviceProp)
+        LOGI("    %s (%04X)", MtpDebug::getDevicePropCodeName(mCode), mCode);
+    else
+        LOGI("    %s (%04X)", MtpDebug::getObjectPropCodeName(mCode), mCode);
+    LOGI("    type %04X", mType);
+    LOGI("    writeable %s", (mWriteable ? "true" : "false"));
+    buffer = "    default value: ";
+    print(mDefaultValue, buffer);
+    LOGI("%s", (const char *)buffer);
+    if (deviceProp) {
+        buffer = "    current value: ";
+        print(mCurrentValue, buffer);
+        LOGI("%s", (const char *)buffer);
+    }
+    switch (mFormFlag) {
+        case kFormNone:
+            break;
+        case kFormRange:
+            buffer = "    Range (";
+            print(mMinimumValue, buffer);
+            buffer += ", ";
+            print(mMaximumValue, buffer);
+            buffer += ", ";
+            print(mStepSize, buffer);
+            buffer += ")";
+            LOGI("%s", (const char *)buffer);
+            break;
+        case kFormEnum:
+            buffer = "    Enum { ";
+            for (int i = 0; i < mEnumLength; i++) {
+                print(mEnumValues[i], buffer);
+                buffer += " ";
+            }
+            buffer += "}";
+            LOGI("%s", (const char *)buffer);
+            break;
+        case kFormDateTime:
+            LOGI("    DateTime\n");
+            break;
+        default:
+            LOGI("    form %d\n", mFormFlag);
+            break;
+    }
+}
+
+void MtpProperty::print(MtpPropertyValue& value, MtpString& buffer) {
+    switch (mType) {
+        case MTP_TYPE_INT8:
+            buffer.appendFormat("%d", value.u.i8);
+            break;
+        case MTP_TYPE_UINT8:
+            buffer.appendFormat("%d", value.u.u8);
+            break;
+        case MTP_TYPE_INT16:
+            buffer.appendFormat("%d", value.u.i16);
+            break;
+        case MTP_TYPE_UINT16:
+            buffer.appendFormat("%d", value.u.u16);
+            break;
+        case MTP_TYPE_INT32:
+            buffer.appendFormat("%d", value.u.i32);
+            break;
+        case MTP_TYPE_UINT32:
+            buffer.appendFormat("%d", value.u.u32);
+            break;
+        case MTP_TYPE_INT64:
+            buffer.appendFormat("%lld", value.u.i64);
+            break;
+        case MTP_TYPE_UINT64:
+            buffer.appendFormat("%lld", value.u.u64);
+            break;
+        case MTP_TYPE_INT128:
+            buffer.appendFormat("%08X%08X%08X%08X", value.u.i128[0], value.u.i128[1],
+                    value.u.i128[2], value.u.i128[3]);
+            break;
+        case MTP_TYPE_UINT128:
+            buffer.appendFormat("%08X%08X%08X%08X", value.u.u128[0], value.u.u128[1],
+                    value.u.u128[2], value.u.u128[3]);
+            break;
+        case MTP_TYPE_STR:
+            buffer.appendFormat("%s", value.str);
+            break;
+        default:
+            LOGE("unsupported type for MtpProperty::print\n");
+            break;
+    }
+}
+
+void MtpProperty::readValue(MtpDataPacket& packet, MtpPropertyValue& value) {
+    MtpStringBuffer stringBuffer;
+
+    switch (mType) {
+        case MTP_TYPE_INT8:
+        case MTP_TYPE_AINT8:
+            value.u.i8 = packet.getInt8();
+            break;
+        case MTP_TYPE_UINT8:
+        case MTP_TYPE_AUINT8:
+            value.u.u8 = packet.getUInt8();
+            break;
+        case MTP_TYPE_INT16:
+        case MTP_TYPE_AINT16:
+            value.u.i16 = packet.getInt16();
+            break;
+        case MTP_TYPE_UINT16:
+        case MTP_TYPE_AUINT16:
+            value.u.u16 = packet.getUInt16();
+            break;
+        case MTP_TYPE_INT32:
+        case MTP_TYPE_AINT32:
+            value.u.i32 = packet.getInt32();
+            break;
+        case MTP_TYPE_UINT32:
+        case MTP_TYPE_AUINT32:
+            value.u.u32 = packet.getUInt32();
+            break;
+        case MTP_TYPE_INT64:
+        case MTP_TYPE_AINT64:
+            value.u.i64 = packet.getInt64();
+            break;
+        case MTP_TYPE_UINT64:
+        case MTP_TYPE_AUINT64:
+            value.u.u64 = packet.getUInt64();
+            break;
+        case MTP_TYPE_INT128:
+        case MTP_TYPE_AINT128:
+            packet.getInt128(value.u.i128);
+            break;
+        case MTP_TYPE_UINT128:
+        case MTP_TYPE_AUINT128:
+            packet.getUInt128(value.u.u128);
+            break;
+        case MTP_TYPE_STR:
+            packet.getString(stringBuffer);
+            value.str = strdup(stringBuffer);
+            break;
+        default:
+            LOGE("unknown type %04X in MtpProperty::readValue", mType);
+    }
+}
+
+void MtpProperty::writeValue(MtpDataPacket& packet, MtpPropertyValue& value) {
+    MtpStringBuffer stringBuffer;
+
+    switch (mType) {
+        case MTP_TYPE_INT8:
+        case MTP_TYPE_AINT8:
+            packet.putInt8(value.u.i8);
+            break;
+        case MTP_TYPE_UINT8:
+        case MTP_TYPE_AUINT8:
+            packet.putUInt8(value.u.u8);
+            break;
+        case MTP_TYPE_INT16:
+        case MTP_TYPE_AINT16:
+            packet.putInt16(value.u.i16);
+            break;
+        case MTP_TYPE_UINT16:
+        case MTP_TYPE_AUINT16:
+            packet.putUInt16(value.u.u16);
+            break;
+        case MTP_TYPE_INT32:
+        case MTP_TYPE_AINT32:
+            packet.putInt32(value.u.i32);
+            break;
+        case MTP_TYPE_UINT32:
+        case MTP_TYPE_AUINT32:
+            packet.putUInt32(value.u.u32);
+            break;
+        case MTP_TYPE_INT64:
+        case MTP_TYPE_AINT64:
+            packet.putInt64(value.u.i64);
+            break;
+        case MTP_TYPE_UINT64:
+        case MTP_TYPE_AUINT64:
+            packet.putUInt64(value.u.u64);
+            break;
+        case MTP_TYPE_INT128:
+        case MTP_TYPE_AINT128:
+            packet.putInt128(value.u.i128);
+            break;
+        case MTP_TYPE_UINT128:
+        case MTP_TYPE_AUINT128:
+            packet.putUInt128(value.u.u128);
+            break;
+        case MTP_TYPE_STR:
+            if (value.str)
+                packet.putString(value.str);
+            else
+                packet.putEmptyString();
+            break;
+        default:
+            LOGE("unknown type %04X in MtpProperty::writeValue", mType);
+    }
+}
+
+MtpPropertyValue* MtpProperty::readArrayValues(MtpDataPacket& packet, int& length) {
+    length = packet.getUInt32();
+    if (length == 0)
+        return NULL;
+    MtpPropertyValue* result = new MtpPropertyValue[length];
+    for (int i = 0; i < length; i++)
+        readValue(packet, result[i]);
+    return result;
+}
+
+void MtpProperty::writeArrayValues(MtpDataPacket& packet, MtpPropertyValue* values, int length) {
+    packet.putUInt32(length);
+    for (int i = 0; i < length; i++)
+        writeValue(packet, values[i]);
+}
+
+}  // namespace android
diff --git a/media/mtp/MtpProperty.h b/media/mtp/MtpProperty.h
new file mode 100644
index 0000000..06ca56e
--- /dev/null
+++ b/media/mtp/MtpProperty.h
@@ -0,0 +1,114 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_PROPERTY_H
+#define _MTP_PROPERTY_H
+
+#include "MtpTypes.h"
+
+namespace android {
+
+class MtpDataPacket;
+
+struct MtpPropertyValue {
+    union {
+        int8_t          i8;
+        uint8_t         u8;
+        int16_t         i16;
+        uint16_t        u16;
+        int32_t         i32;
+        uint32_t        u32;
+        int64_t         i64;
+        uint64_t        u64;
+        int128_t        i128;
+        uint128_t       u128;
+    } u;
+    // string in UTF8 format
+    char*               str;
+};
+
+class MtpProperty {
+public:
+    MtpPropertyCode     mCode;
+    MtpDataType         mType;
+    bool                mWriteable;
+    MtpPropertyValue    mDefaultValue;
+    MtpPropertyValue    mCurrentValue;
+
+    // for array types
+    int                 mDefaultArrayLength;
+    MtpPropertyValue*   mDefaultArrayValues;
+    int                 mCurrentArrayLength;
+    MtpPropertyValue*   mCurrentArrayValues;
+
+    enum {
+        kFormNone = 0,
+        kFormRange = 1,
+        kFormEnum = 2,
+        kFormDateTime = 3,
+    };
+
+    uint32_t            mGroupCode;
+    uint8_t             mFormFlag;
+
+    // for range form
+    MtpPropertyValue    mMinimumValue;
+    MtpPropertyValue    mMaximumValue;
+    MtpPropertyValue    mStepSize;
+
+    // for enum form
+    int                 mEnumLength;
+    MtpPropertyValue*   mEnumValues;
+
+public:
+                        MtpProperty();
+                        MtpProperty(MtpPropertyCode propCode,
+                                     MtpDataType type,
+                                     bool writeable = false,
+                                     int defaultValue = 0);
+    virtual             ~MtpProperty();
+
+    inline MtpPropertyCode getPropertyCode() const { return mCode; }
+
+    void                read(MtpDataPacket& packet);
+    void                write(MtpDataPacket& packet);
+
+    void                setDefaultValue(const uint16_t* string);
+    void                setCurrentValue(const uint16_t* string);
+
+    void                setFormRange(int min, int max, int step);
+    void                setFormEnum(const int* values, int count);
+    void                setFormDateTime();
+
+    void                print();
+    void                print(MtpPropertyValue& value, MtpString& buffer);
+
+    inline bool         isDeviceProperty() const {
+                            return (   ((mCode & 0xF000) == 0x5000)
+                                    || ((mCode & 0xF800) == 0xD000));
+                        }
+
+private:
+    void                readValue(MtpDataPacket& packet, MtpPropertyValue& value);
+    void                writeValue(MtpDataPacket& packet, MtpPropertyValue& value);
+    MtpPropertyValue*   readArrayValues(MtpDataPacket& packet, int& length);
+    void                writeArrayValues(MtpDataPacket& packet,
+                                            MtpPropertyValue* values, int length);
+};
+
+}; // namespace android
+
+#endif // _MTP_PROPERTY_H
diff --git a/media/mtp/MtpRequestPacket.cpp b/media/mtp/MtpRequestPacket.cpp
new file mode 100644
index 0000000..0e58e01
--- /dev/null
+++ b/media/mtp/MtpRequestPacket.cpp
@@ -0,0 +1,60 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpRequestPacket"
+
+#include <stdio.h>
+#include <sys/types.h>
+#include <fcntl.h>
+
+#include "MtpRequestPacket.h"
+
+#include <usbhost/usbhost.h>
+
+namespace android {
+
+MtpRequestPacket::MtpRequestPacket()
+    :   MtpPacket(512)
+{
+}
+
+MtpRequestPacket::~MtpRequestPacket() {
+}
+
+#ifdef MTP_DEVICE
+int MtpRequestPacket::read(int fd) {
+    int ret = ::read(fd, mBuffer, mBufferSize);
+    if (ret >= 0)
+        mPacketSize = ret;
+    else
+        mPacketSize = 0;
+    return ret;
+}
+#endif
+
+#ifdef MTP_HOST
+    // write our buffer to the given endpoint (host mode)
+int MtpRequestPacket::write(struct usb_request *request)
+{
+    putUInt32(MTP_CONTAINER_LENGTH_OFFSET, mPacketSize);
+    putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_COMMAND);
+    request->buffer = mBuffer;
+    request->buffer_length = mPacketSize;
+    return transfer(request);
+}
+#endif
+
+}  // namespace android
diff --git a/media/mtp/MtpRequestPacket.h b/media/mtp/MtpRequestPacket.h
new file mode 100644
index 0000000..1201f11
--- /dev/null
+++ b/media/mtp/MtpRequestPacket.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_REQUEST_PACKET_H
+#define _MTP_REQUEST_PACKET_H
+
+#include "MtpPacket.h"
+#include "mtp.h"
+
+struct usb_request;
+
+namespace android {
+
+class MtpRequestPacket : public MtpPacket {
+
+public:
+                        MtpRequestPacket();
+    virtual             ~MtpRequestPacket();
+
+#ifdef MTP_DEVICE
+    // fill our buffer with data from the given file descriptor
+    int                 read(int fd);
+#endif
+
+#ifdef MTP_HOST
+    // write our buffer to the given endpoint
+    int                 write(struct usb_request *request);
+#endif
+
+    inline MtpOperationCode    getOperationCode() const { return getContainerCode(); }
+    inline void                setOperationCode(MtpOperationCode code)
+                                                    { return setContainerCode(code); }
+};
+
+}; // namespace android
+
+#endif // _MTP_REQUEST_PACKET_H
diff --git a/media/mtp/MtpResponsePacket.cpp b/media/mtp/MtpResponsePacket.cpp
new file mode 100644
index 0000000..c2b41e4
--- /dev/null
+++ b/media/mtp/MtpResponsePacket.cpp
@@ -0,0 +1,60 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpResponsePacket"
+
+#include <stdio.h>
+#include <sys/types.h>
+#include <fcntl.h>
+
+#include "MtpResponsePacket.h"
+
+#include <usbhost/usbhost.h>
+
+namespace android {
+
+MtpResponsePacket::MtpResponsePacket()
+    :   MtpPacket(512)
+{
+}
+
+MtpResponsePacket::~MtpResponsePacket() {
+}
+
+#ifdef MTP_DEVICE
+int MtpResponsePacket::write(int fd) {
+    putUInt32(MTP_CONTAINER_LENGTH_OFFSET, mPacketSize);
+    putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_RESPONSE);
+    int ret = ::write(fd, mBuffer, mPacketSize);
+    return (ret < 0 ? ret : 0);
+}
+#endif
+
+#ifdef MTP_HOST
+int MtpResponsePacket::read(struct usb_request *request) {
+    request->buffer = mBuffer;
+    request->buffer_length = mBufferSize;
+    int ret = transfer(request);
+     if (ret >= 0)
+        mPacketSize = ret;
+    else
+        mPacketSize = 0;
+    return ret;
+}
+#endif
+
+}  // namespace android
+
diff --git a/media/mtp/MtpResponsePacket.h b/media/mtp/MtpResponsePacket.h
new file mode 100644
index 0000000..592ad4a
--- /dev/null
+++ b/media/mtp/MtpResponsePacket.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_RESPONSE_PACKET_H
+#define _MTP_RESPONSE_PACKET_H
+
+#include "MtpPacket.h"
+#include "mtp.h"
+
+namespace android {
+
+class MtpResponsePacket : public MtpPacket {
+
+public:
+                        MtpResponsePacket();
+    virtual             ~MtpResponsePacket();
+
+#ifdef MTP_DEVICE
+    // write our data to the given file descriptor
+    int                 write(int fd);
+#endif
+
+#ifdef MTP_HOST
+    // read our buffer with the given request
+    int                 read(struct usb_request *request);
+#endif
+
+    inline MtpResponseCode      getResponseCode() const { return getContainerCode(); }
+    inline void                 setResponseCode(MtpResponseCode code)
+                                                     { return setContainerCode(code); }
+};
+
+}; // namespace android
+
+#endif // _MTP_RESPONSE_PACKET_H
diff --git a/media/mtp/MtpServer.cpp b/media/mtp/MtpServer.cpp
new file mode 100644
index 0000000..37e02a3
--- /dev/null
+++ b/media/mtp/MtpServer.cpp
@@ -0,0 +1,936 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <sys/types.h>
+#include <sys/ioctl.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <errno.h>
+#include <sys/stat.h>
+#include <dirent.h>
+
+#include <cutils/properties.h>
+
+#define LOG_TAG "MtpServer"
+
+#include "MtpDebug.h"
+#include "MtpDatabase.h"
+#include "MtpProperty.h"
+#include "MtpServer.h"
+#include "MtpStorage.h"
+#include "MtpStringBuffer.h"
+
+#include <linux/usb/f_mtp.h>
+
+namespace android {
+
+static const MtpOperationCode kSupportedOperationCodes[] = {
+    MTP_OPERATION_GET_DEVICE_INFO,
+    MTP_OPERATION_OPEN_SESSION,
+    MTP_OPERATION_CLOSE_SESSION,
+    MTP_OPERATION_GET_STORAGE_IDS,
+    MTP_OPERATION_GET_STORAGE_INFO,
+    MTP_OPERATION_GET_NUM_OBJECTS,
+    MTP_OPERATION_GET_OBJECT_HANDLES,
+    MTP_OPERATION_GET_OBJECT_INFO,
+    MTP_OPERATION_GET_OBJECT,
+//    MTP_OPERATION_GET_THUMB,
+    MTP_OPERATION_DELETE_OBJECT,
+    MTP_OPERATION_SEND_OBJECT_INFO,
+    MTP_OPERATION_SEND_OBJECT,
+//    MTP_OPERATION_INITIATE_CAPTURE,
+//    MTP_OPERATION_FORMAT_STORE,
+//    MTP_OPERATION_RESET_DEVICE,
+//    MTP_OPERATION_SELF_TEST,
+//    MTP_OPERATION_SET_OBJECT_PROTECTION,
+//    MTP_OPERATION_POWER_DOWN,
+    MTP_OPERATION_GET_DEVICE_PROP_DESC,
+    MTP_OPERATION_GET_DEVICE_PROP_VALUE,
+    MTP_OPERATION_SET_DEVICE_PROP_VALUE,
+    MTP_OPERATION_RESET_DEVICE_PROP_VALUE,
+//    MTP_OPERATION_TERMINATE_OPEN_CAPTURE,
+//    MTP_OPERATION_MOVE_OBJECT,
+//    MTP_OPERATION_COPY_OBJECT,
+    MTP_OPERATION_GET_PARTIAL_OBJECT,
+//    MTP_OPERATION_INITIATE_OPEN_CAPTURE,
+    MTP_OPERATION_GET_OBJECT_PROPS_SUPPORTED,
+    MTP_OPERATION_GET_OBJECT_PROP_DESC,
+    MTP_OPERATION_GET_OBJECT_PROP_VALUE,
+    MTP_OPERATION_SET_OBJECT_PROP_VALUE,
+    MTP_OPERATION_GET_OBJECT_PROP_LIST,
+//    MTP_OPERATION_SET_OBJECT_PROP_LIST,
+//    MTP_OPERATION_GET_INTERDEPENDENT_PROP_DESC,
+//    MTP_OPERATION_SEND_OBJECT_PROP_LIST,
+    MTP_OPERATION_GET_OBJECT_REFERENCES,
+    MTP_OPERATION_SET_OBJECT_REFERENCES,
+//    MTP_OPERATION_SKIP,
+};
+
+static const MtpEventCode kSupportedEventCodes[] = {
+    MTP_EVENT_OBJECT_ADDED,
+    MTP_EVENT_OBJECT_REMOVED,
+    MTP_EVENT_STORE_ADDED,
+    MTP_EVENT_STORE_REMOVED,
+};
+
+MtpServer::MtpServer(int fd, MtpDatabase* database,
+                    int fileGroup, int filePerm, int directoryPerm)
+    :   mFD(fd),
+        mDatabase(database),
+        mFileGroup(fileGroup),
+        mFilePermission(filePerm),
+        mDirectoryPermission(directoryPerm),
+        mSessionID(0),
+        mSessionOpen(false),
+        mSendObjectHandle(kInvalidObjectHandle),
+        mSendObjectFormat(0),
+        mSendObjectFileSize(0)
+{
+}
+
+MtpServer::~MtpServer() {
+}
+
+void MtpServer::addStorage(MtpStorage* storage) {
+    Mutex::Autolock autoLock(mMutex);
+
+    mStorages.push(storage);
+    sendStoreAdded(storage->getStorageID());
+}
+
+void MtpServer::removeStorage(MtpStorage* storage) {
+    Mutex::Autolock autoLock(mMutex);
+
+    for (int i = 0; i < mStorages.size(); i++) {
+        if (mStorages[i] == storage) {
+            mStorages.removeAt(i);
+            sendStoreRemoved(storage->getStorageID());
+            break;
+        }
+    }
+}
+
+MtpStorage* MtpServer::getStorage(MtpStorageID id) {
+    if (id == 0)
+        return mStorages[0];
+    for (int i = 0; i < mStorages.size(); i++) {
+        MtpStorage* storage = mStorages[i];
+        if (storage->getStorageID() == id)
+            return storage;
+    }
+    return NULL;
+}
+
+bool MtpServer::hasStorage(MtpStorageID id) {
+    if (id == 0 || id == 0xFFFFFFFF)
+        return mStorages.size() > 0;
+    return (getStorage(id) != NULL);
+}
+
+void MtpServer::run() {
+    int fd = mFD;
+
+    LOGV("MtpServer::run fd: %d\n", fd);
+
+    while (1) {
+        int ret = mRequest.read(fd);
+        if (ret < 0) {
+            LOGV("request read returned %d, errno: %d", ret, errno);
+            if (errno == ECANCELED) {
+                // return to top of loop and wait for next command
+                continue;
+            }
+            break;
+        }
+        MtpOperationCode operation = mRequest.getOperationCode();
+        MtpTransactionID transaction = mRequest.getTransactionID();
+
+        LOGV("operation: %s", MtpDebug::getOperationCodeName(operation));
+        mRequest.dump();
+
+        // FIXME need to generalize this
+        bool dataIn = (operation == MTP_OPERATION_SEND_OBJECT_INFO
+                    || operation == MTP_OPERATION_SET_OBJECT_REFERENCES
+                    || operation == MTP_OPERATION_SET_OBJECT_PROP_VALUE
+                    || operation == MTP_OPERATION_SET_DEVICE_PROP_VALUE);
+        if (dataIn) {
+            int ret = mData.read(fd);
+            if (ret < 0) {
+                LOGE("data read returned %d, errno: %d", ret, errno);
+                if (errno == ECANCELED) {
+                    // return to top of loop and wait for next command
+                    continue;
+                }
+                break;
+            }
+            LOGV("received data:");
+            mData.dump();
+        } else {
+            mData.reset();
+        }
+
+        if (handleRequest()) {
+            if (!dataIn && mData.hasData()) {
+                mData.setOperationCode(operation);
+                mData.setTransactionID(transaction);
+                LOGV("sending data:");
+                mData.dump();
+                ret = mData.write(fd);
+                if (ret < 0) {
+                    LOGE("request write returned %d, errno: %d", ret, errno);
+                    if (errno == ECANCELED) {
+                        // return to top of loop and wait for next command
+                        continue;
+                    }
+                    break;
+                }
+            }
+
+            mResponse.setTransactionID(transaction);
+            LOGV("sending response %04X", mResponse.getResponseCode());
+            ret = mResponse.write(fd);
+            mResponse.dump();
+            if (ret < 0) {
+                LOGE("request write returned %d, errno: %d", ret, errno);
+                if (errno == ECANCELED) {
+                    // return to top of loop and wait for next command
+                    continue;
+                }
+                break;
+            }
+        } else {
+            LOGV("skipping response\n");
+        }
+    }
+
+    if (mSessionOpen)
+        mDatabase->sessionEnded();
+}
+
+void MtpServer::sendObjectAdded(MtpObjectHandle handle) {
+    LOGV("sendObjectAdded %d\n", handle);
+    sendEvent(MTP_EVENT_OBJECT_ADDED, handle);
+}
+
+void MtpServer::sendObjectRemoved(MtpObjectHandle handle) {
+    LOGV("sendObjectRemoved %d\n", handle);
+    sendEvent(MTP_EVENT_OBJECT_REMOVED, handle);
+}
+
+void MtpServer::sendStoreAdded(MtpStorageID id) {
+    LOGV("sendStoreAdded %08X\n", id);
+    sendEvent(MTP_EVENT_STORE_ADDED, id);
+}
+
+void MtpServer::sendStoreRemoved(MtpStorageID id) {
+    LOGV("sendStoreRemoved %08X\n", id);
+    sendEvent(MTP_EVENT_STORE_REMOVED, id);
+}
+
+void MtpServer::sendEvent(MtpEventCode code, uint32_t param1) {
+    if (mSessionOpen) {
+        mEvent.setEventCode(code);
+        mEvent.setTransactionID(mRequest.getTransactionID());
+        mEvent.setParameter(1, param1);
+        int ret = mEvent.write(mFD);
+        LOGV("mEvent.write returned %d\n", ret);
+    }
+}
+
+bool MtpServer::handleRequest() {
+    Mutex::Autolock autoLock(mMutex);
+
+    MtpOperationCode operation = mRequest.getOperationCode();
+    MtpResponseCode response;
+
+    mResponse.reset();
+
+    if (mSendObjectHandle != kInvalidObjectHandle && operation != MTP_OPERATION_SEND_OBJECT) {
+        // FIXME - need to delete mSendObjectHandle from the database
+        LOGE("expected SendObject after SendObjectInfo");
+        mSendObjectHandle = kInvalidObjectHandle;
+    }
+
+    switch (operation) {
+        case MTP_OPERATION_GET_DEVICE_INFO:
+            response = doGetDeviceInfo();
+            break;
+        case MTP_OPERATION_OPEN_SESSION:
+            response = doOpenSession();
+            break;
+        case MTP_OPERATION_CLOSE_SESSION:
+            response = doCloseSession();
+            break;
+        case MTP_OPERATION_GET_STORAGE_IDS:
+            response = doGetStorageIDs();
+            break;
+         case MTP_OPERATION_GET_STORAGE_INFO:
+            response = doGetStorageInfo();
+            break;
+        case MTP_OPERATION_GET_OBJECT_PROPS_SUPPORTED:
+            response = doGetObjectPropsSupported();
+            break;
+        case MTP_OPERATION_GET_OBJECT_HANDLES:
+            response = doGetObjectHandles();
+            break;
+        case MTP_OPERATION_GET_NUM_OBJECTS:
+            response = doGetNumObjects();
+            break;
+        case MTP_OPERATION_GET_OBJECT_REFERENCES:
+            response = doGetObjectReferences();
+            break;
+        case MTP_OPERATION_SET_OBJECT_REFERENCES:
+            response = doSetObjectReferences();
+            break;
+        case MTP_OPERATION_GET_OBJECT_PROP_VALUE:
+            response = doGetObjectPropValue();
+            break;
+        case MTP_OPERATION_SET_OBJECT_PROP_VALUE:
+            response = doSetObjectPropValue();
+            break;
+        case MTP_OPERATION_GET_DEVICE_PROP_VALUE:
+            response = doGetDevicePropValue();
+            break;
+        case MTP_OPERATION_SET_DEVICE_PROP_VALUE:
+            response = doSetDevicePropValue();
+            break;
+        case MTP_OPERATION_RESET_DEVICE_PROP_VALUE:
+            response = doResetDevicePropValue();
+            break;
+        case MTP_OPERATION_GET_OBJECT_PROP_LIST:
+            response = doGetObjectPropList();
+            break;
+        case MTP_OPERATION_GET_OBJECT_INFO:
+            response = doGetObjectInfo();
+            break;
+        case MTP_OPERATION_GET_OBJECT:
+            response = doGetObject();
+            break;
+        case MTP_OPERATION_GET_PARTIAL_OBJECT:
+            response = doGetPartialObject();
+            break;
+        case MTP_OPERATION_SEND_OBJECT_INFO:
+            response = doSendObjectInfo();
+            break;
+        case MTP_OPERATION_SEND_OBJECT:
+            response = doSendObject();
+            break;
+        case MTP_OPERATION_DELETE_OBJECT:
+            response = doDeleteObject();
+            break;
+        case MTP_OPERATION_GET_OBJECT_PROP_DESC:
+            response = doGetObjectPropDesc();
+            break;
+        case MTP_OPERATION_GET_DEVICE_PROP_DESC:
+            response = doGetDevicePropDesc();
+            break;
+        default:
+            LOGE("got unsupported command %s", MtpDebug::getOperationCodeName(operation));
+            response = MTP_RESPONSE_OPERATION_NOT_SUPPORTED;
+            break;
+    }
+
+    if (response == MTP_RESPONSE_TRANSACTION_CANCELLED)
+        return false;
+    mResponse.setResponseCode(response);
+    return true;
+}
+
+MtpResponseCode MtpServer::doGetDeviceInfo() {
+    MtpStringBuffer   string;
+    char prop_value[PROPERTY_VALUE_MAX];
+
+    MtpObjectFormatList* playbackFormats = mDatabase->getSupportedPlaybackFormats();
+    MtpObjectFormatList* captureFormats = mDatabase->getSupportedCaptureFormats();
+    MtpDevicePropertyList* deviceProperties = mDatabase->getSupportedDeviceProperties();
+
+    // fill in device info
+    mData.putUInt16(MTP_STANDARD_VERSION);
+    mData.putUInt32(6); // MTP Vendor Extension ID
+    mData.putUInt16(MTP_STANDARD_VERSION);
+    string.set("microsoft.com: 1.0;");
+    mData.putString(string); // MTP Extensions
+    mData.putUInt16(0); //Functional Mode
+    mData.putAUInt16(kSupportedOperationCodes,
+            sizeof(kSupportedOperationCodes) / sizeof(uint16_t)); // Operations Supported
+    mData.putAUInt16(kSupportedEventCodes,
+            sizeof(kSupportedEventCodes) / sizeof(uint16_t)); // Events Supported
+    mData.putAUInt16(deviceProperties); // Device Properties Supported
+    mData.putAUInt16(captureFormats); // Capture Formats
+    mData.putAUInt16(playbackFormats);  // Playback Formats
+
+    property_get("ro.product.manufacturer", prop_value, "unknown manufacturer");
+    string.set(prop_value);
+    mData.putString(string);   // Manufacturer
+
+    property_get("ro.product.model", prop_value, "MTP Device");
+    string.set(prop_value);
+    mData.putString(string);   // Model
+    string.set("1.0");
+    mData.putString(string);   // Device Version
+
+    property_get("ro.serialno", prop_value, "????????");
+    string.set(prop_value);
+    mData.putString(string);   // Serial Number
+
+    delete playbackFormats;
+    delete captureFormats;
+    delete deviceProperties;
+
+    return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doOpenSession() {
+    if (mSessionOpen) {
+        mResponse.setParameter(1, mSessionID);
+        return MTP_RESPONSE_SESSION_ALREADY_OPEN;
+    }
+    mSessionID = mRequest.getParameter(1);
+    mSessionOpen = true;
+
+    mDatabase->sessionStarted();
+
+    return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doCloseSession() {
+    if (!mSessionOpen)
+        return MTP_RESPONSE_SESSION_NOT_OPEN;
+    mSessionID = 0;
+    mSessionOpen = false;
+    mDatabase->sessionEnded();
+    return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doGetStorageIDs() {
+    if (!mSessionOpen)
+        return MTP_RESPONSE_SESSION_NOT_OPEN;
+
+    int count = mStorages.size();
+    mData.putUInt32(count);
+    for (int i = 0; i < count; i++)
+        mData.putUInt32(mStorages[i]->getStorageID());
+
+    return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doGetStorageInfo() {
+    MtpStringBuffer   string;
+
+    if (!mSessionOpen)
+        return MTP_RESPONSE_SESSION_NOT_OPEN;
+    MtpStorageID id = mRequest.getParameter(1);
+    MtpStorage* storage = getStorage(id);
+    if (!storage)
+        return MTP_RESPONSE_INVALID_STORAGE_ID;
+
+    mData.putUInt16(storage->getType());
+    mData.putUInt16(storage->getFileSystemType());
+    mData.putUInt16(storage->getAccessCapability());
+    mData.putUInt64(storage->getMaxCapacity());
+    mData.putUInt64(storage->getFreeSpace());
+    mData.putUInt32(1024*1024*1024); // Free Space in Objects
+    string.set(storage->getDescription());
+    mData.putString(string);
+    mData.putEmptyString();   // Volume Identifier
+
+    return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doGetObjectPropsSupported() {
+    if (!mSessionOpen)
+        return MTP_RESPONSE_SESSION_NOT_OPEN;
+    MtpObjectFormat format = mRequest.getParameter(1);
+    MtpObjectPropertyList* properties = mDatabase->getSupportedObjectProperties(format);
+    mData.putAUInt16(properties);
+    delete properties;
+    return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doGetObjectHandles() {
+    if (!mSessionOpen)
+        return MTP_RESPONSE_SESSION_NOT_OPEN;
+    MtpStorageID storageID = mRequest.getParameter(1);      // 0xFFFFFFFF for all storage
+    MtpObjectFormat format = mRequest.getParameter(2);      // 0 for all formats
+    MtpObjectHandle parent = mRequest.getParameter(3);      // 0xFFFFFFFF for objects with no parent
+                                                            // 0x00000000 for all objects?
+
+    if (!hasStorage(storageID))
+        return MTP_RESPONSE_INVALID_STORAGE_ID;
+    if (parent == 0xFFFFFFFF)
+        parent = 0;
+
+    MtpObjectHandleList* handles = mDatabase->getObjectList(storageID, format, parent);
+    mData.putAUInt32(handles);
+    delete handles;
+    return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doGetNumObjects() {
+    if (!mSessionOpen)
+        return MTP_RESPONSE_SESSION_NOT_OPEN;
+    MtpStorageID storageID = mRequest.getParameter(1);      // 0xFFFFFFFF for all storage
+    MtpObjectFormat format = mRequest.getParameter(2);      // 0 for all formats
+    MtpObjectHandle parent = mRequest.getParameter(3);      // 0xFFFFFFFF for objects with no parent
+                                                            // 0x00000000 for all objects?
+    if (!hasStorage(storageID))
+        return MTP_RESPONSE_INVALID_STORAGE_ID;
+    if (parent == 0xFFFFFFFF)
+        parent = 0;
+
+    int count = mDatabase->getNumObjects(storageID, format, parent);
+    if (count >= 0) {
+        mResponse.setParameter(1, count);
+        return MTP_RESPONSE_OK;
+    } else {
+        mResponse.setParameter(1, 0);
+        return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+    }
+}
+
+MtpResponseCode MtpServer::doGetObjectReferences() {
+    if (!mSessionOpen)
+        return MTP_RESPONSE_SESSION_NOT_OPEN;
+    if (!hasStorage())
+        return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+    MtpObjectHandle handle = mRequest.getParameter(1);
+
+    // FIXME - check for invalid object handle
+    MtpObjectHandleList* handles = mDatabase->getObjectReferences(handle);
+    if (handles) {
+        mData.putAUInt32(handles);
+        delete handles;
+    } else {
+        mData.putEmptyArray();
+    }
+    return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doSetObjectReferences() {
+    if (!mSessionOpen)
+        return MTP_RESPONSE_SESSION_NOT_OPEN;
+    if (!hasStorage())
+        return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+    MtpStorageID handle = mRequest.getParameter(1);
+
+    MtpObjectHandleList* references = mData.getAUInt32();
+    MtpResponseCode result = mDatabase->setObjectReferences(handle, references);
+    delete references;
+    return result;
+}
+
+MtpResponseCode MtpServer::doGetObjectPropValue() {
+    if (!hasStorage())
+        return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+    MtpObjectHandle handle = mRequest.getParameter(1);
+    MtpObjectProperty property = mRequest.getParameter(2);
+    LOGV("GetObjectPropValue %d %s\n", handle,
+            MtpDebug::getObjectPropCodeName(property));
+
+    return mDatabase->getObjectPropertyValue(handle, property, mData);
+}
+
+MtpResponseCode MtpServer::doSetObjectPropValue() {
+    if (!hasStorage())
+        return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+    MtpObjectHandle handle = mRequest.getParameter(1);
+    MtpObjectProperty property = mRequest.getParameter(2);
+    LOGV("SetObjectPropValue %d %s\n", handle,
+            MtpDebug::getObjectPropCodeName(property));
+
+    return mDatabase->setObjectPropertyValue(handle, property, mData);
+}
+
+MtpResponseCode MtpServer::doGetDevicePropValue() {
+    MtpDeviceProperty property = mRequest.getParameter(1);
+    LOGV("GetDevicePropValue %s\n",
+            MtpDebug::getDevicePropCodeName(property));
+
+    return mDatabase->getDevicePropertyValue(property, mData);
+}
+
+MtpResponseCode MtpServer::doSetDevicePropValue() {
+    MtpDeviceProperty property = mRequest.getParameter(1);
+    LOGV("SetDevicePropValue %s\n",
+            MtpDebug::getDevicePropCodeName(property));
+
+    return mDatabase->setDevicePropertyValue(property, mData);
+}
+
+MtpResponseCode MtpServer::doResetDevicePropValue() {
+    MtpDeviceProperty property = mRequest.getParameter(1);
+    LOGV("ResetDevicePropValue %s\n",
+            MtpDebug::getDevicePropCodeName(property));
+
+    return mDatabase->resetDeviceProperty(property);
+}
+
+MtpResponseCode MtpServer::doGetObjectPropList() {
+    if (!hasStorage())
+        return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+
+    MtpObjectHandle handle = mRequest.getParameter(1);
+    // use uint32_t so we can support 0xFFFFFFFF
+    uint32_t format = mRequest.getParameter(2);
+    uint32_t property = mRequest.getParameter(3);
+    int groupCode = mRequest.getParameter(4);
+    int depth = mRequest.getParameter(5);
+   LOGV("GetObjectPropList %d format: %s property: %s group: %d depth: %d\n",
+            handle, MtpDebug::getFormatCodeName(format),
+            MtpDebug::getObjectPropCodeName(property), groupCode, depth);
+
+    return mDatabase->getObjectPropertyList(handle, format, property, groupCode, depth, mData);
+}
+
+MtpResponseCode MtpServer::doGetObjectInfo() {
+    if (!hasStorage())
+        return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+    MtpObjectHandle handle = mRequest.getParameter(1);
+    return mDatabase->getObjectInfo(handle, mData);
+}
+
+MtpResponseCode MtpServer::doGetObject() {
+    if (!hasStorage())
+        return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+    MtpObjectHandle handle = mRequest.getParameter(1);
+    MtpString pathBuf;
+    int64_t fileLength;
+    MtpObjectFormat format;
+    int result = mDatabase->getObjectFilePath(handle, pathBuf, fileLength, format);
+    if (result != MTP_RESPONSE_OK)
+        return result;
+
+    const char* filePath = (const char *)pathBuf;
+    mtp_file_range  mfr;
+    mfr.fd = open(filePath, O_RDONLY);
+    if (mfr.fd < 0) {
+        return MTP_RESPONSE_GENERAL_ERROR;
+    }
+    mfr.offset = 0;
+    mfr.length = fileLength;
+
+    // send data header
+    mData.setOperationCode(mRequest.getOperationCode());
+    mData.setTransactionID(mRequest.getTransactionID());
+    mData.writeDataHeader(mFD, fileLength + MTP_CONTAINER_HEADER_SIZE);
+
+    // then transfer the file
+    int ret = ioctl(mFD, MTP_SEND_FILE, (unsigned long)&mfr);
+    close(mfr.fd);
+    if (ret < 0) {
+        if (errno == ECANCELED)
+            return MTP_RESPONSE_TRANSACTION_CANCELLED;
+        else
+            return MTP_RESPONSE_GENERAL_ERROR;
+    }
+    return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doGetPartialObject() {
+    if (!hasStorage())
+        return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+    MtpObjectHandle handle = mRequest.getParameter(1);
+    uint32_t offset = mRequest.getParameter(2);
+    uint32_t length = mRequest.getParameter(3);
+    MtpString pathBuf;
+    int64_t fileLength;
+    MtpObjectFormat format;
+    int result = mDatabase->getObjectFilePath(handle, pathBuf, fileLength, format);
+    if (result != MTP_RESPONSE_OK)
+        return result;
+    if (offset + length > fileLength)
+        length = fileLength - offset;
+
+    const char* filePath = (const char *)pathBuf;
+    mtp_file_range  mfr;
+    mfr.fd = open(filePath, O_RDONLY);
+    if (mfr.fd < 0) {
+        return MTP_RESPONSE_GENERAL_ERROR;
+    }
+    mfr.offset = offset;
+    mfr.length = length;
+    mResponse.setParameter(1, length);
+
+    // send data header
+    mData.setOperationCode(mRequest.getOperationCode());
+    mData.setTransactionID(mRequest.getTransactionID());
+    mData.writeDataHeader(mFD, length + MTP_CONTAINER_HEADER_SIZE);
+
+    // then transfer the file
+    int ret = ioctl(mFD, MTP_SEND_FILE, (unsigned long)&mfr);
+    close(mfr.fd);
+    if (ret < 0) {
+        if (errno == ECANCELED)
+            return MTP_RESPONSE_TRANSACTION_CANCELLED;
+        else
+            return MTP_RESPONSE_GENERAL_ERROR;
+    }
+    return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doSendObjectInfo() {
+    MtpString path;
+    MtpStorageID storageID = mRequest.getParameter(1);
+    MtpStorage* storage = getStorage(storageID);
+    MtpObjectHandle parent = mRequest.getParameter(2);
+    if (!storage)
+        return MTP_RESPONSE_INVALID_STORAGE_ID;
+
+    // special case the root
+    if (parent == MTP_PARENT_ROOT) {
+        path = storage->getPath();
+        parent = 0;
+    } else {
+        int64_t length;
+        MtpObjectFormat format;
+        int result = mDatabase->getObjectFilePath(parent, path, length, format);
+        if (result != MTP_RESPONSE_OK)
+            return result;
+        if (format != MTP_FORMAT_ASSOCIATION)
+            return MTP_RESPONSE_INVALID_PARENT_OBJECT;
+    }
+
+    // read only the fields we need
+    mData.getUInt32();  // storage ID
+    MtpObjectFormat format = mData.getUInt16();
+    mData.getUInt16();  // protection status
+    mSendObjectFileSize = mData.getUInt32();
+    mData.getUInt16();  // thumb format
+    mData.getUInt32();  // thumb compressed size
+    mData.getUInt32();  // thumb pix width
+    mData.getUInt32();  // thumb pix height
+    mData.getUInt32();  // image pix width
+    mData.getUInt32();  // image pix height
+    mData.getUInt32();  // image bit depth
+    mData.getUInt32();  // parent
+    uint16_t associationType = mData.getUInt16();
+    uint32_t associationDesc = mData.getUInt32();   // association desc
+    mData.getUInt32();  // sequence number
+    MtpStringBuffer name, created, modified;
+    mData.getString(name);    // file name
+    mData.getString(created);      // date created
+    mData.getString(modified);     // date modified
+    // keywords follow
+
+    LOGV("name: %s format: %04X\n", (const char *)name, format);
+    time_t modifiedTime;
+    if (!parseDateTime(modified, modifiedTime))
+        modifiedTime = 0;
+
+    if (path[path.size() - 1] != '/')
+        path += "/";
+    path += (const char *)name;
+
+    // check space first
+    if (mSendObjectFileSize > storage->getFreeSpace())
+        return MTP_RESPONSE_STORAGE_FULL;
+
+LOGD("path: %s parent: %d storageID: %08X", (const char*)path, parent, storageID);
+    MtpObjectHandle handle = mDatabase->beginSendObject((const char*)path,
+            format, parent, storageID, mSendObjectFileSize, modifiedTime);
+    if (handle == kInvalidObjectHandle) {
+        return MTP_RESPONSE_GENERAL_ERROR;
+    }
+
+  if (format == MTP_FORMAT_ASSOCIATION) {
+        mode_t mask = umask(0);
+        int ret = mkdir((const char *)path, mDirectoryPermission);
+        umask(mask);
+        if (ret && ret != -EEXIST)
+            return MTP_RESPONSE_GENERAL_ERROR;
+        chown((const char *)path, getuid(), mFileGroup);
+
+        // SendObject does not get sent for directories, so call endSendObject here instead
+        mDatabase->endSendObject(path, handle, MTP_FORMAT_ASSOCIATION, MTP_RESPONSE_OK);
+    } else {
+        mSendObjectFilePath = path;
+        // save the handle for the SendObject call, which should follow
+        mSendObjectHandle = handle;
+        mSendObjectFormat = format;
+    }
+
+    mResponse.setParameter(1, storageID);
+    mResponse.setParameter(2, parent);
+    mResponse.setParameter(3, handle);
+
+    return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doSendObject() {
+    if (!hasStorage())
+        return MTP_RESPONSE_GENERAL_ERROR;
+    MtpResponseCode result = MTP_RESPONSE_OK;
+    mode_t mask;
+    int ret;
+
+    if (mSendObjectHandle == kInvalidObjectHandle) {
+        LOGE("Expected SendObjectInfo before SendObject");
+        result = MTP_RESPONSE_NO_VALID_OBJECT_INFO;
+        goto done;
+    }
+
+    // read the header
+    ret = mData.readDataHeader(mFD);
+    // FIXME - check for errors here.
+
+    // reset so we don't attempt to send this back
+    mData.reset();
+
+    mtp_file_range  mfr;
+    mfr.fd = open(mSendObjectFilePath, O_RDWR | O_CREAT | O_TRUNC);
+    if (mfr.fd < 0) {
+        result = MTP_RESPONSE_GENERAL_ERROR;
+        goto done;
+    }
+    fchown(mfr.fd, getuid(), mFileGroup);
+    // set permissions
+    mask = umask(0);
+    fchmod(mfr.fd, mFilePermission);
+    umask(mask);
+
+    mfr.offset = 0;
+    mfr.length = mSendObjectFileSize;
+
+    LOGV("receiving %s\n", (const char *)mSendObjectFilePath);
+    // transfer the file
+    ret = ioctl(mFD, MTP_RECEIVE_FILE, (unsigned long)&mfr);
+    close(mfr.fd);
+
+    LOGV("MTP_RECEIVE_FILE returned %d", ret);
+
+    if (ret < 0) {
+        unlink(mSendObjectFilePath);
+        if (errno == ECANCELED)
+            result = MTP_RESPONSE_TRANSACTION_CANCELLED;
+        else
+            result = MTP_RESPONSE_GENERAL_ERROR;
+    }
+
+done:
+    mDatabase->endSendObject(mSendObjectFilePath, mSendObjectHandle, mSendObjectFormat,
+            result == MTP_RESPONSE_OK);
+    mSendObjectHandle = kInvalidObjectHandle;
+    mSendObjectFormat = 0;
+    return result;
+}
+
+static void deleteRecursive(const char* path) {
+    char pathbuf[PATH_MAX];
+    int pathLength = strlen(path);
+    if (pathLength >= sizeof(pathbuf) - 1) {
+        LOGE("path too long: %s\n", path);
+    }
+    strcpy(pathbuf, path);
+    if (pathbuf[pathLength - 1] != '/') {
+        pathbuf[pathLength++] = '/';
+    }
+    char* fileSpot = pathbuf + pathLength;
+    int pathRemaining = sizeof(pathbuf) - pathLength - 1;
+
+    DIR* dir = opendir(path);
+    if (!dir) {
+        LOGE("opendir %s failed: %s", path, strerror(errno));
+        return;
+    }
+
+    struct dirent* entry;
+    while ((entry = readdir(dir))) {
+        const char* name = entry->d_name;
+
+        // ignore "." and ".."
+        if (name[0] == '.' && (name[1] == 0 || (name[1] == '.' && name[2] == 0))) {
+            continue;
+        }
+
+        int nameLength = strlen(name);
+        if (nameLength > pathRemaining) {
+            LOGE("path %s/%s too long\n", path, name);
+            continue;
+        }
+        strcpy(fileSpot, name);
+
+        int type = entry->d_type;
+        if (entry->d_type == DT_DIR) {
+            deleteRecursive(pathbuf);
+            rmdir(pathbuf);
+        } else {
+            unlink(pathbuf);
+        }
+    }
+    closedir(dir);
+}
+
+static void deletePath(const char* path) {
+    struct stat statbuf;
+    if (stat(path, &statbuf) == 0) {
+        if (S_ISDIR(statbuf.st_mode)) {
+            deleteRecursive(path);
+            rmdir(path);
+        } else {
+            unlink(path);
+        }
+    } else {
+        LOGE("deletePath stat failed for %s: %s", path, strerror(errno));
+    }
+}
+
+MtpResponseCode MtpServer::doDeleteObject() {
+    if (!hasStorage())
+        return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+    MtpObjectHandle handle = mRequest.getParameter(1);
+    MtpObjectFormat format = mRequest.getParameter(2);
+    // FIXME - support deleting all objects if handle is 0xFFFFFFFF
+    // FIXME - implement deleting objects by format
+
+    MtpString filePath;
+    int64_t fileLength;
+    int result = mDatabase->getObjectFilePath(handle, filePath, fileLength, format);
+    if (result == MTP_RESPONSE_OK) {
+        LOGV("deleting %s", (const char *)filePath);
+        deletePath((const char *)filePath);
+        return mDatabase->deleteFile(handle);
+    } else {
+        return result;
+    }
+}
+
+MtpResponseCode MtpServer::doGetObjectPropDesc() {
+    MtpObjectProperty propCode = mRequest.getParameter(1);
+    MtpObjectFormat format = mRequest.getParameter(2);
+    LOGV("GetObjectPropDesc %s %s\n", MtpDebug::getObjectPropCodeName(propCode),
+                                        MtpDebug::getFormatCodeName(format));
+    MtpProperty* property = mDatabase->getObjectPropertyDesc(propCode, format);
+    if (!property)
+        return MTP_RESPONSE_OBJECT_PROP_NOT_SUPPORTED;
+    property->write(mData);
+    delete property;
+    return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doGetDevicePropDesc() {
+    MtpDeviceProperty propCode = mRequest.getParameter(1);
+    LOGV("GetDevicePropDesc %s\n", MtpDebug::getDevicePropCodeName(propCode));
+    MtpProperty* property = mDatabase->getDevicePropertyDesc(propCode);
+    if (!property)
+        return MTP_RESPONSE_DEVICE_PROP_NOT_SUPPORTED;
+    property->write(mData);
+    delete property;
+    return MTP_RESPONSE_OK;
+}
+
+}  // namespace android
diff --git a/media/mtp/MtpServer.h b/media/mtp/MtpServer.h
new file mode 100644
index 0000000..1efa715
--- /dev/null
+++ b/media/mtp/MtpServer.h
@@ -0,0 +1,119 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_SERVER_H
+#define _MTP_SERVER_H
+
+#include "MtpRequestPacket.h"
+#include "MtpDataPacket.h"
+#include "MtpResponsePacket.h"
+#include "MtpEventPacket.h"
+#include "mtp.h"
+#include "MtpUtils.h"
+
+#include <utils/threads.h>
+
+namespace android {
+
+class MtpDatabase;
+class MtpStorage;
+
+class MtpServer {
+
+private:
+    // file descriptor for MTP kernel driver
+    int                 mFD;
+
+    MtpDatabase*        mDatabase;
+
+    // group to own new files and folders
+    int                 mFileGroup;
+    // permissions for new files and directories
+    int                 mFilePermission;
+    int                 mDirectoryPermission;
+
+    // current session ID
+    MtpSessionID        mSessionID;
+    // true if we have an open session and mSessionID is valid
+    bool                mSessionOpen;
+
+    MtpRequestPacket    mRequest;
+    MtpDataPacket       mData;
+    MtpResponsePacket   mResponse;
+    MtpEventPacket      mEvent;
+
+    MtpStorageList      mStorages;
+
+    // handle for new object, set by SendObjectInfo and used by SendObject
+    MtpObjectHandle     mSendObjectHandle;
+    MtpObjectFormat     mSendObjectFormat;
+    MtpString           mSendObjectFilePath;
+    size_t              mSendObjectFileSize;
+
+    Mutex               mMutex;
+
+public:
+                        MtpServer(int fd, MtpDatabase* database,
+                                    int fileGroup, int filePerm, int directoryPerm);
+    virtual             ~MtpServer();
+
+    void                addStorage(MtpStorage* storage);
+    void                removeStorage(MtpStorage* storage);
+
+    void                run();
+
+    void                sendObjectAdded(MtpObjectHandle handle);
+    void                sendObjectRemoved(MtpObjectHandle handle);
+
+private:
+    MtpStorage*         getStorage(MtpStorageID id);
+    inline bool         hasStorage() { return mStorages.size() > 0; }
+    bool                hasStorage(MtpStorageID id);
+    void                sendStoreAdded(MtpStorageID id);
+    void                sendStoreRemoved(MtpStorageID id);
+    void                sendEvent(MtpEventCode code, uint32_t param1);
+
+    bool                handleRequest();
+
+    MtpResponseCode     doGetDeviceInfo();
+    MtpResponseCode     doOpenSession();
+    MtpResponseCode     doCloseSession();
+    MtpResponseCode     doGetStorageIDs();
+    MtpResponseCode     doGetStorageInfo();
+    MtpResponseCode     doGetObjectPropsSupported();
+    MtpResponseCode     doGetObjectHandles();
+    MtpResponseCode     doGetNumObjects();
+    MtpResponseCode     doGetObjectReferences();
+    MtpResponseCode     doSetObjectReferences();
+    MtpResponseCode     doGetObjectPropValue();
+    MtpResponseCode     doSetObjectPropValue();
+    MtpResponseCode     doGetDevicePropValue();
+    MtpResponseCode     doSetDevicePropValue();
+    MtpResponseCode     doResetDevicePropValue();
+    MtpResponseCode     doGetObjectPropList();
+    MtpResponseCode     doGetObjectInfo();
+    MtpResponseCode     doGetObject();
+    MtpResponseCode     doGetPartialObject();
+    MtpResponseCode     doSendObjectInfo();
+    MtpResponseCode     doSendObject();
+    MtpResponseCode     doDeleteObject();
+    MtpResponseCode     doGetObjectPropDesc();
+    MtpResponseCode     doGetDevicePropDesc();
+};
+
+}; // namespace android
+
+#endif // _MTP_SERVER_H
diff --git a/media/mtp/MtpStorage.cpp b/media/mtp/MtpStorage.cpp
new file mode 100644
index 0000000..6cb88b3
--- /dev/null
+++ b/media/mtp/MtpStorage.cpp
@@ -0,0 +1,81 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpStorage"
+
+#include "MtpDebug.h"
+#include "MtpDatabase.h"
+#include "MtpStorage.h"
+
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <sys/statfs.h>
+#include <unistd.h>
+#include <dirent.h>
+#include <errno.h>
+#include <string.h>
+#include <stdio.h>
+#include <limits.h>
+
+namespace android {
+
+MtpStorage::MtpStorage(MtpStorageID id, const char* filePath, uint64_t reserveSpace)
+    :   mStorageID(id),
+        mFilePath(filePath),
+        mMaxCapacity(0),
+        mReserveSpace(reserveSpace)
+{
+    LOGV("MtpStorage id: %d path: %s\n", id, filePath);
+}
+
+MtpStorage::~MtpStorage() {
+}
+
+int MtpStorage::getType() const {
+    return MTP_STORAGE_FIXED_RAM;
+}
+
+int MtpStorage::getFileSystemType() const {
+    return MTP_STORAGE_FILESYSTEM_HIERARCHICAL;
+}
+
+int MtpStorage::getAccessCapability() const {
+    return MTP_STORAGE_READ_WRITE;
+}
+
+uint64_t MtpStorage::getMaxCapacity() {
+    if (mMaxCapacity == 0) {
+        struct statfs   stat;
+        if (statfs(getPath(), &stat))
+            return -1;
+        mMaxCapacity = (uint64_t)stat.f_blocks * (uint64_t)stat.f_bsize;
+    }
+    return mMaxCapacity;
+}
+
+uint64_t MtpStorage::getFreeSpace() {
+    struct statfs   stat;
+    if (statfs(getPath(), &stat))
+        return -1;
+    uint64_t freeSpace = (uint64_t)stat.f_bavail * (uint64_t)stat.f_bsize;
+    return (freeSpace > mReserveSpace ? freeSpace - mReserveSpace : 0);
+}
+
+const char* MtpStorage::getDescription() const {
+    return "Device Storage";
+}
+
+}  // namespace android
diff --git a/media/mtp/MtpStorage.h b/media/mtp/MtpStorage.h
new file mode 100644
index 0000000..858c9d3
--- /dev/null
+++ b/media/mtp/MtpStorage.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_STORAGE_H
+#define _MTP_STORAGE_H
+
+#include "MtpTypes.h"
+#include "mtp.h"
+
+namespace android {
+
+class MtpDatabase;
+
+class MtpStorage {
+
+private:
+    MtpStorageID            mStorageID;
+    MtpString               mFilePath;
+    uint64_t                mMaxCapacity;
+    // amount of free space to leave unallocated
+    uint64_t                mReserveSpace;
+
+public:
+                            MtpStorage(MtpStorageID id, const char* filePath,
+                                    uint64_t reserveSpace);
+    virtual                 ~MtpStorage();
+
+    inline MtpStorageID     getStorageID() const { return mStorageID; }
+    int                     getType() const;
+    int                     getFileSystemType() const;
+    int                     getAccessCapability() const;
+    uint64_t                getMaxCapacity();
+    uint64_t                getFreeSpace();
+    const char*             getDescription() const;
+    inline const char*      getPath() const { return (const char *)mFilePath; }
+};
+
+}; // namespace android
+
+#endif // _MTP_STORAGE_H
diff --git a/media/mtp/MtpStorageInfo.cpp b/media/mtp/MtpStorageInfo.cpp
new file mode 100644
index 0000000..ca64ac0
--- /dev/null
+++ b/media/mtp/MtpStorageInfo.cpp
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpStorageInfo"
+
+#include "MtpDebug.h"
+#include "MtpDataPacket.h"
+#include "MtpStorageInfo.h"
+#include "MtpStringBuffer.h"
+
+namespace android {
+
+MtpStorageInfo::MtpStorageInfo(MtpStorageID id)
+    :   mStorageID(id),
+        mStorageType(0),
+        mFileSystemType(0),
+        mAccessCapability(0),
+        mMaxCapacity(0),
+        mFreeSpaceBytes(0),
+        mFreeSpaceObjects(0),
+        mStorageDescription(NULL),
+        mVolumeIdentifier(NULL)
+{
+}
+
+MtpStorageInfo::~MtpStorageInfo() {
+    if (mStorageDescription)
+        free(mStorageDescription);
+    if (mVolumeIdentifier)
+        free(mVolumeIdentifier);
+}
+
+void MtpStorageInfo::read(MtpDataPacket& packet) {
+    MtpStringBuffer string;
+
+    // read the device info
+    mStorageType = packet.getUInt16();
+    mFileSystemType = packet.getUInt16();
+    mAccessCapability = packet.getUInt16();
+    mMaxCapacity = packet.getUInt64();
+    mFreeSpaceBytes = packet.getUInt64();
+    mFreeSpaceObjects = packet.getUInt32();
+
+    packet.getString(string);
+    mStorageDescription = strdup((const char *)string);
+    packet.getString(string);
+    mVolumeIdentifier = strdup((const char *)string);
+}
+
+void MtpStorageInfo::print() {
+    LOGD("Storage Info %08X:\n\tmStorageType: %d\n\tmFileSystemType: %d\n\tmAccessCapability: %d\n",
+            mStorageID, mStorageType, mFileSystemType, mAccessCapability);
+    LOGD("\tmMaxCapacity: %lld\n\tmFreeSpaceBytes: %lld\n\tmFreeSpaceObjects: %d\n",
+            mMaxCapacity, mFreeSpaceBytes, mFreeSpaceObjects);
+    LOGD("\tmStorageDescription: %s\n\tmVolumeIdentifier: %s\n",
+            mStorageDescription, mVolumeIdentifier);
+}
+
+}  // namespace android
diff --git a/media/mtp/MtpStorageInfo.h b/media/mtp/MtpStorageInfo.h
new file mode 100644
index 0000000..2cb626e
--- /dev/null
+++ b/media/mtp/MtpStorageInfo.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_STORAGE_INFO_H
+#define _MTP_STORAGE_INFO_H
+
+#include "MtpTypes.h"
+
+namespace android {
+
+class MtpDataPacket;
+
+class MtpStorageInfo {
+public:
+    MtpStorageID        mStorageID;
+    uint16_t            mStorageType;
+    uint16_t            mFileSystemType;
+    uint16_t            mAccessCapability;
+    uint64_t            mMaxCapacity;
+    uint64_t            mFreeSpaceBytes;
+    uint32_t            mFreeSpaceObjects;
+    char*               mStorageDescription;
+    char*               mVolumeIdentifier;
+
+public:
+                        MtpStorageInfo(MtpStorageID id);
+    virtual             ~MtpStorageInfo();
+
+    void                read(MtpDataPacket& packet);
+
+    void                print();
+};
+
+}; // namespace android
+
+#endif // _MTP_STORAGE_INFO_H
diff --git a/media/mtp/MtpStringBuffer.cpp b/media/mtp/MtpStringBuffer.cpp
new file mode 100644
index 0000000..fe8cf04
--- /dev/null
+++ b/media/mtp/MtpStringBuffer.cpp
@@ -0,0 +1,171 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpStringBuffer"
+
+#include <string.h>
+
+#include "MtpDataPacket.h"
+#include "MtpStringBuffer.h"
+
+namespace android {
+
+MtpStringBuffer::MtpStringBuffer()
+    :   mCharCount(0),
+        mByteCount(1)
+{
+    mBuffer[0] = 0;
+}
+
+MtpStringBuffer::MtpStringBuffer(const char* src)
+    :   mCharCount(0),
+        mByteCount(1)
+{
+    set(src);
+}
+
+MtpStringBuffer::MtpStringBuffer(const uint16_t* src)
+    :   mCharCount(0),
+        mByteCount(1)
+{
+    set(src);
+}
+
+MtpStringBuffer::MtpStringBuffer(const MtpStringBuffer& src)
+    :   mCharCount(src.mCharCount),
+        mByteCount(src.mByteCount)
+{
+    memcpy(mBuffer, src.mBuffer, mByteCount);
+}
+
+
+MtpStringBuffer::~MtpStringBuffer() {
+}
+
+void MtpStringBuffer::set(const char* src) {
+    int length = strlen(src);
+    if (length >= sizeof(mBuffer))
+        length = sizeof(mBuffer) - 1;
+    memcpy(mBuffer, src, length);
+
+    // count the characters
+    int count = 0;
+    char ch;
+    while ((ch = *src++) != 0) {
+        if ((ch & 0x80) == 0) {
+            // single byte character
+        } else if ((ch & 0xE0) == 0xC0) {
+            // two byte character
+            if (! *src++) {
+                // last character was truncated, so ignore last byte
+                length--;
+                break;
+            }
+        } else if ((ch & 0xF0) == 0xE0) {
+            // 3 byte char
+            if (! *src++) {
+                // last character was truncated, so ignore last byte
+                length--;
+                break;
+            }
+            if (! *src++) {
+                // last character was truncated, so ignore last two bytes
+                length -= 2;
+                break;
+            }
+        }
+        count++;
+    }
+
+    mByteCount = length + 1;
+    mBuffer[length] = 0;
+    mCharCount = count;
+}
+
+void MtpStringBuffer::set(const uint16_t* src) {
+    int count = 0;
+    uint16_t ch;
+    uint8_t* dest = mBuffer;
+
+    while ((ch = *src++) != 0 && count < 255) {
+        if (ch >= 0x0800) {
+            *dest++ = (uint8_t)(0xE0 | (ch >> 12));
+            *dest++ = (uint8_t)(0x80 | ((ch >> 6) & 0x3F));
+            *dest++ = (uint8_t)(0x80 | (ch & 0x3F));
+        } else if (ch >= 0x80) {
+            *dest++ = (uint8_t)(0xC0 | (ch >> 6));
+            *dest++ = (uint8_t)(0x80 | (ch & 0x3F));
+        } else {
+            *dest++ = ch;
+        }
+        count++;
+    }
+    *dest++ = 0;
+    mCharCount = count;
+    mByteCount = dest - mBuffer;
+}
+
+void MtpStringBuffer::readFromPacket(MtpDataPacket* packet) {
+    int count = packet->getUInt8();
+    uint8_t* dest = mBuffer;
+    for (int i = 0; i < count; i++) {
+        uint16_t ch = packet->getUInt16();
+        if (ch >= 0x0800) {
+            *dest++ = (uint8_t)(0xE0 | (ch >> 12));
+            *dest++ = (uint8_t)(0x80 | ((ch >> 6) & 0x3F));
+            *dest++ = (uint8_t)(0x80 | (ch & 0x3F));
+        } else if (ch >= 0x80) {
+            *dest++ = (uint8_t)(0xC0 | (ch >> 6));
+            *dest++ = (uint8_t)(0x80 | (ch & 0x3F));
+        } else {
+            *dest++ = ch;
+        }
+    }
+    *dest++ = 0;
+    mCharCount = count;
+    mByteCount = dest - mBuffer;
+}
+
+void MtpStringBuffer::writeToPacket(MtpDataPacket* packet) const {
+    int count = mCharCount;
+    const uint8_t* src = mBuffer;
+    packet->putUInt8(count > 0 ? count + 1 : 0);
+
+    // expand utf8 to 16 bit chars
+    for (int i = 0; i < count; i++) {
+        uint16_t ch;
+        uint16_t ch1 = *src++;
+        if ((ch1 & 0x80) == 0) {
+            // single byte character
+            ch = ch1;
+        } else if ((ch1 & 0xE0) == 0xC0) {
+            // two byte character
+            uint16_t ch2 = *src++;
+            ch = ((ch1 & 0x1F) << 6) | (ch2 & 0x3F);
+        } else {
+            // three byte character
+            uint16_t ch2 = *src++;
+            uint16_t ch3 = *src++;
+            ch = ((ch1 & 0x0F) << 12) | ((ch2 & 0x3F) << 6) | (ch3 & 0x3F);
+        }
+        packet->putUInt16(ch);
+    }
+    // only terminate with zero if string is not empty
+    if (count > 0)
+        packet->putUInt16(0);
+}
+
+}  // namespace android
diff --git a/media/mtp/MtpStringBuffer.h b/media/mtp/MtpStringBuffer.h
new file mode 100644
index 0000000..cbc8307
--- /dev/null
+++ b/media/mtp/MtpStringBuffer.h
@@ -0,0 +1,57 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_STRING_BUFFER_H
+#define _MTP_STRING_BUFFER_H
+
+#include <stdint.h>
+
+namespace android {
+
+class MtpDataPacket;
+
+// Represents a utf8 string, with a maximum of 255 characters
+class MtpStringBuffer {
+
+private:
+    // mBuffer contains string in UTF8 format
+    // maximum 3 bytes/character, with 1 extra for zero termination
+    uint8_t         mBuffer[255 * 3 + 1];
+    int             mCharCount;
+    int             mByteCount;
+
+public:
+                    MtpStringBuffer();
+                    MtpStringBuffer(const char* src);
+                    MtpStringBuffer(const uint16_t* src);
+                    MtpStringBuffer(const MtpStringBuffer& src);
+    virtual         ~MtpStringBuffer();
+
+    void            set(const char* src);
+    void            set(const uint16_t* src);
+
+    void            readFromPacket(MtpDataPacket* packet);
+    void            writeToPacket(MtpDataPacket* packet) const;
+
+    inline int      getCharCount() const { return mCharCount; }
+    inline int      getByteCount() const { return mByteCount; }
+
+	inline operator const char*() const { return (const char *)mBuffer; }
+};
+
+}; // namespace android
+
+#endif // _MTP_STRING_BUFFER_H
diff --git a/media/mtp/MtpTypes.h b/media/mtp/MtpTypes.h
new file mode 100644
index 0000000..720c854
--- /dev/null
+++ b/media/mtp/MtpTypes.h
@@ -0,0 +1,78 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_TYPES_H
+#define _MTP_TYPES_H
+
+#include <stdint.h>
+#include "utils/String8.h"
+#include "utils/Vector.h"
+
+namespace android {
+
+typedef int32_t int128_t[4];
+typedef uint32_t uint128_t[4];
+
+typedef uint16_t MtpOperationCode;
+typedef uint16_t MtpResponseCode;
+typedef uint16_t MtpEventCode;
+typedef uint32_t MtpSessionID;
+typedef uint32_t MtpStorageID;
+typedef uint32_t MtpTransactionID;
+typedef uint16_t MtpPropertyCode;
+typedef uint16_t MtpDataType;
+typedef uint16_t MtpObjectFormat;
+typedef MtpPropertyCode MtpDeviceProperty;
+typedef MtpPropertyCode MtpObjectProperty;
+
+// object handles are unique across all storage but only within a single session.
+// object handles cannot be reused after an object is deleted.
+// values 0x00000000 and 0xFFFFFFFF are reserved for special purposes.
+typedef uint32_t MtpObjectHandle;
+
+// Special values
+#define MTP_PARENT_ROOT         0xFFFFFFFF       // parent is root of the storage
+#define kInvalidObjectHandle    0xFFFFFFFF
+
+class MtpStorage;
+class MtpDevice;
+class MtpProperty;
+
+typedef Vector<MtpStorage *> MtpStorageList;
+typedef Vector<MtpDevice*> MtpDeviceList;
+typedef Vector<MtpProperty*> MtpPropertyList;
+
+typedef Vector<uint8_t> UInt8List;
+typedef Vector<uint16_t> UInt16List;
+typedef Vector<uint32_t> UInt32List;
+typedef Vector<uint64_t> UInt64List;
+typedef Vector<int8_t> Int8List;
+typedef Vector<int16_t> Int16List;
+typedef Vector<int32_t> Int32List;
+typedef Vector<int64_t> Int64List;
+
+typedef UInt16List MtpObjectPropertyList;
+typedef UInt16List MtpDevicePropertyList;
+typedef UInt16List MtpObjectFormatList;
+typedef UInt32List MtpObjectHandleList;
+typedef UInt16List MtpObjectPropertyList;
+typedef UInt32List MtpStorageIDList;
+
+typedef String8    MtpString;
+
+}; // namespace android
+
+#endif // _MTP_TYPES_H
diff --git a/media/mtp/MtpUtils.cpp b/media/mtp/MtpUtils.cpp
new file mode 100644
index 0000000..6ec8876
--- /dev/null
+++ b/media/mtp/MtpUtils.cpp
@@ -0,0 +1,80 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpUtils"
+
+#include <stdio.h>
+#include <time.h>
+
+#include <cutils/tztime.h>
+#include "MtpUtils.h"
+
+namespace android {
+
+/*
+DateTime strings follow a compatible subset of the definition found in ISO 8601, and
+take the form of a Unicode string formatted as: "YYYYMMDDThhmmss.s". In this
+representation, YYYY shall be replaced by the year, MM replaced by the month (01-12),
+DD replaced by the day (01-31), T is a constant character 'T' delimiting time from date,
+hh is replaced by the hour (00-23), mm is replaced by the minute (00-59), and ss by the
+second (00-59). The ".s" is optional, and represents tenths of a second.
+*/
+
+bool parseDateTime(const char* dateTime, time_t& outSeconds) {
+    int year, month, day, hour, minute, second;
+    struct tm tm;
+
+    if (sscanf(dateTime, "%04d%02d%02dT%02d%02d%02d",
+            &year, &month, &day, &hour, &minute, &second) != 6)
+        return false;
+    const char* tail = dateTime + 15;
+    // skip optional tenth of second
+    if (tail[0] == '.' && tail[1])
+        tail += 2;
+    //FIXME - support +/-hhmm
+    bool useUTC = (tail[0] == 'Z');
+
+    // hack to compute timezone
+    time_t dummy;
+    localtime_r(&dummy, &tm);
+
+    tm.tm_sec = second;
+    tm.tm_min = minute;
+    tm.tm_hour = hour;
+    tm.tm_mday = day;
+    tm.tm_mon = month - 1;  // mktime uses months in 0 - 11 range
+    tm.tm_year = year - 1900;
+    tm.tm_wday = 0;
+    tm.tm_isdst = -1;
+    if (useUTC)
+        outSeconds = mktime(&tm);
+    else
+        outSeconds = mktime_tz(&tm, tm.tm_zone);
+
+    return true;
+}
+
+void formatDateTime(time_t seconds, char* buffer, int bufferLength) {
+    struct tm tm;
+
+    localtime_r(&seconds, &tm);
+    snprintf(buffer, bufferLength, "%04d%02d%02dT%02d%02d%02d",
+        tm.tm_year + 1900, 
+        tm.tm_mon + 1, // localtime_r uses months in 0 - 11 range
+        tm.tm_mday, tm.tm_hour, tm.tm_min, tm.tm_sec);
+}
+
+}  // namespace android
diff --git a/media/mtp/MtpUtils.h b/media/mtp/MtpUtils.h
new file mode 100644
index 0000000..61f9055
--- /dev/null
+++ b/media/mtp/MtpUtils.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_UTILS_H
+#define _MTP_UTILS_H
+
+#include <stdint.h>
+
+namespace android {
+
+bool parseDateTime(const char* dateTime, time_t& outSeconds);
+void formatDateTime(time_t seconds, char* buffer, int bufferLength);
+
+}; // namespace android
+
+#endif // _MTP_UTILS_H
diff --git a/media/mtp/mtp.h b/media/mtp/mtp.h
new file mode 100644
index 0000000..6fedc16
--- /dev/null
+++ b/media/mtp/mtp.h
@@ -0,0 +1,481 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_H
+#define _MTP_H
+
+#include <stdint.h>
+#include <stdlib.h>
+
+#define MTP_STANDARD_VERSION            100
+
+#define MTP_FIRST_STORAGE_ID            0x00010001
+
+// Container Types
+#define MTP_CONTAINER_TYPE_UNDEFINED    0
+#define MTP_CONTAINER_TYPE_COMMAND      1
+#define MTP_CONTAINER_TYPE_DATA         2
+#define MTP_CONTAINER_TYPE_RESPONSE     3
+#define MTP_CONTAINER_TYPE_EVENT        4
+
+// Container Offsets
+#define MTP_CONTAINER_LENGTH_OFFSET             0
+#define MTP_CONTAINER_TYPE_OFFSET               4
+#define MTP_CONTAINER_CODE_OFFSET               6
+#define MTP_CONTAINER_TRANSACTION_ID_OFFSET     8
+#define MTP_CONTAINER_PARAMETER_OFFSET          12
+#define MTP_CONTAINER_HEADER_SIZE               12
+
+// MTP Data Types
+#define MTP_TYPE_UNDEFINED      0x0000          // Undefined
+#define MTP_TYPE_INT8           0x0001          // Signed 8-bit integer
+#define MTP_TYPE_UINT8          0x0002          // Unsigned 8-bit integer
+#define MTP_TYPE_INT16          0x0003          // Signed 16-bit integer
+#define MTP_TYPE_UINT16         0x0004          // Unsigned 16-bit integer
+#define MTP_TYPE_INT32          0x0005          // Signed 32-bit integer
+#define MTP_TYPE_UINT32         0x0006          // Unsigned 32-bit integer
+#define MTP_TYPE_INT64          0x0007          // Signed 64-bit integer
+#define MTP_TYPE_UINT64         0x0008          // Unsigned 64-bit integer
+#define MTP_TYPE_INT128         0x0009          // Signed 128-bit integer
+#define MTP_TYPE_UINT128        0x000A          // Unsigned 128-bit integer
+#define MTP_TYPE_AINT8          0x4001          // Array of signed 8-bit integers
+#define MTP_TYPE_AUINT8         0x4002          // Array of unsigned 8-bit integers
+#define MTP_TYPE_AINT16         0x4003          // Array of signed 16-bit integers
+#define MTP_TYPE_AUINT16        0x4004          // Array of unsigned 16-bit integers
+#define MTP_TYPE_AINT32         0x4005          // Array of signed 32-bit integers
+#define MTP_TYPE_AUINT32        0x4006          // Array of unsigned 32-bit integers
+#define MTP_TYPE_AINT64         0x4007          // Array of signed 64-bit integers
+#define MTP_TYPE_AUINT64        0x4008          // Array of unsigned 64-bit integers
+#define MTP_TYPE_AINT128        0x4009          // Array of signed 128-bit integers
+#define MTP_TYPE_AUINT128       0x400A          // Array of unsigned 128-bit integers
+#define MTP_TYPE_STR            0xFFFF          // Variable-length Unicode string
+
+// MTP Format Codes
+#define MTP_FORMAT_UNDEFINED                            0x3000   // Undefined object
+#define MTP_FORMAT_ASSOCIATION                          0x3001   // Association (for example, a folder)
+#define MTP_FORMAT_SCRIPT                               0x3002   // Device model-specific script
+#define MTP_FORMAT_EXECUTABLE                           0x3003   // Device model-specific binary executable
+#define MTP_FORMAT_TEXT                                 0x3004   // Text file
+#define MTP_FORMAT_HTML                                 0x3005   // Hypertext Markup Language file (text)
+#define MTP_FORMAT_DPOF                                 0x3006   // Digital Print Order Format file (text)
+#define MTP_FORMAT_AIFF                                 0x3007   // Audio clip
+#define MTP_FORMAT_WAV                                  0x3008   // Audio clip
+#define MTP_FORMAT_MP3                                  0x3009   // Audio clip
+#define MTP_FORMAT_AVI                                  0x300A   // Video clip
+#define MTP_FORMAT_MPEG                                 0x300B   // Video clip
+#define MTP_FORMAT_ASF                                  0x300C   // Microsoft Advanced Streaming Format (video)
+#define MTP_FORMAT_DEFINED                              0x3800   // Unknown image object
+#define MTP_FORMAT_EXIF_JPEG                            0x3801   // Exchangeable File Format, JEIDA standard
+#define MTP_FORMAT_TIFF_EP                              0x3802   // Tag Image File Format for Electronic Photography
+#define MTP_FORMAT_FLASHPIX                             0x3803   // Structured Storage Image Format
+#define MTP_FORMAT_BMP                                  0x3804   // Microsoft Windows Bitmap file
+#define MTP_FORMAT_CIFF                                 0x3805   // Canon Camera Image File Format
+#define MTP_FORMAT_GIF                                  0x3807   // Graphics Interchange Format
+#define MTP_FORMAT_JFIF                                 0x3808   // JPEG File Interchange Format
+#define MTP_FORMAT_CD                                   0x3809   // PhotoCD Image Pac
+#define MTP_FORMAT_PICT                                 0x380A   // Quickdraw Image Format
+#define MTP_FORMAT_PNG                                  0x380B   // Portable Network Graphics
+#define MTP_FORMAT_TIFF                                 0x380D   // Tag Image File Format
+#define MTP_FORMAT_TIFF_IT                              0x380E   // Tag Image File Format for Information Technology (graphic arts)
+#define MTP_FORMAT_JP2                                  0x380F   // JPEG2000 Baseline File Format
+#define MTP_FORMAT_JPX                                  0x3810   // JPEG2000 Extended File Format
+#define MTP_FORMAT_UNDEFINED_FIRMWARE                   0xB802
+#define MTP_FORMAT_WINDOWS_IMAGE_FORMAT                 0xB881
+#define MTP_FORMAT_UNDEFINED_AUDIO                      0xB900
+#define MTP_FORMAT_WMA                                  0xB901
+#define MTP_FORMAT_OGG                                  0xB902
+#define MTP_FORMAT_AAC                                  0xB903
+#define MTP_FORMAT_AUDIBLE                              0xB904
+#define MTP_FORMAT_FLAC                                 0xB906
+#define MTP_FORMAT_UNDEFINED_VIDEO                      0xB980
+#define MTP_FORMAT_WMV                                  0xB981
+#define MTP_FORMAT_MP4_CONTAINER                        0xB982  // ISO 14496-1
+#define MTP_FORMAT_MP2                                  0xB983
+#define MTP_FORMAT_3GP_CONTAINER                        0xB984  // 3GPP file format. Details: http://www.3gpp.org/ftp/Specs/html-info/26244.htm (page title - \u201cTransparent end-to-end packet switched streaming service, 3GPP file format\u201d).
+#define MTP_FORMAT_UNDEFINED_COLLECTION                 0xBA00
+#define MTP_FORMAT_ABSTRACT_MULTIMEDIA_ALBUM            0xBA01
+#define MTP_FORMAT_ABSTRACT_IMAGE_ALBUM                 0xBA02
+#define MTP_FORMAT_ABSTRACT_AUDIO_ALBUM                 0xBA03
+#define MTP_FORMAT_ABSTRACT_VIDEO_ALBUM                 0xBA04
+#define MTP_FORMAT_ABSTRACT_AV_PLAYLIST                 0xBA05
+#define MTP_FORMAT_ABSTRACT_CONTACT_GROUP               0xBA06
+#define MTP_FORMAT_ABSTRACT_MESSAGE_FOLDER              0xBA07
+#define MTP_FORMAT_ABSTRACT_CHAPTERED_PRODUCTION        0xBA08
+#define MTP_FORMAT_ABSTRACT_AUDIO_PLAYLIST              0xBA09
+#define MTP_FORMAT_ABSTRACT_VIDEO_PLAYLIST              0xBA0A
+#define MTP_FORMAT_ABSTRACT_MEDIACAST                   0xBA0B // For use with mediacasts; references multimedia enclosures of RSS feeds or episodic content
+#define MTP_FORMAT_WPL_PLAYLIST                         0xBA10
+#define MTP_FORMAT_M3U_PLAYLIST                         0xBA11
+#define MTP_FORMAT_MPL_PLAYLIST                         0xBA12
+#define MTP_FORMAT_ASX_PLAYLIST                         0xBA13
+#define MTP_FORMAT_PLS_PLAYLIST                         0xBA14
+#define MTP_FORMAT_UNDEFINED_DOCUMENT                   0xBA80
+#define MTP_FORMAT_ABSTRACT_DOCUMENT                    0xBA81
+#define MTP_FORMAT_XML_DOCUMENT                         0xBA82
+#define MTP_FORMAT_MS_WORD_DOCUMENT                     0xBA83
+#define MTP_FORMAT_MHT_COMPILED_HTML_DOCUMENT           0xBA84
+#define MTP_FORMAT_MS_EXCEL_SPREADSHEET                 0xBA85
+#define MTP_FORMAT_MS_POWERPOINT_PRESENTATION           0xBA86
+#define MTP_FORMAT_UNDEFINED_MESSAGE                    0xBB00
+#define MTP_FORMAT_ABSTRACT_MESSSAGE                    0xBB01
+#define MTP_FORMAT_UNDEFINED_CONTACT                    0xBB80
+#define MTP_FORMAT_ABSTRACT_CONTACT                     0xBB81
+#define MTP_FORMAT_VCARD_2                              0xBB82
+
+// MTP Object Property Codes
+#define MTP_PROPERTY_STORAGE_ID                             0xDC01
+#define MTP_PROPERTY_OBJECT_FORMAT                          0xDC02
+#define MTP_PROPERTY_PROTECTION_STATUS                      0xDC03
+#define MTP_PROPERTY_OBJECT_SIZE                            0xDC04
+#define MTP_PROPERTY_ASSOCIATION_TYPE                       0xDC05
+#define MTP_PROPERTY_ASSOCIATION_DESC                       0xDC06
+#define MTP_PROPERTY_OBJECT_FILE_NAME                       0xDC07
+#define MTP_PROPERTY_DATE_CREATED                           0xDC08
+#define MTP_PROPERTY_DATE_MODIFIED                          0xDC09
+#define MTP_PROPERTY_KEYWORDS                               0xDC0A
+#define MTP_PROPERTY_PARENT_OBJECT                          0xDC0B
+#define MTP_PROPERTY_ALLOWED_FOLDER_CONTENTS                0xDC0C
+#define MTP_PROPERTY_HIDDEN                                 0xDC0D
+#define MTP_PROPERTY_SYSTEM_OBJECT                          0xDC0E
+#define MTP_PROPERTY_PERSISTENT_UID                         0xDC41
+#define MTP_PROPERTY_SYNC_ID                                0xDC42
+#define MTP_PROPERTY_PROPERTY_BAG                           0xDC43
+#define MTP_PROPERTY_NAME                                   0xDC44
+#define MTP_PROPERTY_CREATED_BY                             0xDC45
+#define MTP_PROPERTY_ARTIST                                 0xDC46
+#define MTP_PROPERTY_DATE_AUTHORED                          0xDC47
+#define MTP_PROPERTY_DESCRIPTION                            0xDC48
+#define MTP_PROPERTY_URL_REFERENCE                          0xDC49
+#define MTP_PROPERTY_LANGUAGE_LOCALE                        0xDC4A
+#define MTP_PROPERTY_COPYRIGHT_INFORMATION                  0xDC4B
+#define MTP_PROPERTY_SOURCE                                 0xDC4C
+#define MTP_PROPERTY_ORIGIN_LOCATION                        0xDC4D
+#define MTP_PROPERTY_DATE_ADDED                             0xDC4E
+#define MTP_PROPERTY_NON_CONSUMABLE                         0xDC4F
+#define MTP_PROPERTY_CORRUPT_UNPLAYABLE                     0xDC50
+#define MTP_PROPERTY_PRODUCER_SERIAL_NUMBER                 0xDC51
+#define MTP_PROPERTY_REPRESENTATIVE_SAMPLE_FORMAT           0xDC81
+#define MTP_PROPERTY_REPRESENTATIVE_SAMPLE_SIZE             0xDC82
+#define MTP_PROPERTY_REPRESENTATIVE_SAMPLE_HEIGHT           0xDC83
+#define MTP_PROPERTY_REPRESENTATIVE_SAMPLE_WIDTH            0xDC84
+#define MTP_PROPERTY_REPRESENTATIVE_SAMPLE_DURATION         0xDC85
+#define MTP_PROPERTY_REPRESENTATIVE_SAMPLE_DATA             0xDC86
+#define MTP_PROPERTY_WIDTH                                  0xDC87
+#define MTP_PROPERTY_HEIGHT                                 0xDC88
+#define MTP_PROPERTY_DURATION                               0xDC89
+#define MTP_PROPERTY_RATING                                 0xDC8A
+#define MTP_PROPERTY_TRACK                                  0xDC8B
+#define MTP_PROPERTY_GENRE                                  0xDC8C
+#define MTP_PROPERTY_CREDITS                                0xDC8D
+#define MTP_PROPERTY_LYRICS                                 0xDC8E
+#define MTP_PROPERTY_SUBSCRIPTION_CONTENT_ID                0xDC8F
+#define MTP_PROPERTY_PRODUCED_BY                            0xDC90
+#define MTP_PROPERTY_USE_COUNT                              0xDC91
+#define MTP_PROPERTY_SKIP_COUNT                             0xDC92
+#define MTP_PROPERTY_LAST_ACCESSED                          0xDC93
+#define MTP_PROPERTY_PARENTAL_RATING                        0xDC94
+#define MTP_PROPERTY_META_GENRE                             0xDC95
+#define MTP_PROPERTY_COMPOSER                               0xDC96
+#define MTP_PROPERTY_EFFECTIVE_RATING                       0xDC97
+#define MTP_PROPERTY_SUBTITLE                               0xDC98
+#define MTP_PROPERTY_ORIGINAL_RELEASE_DATE                  0xDC99
+#define MTP_PROPERTY_ALBUM_NAME                             0xDC9A
+#define MTP_PROPERTY_ALBUM_ARTIST                           0xDC9B
+#define MTP_PROPERTY_MOOD                                   0xDC9C
+#define MTP_PROPERTY_DRM_STATUS                             0xDC9D
+#define MTP_PROPERTY_SUB_DESCRIPTION                        0xDC9E
+#define MTP_PROPERTY_IS_CROPPED                             0xDCD1
+#define MTP_PROPERTY_IS_COLOUR_CORRECTED                    0xDCD2
+#define MTP_PROPERTY_IMAGE_BIT_DEPTH                        0xDCD3
+#define MTP_PROPERTY_F_NUMBER                               0xDCD4
+#define MTP_PROPERTY_EXPOSURE_TIME                          0xDCD5
+#define MTP_PROPERTY_EXPOSURE_INDEX                         0xDCD6
+#define MTP_PROPERTY_TOTAL_BITRATE                          0xDE91
+#define MTP_PROPERTY_BITRATE_TYPE                           0xDE92
+#define MTP_PROPERTY_SAMPLE_RATE                            0xDE93
+#define MTP_PROPERTY_NUMBER_OF_CHANNELS                     0xDE94
+#define MTP_PROPERTY_AUDIO_BIT_DEPTH                        0xDE95
+#define MTP_PROPERTY_SCAN_TYPE                              0xDE97
+#define MTP_PROPERTY_AUDIO_WAVE_CODEC                       0xDE99
+#define MTP_PROPERTY_AUDIO_BITRATE                          0xDE9A
+#define MTP_PROPERTY_VIDEO_FOURCC_CODEC                     0xDE9B
+#define MTP_PROPERTY_VIDEO_BITRATE                          0xDE9C
+#define MTP_PROPERTY_FRAMES_PER_THOUSAND_SECONDS            0xDE9D
+#define MTP_PROPERTY_KEYFRAME_DISTANCE                      0xDE9E
+#define MTP_PROPERTY_BUFFER_SIZE                            0xDE9F
+#define MTP_PROPERTY_ENCODING_QUALITY                       0xDEA0
+#define MTP_PROPERTY_ENCODING_PROFILE                       0xDEA1
+#define MTP_PROPERTY_DISPLAY_NAME                           0xDCE0
+#define MTP_PROPERTY_BODY_TEXT                              0xDCE1
+#define MTP_PROPERTY_SUBJECT                                0xDCE2
+#define MTP_PROPERTY_PRIORITY                               0xDCE3
+#define MTP_PROPERTY_GIVEN_NAME                             0xDD00
+#define MTP_PROPERTY_MIDDLE_NAMES                           0xDD01
+#define MTP_PROPERTY_FAMILY_NAME                            0xDD02
+#define MTP_PROPERTY_PREFIX                                 0xDD03
+#define MTP_PROPERTY_SUFFIX                                 0xDD04
+#define MTP_PROPERTY_PHONETIC_GIVEN_NAME                    0xDD05
+#define MTP_PROPERTY_PHONETIC_FAMILY_NAME                   0xDD06
+#define MTP_PROPERTY_EMAIL_PRIMARY                          0xDD07
+#define MTP_PROPERTY_EMAIL_PERSONAL_1                       0xDD08
+#define MTP_PROPERTY_EMAIL_PERSONAL_2                       0xDD09
+#define MTP_PROPERTY_EMAIL_BUSINESS_1                       0xDD0A
+#define MTP_PROPERTY_EMAIL_BUSINESS_2                       0xDD0B
+#define MTP_PROPERTY_EMAIL_OTHERS                           0xDD0C
+#define MTP_PROPERTY_PHONE_NUMBER_PRIMARY                   0xDD0D
+#define MTP_PROPERTY_PHONE_NUMBER_PERSONAL                  0xDD0E
+#define MTP_PROPERTY_PHONE_NUMBER_PERSONAL_2                0xDD0F
+#define MTP_PROPERTY_PHONE_NUMBER_BUSINESS                  0xDD10
+#define MTP_PROPERTY_PHONE_NUMBER_BUSINESS_2                0xDD11
+#define MTP_PROPERTY_PHONE_NUMBER_MOBILE                    0xDD12
+#define MTP_PROPERTY_PHONE_NUMBER_MOBILE_2                  0xDD13
+#define MTP_PROPERTY_FAX_NUMBER_PRIMARY                     0xDD14
+#define MTP_PROPERTY_FAX_NUMBER_PERSONAL                    0xDD15
+#define MTP_PROPERTY_FAX_NUMBER_BUSINESS                    0xDD16
+#define MTP_PROPERTY_PAGER_NUMBER                           0xDD17
+#define MTP_PROPERTY_PHONE_NUMBER_OTHERS                    0xDD18
+#define MTP_PROPERTY_PRIMARY_WEB_ADDRESS                    0xDD19
+#define MTP_PROPERTY_PERSONAL_WEB_ADDRESS                   0xDD1A
+#define MTP_PROPERTY_BUSINESS_WEB_ADDRESS                   0xDD1B
+#define MTP_PROPERTY_INSTANT_MESSANGER_ADDRESS              0xDD1C
+#define MTP_PROPERTY_INSTANT_MESSANGER_ADDRESS_2            0xDD1D
+#define MTP_PROPERTY_INSTANT_MESSANGER_ADDRESS_3            0xDD1E
+#define MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_FULL           0xDD1F
+#define MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_LINE_1         0xDD20
+#define MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_LINE_2         0xDD21
+#define MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_CITY           0xDD22
+#define MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_REGION         0xDD23
+#define MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_POSTAL_CODE    0xDD24
+#define MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_COUNTRY        0xDD25
+#define MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_FULL           0xDD26
+#define MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_LINE_1         0xDD27
+#define MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_LINE_2         0xDD28
+#define MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_CITY           0xDD29
+#define MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_REGION         0xDD2A
+#define MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_POSTAL_CODE    0xDD2B
+#define MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_COUNTRY        0xDD2C
+#define MTP_PROPERTY_POSTAL_ADDRESS_OTHER_FULL              0xDD2D
+#define MTP_PROPERTY_POSTAL_ADDRESS_OTHER_LINE_1            0xDD2E
+#define MTP_PROPERTY_POSTAL_ADDRESS_OTHER_LINE_2            0xDD2F
+#define MTP_PROPERTY_POSTAL_ADDRESS_OTHER_CITY              0xDD30
+#define MTP_PROPERTY_POSTAL_ADDRESS_OTHER_REGION            0xDD31
+#define MTP_PROPERTY_POSTAL_ADDRESS_OTHER_POSTAL_CODE       0xDD32
+#define MTP_PROPERTY_POSTAL_ADDRESS_OTHER_COUNTRY           0xDD33
+#define MTP_PROPERTY_ORGANIZATION_NAME                      0xDD34
+#define MTP_PROPERTY_PHONETIC_ORGANIZATION_NAME             0xDD35
+#define MTP_PROPERTY_ROLE                                   0xDD36
+#define MTP_PROPERTY_BIRTHDATE                              0xDD37
+#define MTP_PROPERTY_MESSAGE_TO                             0xDD40
+#define MTP_PROPERTY_MESSAGE_CC                             0xDD41
+#define MTP_PROPERTY_MESSAGE_BCC                            0xDD42
+#define MTP_PROPERTY_MESSAGE_READ                           0xDD43
+#define MTP_PROPERTY_MESSAGE_RECEIVED_TIME                  0xDD44
+#define MTP_PROPERTY_MESSAGE_SENDER                         0xDD45
+#define MTP_PROPERTY_ACTIVITY_BEGIN_TIME                    0xDD50
+#define MTP_PROPERTY_ACTIVITY_END_TIME                      0xDD51
+#define MTP_PROPERTY_ACTIVITY_LOCATION                      0xDD52
+#define MTP_PROPERTY_ACTIVITY_REQUIRED_ATTENDEES            0xDD54
+#define MTP_PROPERTY_ACTIVITY_OPTIONAL_ATTENDEES            0xDD55
+#define MTP_PROPERTY_ACTIVITY_RESOURCES                     0xDD56
+#define MTP_PROPERTY_ACTIVITY_ACCEPTED                      0xDD57
+#define MTP_PROPERTY_ACTIVITY_TENTATIVE                     0xDD58
+#define MTP_PROPERTY_ACTIVITY_DECLINED                      0xDD59
+#define MTP_PROPERTY_ACTIVITY_REMAINDER_TIME                0xDD5A
+#define MTP_PROPERTY_ACTIVITY_OWNER                         0xDD5B
+#define MTP_PROPERTY_ACTIVITY_STATUS                        0xDD5C
+#define MTP_PROPERTY_OWNER                                  0xDD5D
+#define MTP_PROPERTY_EDITOR                                 0xDD5E
+#define MTP_PROPERTY_WEBMASTER                              0xDD5F
+#define MTP_PROPERTY_URL_SOURCE                             0xDD60
+#define MTP_PROPERTY_URL_DESTINATION                        0xDD61
+#define MTP_PROPERTY_TIME_BOOKMARK                          0xDD62
+#define MTP_PROPERTY_OBJECT_BOOKMARK                        0xDD63
+#define MTP_PROPERTY_BYTE_BOOKMARK                          0xDD64
+#define MTP_PROPERTY_LAST_BUILD_DATE                        0xDD70
+#define MTP_PROPERTY_TIME_TO_LIVE                           0xDD71
+#define MTP_PROPERTY_MEDIA_GUID                             0xDD72
+
+// MTP Device Property Codes
+#define MTP_DEVICE_PROPERTY_UNDEFINED                       0x5000
+#define MTP_DEVICE_PROPERTY_BATTERY_LEVEL                   0x5001
+#define MTP_DEVICE_PROPERTY_FUNCTIONAL_MODE                 0x5002
+#define MTP_DEVICE_PROPERTY_IMAGE_SIZE                      0x5003
+#define MTP_DEVICE_PROPERTY_COMPRESSION_SETTING             0x5004
+#define MTP_DEVICE_PROPERTY_WHITE_BALANCE                   0x5005
+#define MTP_DEVICE_PROPERTY_RGB_GAIN                        0x5006
+#define MTP_DEVICE_PROPERTY_F_NUMBER                        0x5007
+#define MTP_DEVICE_PROPERTY_FOCAL_LENGTH                    0x5008
+#define MTP_DEVICE_PROPERTY_FOCUS_DISTANCE                  0x5009
+#define MTP_DEVICE_PROPERTY_FOCUS_MODE                      0x500A
+#define MTP_DEVICE_PROPERTY_EXPOSURE_METERING_MODE          0x500B
+#define MTP_DEVICE_PROPERTY_FLASH_MODE                      0x500C
+#define MTP_DEVICE_PROPERTY_EXPOSURE_TIME                   0x500D
+#define MTP_DEVICE_PROPERTY_EXPOSURE_PROGRAM_MODE           0x500E
+#define MTP_DEVICE_PROPERTY_EXPOSURE_INDEX                  0x500F
+#define MTP_DEVICE_PROPERTY_EXPOSURE_BIAS_COMPENSATION      0x5010
+#define MTP_DEVICE_PROPERTY_DATETIME                        0x5011
+#define MTP_DEVICE_PROPERTY_CAPTURE_DELAY                   0x5012
+#define MTP_DEVICE_PROPERTY_STILL_CAPTURE_MODE              0x5013
+#define MTP_DEVICE_PROPERTY_CONTRAST                        0x5014
+#define MTP_DEVICE_PROPERTY_SHARPNESS                       0x5015
+#define MTP_DEVICE_PROPERTY_DIGITAL_ZOOM                    0x5016
+#define MTP_DEVICE_PROPERTY_EFFECT_MODE                     0x5017
+#define MTP_DEVICE_PROPERTY_BURST_NUMBER                    0x5018
+#define MTP_DEVICE_PROPERTY_BURST_INTERVAL                  0x5019
+#define MTP_DEVICE_PROPERTY_TIMELAPSE_NUMBER                0x501A
+#define MTP_DEVICE_PROPERTY_TIMELAPSE_INTERVAL              0x501B
+#define MTP_DEVICE_PROPERTY_FOCUS_METERING_MODE             0x501C
+#define MTP_DEVICE_PROPERTY_UPLOAD_URL                      0x501D
+#define MTP_DEVICE_PROPERTY_ARTIST                          0x501E
+#define MTP_DEVICE_PROPERTY_COPYRIGHT_INFO                  0x501F
+#define MTP_DEVICE_PROPERTY_SYNCHRONIZATION_PARTNER         0xD401
+#define MTP_DEVICE_PROPERTY_DEVICE_FRIENDLY_NAME            0xD402
+#define MTP_DEVICE_PROPERTY_VOLUME                          0xD403
+#define MTP_DEVICE_PROPERTY_SUPPORTED_FORMATS_ORDERED       0xD404
+#define MTP_DEVICE_PROPERTY_DEVICE_ICON                     0xD405
+#define MTP_DEVICE_PROPERTY_PLAYBACK_RATE                   0xD410
+#define MTP_DEVICE_PROPERTY_PLAYBACK_OBJECT                 0xD411
+#define MTP_DEVICE_PROPERTY_PLAYBACK_CONTAINER_INDEX        0xD412
+#define MTP_DEVICE_PROPERTY_SESSION_INITIATOR_VERSION_INFO  0xD406
+#define MTP_DEVICE_PROPERTY_PERCEIVED_DEVICE_TYPE           0xD407
+
+// MTP Operation Codes
+#define MTP_OPERATION_GET_DEVICE_INFO                       0x1001
+#define MTP_OPERATION_OPEN_SESSION                          0x1002
+#define MTP_OPERATION_CLOSE_SESSION                         0x1003
+#define MTP_OPERATION_GET_STORAGE_IDS                       0x1004
+#define MTP_OPERATION_GET_STORAGE_INFO                      0x1005
+#define MTP_OPERATION_GET_NUM_OBJECTS                       0x1006
+#define MTP_OPERATION_GET_OBJECT_HANDLES                    0x1007
+#define MTP_OPERATION_GET_OBJECT_INFO                       0x1008
+#define MTP_OPERATION_GET_OBJECT                            0x1009
+#define MTP_OPERATION_GET_THUMB                             0x100A
+#define MTP_OPERATION_DELETE_OBJECT                         0x100B
+#define MTP_OPERATION_SEND_OBJECT_INFO                      0x100C
+#define MTP_OPERATION_SEND_OBJECT                           0x100D
+#define MTP_OPERATION_INITIATE_CAPTURE                      0x100E
+#define MTP_OPERATION_FORMAT_STORE                          0x100F
+#define MTP_OPERATION_RESET_DEVICE                          0x1010
+#define MTP_OPERATION_SELF_TEST                             0x1011
+#define MTP_OPERATION_SET_OBJECT_PROTECTION                 0x1012
+#define MTP_OPERATION_POWER_DOWN                            0x1013
+#define MTP_OPERATION_GET_DEVICE_PROP_DESC                  0x1014
+#define MTP_OPERATION_GET_DEVICE_PROP_VALUE                 0x1015
+#define MTP_OPERATION_SET_DEVICE_PROP_VALUE                 0x1016
+#define MTP_OPERATION_RESET_DEVICE_PROP_VALUE               0x1017
+#define MTP_OPERATION_TERMINATE_OPEN_CAPTURE                0x1018
+#define MTP_OPERATION_MOVE_OBJECT                           0x1019
+#define MTP_OPERATION_COPY_OBJECT                           0x101A
+#define MTP_OPERATION_GET_PARTIAL_OBJECT                    0x101B
+#define MTP_OPERATION_INITIATE_OPEN_CAPTURE                 0x101C
+#define MTP_OPERATION_GET_OBJECT_PROPS_SUPPORTED            0x9801
+#define MTP_OPERATION_GET_OBJECT_PROP_DESC                  0x9802
+#define MTP_OPERATION_GET_OBJECT_PROP_VALUE                 0x9803
+#define MTP_OPERATION_SET_OBJECT_PROP_VALUE                 0x9804
+#define MTP_OPERATION_GET_OBJECT_PROP_LIST                  0x9805
+#define MTP_OPERATION_SET_OBJECT_PROP_LIST                  0x9806
+#define MTP_OPERATION_GET_INTERDEPENDENT_PROP_DESC          0x9807
+#define MTP_OPERATION_SEND_OBJECT_PROP_LIST                 0x9808
+#define MTP_OPERATION_GET_OBJECT_REFERENCES                 0x9810
+#define MTP_OPERATION_SET_OBJECT_REFERENCES                 0x9811
+#define MTP_OPERATION_SKIP                                  0x9820
+
+// MTP Response Codes
+#define MTP_RESPONSE_UNDEFINED                                  0x2000
+#define MTP_RESPONSE_OK                                         0x2001
+#define MTP_RESPONSE_GENERAL_ERROR                              0x2002
+#define MTP_RESPONSE_SESSION_NOT_OPEN                           0x2003
+#define MTP_RESPONSE_INVALID_TRANSACTION_ID                     0x2004
+#define MTP_RESPONSE_OPERATION_NOT_SUPPORTED                    0x2005
+#define MTP_RESPONSE_PARAMETER_NOT_SUPPORTED                    0x2006
+#define MTP_RESPONSE_INCOMPLETE_TRANSFER                        0x2007
+#define MTP_RESPONSE_INVALID_STORAGE_ID                         0x2008
+#define MTP_RESPONSE_INVALID_OBJECT_HANDLE                      0x2009
+#define MTP_RESPONSE_DEVICE_PROP_NOT_SUPPORTED                  0x200A
+#define MTP_RESPONSE_INVALID_OBJECT_FORMAT_CODE                 0x200B
+#define MTP_RESPONSE_STORAGE_FULL                               0x200C
+#define MTP_RESPONSE_OBJECT_WRITE_PROTECTED                     0x200D
+#define MTP_RESPONSE_STORE_READ_ONLY                            0x200E
+#define MTP_RESPONSE_ACCESS_DENIED                              0x200F
+#define MTP_RESPONSE_NO_THUMBNAIL_PRESENT                       0x2010
+#define MTP_RESPONSE_SELF_TEST_FAILED                           0x2011
+#define MTP_RESPONSE_PARTIAL_DELETION                           0x2012
+#define MTP_RESPONSE_STORE_NOT_AVAILABLE                        0x2013
+#define MTP_RESPONSE_SPECIFICATION_BY_FORMAT_UNSUPPORTED        0x2014
+#define MTP_RESPONSE_NO_VALID_OBJECT_INFO                       0x2015
+#define MTP_RESPONSE_INVALID_CODE_FORMAT                        0x2016
+#define MTP_RESPONSE_UNKNOWN_VENDOR_CODE                        0x2017
+#define MTP_RESPONSE_CAPTURE_ALREADY_TERMINATED                 0x2018
+#define MTP_RESPONSE_DEVICE_BUSY                                0x2019
+#define MTP_RESPONSE_INVALID_PARENT_OBJECT                      0x201A
+#define MTP_RESPONSE_INVALID_DEVICE_PROP_FORMAT                 0x201B
+#define MTP_RESPONSE_INVALID_DEVICE_PROP_VALUE                  0x201C
+#define MTP_RESPONSE_INVALID_PARAMETER                          0x201D
+#define MTP_RESPONSE_SESSION_ALREADY_OPEN                       0x201E
+#define MTP_RESPONSE_TRANSACTION_CANCELLED                      0x201F
+#define MTP_RESPONSE_SPECIFICATION_OF_DESTINATION_UNSUPPORTED   0x2020
+#define MTP_RESPONSE_INVALID_OBJECT_PROP_CODE                   0xA801
+#define MTP_RESPONSE_INVALID_OBJECT_PROP_FORMAT                 0xA802
+#define MTP_RESPONSE_INVALID_OBJECT_PROP_VALUE                  0xA803
+#define MTP_RESPONSE_INVALID_OBJECT_REFERENCE                   0xA804
+#define MTP_RESPONSE_GROUP_NOT_SUPPORTED                        0xA805
+#define MTP_RESPONSE_INVALID_DATASET                            0xA806
+#define MTP_RESPONSE_SPECIFICATION_BY_GROUP_UNSUPPORTED         0xA807
+#define MTP_RESPONSE_SPECIFICATION_BY_DEPTH_UNSUPPORTED         0xA808
+#define MTP_RESPONSE_OBJECT_TOO_LARGE                           0xA809
+#define MTP_RESPONSE_OBJECT_PROP_NOT_SUPPORTED                  0xA80A
+
+// MTP Event Codes
+#define MTP_EVENT_UNDEFINED                         0x4000
+#define MTP_EVENT_CANCEL_TRANSACTION                0x4001
+#define MTP_EVENT_OBJECT_ADDED                      0x4002
+#define MTP_EVENT_OBJECT_REMOVED                    0x4003
+#define MTP_EVENT_STORE_ADDED                       0x4004
+#define MTP_EVENT_STORE_REMOVED                     0x4005
+#define MTP_EVENT_DEVICE_PROP_CHANGED               0x4006
+#define MTP_EVENT_OBJECT_INFO_CHANGED               0x4007
+#define MTP_EVENT_DEVICE_INFO_CHANGED               0x4008
+#define MTP_EVENT_REQUEST_OBJECT_TRANSFER           0x4009
+#define MTP_EVENT_STORE_FULL                        0x400A
+#define MTP_EVENT_DEVICE_RESET                      0x400B
+#define MTP_EVENT_STORAGE_INFO_CHANGED              0x400C
+#define MTP_EVENT_CAPTURE_COMPLETE                  0x400D
+#define MTP_EVENT_UNREPORTED_STATUS                 0x400E
+#define MTP_EVENT_OBJECT_PROP_CHANGED               0xC801
+#define MTP_EVENT_OBJECT_PROP_DESC_CHANGED          0xC802
+#define MTP_EVENT_OBJECT_REFERENCES_CHANGED         0xC803
+
+// Storage Type
+#define MTP_STORAGE_FIXED_ROM                       0x0001
+#define MTP_STORAGE_REMOVABLE_ROM                   0x0002
+#define MTP_STORAGE_FIXED_RAM                       0x0003
+#define MTP_STORAGE_REMOVABLE_RAM                   0x0004
+
+// Storage File System
+#define MTP_STORAGE_FILESYSTEM_FLAT                 0x0001
+#define MTP_STORAGE_FILESYSTEM_HIERARCHICAL         0x0002
+#define MTP_STORAGE_FILESYSTEM_DCF                  0x0003
+
+// Storage Access Capability
+#define MTP_STORAGE_READ_WRITE                      0x0000
+#define MTP_STORAGE_READ_ONLY_WITHOUT_DELETE        0x0001
+#define MTP_STORAGE_READ_ONLY_WITH_DELETE           0x0002
+
+// Association Type
+#define MTP_ASSOCIATION_TYPE_UNDEFINED              0x0000
+#define MTP_ASSOCIATION_TYPE_GENERIC_FOLDER         0x0001
+
+#endif // _MTP_H
diff --git a/services/audioflinger/A2dpAudioInterface.cpp b/services/audioflinger/A2dpAudioInterface.cpp
index 995e31c..d926cb1 100644
--- a/services/audioflinger/A2dpAudioInterface.cpp
+++ b/services/audioflinger/A2dpAudioInterface.cpp
@@ -23,10 +23,13 @@
 
 #include "A2dpAudioInterface.h"
 #include "audio/liba2dp.h"
-
+#include <hardware_legacy/power.h>
 
 namespace android {
 
+static const char *sA2dpWakeLock = "A2dpOutputStream";
+#define MAX_WRITE_RETRIES  5
+
 // ----------------------------------------------------------------------------
 
 //AudioHardwareInterface* A2dpAudioInterface::createA2dpInterface()
@@ -257,52 +260,74 @@
     if (pRate) *pRate = lRate;
 
     mDevice = device;
+    mBufferDurationUs = ((bufferSize() * 1000 )/ frameSize() / sampleRate()) * 1000;
     return NO_ERROR;
 }
 
 A2dpAudioInterface::A2dpAudioStreamOut::~A2dpAudioStreamOut()
 {
     LOGV("A2dpAudioStreamOut destructor");
-    standby();
     close();
     LOGV("A2dpAudioStreamOut destructor returning from close()");
 }
 
 ssize_t A2dpAudioInterface::A2dpAudioStreamOut::write(const void* buffer, size_t bytes)
 {
-    Mutex::Autolock lock(mLock);
-
-    size_t remaining = bytes;
     status_t status = -1;
+    {
+        Mutex::Autolock lock(mLock);
 
-    if (!mBluetoothEnabled || mClosing || mSuspended) {
-        LOGV("A2dpAudioStreamOut::write(), but bluetooth disabled \
-               mBluetoothEnabled %d, mClosing %d, mSuspended %d",
-                mBluetoothEnabled, mClosing, mSuspended);
-        goto Error;
-    }
+        size_t remaining = bytes;
 
-    status = init();
-    if (status < 0)
-        goto Error;
-
-    while (remaining > 0) {
-        status = a2dp_write(mData, buffer, remaining);
-        if (status <= 0) {
-            LOGE("a2dp_write failed err: %d\n", status);
+        if (!mBluetoothEnabled || mClosing || mSuspended) {
+            LOGV("A2dpAudioStreamOut::write(), but bluetooth disabled \
+                   mBluetoothEnabled %d, mClosing %d, mSuspended %d",
+                    mBluetoothEnabled, mClosing, mSuspended);
             goto Error;
         }
-        remaining -= status;
-        buffer = ((char *)buffer) + status;
+
+        if (mStandby) {
+            acquire_wake_lock (PARTIAL_WAKE_LOCK, sA2dpWakeLock);
+            mStandby = false;
+            mLastWriteTime = systemTime();
+        }
+
+        status = init();
+        if (status < 0)
+            goto Error;
+
+        int retries = MAX_WRITE_RETRIES;
+        while (remaining > 0 && retries) {
+            status = a2dp_write(mData, buffer, remaining);
+            if (status < 0) {
+                LOGE("a2dp_write failed err: %d\n", status);
+                goto Error;
+            }
+            if (status == 0) {
+                retries--;
+            }
+            remaining -= status;
+            buffer = (char *)buffer + status;
+        }
+
+        // if A2DP sink runs abnormally fast, sleep a little so that audioflinger mixer thread
+        // does no spin and starve other threads.
+        // NOTE: It is likely that the A2DP headset is being disconnected
+        nsecs_t now = systemTime();
+        if ((uint32_t)ns2us(now - mLastWriteTime) < (mBufferDurationUs >> 2)) {
+            LOGV("A2DP sink runs too fast");
+            usleep(mBufferDurationUs - (uint32_t)ns2us(now - mLastWriteTime));
+        }
+        mLastWriteTime = now;
+        return bytes;
+
     }
-
-    mStandby = false;
-
-    return bytes;
-
 Error:
+
+    standby();
+
     // Simulate audio output timing in case of error
-    usleep(((bytes * 1000 )/ frameSize() / sampleRate()) * 1000);
+    usleep(mBufferDurationUs);
 
     return status;
 }
@@ -324,19 +349,22 @@
 
 status_t A2dpAudioInterface::A2dpAudioStreamOut::standby()
 {
-    int result = 0;
-
-    if (mClosing) {
-        LOGV("Ignore standby, closing");
-        return result;
-    }
-
     Mutex::Autolock lock(mLock);
+    return standby_l();
+}
+
+status_t A2dpAudioInterface::A2dpAudioStreamOut::standby_l()
+{
+    int result = NO_ERROR;
 
     if (!mStandby) {
-        result = a2dp_stop(mData);
-        if (result == 0)
-            mStandby = true;
+        LOGV_IF(mClosing || !mBluetoothEnabled, "Standby skip stop: closing %d enabled %d",
+                mClosing, mBluetoothEnabled);
+        if (!mClosing && mBluetoothEnabled) {
+            result = a2dp_stop(mData);
+        }
+        release_wake_lock(sA2dpWakeLock);
+        mStandby = true;
     }
 
     return result;
@@ -362,6 +390,9 @@
     key = String8("closing");
     if (param.get(key, value) == NO_ERROR) {
         mClosing = (value == "true");
+        if (mClosing) {
+            standby();
+        }
         param.remove(key);
     }
     key = AudioParameter::keyRouting;
@@ -444,6 +475,7 @@
 
 status_t A2dpAudioInterface::A2dpAudioStreamOut::close_l()
 {
+    standby_l();
     if (mData) {
         LOGV("A2dpAudioStreamOut::close_l() calling a2dp_cleanup(mData)");
         a2dp_cleanup(mData);
diff --git a/services/audioflinger/A2dpAudioInterface.h b/services/audioflinger/A2dpAudioInterface.h
index 48154f9..dbe2c6a 100644
--- a/services/audioflinger/A2dpAudioInterface.h
+++ b/services/audioflinger/A2dpAudioInterface.h
@@ -103,6 +103,7 @@
                 status_t    setAddress(const char* address);
                 status_t    setBluetoothEnabled(bool enabled);
                 status_t    setSuspended(bool onOff);
+                status_t    standby_l();
 
     private:
                 int         mFd;
@@ -116,6 +117,8 @@
                 uint32_t    mDevice;
                 bool        mClosing;
                 bool        mSuspended;
+                nsecs_t     mLastWriteTime;
+                uint32_t    mBufferDurationUs;
     };
 
     friend class A2dpAudioStreamOut;
diff --git a/services/audioflinger/Android.mk b/services/audioflinger/Android.mk
index 22ecc54..69a4adc 100644
--- a/services/audioflinger/Android.mk
+++ b/services/audioflinger/Android.mk
@@ -120,12 +120,4 @@
     endif
 endif
 
-ifeq ($(BOARD_USE_LVMX),true)
-    LOCAL_CFLAGS += -DLVMX
-    LOCAL_C_INCLUDES += vendor/nxp
-    LOCAL_STATIC_LIBRARIES += liblifevibes
-    LOCAL_SHARED_LIBRARIES += liblvmxservice
-#    LOCAL_SHARED_LIBRARIES += liblvmxipc
-endif
-
 include $(BUILD_SHARED_LIBRARY)
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 5935bf9..2b08ab5 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -24,6 +24,7 @@
 #include <sys/time.h>
 #include <sys/resource.h>
 
+#include <binder/IPCThreadState.h>
 #include <binder/IServiceManager.h>
 #include <utils/Log.h>
 #include <binder/Parcel.h>
@@ -35,6 +36,7 @@
 
 #include <media/AudioTrack.h>
 #include <media/AudioRecord.h>
+#include <media/IMediaPlayerService.h>
 
 #include <private/media/AudioTrackShared.h>
 #include <private/media/AudioEffectShared.h>
@@ -47,10 +49,6 @@
 #include "A2dpAudioInterface.h"
 #endif
 
-#ifdef LVMX
-#include "lifevibes.h"
-#endif
-
 #include <media/EffectsFactoryApi.h>
 #include <media/EffectVisualizerApi.h>
 
@@ -125,31 +123,43 @@
 #endif
 }
 
+// To collect the amplifier usage
+static void addBatteryData(uint32_t params) {
+    sp<IBinder> binder =
+        defaultServiceManager()->getService(String16("media.player"));
+    sp<IMediaPlayerService> service = interface_cast<IMediaPlayerService>(binder);
+    if (service.get() == NULL) {
+        LOGW("Cannot connect to the MediaPlayerService for battery tracking");
+        return;
+    }
+
+    service->addBatteryData(params);
+}
+
 // ----------------------------------------------------------------------------
 
 AudioFlinger::AudioFlinger()
     : BnAudioFlinger(),
         mAudioHardware(0), mMasterVolume(1.0f), mMasterMute(false), mNextUniqueId(1)
 {
+    Mutex::Autolock _l(mLock);
+
     mHardwareStatus = AUDIO_HW_IDLE;
 
     mAudioHardware = AudioHardwareInterface::create();
 
     mHardwareStatus = AUDIO_HW_INIT;
     if (mAudioHardware->initCheck() == NO_ERROR) {
-        // open 16-bit output stream for s/w mixer
+        AutoMutex lock(mHardwareLock);
         mMode = AudioSystem::MODE_NORMAL;
-        setMode(mMode);
-
-        setMasterVolume(1.0f);
-        setMasterMute(false);
+        mHardwareStatus = AUDIO_HW_SET_MODE;
+        mAudioHardware->setMode(mMode);
+        mHardwareStatus = AUDIO_HW_SET_MASTER_VOLUME;
+        mAudioHardware->setMasterVolume(1.0f);
+        mHardwareStatus = AUDIO_HW_IDLE;
     } else {
         LOGE("Couldn't even initialize the stubbed audio hardware!");
     }
-#ifdef LVMX
-    LifeVibes::init();
-    mLifeVibesClientPid = -1;
-#endif
 }
 
 AudioFlinger::~AudioFlinger()
@@ -343,7 +353,7 @@
             lSessionId = *sessionId;
         } else {
             // if no audio session id is provided, create one here
-            lSessionId = nextUniqueId();
+            lSessionId = nextUniqueId_l();
             if (sessionId != NULL) {
                 *sessionId = lSessionId;
             }
@@ -440,13 +450,16 @@
     }
 
     // when hw supports master volume, don't scale in sw mixer
-    AutoMutex lock(mHardwareLock);
-    mHardwareStatus = AUDIO_HW_SET_MASTER_VOLUME;
-    if (mAudioHardware->setMasterVolume(value) == NO_ERROR) {
-        value = 1.0f;
+    { // scope for the lock
+        AutoMutex lock(mHardwareLock);
+        mHardwareStatus = AUDIO_HW_SET_MASTER_VOLUME;
+        if (mAudioHardware->setMasterVolume(value) == NO_ERROR) {
+            value = 1.0f;
+        }
+        mHardwareStatus = AUDIO_HW_IDLE;
     }
-    mHardwareStatus = AUDIO_HW_IDLE;
 
+    Mutex::Autolock _l(mLock);
     mMasterVolume = value;
     for (uint32_t i = 0; i < mPlaybackThreads.size(); i++)
        mPlaybackThreads.valueAt(i)->setMasterVolume(value);
@@ -479,9 +492,6 @@
         mMode = mode;
         for (uint32_t i = 0; i < mPlaybackThreads.size(); i++)
            mPlaybackThreads.valueAt(i)->setMode(mode);
-#ifdef LVMX
-        LifeVibes::setMode(mode);
-#endif
     }
 
     return ret;
@@ -517,6 +527,7 @@
         return PERMISSION_DENIED;
     }
 
+    Mutex::Autolock _l(mLock);
     mMasterMute = muted;
     for (uint32_t i = 0; i < mPlaybackThreads.size(); i++)
        mPlaybackThreads.valueAt(i)->setMasterMute(muted);
@@ -579,6 +590,7 @@
         return BAD_VALUE;
     }
 
+    AutoMutex lock(mLock);
     mStreamTypes[stream].mute = muted;
     for (uint32_t i = 0; i < mPlaybackThreads.size(); i++)
        mPlaybackThreads.valueAt(i)->setStreamMute(stream, muted);
@@ -616,17 +628,6 @@
     return mStreamTypes[stream].mute;
 }
 
-bool AudioFlinger::isStreamActive(int stream) const
-{
-    Mutex::Autolock _l(mLock);
-    for (uint32_t i = 0; i < mPlaybackThreads.size(); i++) {
-        if (mPlaybackThreads.valueAt(i)->isStreamActive(stream)) {
-            return true;
-        }
-    }
-    return false;
-}
-
 status_t AudioFlinger::setParameters(int ioHandle, const String8& keyValuePairs)
 {
     status_t result;
@@ -638,39 +639,11 @@
         return PERMISSION_DENIED;
     }
 
-#ifdef LVMX
-    AudioParameter param = AudioParameter(keyValuePairs);
-    LifeVibes::setParameters(ioHandle,keyValuePairs);
-    String8 key = String8(AudioParameter::keyRouting);
-    int device;
-    if (NO_ERROR != param.getInt(key, device)) {
-        device = -1;
-    }
-
-    key = String8(LifevibesTag);
-    String8 value;
-    int musicEnabled = -1;
-    if (NO_ERROR == param.get(key, value)) {
-        if (value == LifevibesEnable) {
-            mLifeVibesClientPid = IPCThreadState::self()->getCallingPid();
-            musicEnabled = 1;
-        } else if (value == LifevibesDisable) {
-            mLifeVibesClientPid = -1;
-            musicEnabled = 0;
-        }
-    }
-#endif
-
     // ioHandle == 0 means the parameters are global to the audio hardware interface
     if (ioHandle == 0) {
         AutoMutex lock(mHardwareLock);
         mHardwareStatus = AUDIO_SET_PARAMETER;
         result = mAudioHardware->setParameters(keyValuePairs);
-#ifdef LVMX
-        if (musicEnabled != -1) {
-            LifeVibes::enableMusic((bool) musicEnabled);
-        }
-#endif
         mHardwareStatus = AUDIO_HW_IDLE;
         return result;
     }
@@ -687,11 +660,6 @@
     }
     if (thread != NULL) {
         result = thread->setParameters(keyValuePairs);
-#ifdef LVMX
-        if ((NO_ERROR == result) && (device != -1)) {
-            LifeVibes::setDevice(LifeVibes::threadIdToAudioOutputType(thread->id()), device);
-        }
-#endif
         return result;
     }
     return BAD_VALUE;
@@ -805,13 +773,6 @@
     if (index >= 0) {
         sp <NotificationClient> client = mNotificationClients.valueFor(pid);
         LOGV("removeNotificationClient() %p, pid %d", client.get(), pid);
-#ifdef LVMX
-        if (pid == mLifeVibesClientPid) {
-            LOGV("Disabling lifevibes");
-            LifeVibes::enableMusic(false);
-            mLifeVibesClientPid = -1;
-        }
-#endif
         mNotificationClients.removeItem(pid);
     }
 }
@@ -1217,24 +1178,12 @@
 
 status_t AudioFlinger::PlaybackThread::setMasterVolume(float value)
 {
-#ifdef LVMX
-    int audioOutputType = LifeVibes::getMixerType(mId, mType);
-    if (LifeVibes::audioOutputTypeIsLifeVibes(audioOutputType)) {
-        LifeVibes::setMasterVolume(audioOutputType, value);
-    }
-#endif
     mMasterVolume = value;
     return NO_ERROR;
 }
 
 status_t AudioFlinger::PlaybackThread::setMasterMute(bool muted)
 {
-#ifdef LVMX
-    int audioOutputType = LifeVibes::getMixerType(mId, mType);
-    if (LifeVibes::audioOutputTypeIsLifeVibes(audioOutputType)) {
-        LifeVibes::setMasterMute(audioOutputType, muted);
-    }
-#endif
     mMasterMute = muted;
     return NO_ERROR;
 }
@@ -1251,24 +1200,12 @@
 
 status_t AudioFlinger::PlaybackThread::setStreamVolume(int stream, float value)
 {
-#ifdef LVMX
-    int audioOutputType = LifeVibes::getMixerType(mId, mType);
-    if (LifeVibes::audioOutputTypeIsLifeVibes(audioOutputType)) {
-        LifeVibes::setStreamVolume(audioOutputType, stream, value);
-    }
-#endif
     mStreamTypes[stream].volume = value;
     return NO_ERROR;
 }
 
 status_t AudioFlinger::PlaybackThread::setStreamMute(int stream, bool muted)
 {
-#ifdef LVMX
-    int audioOutputType = LifeVibes::getMixerType(mId, mType);
-    if (LifeVibes::audioOutputTypeIsLifeVibes(audioOutputType)) {
-        LifeVibes::setStreamMute(audioOutputType, stream, muted);
-    }
-#endif
     mStreamTypes[stream].mute = muted;
     return NO_ERROR;
 }
@@ -1283,20 +1220,6 @@
     return mStreamTypes[stream].mute;
 }
 
-bool AudioFlinger::PlaybackThread::isStreamActive(int stream) const
-{
-    Mutex::Autolock _l(mLock);
-    size_t count = mActiveTracks.size();
-    for (size_t i = 0 ; i < count ; ++i) {
-        sp<Track> t = mActiveTracks[i].promote();
-        if (t == 0) continue;
-        Track* const track = t.get();
-        if (t->type() == stream)
-            return true;
-    }
-    return false;
-}
-
 // addTrack_l() must be called with ThreadBase::mLock held
 status_t AudioFlinger::PlaybackThread::addTrack_l(const sp<Track>& track)
 {
@@ -1610,12 +1533,6 @@
              }
              // enable changes in effect chain
              unlockEffectChains(effectChains);
-#ifdef LVMX
-            int audioOutputType = LifeVibes::getMixerType(mId, mType);
-            if (LifeVibes::audioOutputTypeIsLifeVibes(audioOutputType)) {
-               LifeVibes::process(audioOutputType, mMixBuffer, mixBufferSize);
-            }
-#endif
             mLastWriteTime = systemTime();
             mInWrite = true;
             mBytesWritten += mixBufferSize;
@@ -1678,24 +1595,6 @@
     if (masterMute) {
         masterVolume = 0;
     }
-#ifdef LVMX
-    bool tracksConnectedChanged = false;
-    bool stateChanged = false;
-
-    int audioOutputType = LifeVibes::getMixerType(mId, mType);
-    if (LifeVibes::audioOutputTypeIsLifeVibes(audioOutputType))
-    {
-        int activeTypes = 0;
-        for (size_t i=0 ; i<count ; i++) {
-            sp<Track> t = activeTracks[i].promote();
-            if (t == 0) continue;
-            Track* const track = t.get();
-            int iTracktype=track->type();
-            activeTypes |= 1<<track->type();
-        }
-        LifeVibes::computeVolumes(audioOutputType, activeTypes, tracksConnectedChanged, stateChanged, masterVolume, masterMute);
-    }
-#endif
     // Delegate master volume control to effect in output mix effect chain if needed
     sp<EffectChain> chain = getEffectChain_l(AudioSystem::SESSION_OUTPUT_MIX);
     if (chain != 0) {
@@ -1745,6 +1644,7 @@
                     track->mState = TrackBase::ACTIVE;
                     param = AudioMixer::RAMP_VOLUME;
                 }
+                mAudioMixer->setParameter(AudioMixer::RESAMPLE, AudioMixer::RESET, NULL);
             } else if (cblk->server != 0) {
                 // If the track is stopped before the first frame was mixed,
                 // do not apply ramp
@@ -1763,17 +1663,6 @@
 
                 // read original volumes with volume control
                 float typeVolume = mStreamTypes[track->type()].volume;
-#ifdef LVMX
-                bool streamMute=false;
-                // read the volume from the LivesVibes audio engine.
-                if (LifeVibes::audioOutputTypeIsLifeVibes(audioOutputType))
-                {
-                    LifeVibes::getStreamVolumes(audioOutputType, track->type(), &typeVolume, &streamMute);
-                    if (streamMute) {
-                        typeVolume = 0;
-                    }
-                }
-#endif
                 float v = masterVolume * typeVolume;
                 vl = (uint32_t)(v * cblk->volume[0]) << 12;
                 vr = (uint32_t)(v * cblk->volume[1]) << 12;
@@ -1806,14 +1695,6 @@
             if (va > MAX_GAIN_INT) va = MAX_GAIN_INT;
             aux = int16_t(va);
 
-#ifdef LVMX
-            if ( tracksConnectedChanged || stateChanged )
-            {
-                 // only do the ramp when the volume is changed by the user / application
-                 param = AudioMixer::VOLUME;
-            }
-#endif
-
             // XXX: these things DON'T need to be done each time
             mAudioMixer->setBufferProvider(track);
             mAudioMixer->enable(AudioMixer::MIXING);
@@ -1967,6 +1848,27 @@
             }
         }
         if (param.getInt(String8(AudioParameter::keyRouting), value) == NO_ERROR) {
+            // when changing the audio output device, call addBatteryData to notify
+            // the change
+            if (mDevice != value) {
+                uint32_t params = 0;
+                // check whether speaker is on
+                if (value & AudioSystem::DEVICE_OUT_SPEAKER) {
+                    params |= IMediaPlayerService::kBatteryDataSpeakerOn;
+                }
+
+                int deviceWithoutSpeaker
+                    = AudioSystem::DEVICE_OUT_ALL & ~AudioSystem::DEVICE_OUT_SPEAKER;
+                // check if any other device (except speaker) is on
+                if (value & deviceWithoutSpeaker ) {
+                    params |= IMediaPlayerService::kBatteryDataOtherAudioDeviceOn;
+                }
+
+                if (params != 0) {
+                    addBatteryData(params);
+                }
+            }
+
             // forward device change to effects that have requested to be
             // aware of attached audio device.
             mDevice = (uint32_t)value;
@@ -2965,6 +2867,9 @@
                     AudioSystem::stopOutput(thread->id(),
                                             (AudioSystem::stream_type)mStreamType,
                                             mSessionId);
+
+                    // to track the speaker usage
+                    addBatteryData(IMediaPlayerService::kBatteryDataAudioFlingerStop);
                 }
                 AudioSystem::releaseOutput(thread->id());
             }
@@ -3075,6 +2980,11 @@
                                               (AudioSystem::stream_type)mStreamType,
                                               mSessionId);
             thread->mLock.lock();
+
+            // to track the speaker usage
+            if (status == NO_ERROR) {
+                addBatteryData(IMediaPlayerService::kBatteryDataAudioFlingerStart);
+            }
         }
         if (status == NO_ERROR) {
             PlaybackThread *playbackThread = (PlaybackThread *)thread.get();
@@ -3110,6 +3020,9 @@
                                     (AudioSystem::stream_type)mStreamType,
                                     mSessionId);
             thread->mLock.lock();
+
+            // to track the speaker usage
+            addBatteryData(IMediaPlayerService::kBatteryDataAudioFlingerStop);
         }
     }
 }
@@ -3129,6 +3042,9 @@
                                         (AudioSystem::stream_type)mStreamType,
                                         mSessionId);
                 thread->mLock.lock();
+
+                // to track the speaker usage
+                addBatteryData(IMediaPlayerService::kBatteryDataAudioFlingerStop);
             }
         }
     }
@@ -3699,7 +3615,7 @@
         if (sessionId != NULL && *sessionId != AudioSystem::SESSION_OUTPUT_MIX) {
             lSessionId = *sessionId;
         } else {
-            lSessionId = nextUniqueId();
+            lSessionId = nextUniqueId_l();
             if (sessionId != NULL) {
                 *sessionId = lSessionId;
             }
@@ -3990,9 +3906,12 @@
             mActiveTrack.clear();
             return status;
         }
-        mActiveTrack->mState = TrackBase::RESUMING;
         mRsmpInIndex = mFrameCount;
         mBytesRead = 0;
+        if (mResampler != NULL) {
+            mResampler->reset();
+        }
+        mActiveTrack->mState = TrackBase::RESUMING;
         // signal thread to start
         LOGV("Signal record thread");
         mWaitWorkCV.signal();
@@ -4300,7 +4219,7 @@
 
     mHardwareStatus = AUDIO_HW_IDLE;
     if (output != 0) {
-        int id = nextUniqueId();
+        int id = nextUniqueId_l();
         if ((flags & AudioSystem::OUTPUT_FLAG_DIRECT) ||
             (format != AudioSystem::PCM_16_BIT) ||
             (channels != AudioSystem::CHANNEL_OUT_STEREO)) {
@@ -4309,18 +4228,6 @@
         } else {
             thread = new MixerThread(this, output, id, *pDevices);
             LOGV("openOutput() created mixer output: ID %d thread %p", id, thread);
-
-#ifdef LVMX
-            unsigned bitsPerSample =
-                (format == AudioSystem::PCM_16_BIT) ? 16 :
-                    ((format == AudioSystem::PCM_8_BIT) ? 8 : 0);
-            unsigned channelCount = (channels == AudioSystem::CHANNEL_OUT_STEREO) ? 2 : 1;
-            int audioOutputType = LifeVibes::threadIdToAudioOutputType(thread->id());
-
-            LifeVibes::init_aot(audioOutputType, samplingRate, bitsPerSample, channelCount);
-            LifeVibes::setDevice(audioOutputType, *pDevices);
-#endif
-
         }
         mPlaybackThreads.add(id, thread);
 
@@ -4348,7 +4255,7 @@
         return 0;
     }
 
-    int id = nextUniqueId();
+    int id = nextUniqueId_l();
     DuplicatingThread *thread = new DuplicatingThread(this, thread1, id);
     thread->addOutputTrack(thread2);
     mPlaybackThreads.add(id, thread);
@@ -4473,7 +4380,7 @@
     }
 
     if (input != 0) {
-        int id = nextUniqueId();
+        int id = nextUniqueId_l();
          // Start record thread
         thread = new RecordThread(this, input, reqSamplingRate, reqChannels, id);
         mRecordThreads.add(id, thread);
@@ -4543,7 +4450,8 @@
 
 int AudioFlinger::newAudioSessionId()
 {
-    return nextUniqueId();
+    AutoMutex _l(mLock);
+    return nextUniqueId_l();
 }
 
 // checkPlaybackThread_l() must be called with AudioFlinger::mLock held
@@ -4578,9 +4486,10 @@
     return thread;
 }
 
-int AudioFlinger::nextUniqueId()
+// nextUniqueId_l() must be called with AudioFlinger::mLock held
+int AudioFlinger::nextUniqueId_l()
 {
-    return android_atomic_inc(&mNextUniqueId);
+    return mNextUniqueId++;
 }
 
 // ----------------------------------------------------------------------------
@@ -4967,7 +4876,7 @@
         LOGV("createEffect_l() got effect %p on chain %p", effect == 0 ? 0 : effect.get(), chain.get());
 
         if (effect == 0) {
-            int id = mAudioFlinger->nextUniqueId();
+            int id = mAudioFlinger->nextUniqueId_l();
             // Check CPU and memory usage
             lStatus = AudioSystem::registerEffect(desc, mId, chain->strategy(), sessionId, id);
             if (lStatus != NO_ERROR) {
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 5917632..ec3d202 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -107,8 +107,6 @@
     virtual     status_t    setMicMute(bool state);
     virtual     bool        getMicMute() const;
 
-    virtual     bool        isStreamActive(int stream) const;
-
     virtual     status_t    setParameters(int ioHandle, const String8& keyValuePairs);
     virtual     String8     getParameters(int ioHandle, const String8& keys);
 
@@ -579,8 +577,6 @@
         virtual     float       streamVolume(int stream) const;
         virtual     bool        streamMute(int stream) const;
 
-                    bool        isStreamActive(int stream) const;
-
                     sp<Track>   createTrack_l(
                                     const sp<AudioFlinger::Client>& client,
                                     int streamType,
@@ -785,7 +781,7 @@
               float streamVolumeInternal(int stream) const { return mStreamTypes[stream].volume; }
               void audioConfigChanged_l(int event, int ioHandle, void *param2);
 
-              int  nextUniqueId();
+              int  nextUniqueId_l();
               status_t moveEffectChain_l(int session,
                                      AudioFlinger::PlaybackThread *srcThread,
                                      AudioFlinger::PlaybackThread *dstThread,
@@ -1185,9 +1181,6 @@
 
                 DefaultKeyedVector< pid_t, sp<NotificationClient> >    mNotificationClients;
                 volatile int32_t                    mNextUniqueId;
-#ifdef LVMX
-                int mLifeVibesClientPid;
-#endif
                 uint32_t mMode;
 
 };
diff --git a/services/audioflinger/AudioMixer.cpp b/services/audioflinger/AudioMixer.cpp
index 433f1f7..50dcda7 100644
--- a/services/audioflinger/AudioMixer.cpp
+++ b/services/audioflinger/AudioMixer.cpp
@@ -220,6 +220,12 @@
                 return NO_ERROR;
             }
         }
+        if (name == RESET) {
+            track_t& track = mState.tracks[ mActiveTrack ];
+            track.resetResampler();
+            invalidateState(1<<mActiveTrack);
+            return NO_ERROR;
+        }
         break;
     case RAMP_VOLUME:
     case VOLUME:
@@ -289,6 +295,13 @@
     return resampler != 0;
 }
 
+void AudioMixer::track_t::resetResampler()
+{
+    if (resampler != 0) {
+        resampler->reset();
+    }
+}
+
 inline
 void AudioMixer::track_t::adjustVolumeRamp(bool aux)
 {
diff --git a/services/audioflinger/AudioMixer.h b/services/audioflinger/AudioMixer.h
index aee3e17..88408a7 100644
--- a/services/audioflinger/AudioMixer.h
+++ b/services/audioflinger/AudioMixer.h
@@ -67,6 +67,7 @@
         AUX_BUFFER      = 0x4003,
         // for TARGET RESAMPLE
         SAMPLE_RATE     = 0x4100,
+        RESET           = 0x4101,
         // for TARGET VOLUME (8 channels max)
         VOLUME0         = 0x4200,
         VOLUME1         = 0x4201,
@@ -163,6 +164,7 @@
 
         bool        setResampler(uint32_t sampleRate, uint32_t devSampleRate);
         bool        doesResample() const;
+        void        resetResampler();
         void        adjustVolumeRamp(bool aux);
     };
 
diff --git a/services/audioflinger/AudioPolicyManagerBase.cpp b/services/audioflinger/AudioPolicyManagerBase.cpp
index 4612af1..74be4e0 100644
--- a/services/audioflinger/AudioPolicyManagerBase.cpp
+++ b/services/audioflinger/AudioPolicyManagerBase.cpp
@@ -19,6 +19,7 @@
 #include <utils/Log.h>
 #include <hardware_legacy/AudioPolicyManagerBase.h>
 #include <media/mediarecorder.h>
+#include <math.h>
 
 namespace android {
 
@@ -121,12 +122,12 @@
         // request routing change if necessary
         uint32_t newDevice = getNewDevice(mHardwareOutput, false);
 #ifdef WITH_A2DP
+        checkA2dpSuspend();
         checkOutputForAllStrategies();
         // A2DP outputs must be closed after checkOutputForAllStrategies() is executed
         if (state == AudioSystem::DEVICE_STATE_UNAVAILABLE && AudioSystem::isA2dpDevice(device)) {
             closeA2dpOutputs();
         }
-        checkA2dpSuspend();
 #endif
         updateDeviceForStrategy();
         setOutputDevice(mHardwareOutput, newDevice);
@@ -268,8 +269,8 @@
     // check for device and output changes triggered by new phone state
     newDevice = getNewDevice(mHardwareOutput, false);
 #ifdef WITH_A2DP
-    checkOutputForAllStrategies();
     checkA2dpSuspend();
+    checkOutputForAllStrategies();
 #endif
     updateDeviceForStrategy();
 
@@ -312,8 +313,7 @@
 
     // Flag that ringtone volume must be limited to music volume until we exit MODE_RINGTONE
     if (state == AudioSystem::MODE_RINGTONE &&
-        (hwOutputDesc->mRefCount[AudioSystem::MUSIC] ||
-        (systemTime() - mMusicStopTime) < seconds(SONIFICATION_HEADSET_MUSIC_DELAY))) {
+        isStreamActive(AudioSystem::MUSIC, SONIFICATION_HEADSET_MUSIC_DELAY)) {
         mLimitRingtoneVolume = true;
     } else {
         mLimitRingtoneVolume = false;
@@ -343,7 +343,9 @@
         break;
     case AudioSystem::FOR_MEDIA:
         if (config != AudioSystem::FORCE_HEADPHONES && config != AudioSystem::FORCE_BT_A2DP &&
-            config != AudioSystem::FORCE_WIRED_ACCESSORY && config != AudioSystem::FORCE_NONE) {
+            config != AudioSystem::FORCE_WIRED_ACCESSORY &&
+            config != AudioSystem::FORCE_ANALOG_DOCK &&
+            config != AudioSystem::FORCE_DIGITAL_DOCK && config != AudioSystem::FORCE_NONE) {
             LOGW("setForceUse() invalid config %d for FOR_MEDIA", config);
             return;
         }
@@ -359,7 +361,10 @@
         break;
     case AudioSystem::FOR_DOCK:
         if (config != AudioSystem::FORCE_NONE && config != AudioSystem::FORCE_BT_CAR_DOCK &&
-            config != AudioSystem::FORCE_BT_DESK_DOCK && config != AudioSystem::FORCE_WIRED_ACCESSORY) {
+            config != AudioSystem::FORCE_BT_DESK_DOCK &&
+            config != AudioSystem::FORCE_WIRED_ACCESSORY &&
+            config != AudioSystem::FORCE_ANALOG_DOCK &&
+            config != AudioSystem::FORCE_DIGITAL_DOCK) {
             LOGW("setForceUse() invalid config %d for FOR_DOCK", config);
         }
         forceVolumeReeval = true;
@@ -373,8 +378,8 @@
     // check for device and output changes triggered by new phone state
     uint32_t newDevice = getNewDevice(mHardwareOutput, false);
 #ifdef WITH_A2DP
-    checkOutputForAllStrategies();
     checkA2dpSuspend();
+    checkOutputForAllStrategies();
 #endif
     updateDeviceForStrategy();
     setOutputDevice(mHardwareOutput, newDevice);
@@ -473,6 +478,7 @@
         outputDesc->mLatency = 0;
         outputDesc->mFlags = (AudioSystem::output_flags)(flags | AudioSystem::OUTPUT_FLAG_DIRECT);
         outputDesc->mRefCount[stream] = 0;
+        outputDesc->mStopTime[stream] = 0;
         output = mpClientInterface->openOutput(&outputDesc->mDevice,
                                         &outputDesc->mSamplingRate,
                                         &outputDesc->mFormat,
@@ -601,12 +607,10 @@
     if (outputDesc->mRefCount[stream] > 0) {
         // decrement usage count of this stream on the output
         outputDesc->changeRefCount(stream, -1);
-        // store time at which the last music track was stopped - see computeVolume()
-        if (stream == AudioSystem::MUSIC) {
-            mMusicStopTime = systemTime();
-        }
+        // store time at which the stream was stopped - see isStreamActive()
+        outputDesc->mStopTime[stream] = systemTime();
 
-        setOutputDevice(output, getNewDevice(output));
+        setOutputDevice(output, getNewDevice(output), false, outputDesc->mLatency*2);
 
 #ifdef WITH_A2DP
         if (mA2dpOutput != 0 && !a2dpUsedForSonification() &&
@@ -914,6 +918,19 @@
     return NO_ERROR;
 }
 
+bool AudioPolicyManagerBase::isStreamActive(int stream, uint32_t inPastMs) const
+{
+    nsecs_t sysTime = systemTime();
+    for (size_t i = 0; i < mOutputs.size(); i++) {
+        if (mOutputs.valueAt(i)->mRefCount[stream] != 0 ||
+            ns2ms(sysTime - mOutputs.valueAt(i)->mStopTime[stream]) < inPastMs) {
+            return true;
+        }
+    }
+    return false;
+}
+
+
 status_t AudioPolicyManagerBase::dump(int fd)
 {
     const size_t SIZE = 256;
@@ -1004,7 +1021,7 @@
     Thread(false),
 #endif //AUDIO_POLICY_TEST
     mPhoneState(AudioSystem::MODE_NORMAL), mRingerMode(0),
-    mMusicStopTime(0), mLimitRingtoneVolume(false), mLastVoiceVolume(-1.0f),
+    mLimitRingtoneVolume(false), mLastVoiceVolume(-1.0f),
     mTotalEffectsCpuLoad(0), mTotalEffectsMemory(0),
     mA2dpSuspended(false)
 {
@@ -1014,6 +1031,8 @@
         mForceUse[i] = AudioSystem::FORCE_NONE;
     }
 
+    initializeVolumeCurves();
+
     // devices available by default are speaker, ear piece and microphone
     mAvailableOutputDevices = AudioSystem::DEVICE_OUT_EARPIECE |
                         AudioSystem::DEVICE_OUT_SPEAKER;
@@ -1047,25 +1066,27 @@
 
     updateDeviceForStrategy();
 #ifdef AUDIO_POLICY_TEST
-    AudioParameter outputCmd = AudioParameter();
-    outputCmd.addInt(String8("set_id"), 0);
-    mpClientInterface->setParameters(mHardwareOutput, outputCmd.toString());
+    if (mHardwareOutput != 0) {
+        AudioParameter outputCmd = AudioParameter();
+        outputCmd.addInt(String8("set_id"), 0);
+        mpClientInterface->setParameters(mHardwareOutput, outputCmd.toString());
 
-    mTestDevice = AudioSystem::DEVICE_OUT_SPEAKER;
-    mTestSamplingRate = 44100;
-    mTestFormat = AudioSystem::PCM_16_BIT;
-    mTestChannels =  AudioSystem::CHANNEL_OUT_STEREO;
-    mTestLatencyMs = 0;
-    mCurOutput = 0;
-    mDirectOutput = false;
-    for (int i = 0; i < NUM_TEST_OUTPUTS; i++) {
-        mTestOutputs[i] = 0;
+        mTestDevice = AudioSystem::DEVICE_OUT_SPEAKER;
+        mTestSamplingRate = 44100;
+        mTestFormat = AudioSystem::PCM_16_BIT;
+        mTestChannels =  AudioSystem::CHANNEL_OUT_STEREO;
+        mTestLatencyMs = 0;
+        mCurOutput = 0;
+        mDirectOutput = false;
+        for (int i = 0; i < NUM_TEST_OUTPUTS; i++) {
+            mTestOutputs[i] = 0;
+        }
+
+        const size_t SIZE = 256;
+        char buffer[SIZE];
+        snprintf(buffer, SIZE, "AudioPolicyManagerTest");
+        run(buffer, ANDROID_PRIORITY_AUDIO);
     }
-
-    const size_t SIZE = 256;
-    char buffer[SIZE];
-    snprintf(buffer, SIZE, "AudioPolicyManagerTest");
-    run(buffer, ANDROID_PRIORITY_AUDIO);
 #endif //AUDIO_POLICY_TEST
 }
 
@@ -1086,6 +1107,11 @@
    mInputs.clear();
 }
 
+status_t AudioPolicyManagerBase::initCheck()
+{
+    return (mHardwareOutput == 0) ? NO_INIT : NO_ERROR;
+}
+
 #ifdef AUDIO_POLICY_TEST
 bool AudioPolicyManagerBase::threadLoop()
 {
@@ -1347,6 +1373,7 @@
 
 void AudioPolicyManagerBase::closeA2dpOutputs()
 {
+
     LOGV("setDeviceConnectionState() closing A2DP and duplicated output!");
 
     if (mDuplicatedOutput != 0) {
@@ -1516,6 +1543,20 @@
     return (uint32_t)getStrategy(stream);
 }
 
+uint32_t AudioPolicyManagerBase::getDevicesForStream(AudioSystem::stream_type stream) {
+    uint32_t devices;
+    // By checking the range of stream before calling getStrategy, we avoid
+    // getStrategy's behavior for invalid streams.  getStrategy would do a LOGE
+    // and then return STRATEGY_MEDIA, but we want to return the empty set.
+    if (stream < (AudioSystem::stream_type) 0 || stream >= AudioSystem::NUM_STREAM_TYPES) {
+        devices = 0;
+    } else {
+        AudioPolicyManagerBase::routing_strategy strategy = getStrategy(stream);
+        devices = getDeviceForStrategy(strategy, true);
+    }
+    return devices;
+}
+
 AudioPolicyManagerBase::routing_strategy AudioPolicyManagerBase::getStrategy(
         AudioSystem::stream_type stream) {
     // stream to strategy mapping
@@ -1583,13 +1624,19 @@
             if (device) break;
 #ifdef WITH_A2DP
             // when not in a phone call, phone strategy should route STREAM_VOICE_CALL to A2DP
-            if (!isInCall()) {
+            if (!isInCall() && !mA2dpSuspended) {
                 device = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_BLUETOOTH_A2DP;
                 if (device) break;
                 device = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES;
                 if (device) break;
             }
 #endif
+            device = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_AUX_DIGITAL;
+            if (device) break;
+            device = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_DGTL_DOCK_HEADSET;
+            if (device) break;
+            device = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_ANLG_DOCK_HEADSET;
+            if (device) break;
             device = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_EARPIECE;
             if (device == 0) {
                 LOGE("getDeviceForStrategy() earpiece device not found");
@@ -1597,18 +1644,20 @@
             break;
 
         case AudioSystem::FORCE_SPEAKER:
-            if (!isInCall() || strategy != STRATEGY_DTMF) {
-                device = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_BLUETOOTH_SCO_CARKIT;
-                if (device) break;
-            }
 #ifdef WITH_A2DP
             // when not in a phone call, phone strategy should route STREAM_VOICE_CALL to
             // A2DP speaker when forcing to speaker output
-            if (!isInCall()) {
+            if (!isInCall() && !mA2dpSuspended) {
                 device = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER;
                 if (device) break;
             }
 #endif
+            device = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_AUX_DIGITAL;
+            if (device) break;
+            device = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_DGTL_DOCK_HEADSET;
+            if (device) break;
+            device = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_ANLG_DOCK_HEADSET;
+            if (device) break;
             device = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_SPEAKER;
             if (device == 0) {
                 LOGE("getDeviceForStrategy() speaker device not found");
@@ -1633,18 +1682,13 @@
         // FALL THROUGH
 
     case STRATEGY_MEDIA: {
-        uint32_t device2 = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_AUX_DIGITAL;
-        if (device2 == 0) {
-            device2 = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_WIRED_HEADPHONE;
-        }
+        uint32_t device2 = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_WIRED_HEADPHONE;
         if (device2 == 0) {
             device2 = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_WIRED_HEADSET;
         }
 #ifdef WITH_A2DP
-        if (mA2dpOutput != 0) {
-            if (strategy == STRATEGY_SONIFICATION && !a2dpUsedForSonification()) {
-                break;
-            }
+        if ((mA2dpOutput != 0) && !mA2dpSuspended &&
+                (strategy != STRATEGY_SONIFICATION || a2dpUsedForSonification())) {
             if (device2 == 0) {
                 device2 = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_BLUETOOTH_A2DP;
             }
@@ -1657,6 +1701,15 @@
         }
 #endif
         if (device2 == 0) {
+            device2 = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_AUX_DIGITAL;
+        }
+        if (device2 == 0) {
+            device2 = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_DGTL_DOCK_HEADSET;
+        }
+        if (device2 == 0) {
+            device2 = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_ANLG_DOCK_HEADSET;
+        }
+        if (device2 == 0) {
             device2 = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_SPEAKER;
         }
 
@@ -1783,6 +1836,70 @@
     return 0;
 }
 
+float AudioPolicyManagerBase::volIndexToAmpl(uint32_t device, const StreamDescriptor& streamDesc,
+        int indexInUi) {
+    // the volume index in the UI is relative to the min and max volume indices for this stream type
+    int nbSteps = 1 + streamDesc.mVolIndex[StreamDescriptor::VOLMAX] -
+            streamDesc.mVolIndex[StreamDescriptor::VOLMIN];
+    int volIdx = (nbSteps * (indexInUi - streamDesc.mIndexMin)) /
+            (streamDesc.mIndexMax - streamDesc.mIndexMin);
+
+    // find what part of the curve this index volume belongs to, or if it's out of bounds
+    int segment = 0;
+    if (volIdx < streamDesc.mVolIndex[StreamDescriptor::VOLMIN]) {         // out of bounds
+        return 0.0f;
+    } else if (volIdx < streamDesc.mVolIndex[StreamDescriptor::VOLKNEE1]) {
+        segment = 0;
+    } else if (volIdx < streamDesc.mVolIndex[StreamDescriptor::VOLKNEE2]) {
+        segment = 1;
+    } else if (volIdx <= streamDesc.mVolIndex[StreamDescriptor::VOLMAX]) {
+        segment = 2;
+    } else {                                                               // out of bounds
+        return 1.0f;
+    }
+
+    // linear interpolation in the attenuation table in dB
+    float decibels = streamDesc.mVolDbAtt[segment] +
+            ((float)(volIdx - streamDesc.mVolIndex[segment])) *
+                ( (streamDesc.mVolDbAtt[segment+1] - streamDesc.mVolDbAtt[segment]) /
+                    ((float)(streamDesc.mVolIndex[segment+1] - streamDesc.mVolIndex[segment])) );
+
+    float amplification = exp( decibels * 0.115129f); // exp( dB * ln(10) / 20 )
+
+    LOGV("VOLUME vol index=[%d %d %d], dB=[%.1f %.1f %.1f] ampl=%.5f",
+            streamDesc.mVolIndex[segment], volIdx, streamDesc.mVolIndex[segment+1],
+            streamDesc.mVolDbAtt[segment], decibels, streamDesc.mVolDbAtt[segment+1],
+            amplification);
+
+    return amplification;
+}
+
+void AudioPolicyManagerBase::initializeVolumeCurves() {
+    // initialize the volume curves to a (-49.5 - 0 dB) attenuation in 0.5dB steps
+    for (int i=0 ; i< AudioSystem::NUM_STREAM_TYPES ; i++) {
+        mStreams[i].mVolIndex[StreamDescriptor::VOLMIN] = 1;
+        mStreams[i].mVolDbAtt[StreamDescriptor::VOLMIN] = -49.5f;
+        mStreams[i].mVolIndex[StreamDescriptor::VOLKNEE1] = 33;
+        mStreams[i].mVolDbAtt[StreamDescriptor::VOLKNEE1] = -33.5f;
+        mStreams[i].mVolIndex[StreamDescriptor::VOLKNEE2] = 66;
+        mStreams[i].mVolDbAtt[StreamDescriptor::VOLKNEE2] = -17.0f;
+        // here we use 100 steps to avoid rounding errors
+        // when computing the volume in volIndexToAmpl()
+        mStreams[i].mVolIndex[StreamDescriptor::VOLMAX] = 100;
+        mStreams[i].mVolDbAtt[StreamDescriptor::VOLMAX] = 0.0f;
+    }
+
+    // Modification for music: more attenuation for lower volumes, finer steps at high volumes
+    mStreams[AudioSystem::MUSIC].mVolIndex[StreamDescriptor::VOLMIN] = 1;
+    mStreams[AudioSystem::MUSIC].mVolDbAtt[StreamDescriptor::VOLMIN] = -58.0f;
+    mStreams[AudioSystem::MUSIC].mVolIndex[StreamDescriptor::VOLKNEE1] = 20;
+    mStreams[AudioSystem::MUSIC].mVolDbAtt[StreamDescriptor::VOLKNEE1] = -40.0f;
+    mStreams[AudioSystem::MUSIC].mVolIndex[StreamDescriptor::VOLKNEE2] = 60;
+    mStreams[AudioSystem::MUSIC].mVolDbAtt[StreamDescriptor::VOLKNEE2] = -17.0f;
+    mStreams[AudioSystem::MUSIC].mVolIndex[StreamDescriptor::VOLMAX] = 100;
+    mStreams[AudioSystem::MUSIC].mVolDbAtt[StreamDescriptor::VOLMAX] = 0.0f;
+}
+
 float AudioPolicyManagerBase::computeVolume(int stream, int index, audio_io_handle_t output, uint32_t device)
 {
     float volume = 1.0;
@@ -1793,8 +1910,7 @@
         device = outputDesc->device();
     }
 
-    int volInt = (100 * (index - streamDesc.mIndexMin)) / (streamDesc.mIndexMax - streamDesc.mIndexMin);
-    volume = AudioSystem::linearToLog(volInt);
+    volume = volIndexToAmpl(device, streamDesc, index);
 
     // if a headset is connected, apply the following rules to ring tones and notifications
     // to avoid sound level bursts in user's ears:
@@ -2007,6 +2123,7 @@
         mRefCount[i] = 0;
         mCurVolume[i] = -1.0;
         mMuteCount[i] = 0;
+        mStopTime[i] = 0;
     }
 }
 
@@ -2057,7 +2174,6 @@
     return refCount;
 }
 
-
 status_t AudioPolicyManagerBase::AudioOutputDescriptor::dump(int fd)
 {
     const size_t SIZE = 256;
diff --git a/services/audioflinger/AudioPolicyService.cpp b/services/audioflinger/AudioPolicyService.cpp
index f24e08e..b614c48 100644
--- a/services/audioflinger/AudioPolicyService.cpp
+++ b/services/audioflinger/AudioPolicyService.cpp
@@ -68,6 +68,8 @@
 {
     char value[PROPERTY_VALUE_MAX];
 
+    Mutex::Autolock _l(mLock);
+
     // start tone playback thread
     mTonePlaybackThread = new AudioCommandThread(String8(""));
     // start audio commands thread
@@ -88,9 +90,18 @@
     }
 #endif
 
-    // load properties
-    property_get("ro.camera.sound.forced", value, "0");
-    mpPolicyManager->setSystemProperty("ro.camera.sound.forced", value);
+    if ((mpPolicyManager != NULL) && (mpPolicyManager->initCheck() != NO_ERROR)) {
+        delete mpPolicyManager;
+        mpPolicyManager = NULL;
+    }
+
+    if (mpPolicyManager == NULL) {
+        LOGE("Could not create AudioPolicyManager");
+    } else {
+        // load properties
+        property_get("ro.camera.sound.forced", value, "0");
+        mpPolicyManager->setSystemProperty("ro.camera.sound.forced", value);
+    }
 }
 
 AudioPolicyService::~AudioPolicyService()
@@ -354,6 +365,14 @@
     return mpPolicyManager->getStrategyForStream(stream);
 }
 
+uint32_t AudioPolicyService::getDevicesForStream(AudioSystem::stream_type stream)
+{
+    if (mpPolicyManager == NULL) {
+        return 0;
+    }
+    return mpPolicyManager->getDevicesForStream(stream);
+}
+
 audio_io_handle_t AudioPolicyService::getOutputForEffect(effect_descriptor_t *desc)
 {
     if (mpPolicyManager == NULL) {
@@ -383,6 +402,15 @@
     return mpPolicyManager->unregisterEffect(id);
 }
 
+bool AudioPolicyService::isStreamActive(int stream, uint32_t inPastMs) const
+{
+    if (mpPolicyManager == NULL) {
+        return 0;
+    }
+    Mutex::Autolock _l(mLock);
+    return mpPolicyManager->isStreamActive(stream, inPastMs);
+}
+
 void AudioPolicyService::binderDied(const wp<IBinder>& who) {
     LOGW("binderDied() %p, tid %d, calling tid %d", who.unsafe_get(), gettid(),
             IPCThreadState::self()->getCallingPid());
@@ -468,13 +496,6 @@
 
 
 // ----------------------------------------------------------------------------
-void AudioPolicyService::instantiate() {
-    defaultServiceManager()->addService(
-            String16("media.audio_policy"), new AudioPolicyService());
-}
-
-
-// ----------------------------------------------------------------------------
 // AudioPolicyClientInterface implementation
 // ----------------------------------------------------------------------------
 
diff --git a/services/audioflinger/AudioPolicyService.h b/services/audioflinger/AudioPolicyService.h
index 558f455..faad893 100644
--- a/services/audioflinger/AudioPolicyService.h
+++ b/services/audioflinger/AudioPolicyService.h
@@ -21,6 +21,7 @@
 #include <hardware_legacy/AudioPolicyInterface.h>
 #include <media/ToneGenerator.h>
 #include <utils/Vector.h>
+#include <binder/BinderService.h>
 
 namespace android {
 
@@ -28,12 +29,17 @@
 
 // ----------------------------------------------------------------------------
 
-class AudioPolicyService: public BnAudioPolicyService, public AudioPolicyClientInterface,
+class AudioPolicyService :
+    public BinderService<AudioPolicyService>,
+    public BnAudioPolicyService,
+    public AudioPolicyClientInterface,
     public IBinder::DeathRecipient
 {
+    friend class BinderService<AudioPolicyService>;
 
 public:
-    static  void        instantiate();
+    // for BinderService
+    static const char *getServiceName() { return "media.audio_policy"; }
 
     virtual status_t    dump(int fd, const Vector<String16>& args);
 
@@ -80,6 +86,7 @@
     virtual status_t getStreamVolumeIndex(AudioSystem::stream_type stream, int *index);
 
     virtual uint32_t getStrategyForStream(AudioSystem::stream_type stream);
+    virtual uint32_t getDevicesForStream(AudioSystem::stream_type stream);
 
     virtual audio_io_handle_t getOutputForEffect(effect_descriptor_t *desc);
     virtual status_t registerEffect(effect_descriptor_t *desc,
@@ -88,6 +95,7 @@
                                     int session,
                                     int id);
     virtual status_t unregisterEffect(int id);
+    virtual bool isStreamActive(int stream, uint32_t inPastMs = 0) const;
 
     virtual     status_t    onTransact(
                                 uint32_t code,
@@ -230,8 +238,8 @@
     status_t dumpPermissionDenial(int fd);
 
 
-    Mutex   mLock;      // prevents concurrent access to AudioPolicy manager functions changing device
-                        // connection stated our routing
+    mutable Mutex mLock;    // prevents concurrent access to AudioPolicy manager functions changing
+                            // device connection state  or routing
     AudioPolicyInterface* mpPolicyManager;          // the platform specific policy manager
     sp <AudioCommandThread> mAudioCommandThread;    // audio commands thread
     sp <AudioCommandThread> mTonePlaybackThread;     // tone playback thread
@@ -240,11 +248,3 @@
 }; // namespace android
 
 #endif // ANDROID_AUDIOPOLICYSERVICE_H
-
-
-
-
-
-
-
-
diff --git a/services/audioflinger/AudioResampler.cpp b/services/audioflinger/AudioResampler.cpp
index 5dabacb..5c3b43f 100644
--- a/services/audioflinger/AudioResampler.cpp
+++ b/services/audioflinger/AudioResampler.cpp
@@ -148,6 +148,12 @@
     mVolume[1] = right;
 }
 
+void AudioResampler::reset() {
+    mInputIndex = 0;
+    mPhaseFraction = 0;
+    mBuffer.frameCount = 0;
+}
+
 // ----------------------------------------------------------------------------
 
 void AudioResamplerOrder1::resample(int32_t* out, size_t outFrameCount,
diff --git a/services/audioflinger/AudioResampler.h b/services/audioflinger/AudioResampler.h
index 2dfac76..9f06c1c 100644
--- a/services/audioflinger/AudioResampler.h
+++ b/services/audioflinger/AudioResampler.h
@@ -53,6 +53,8 @@
     virtual void resample(int32_t* out, size_t outFrameCount,
             AudioBufferProvider* provider) = 0;
 
+    virtual void reset();
+
 protected:
     // number of bits for phase fraction - 30 bits allows nearly 2x downsampling
     static const int kNumPhaseBits = 30;
diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk
index 87975af..b52fc69 100644
--- a/services/camera/libcameraservice/Android.mk
+++ b/services/camera/libcameraservice/Android.mk
@@ -49,7 +49,8 @@
     libcutils \
     libmedia \
     libcamera_client \
-    libsurfaceflinger_client
+    libsurfaceflinger_client \
+    libgui
 
 LOCAL_MODULE:= libcameraservice
 
diff --git a/services/camera/libcameraservice/CameraHardwareStub.cpp b/services/camera/libcameraservice/CameraHardwareStub.cpp
index b3e0ee6..07b5a37 100644
--- a/services/camera/libcameraservice/CameraHardwareStub.cpp
+++ b/services/camera/libcameraservice/CameraHardwareStub.cpp
@@ -101,9 +101,9 @@
     mFakeCamera = 0; // paranoia
 }
 
-sp<IMemoryHeap> CameraHardwareStub::getPreviewHeap() const
+status_t CameraHardwareStub::setPreviewWindow(const sp<ANativeWindow>& buf)
 {
-    return mPreviewHeap;
+    return NO_ERROR;
 }
 
 sp<IMemoryHeap> CameraHardwareStub::getRawHeap() const
diff --git a/services/camera/libcameraservice/CameraHardwareStub.h b/services/camera/libcameraservice/CameraHardwareStub.h
index d3427ba..9b66a76 100644
--- a/services/camera/libcameraservice/CameraHardwareStub.h
+++ b/services/camera/libcameraservice/CameraHardwareStub.h
@@ -29,7 +29,7 @@
 
 class CameraHardwareStub : public CameraHardwareInterface {
 public:
-    virtual sp<IMemoryHeap> getPreviewHeap() const;
+    virtual status_t setPreviewWindow(const sp<ANativeWindow>& buf);
     virtual sp<IMemoryHeap> getRawHeap() const;
 
     virtual void        setCallbacks(notify_callback notify_cb,
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index a64ddcf..a09e16b 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -26,11 +26,12 @@
 #include <binder/MemoryBase.h>
 #include <binder/MemoryHeapBase.h>
 #include <cutils/atomic.h>
+#include <cutils/properties.h>
+#include <gui/SurfaceTextureClient.h>
 #include <hardware/hardware.h>
 #include <media/AudioSystem.h>
 #include <media/mediaplayer.h>
 #include <surfaceflinger/ISurface.h>
-#include <ui/Overlay.h>
 #include <utils/Errors.h>
 #include <utils/Log.h>
 #include <utils/String16.h>
@@ -305,9 +306,9 @@
     mCameraId = cameraId;
     mCameraFacing = cameraFacing;
     mClientPid = clientPid;
-    mUseOverlay = mHardware->useOverlay();
     mMsgEnabled = 0;
-
+    mSurface = 0;
+    mPreviewWindow = 0;
     mHardware->setCallbacks(notifyCallback,
                             dataCallback,
                             dataCallbackTimestamp,
@@ -317,42 +318,21 @@
     enableMsgType(CAMERA_MSG_ERROR |
                   CAMERA_MSG_ZOOM |
                   CAMERA_MSG_FOCUS);
-    mOverlayW = 0;
-    mOverlayH = 0;
 
     // Callback is disabled by default
     mPreviewCallbackFlag = FRAME_CALLBACK_FLAG_NOOP;
     mOrientation = getOrientation(0, mCameraFacing == CAMERA_FACING_FRONT);
-    mOrientationChanged = false;
+    mPlayShutterSound = true;
     cameraService->setCameraBusy(cameraId);
     cameraService->loadSound();
     LOG1("Client::Client X (pid %d)", callingPid);
 }
 
-static void *unregister_surface(void *arg) {
-    ISurface *surface = (ISurface *)arg;
-    surface->unregisterBuffers();
-    IPCThreadState::self()->flushCommands();
-    return NULL;
-}
-
 // tear down the client
 CameraService::Client::~Client() {
     int callingPid = getCallingPid();
     LOG1("Client::~Client E (pid %d, this %p)", callingPid, this);
 
-    if (mSurface != 0 && !mUseOverlay) {
-        pthread_t thr;
-        // We unregister the buffers in a different thread because binder does
-        // not let us make sychronous transactions in a binder destructor (that
-        // is, upon our reaching a refcount of zero.)
-        pthread_create(&thr,
-                       NULL,  // attr
-                       unregister_surface,
-                       mSurface.get());
-        pthread_join(thr, NULL);
-    }
-
     // set mClientPid to let disconnet() tear down the hardware
     mClientPid = callingPid;
     disconnect();
@@ -466,9 +446,11 @@
     mHardware->cancelPicture();
     // Release the hardware resources.
     mHardware->release();
-    // Release the held overlay resources.
-    if (mUseOverlay) {
-        mOverlayRef = 0;
+
+    // Release the held ANativeWindow resources.
+    if (mPreviewWindow != 0) {
+        mPreviewWindow = 0;
+        mHardware->setPreviewWindow(mPreviewWindow);
     }
     mHardware.clear();
 
@@ -480,8 +462,8 @@
 
 // ----------------------------------------------------------------------------
 
-// set the ISurface that the preview will use
-status_t CameraService::Client::setPreviewDisplay(const sp<ISurface>& surface) {
+// set the Surface that the preview will use
+status_t CameraService::Client::setPreviewDisplay(const sp<Surface>& surface) {
     LOG1("setPreviewDisplay(%p) (pid %d)", surface.get(), getCallingPid());
     Mutex::Autolock lock(mLock);
     status_t result = checkPidAndHardware();
@@ -491,100 +473,64 @@
 
     // return if no change in surface.
     // asBinder() is safe on NULL (returns NULL)
-    if (surface->asBinder() == mSurface->asBinder()) {
+    if (getISurface(surface)->asBinder() == mSurface) {
         return result;
     }
 
     if (mSurface != 0) {
         LOG1("clearing old preview surface %p", mSurface.get());
-        if (mUseOverlay) {
-            // Force the destruction of any previous overlay
-            sp<Overlay> dummy;
-            mHardware->setOverlay(dummy);
-            mOverlayRef = 0;
-        } else {
-            mSurface->unregisterBuffers();
-        }
     }
-    mSurface = surface;
-    mOverlayRef = 0;
-    // If preview has been already started, set overlay or register preview
+    mSurface = getISurface(surface)->asBinder();
+    mPreviewWindow = surface;
+
+    // If preview has been already started, register preview
     // buffers now.
     if (mHardware->previewEnabled()) {
-        if (mUseOverlay) {
-            result = setOverlay();
-        } else if (mSurface != 0) {
-            result = registerPreviewBuffers();
+        if (mPreviewWindow != 0) {
+            native_window_set_buffers_transform(mPreviewWindow.get(),
+                                                mOrientation);
+            result = mHardware->setPreviewWindow(mPreviewWindow);
         }
     }
 
     return result;
 }
 
-status_t CameraService::Client::registerPreviewBuffers() {
-    int w, h;
-    CameraParameters params(mHardware->getParameters());
-    params.getPreviewSize(&w, &h);
+// set the SurfaceTexture that the preview will use
+status_t CameraService::Client::setPreviewTexture(
+        const sp<ISurfaceTexture>& surfaceTexture) {
+    LOG1("setPreviewTexture(%p) (pid %d)", surfaceTexture.get(),
+            getCallingPid());
+    Mutex::Autolock lock(mLock);
+    status_t result = checkPidAndHardware();
+    if (result != NO_ERROR) return result;
 
-    // FIXME: don't use a hardcoded format here.
-    ISurface::BufferHeap buffers(w, h, w, h,
-                                 HAL_PIXEL_FORMAT_YCrCb_420_SP,
-                                 mOrientation,
-                                 0,
-                                 mHardware->getPreviewHeap());
-
-    status_t result = mSurface->registerBuffers(buffers);
-    if (result != NO_ERROR) {
-        LOGE("registerBuffers failed with status %d", result);
-    }
-    return result;
-}
-
-status_t CameraService::Client::setOverlay() {
-    int w, h;
-    CameraParameters params(mHardware->getParameters());
-    params.getPreviewSize(&w, &h);
-
-    if (w != mOverlayW || h != mOverlayH || mOrientationChanged) {
-        // Force the destruction of any previous overlay
-        sp<Overlay> dummy;
-        mHardware->setOverlay(dummy);
-        mOverlayRef = 0;
-        mOrientationChanged = false;
-    }
-
-    status_t result = NO_ERROR;
-    if (mSurface == 0) {
-        result = mHardware->setOverlay(NULL);
-    } else {
-        if (mOverlayRef == 0) {
-            // FIXME:
-            // Surfaceflinger may hold onto the previous overlay reference for some
-            // time after we try to destroy it. retry a few times. In the future, we
-            // should make the destroy call block, or possibly specify that we can
-            // wait in the createOverlay call if the previous overlay is in the
-            // process of being destroyed.
-            for (int retry = 0; retry < 50; ++retry) {
-                mOverlayRef = mSurface->createOverlay(w, h, OVERLAY_FORMAT_DEFAULT,
-                                                      mOrientation);
-                if (mOverlayRef != 0) break;
-                LOGW("Overlay create failed - retrying");
-                usleep(20000);
-            }
-            if (mOverlayRef == 0) {
-                LOGE("Overlay Creation Failed!");
-                return -EINVAL;
-            }
-            result = mHardware->setOverlay(new Overlay(mOverlayRef));
-        }
-    }
-    if (result != NO_ERROR) {
-        LOGE("mHardware->setOverlay() failed with status %d\n", result);
+    // return if no change in surface.
+    // asBinder() is safe on NULL (returns NULL)
+    if (surfaceTexture->asBinder() == mSurface) {
         return result;
     }
 
-    mOverlayW = w;
-    mOverlayH = h;
+    if (mSurface != 0) {
+        LOG1("clearing old preview surface %p", mSurface.get());
+    }
+    mSurface = surfaceTexture->asBinder();
+    if (surfaceTexture != 0) {
+        mPreviewWindow = new SurfaceTextureClient(surfaceTexture);
+    } else {
+        mPreviewWindow = 0;
+    }
+
+    // If preview has been already started, set overlay or register preview
+    // buffers now.
+    if (mHardware->previewEnabled()) {
+        // XXX: What if the new preview window is 0?
+        if (mPreviewWindow != 0) {
+            native_window_set_buffers_transform(mPreviewWindow.get(),
+                                                mOrientation);
+            result = mHardware->setPreviewWindow(mPreviewWindow);
+        }
+    }
 
     return result;
 }
@@ -597,16 +543,10 @@
     if (checkPidAndHardware() != NO_ERROR) return;
 
     mPreviewCallbackFlag = callback_flag;
-
-    // If we don't use overlay, we always need the preview frame for display.
-    // If we do use overlay, we only need the preview frame if the user
-    // wants the data.
-    if (mUseOverlay) {
-        if(mPreviewCallbackFlag & FRAME_CALLBACK_FLAG_ENABLE_MASK) {
-            enableMsgType(CAMERA_MSG_PREVIEW_FRAME);
-        } else {
-            disableMsgType(CAMERA_MSG_PREVIEW_FRAME);
-        }
+    if (mPreviewCallbackFlag & FRAME_CALLBACK_FLAG_ENABLE_MASK) {
+        enableMsgType(CAMERA_MSG_PREVIEW_FRAME);
+    } else {
+        disableMsgType(CAMERA_MSG_PREVIEW_FRAME);
     }
 }
 
@@ -631,14 +571,14 @@
 
     switch(mode) {
         case CAMERA_PREVIEW_MODE:
-            if (mSurface == 0) {
+            if (mSurface == 0 && mPreviewWindow == 0) {
                 LOG1("mSurface is not set yet.");
                 // still able to start preview in this case.
             }
             return startPreviewMode();
         case CAMERA_RECORDING_MODE:
-            if (mSurface == 0) {
-                LOGE("mSurface must be set before startRecordingMode.");
+            if (mSurface == 0 && mPreviewWindow == 0) {
+                LOGE("mSurface or mPreviewWindow must be set before startRecordingMode.");
                 return INVALID_OPERATION;
             }
             return startRecordingMode();
@@ -656,25 +596,13 @@
         return NO_ERROR;
     }
 
-    if (mUseOverlay) {
-        // If preview display has been set, set overlay now.
-        if (mSurface != 0) {
-            result = setOverlay();
-        }
-        if (result != NO_ERROR) return result;
-        result = mHardware->startPreview();
-    } else {
-        enableMsgType(CAMERA_MSG_PREVIEW_FRAME);
-        result = mHardware->startPreview();
-        if (result != NO_ERROR) return result;
-        // If preview display has been set, register preview buffers now.
-        if (mSurface != 0) {
-           // Unregister here because the surface may be previously registered
-           // with the raw (snapshot) heap.
-           mSurface->unregisterBuffers();
-           result = registerPreviewBuffers();
-        }
+    if (mPreviewWindow != 0) {
+        native_window_set_buffers_transform(mPreviewWindow.get(),
+                mOrientation);
     }
+    mHardware->setPreviewWindow(mPreviewWindow);
+    result = mHardware->startPreview();
+
     return result;
 }
 
@@ -711,13 +639,10 @@
     Mutex::Autolock lock(mLock);
     if (checkPidAndHardware() != NO_ERROR) return;
 
+
     disableMsgType(CAMERA_MSG_PREVIEW_FRAME);
     mHardware->stopPreview();
 
-    if (mSurface != 0 && !mUseOverlay) {
-        mSurface->unregisterBuffers();
-    }
-
     mPreviewBuffer.clear();
 }
 
@@ -741,6 +666,30 @@
     mHardware->releaseRecordingFrame(mem);
 }
 
+int32_t CameraService::Client::getNumberOfVideoBuffers() const {
+    LOG1("getNumberOfVideoBuffers");
+    Mutex::Autolock lock(mLock);
+    if (checkPidAndHardware() != NO_ERROR) return 0;
+    return mHardware->getNumberOfVideoBuffers();
+}
+
+sp<IMemory> CameraService::Client::getVideoBuffer(int32_t index) const {
+    LOG1("getVideoBuffer: %d", index);
+    Mutex::Autolock lock(mLock);
+    if (checkPidAndHardware() != NO_ERROR) return 0;
+    return mHardware->getVideoBuffer(index);
+}
+
+status_t CameraService::Client::storeMetaDataInBuffers(bool enabled)
+{
+    LOG1("storeMetaDataInBuffers: %s", enabled? "true": "false");
+    Mutex::Autolock lock(mLock);
+    if (checkPidAndHardware() != NO_ERROR) {
+        return UNKNOWN_ERROR;
+    }
+    return mHardware->storeMetaDataInBuffers(enabled);
+}
+
 bool CameraService::Client::previewEnabled() {
     LOG1("previewEnabled (pid %d)", getCallingPid());
 
@@ -778,17 +727,30 @@
 }
 
 // take a picture - image is returned in callback
-status_t CameraService::Client::takePicture() {
-    LOG1("takePicture (pid %d)", getCallingPid());
+status_t CameraService::Client::takePicture(int msgType) {
+    LOG1("takePicture (pid %d): 0x%x", getCallingPid(), msgType);
 
     Mutex::Autolock lock(mLock);
     status_t result = checkPidAndHardware();
     if (result != NO_ERROR) return result;
 
-    enableMsgType(CAMERA_MSG_SHUTTER |
-                  CAMERA_MSG_POSTVIEW_FRAME |
-                  CAMERA_MSG_RAW_IMAGE |
-                  CAMERA_MSG_COMPRESSED_IMAGE);
+    if ((msgType & CAMERA_MSG_RAW_IMAGE) &&
+        (msgType & CAMERA_MSG_RAW_IMAGE_NOTIFY)) {
+        LOGE("CAMERA_MSG_RAW_IMAGE and CAMERA_MSG_RAW_IMAGE_NOTIFY"
+                " cannot be both enabled");
+        return BAD_VALUE;
+    }
+
+    // We only accept picture related message types
+    // and ignore other types of messages for takePicture().
+    int picMsgType = msgType
+                        & (CAMERA_MSG_SHUTTER |
+                           CAMERA_MSG_POSTVIEW_FRAME |
+                           CAMERA_MSG_RAW_IMAGE |
+                           CAMERA_MSG_RAW_IMAGE_NOTIFY |
+                           CAMERA_MSG_COMPRESSED_IMAGE);
+
+    enableMsgType(picMsgType);
 
     return mHardware->takePicture();
 }
@@ -815,6 +777,35 @@
     return params;
 }
 
+// enable shutter sound
+status_t CameraService::Client::enableShutterSound(bool enable) {
+    LOG1("enableShutterSound (pid %d)", getCallingPid());
+
+    status_t result = checkPidAndHardware();
+    if (result != NO_ERROR) return result;
+
+    if (enable) {
+        mPlayShutterSound = true;
+        return OK;
+    }
+
+    // Disabling shutter sound may not be allowed. In that case only
+    // allow the mediaserver process to disable the sound.
+    char value[PROPERTY_VALUE_MAX];
+    property_get("ro.camera.sound.forced", value, "0");
+    if (strcmp(value, "0") != 0) {
+        // Disabling shutter sound is not allowed. Deny if the current
+        // process is not mediaserver.
+        if (getCallingPid() != getpid()) {
+            LOGE("Failed to disable shutter sound. Permission denied (pid %d)", getCallingPid());
+            return PERMISSION_DENIED;
+        }
+    }
+
+    mPlayShutterSound = false;
+    return OK;
+}
+
 status_t CameraService::Client::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2) {
     LOG1("sendCommand (pid %d)", getCallingPid());
     int orientation;
@@ -833,9 +824,22 @@
 
         if (mOrientation != orientation) {
             mOrientation = orientation;
-            if (mOverlayRef != 0) mOrientationChanged = true;
         }
         return OK;
+    } else if (cmd == CAMERA_CMD_ENABLE_SHUTTER_SOUND) {
+        switch (arg1) {
+            case 0:
+                enableShutterSound(false);
+                break;
+            case 1:
+                enableShutterSound(true);
+                break;
+            default:
+                return BAD_VALUE;
+        }
+        return OK;
+    } else if (cmd == CAMERA_CMD_PLAY_RECORDING_SOUND) {
+        mCameraService->playSound(SOUND_RECORDING);
     }
 
     return mHardware->sendCommand(cmd, arg1, arg2);
@@ -996,11 +1000,8 @@
 // "size" is the width and height of yuv picture for registerBuffer.
 // If it is NULL, use the picture size from parameters.
 void CameraService::Client::handleShutter(image_rect_type *size) {
-    mCameraService->playSound(SOUND_SHUTTER);
-
-    // Screen goes black after the buffer is unregistered.
-    if (mSurface != 0 && !mUseOverlay) {
-        mSurface->unregisterBuffers();
+    if (mPlayShutterSound) {
+        mCameraService->playSound(SOUND_SHUTTER);
     }
 
     sp<ICameraClient> c = mCameraClient;
@@ -1011,29 +1012,6 @@
     }
     disableMsgType(CAMERA_MSG_SHUTTER);
 
-    // It takes some time before yuvPicture callback to be called.
-    // Register the buffer for raw image here to reduce latency.
-    if (mSurface != 0 && !mUseOverlay) {
-        int w, h;
-        CameraParameters params(mHardware->getParameters());
-        if (size == NULL) {
-            params.getPictureSize(&w, &h);
-        } else {
-            w = size->width;
-            h = size->height;
-            w &= ~1;
-            h &= ~1;
-            LOG1("Snapshot image width=%d, height=%d", w, h);
-        }
-        // FIXME: don't use hardcoded format constants here
-        ISurface::BufferHeap buffers(w, h, w, h,
-            HAL_PIXEL_FORMAT_YCrCb_420_SP, mOrientation, 0,
-            mHardware->getRawHeap());
-
-        mSurface->registerBuffers(buffers);
-        IPCThreadState::self()->flushCommands();
-    }
-
     mLock.unlock();
 }
 
@@ -1043,12 +1021,6 @@
     size_t size;
     sp<IMemoryHeap> heap = mem->getMemory(&offset, &size);
 
-    if (!mUseOverlay) {
-        if (mSurface != 0) {
-            mSurface->postBuffer(offset);
-        }
-    }
-
     // local copy of the callback flags
     int flags = mPreviewCallbackFlag;
 
@@ -1069,9 +1041,7 @@
         mPreviewCallbackFlag &= ~(FRAME_CALLBACK_FLAG_ONE_SHOT_MASK |
                                   FRAME_CALLBACK_FLAG_COPY_OUT_MASK |
                                   FRAME_CALLBACK_FLAG_ENABLE_MASK);
-        if (mUseOverlay) {
-            disableMsgType(CAMERA_MSG_PREVIEW_FRAME);
-        }
+        disableMsgType(CAMERA_MSG_PREVIEW_FRAME);
     }
 
     if (c != 0) {
@@ -1108,11 +1078,6 @@
     size_t size;
     sp<IMemoryHeap> heap = mem->getMemory(&offset, &size);
 
-    // Put the YUV version of the snapshot in the preview display.
-    if (mSurface != 0 && !mUseOverlay) {
-        mSurface->postBuffer(offset);
-    }
-
     sp<ICameraClient> c = mCameraClient;
     mLock.unlock();
     if (c != 0) {
@@ -1292,4 +1257,12 @@
     return NO_ERROR;
 }
 
+sp<ISurface> CameraService::getISurface(const sp<Surface>& surface) {
+    if (surface != 0) {
+        return surface->getISurface();
+    } else {
+        return sp<ISurface>(0);
+    }
+}
+
 }; // namespace android
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index f09773d..1c43b00 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -79,6 +79,12 @@
     sp<MediaPlayer>     mSoundPlayer[NUM_SOUNDS];
     int                 mSoundRef;  // reference count (release all MediaPlayer when 0)
 
+    // Used by Client objects to extract the ISurface from a Surface object.
+    // This is used because making Client a friend class of Surface would
+    // require including this header in Surface.h since Client is a nested
+    // class.
+    static sp<ISurface> getISurface(const sp<Surface>& surface);
+
     class Client : public BnCamera
     {
     public:
@@ -87,18 +93,22 @@
         virtual status_t        connect(const sp<ICameraClient>& client);
         virtual status_t        lock();
         virtual status_t        unlock();
-        virtual status_t        setPreviewDisplay(const sp<ISurface>& surface);
+        virtual status_t        setPreviewDisplay(const sp<Surface>& surface);
+        virtual status_t        setPreviewTexture(const sp<ISurfaceTexture>& surfaceTexture);
         virtual void            setPreviewCallbackFlag(int flag);
         virtual status_t        startPreview();
         virtual void            stopPreview();
         virtual bool            previewEnabled();
+        virtual int32_t         getNumberOfVideoBuffers() const;
+        virtual sp<IMemory>     getVideoBuffer(int32_t index) const;
+        virtual status_t        storeMetaDataInBuffers(bool enabled);
         virtual status_t        startRecording();
         virtual void            stopRecording();
         virtual bool            recordingEnabled();
         virtual void            releaseRecordingFrame(const sp<IMemory>& mem);
         virtual status_t        autoFocus();
         virtual status_t        cancelAutoFocus();
-        virtual status_t        takePicture();
+        virtual status_t        takePicture(int msgType);
         virtual status_t        setParameters(const String8& params);
         virtual String8         getParameters() const;
         virtual status_t        sendCommand(int32_t cmd, int32_t arg1, int32_t arg2);
@@ -121,7 +131,6 @@
 
         // these are internal functions used to set up preview buffers
         status_t                registerPreviewBuffers();
-        status_t                setOverlay();
 
         // camera operation mode
         enum camera_mode {
@@ -133,6 +142,9 @@
         status_t                startPreviewMode();
         status_t                startRecordingMode();
 
+        // internal function used by sendCommand to enable/disable shutter sound.
+        status_t                enableShutterSound(bool enable);
+
         // these are static callback functions
         static void             notifyCallback(int32_t msgType, int32_t ext1, int32_t ext2, void* user);
         static void             dataCallback(int32_t msgType, const sp<IMemory>& dataPtr, void* user);
@@ -163,18 +175,15 @@
         int                             mCameraFacing;   // immutable after constructor
         pid_t                           mClientPid;
         sp<CameraHardwareInterface>     mHardware;       // cleared after disconnect()
-        bool                            mUseOverlay;     // immutable after constructor
-        sp<OverlayRef>                  mOverlayRef;
-        int                             mOverlayW;
-        int                             mOverlayH;
         int                             mPreviewCallbackFlag;
         int                             mOrientation;     // Current display orientation
-        // True if display orientation has been changed. This is only used in overlay.
-        int                             mOrientationChanged;
+        bool                            mPlayShutterSound;
 
         // Ensures atomicity among the public methods
         mutable Mutex                   mLock;
-        sp<ISurface>                    mSurface;
+        // This is a binder of Surface or SurfaceTexture.
+        sp<IBinder>                     mSurface;
+        sp<ANativeWindow>               mPreviewWindow;
 
         // If the user want us to return a copy of the preview frame (instead
         // of the original one), we allocate mPreviewBuffer and reuse it if possible.