am 8ec8a534: am 1e0b0808: Merge "Restore stagefright command line tool." into klp-dev

* commit '8ec8a5346831efa699d31274363da9cf516981db':
diff --git a/camera/Camera.cpp b/camera/Camera.cpp
index fd78572..22199fa 100644
--- a/camera/Camera.cpp
+++ b/camera/Camera.cpp
@@ -39,6 +39,9 @@
 {
 }
 
+CameraTraits<Camera>::TCamConnectService CameraTraits<Camera>::fnConnectService =
+        &ICameraService::connect;
+
 // construct a camera client from an existing camera remote
 sp<Camera> Camera::create(const sp<ICamera>& camera)
 {
@@ -97,13 +100,13 @@
 }
 
 // pass the buffered IGraphicBufferProducer to the camera service
-status_t Camera::setPreviewTexture(const sp<IGraphicBufferProducer>& bufferProducer)
+status_t Camera::setPreviewTarget(const sp<IGraphicBufferProducer>& bufferProducer)
 {
-    ALOGV("setPreviewTexture(%p)", bufferProducer.get());
+    ALOGV("setPreviewTarget(%p)", bufferProducer.get());
     sp <ICamera> c = mCamera;
     if (c == 0) return NO_INIT;
     ALOGD_IF(bufferProducer == 0, "app passed NULL surface");
-    return c->setPreviewTexture(bufferProducer);
+    return c->setPreviewTarget(bufferProducer);
 }
 
 // start preview mode
@@ -124,7 +127,7 @@
     return c->storeMetaDataInBuffers(enabled);
 }
 
-// start recording mode, must call setPreviewDisplay first
+// start recording mode, must call setPreviewTarget first
 status_t Camera::startRecording()
 {
     ALOGV("startRecording");
diff --git a/camera/CameraBase.cpp b/camera/CameraBase.cpp
index c25c5fd..55376b0 100644
--- a/camera/CameraBase.cpp
+++ b/camera/CameraBase.cpp
@@ -92,20 +92,25 @@
 
 template <typename TCam, typename TCamTraits>
 sp<TCam> CameraBase<TCam, TCamTraits>::connect(int cameraId,
-                                         const String16& clientPackageName,
+                                               const String16& clientPackageName,
                                                int clientUid)
 {
     ALOGV("%s: connect", __FUNCTION__);
     sp<TCam> c = new TCam(cameraId);
     sp<TCamCallbacks> cl = c;
+    status_t status = NO_ERROR;
     const sp<ICameraService>& cs = getCameraService();
+
     if (cs != 0) {
-        c->mCamera = cs->connect(cl, cameraId, clientPackageName, clientUid);
+        TCamConnectService fnConnectService = TCamTraits::fnConnectService;
+        status = (cs.get()->*fnConnectService)(cl, cameraId, clientPackageName, clientUid,
+                                             /*out*/ c->mCamera);
     }
-    if (c->mCamera != 0) {
+    if (status == OK && c->mCamera != 0) {
         c->mCamera->asBinder()->linkToDeath(c);
         c->mStatus = NO_ERROR;
     } else {
+        ALOGW("An error occurred while connecting to camera: %d", cameraId);
         c.clear();
     }
     return c;
diff --git a/camera/CameraMetadata.cpp b/camera/CameraMetadata.cpp
index f447c5b..7765914 100644
--- a/camera/CameraMetadata.cpp
+++ b/camera/CameraMetadata.cpp
@@ -133,11 +133,19 @@
 }
 
 status_t CameraMetadata::append(const CameraMetadata &other) {
+    return append(other.mBuffer);
+}
+
+status_t CameraMetadata::append(const camera_metadata_t* other) {
     if (mLocked) {
         ALOGE("%s: CameraMetadata is locked", __FUNCTION__);
         return INVALID_OPERATION;
     }
-    return append_camera_metadata(mBuffer, other.mBuffer);
+    size_t extraEntries = get_camera_metadata_entry_count(other);
+    size_t extraData = get_camera_metadata_data_count(other);
+    resizeIfNeeded(extraEntries, extraData);
+
+    return append_camera_metadata(mBuffer, other);
 }
 
 size_t CameraMetadata::entryCount() const {
diff --git a/camera/ICamera.cpp b/camera/ICamera.cpp
index 12356f0..8c6e1f7 100644
--- a/camera/ICamera.cpp
+++ b/camera/ICamera.cpp
@@ -29,7 +29,7 @@
 
 enum {
     DISCONNECT = IBinder::FIRST_CALL_TRANSACTION,
-    SET_PREVIEW_TEXTURE,
+    SET_PREVIEW_TARGET,
     SET_PREVIEW_CALLBACK_FLAG,
     SET_PREVIEW_CALLBACK_TARGET,
     START_PREVIEW,
@@ -70,14 +70,14 @@
     }
 
     // pass the buffered IGraphicBufferProducer to the camera service
-    status_t setPreviewTexture(const sp<IGraphicBufferProducer>& bufferProducer)
+    status_t setPreviewTarget(const sp<IGraphicBufferProducer>& bufferProducer)
     {
-        ALOGV("setPreviewTexture");
+        ALOGV("setPreviewTarget");
         Parcel data, reply;
         data.writeInterfaceToken(ICamera::getInterfaceDescriptor());
         sp<IBinder> b(bufferProducer->asBinder());
         data.writeStrongBinder(b);
-        remote()->transact(SET_PREVIEW_TEXTURE, data, &reply);
+        remote()->transact(SET_PREVIEW_TARGET, data, &reply);
         return reply.readInt32();
     }
 
@@ -104,7 +104,7 @@
         return reply.readInt32();
     }
 
-    // start preview mode, must call setPreviewDisplay first
+    // start preview mode, must call setPreviewTarget first
     status_t startPreview()
     {
         ALOGV("startPreview");
@@ -114,7 +114,7 @@
         return reply.readInt32();
     }
 
-    // start recording mode, must call setPreviewDisplay first
+    // start recording mode, must call setPreviewTarget first
     status_t startRecording()
     {
         ALOGV("startRecording");
@@ -285,12 +285,12 @@
             reply->writeNoException();
             return NO_ERROR;
         } break;
-        case SET_PREVIEW_TEXTURE: {
-            ALOGV("SET_PREVIEW_TEXTURE");
+        case SET_PREVIEW_TARGET: {
+            ALOGV("SET_PREVIEW_TARGET");
             CHECK_INTERFACE(ICamera, data, reply);
             sp<IGraphicBufferProducer> st =
                 interface_cast<IGraphicBufferProducer>(data.readStrongBinder());
-            reply->writeInt32(setPreviewTexture(st));
+            reply->writeInt32(setPreviewTarget(st));
             return NO_ERROR;
         } break;
         case SET_PREVIEW_CALLBACK_FLAG: {
diff --git a/camera/ICameraService.cpp b/camera/ICameraService.cpp
index 876a2df..5fc89fb 100644
--- a/camera/ICameraService.cpp
+++ b/camera/ICameraService.cpp
@@ -33,6 +33,7 @@
 #include <camera/ICameraClient.h>
 #include <camera/camera2/ICameraDeviceUser.h>
 #include <camera/camera2/ICameraDeviceCallbacks.h>
+#include <camera/CameraMetadata.h>
 
 namespace android {
 
@@ -119,9 +120,34 @@
         return result;
     }
 
+    // get camera characteristics (static metadata)
+    virtual status_t getCameraCharacteristics(int cameraId,
+                                              CameraMetadata* cameraInfo) {
+        Parcel data, reply;
+        data.writeInterfaceToken(ICameraService::getInterfaceDescriptor());
+        data.writeInt32(cameraId);
+        remote()->transact(BnCameraService::GET_CAMERA_CHARACTERISTICS, data, &reply);
+
+        if (readExceptionCode(reply)) return -EPROTO;
+        status_t result = reply.readInt32();
+
+        CameraMetadata out;
+        if (reply.readInt32() != 0) {
+            out.readFromParcel(&reply);
+        }
+
+        if (cameraInfo != NULL) {
+            cameraInfo->swap(out);
+        }
+
+        return result;
+    }
+
     // connect to camera service (android.hardware.Camera)
-    virtual sp<ICamera> connect(const sp<ICameraClient>& cameraClient, int cameraId,
-                                const String16 &clientPackageName, int clientUid)
+    virtual status_t connect(const sp<ICameraClient>& cameraClient, int cameraId,
+                             const String16 &clientPackageName, int clientUid,
+                             /*out*/
+                             sp<ICamera>& device)
     {
         Parcel data, reply;
         data.writeInterfaceToken(ICameraService::getInterfaceDescriptor());
@@ -131,13 +157,19 @@
         data.writeInt32(clientUid);
         remote()->transact(BnCameraService::CONNECT, data, &reply);
 
-        if (readExceptionCode(reply)) return NULL;
-        return interface_cast<ICamera>(reply.readStrongBinder());
+        if (readExceptionCode(reply)) return -EPROTO;
+        status_t status = reply.readInt32();
+        if (reply.readInt32() != 0) {
+            device = interface_cast<ICamera>(reply.readStrongBinder());
+        }
+        return status;
     }
 
     // connect to camera service (pro client)
-    virtual sp<IProCameraUser> connect(const sp<IProCameraCallbacks>& cameraCb, int cameraId,
-                                       const String16 &clientPackageName, int clientUid)
+    virtual status_t connectPro(const sp<IProCameraCallbacks>& cameraCb, int cameraId,
+                                const String16 &clientPackageName, int clientUid,
+                                /*out*/
+                                sp<IProCameraUser>& device)
     {
         Parcel data, reply;
         data.writeInterfaceToken(ICameraService::getInterfaceDescriptor());
@@ -147,16 +179,22 @@
         data.writeInt32(clientUid);
         remote()->transact(BnCameraService::CONNECT_PRO, data, &reply);
 
-        if (readExceptionCode(reply)) return NULL;
-        return interface_cast<IProCameraUser>(reply.readStrongBinder());
+        if (readExceptionCode(reply)) return -EPROTO;
+        status_t status = reply.readInt32();
+        if (reply.readInt32() != 0) {
+            device = interface_cast<IProCameraUser>(reply.readStrongBinder());
+        }
+        return status;
     }
 
     // connect to camera service (android.hardware.camera2.CameraDevice)
-    virtual sp<ICameraDeviceUser> connect(
+    virtual status_t connectDevice(
             const sp<ICameraDeviceCallbacks>& cameraCb,
             int cameraId,
             const String16& clientPackageName,
-            int clientUid)
+            int clientUid,
+            /*out*/
+            sp<ICameraDeviceUser>& device)
     {
         Parcel data, reply;
         data.writeInterfaceToken(ICameraService::getInterfaceDescriptor());
@@ -166,8 +204,12 @@
         data.writeInt32(clientUid);
         remote()->transact(BnCameraService::CONNECT_DEVICE, data, &reply);
 
-        if (readExceptionCode(reply)) return NULL;
-        return interface_cast<ICameraDeviceUser>(reply.readStrongBinder());
+        if (readExceptionCode(reply)) return -EPROTO;
+        status_t status = reply.readInt32();
+        if (reply.readInt32() != 0) {
+            device = interface_cast<ICameraDeviceUser>(reply.readStrongBinder());
+        }
+        return status;
     }
 
     virtual status_t addListener(const sp<ICameraServiceListener>& listener)
@@ -221,6 +263,18 @@
             reply->writeInt32(cameraInfo.orientation);
             return NO_ERROR;
         } break;
+        case GET_CAMERA_CHARACTERISTICS: {
+            CHECK_INTERFACE(ICameraService, data, reply);
+            CameraMetadata info;
+            status_t result = getCameraCharacteristics(data.readInt32(), &info);
+            reply->writeNoException();
+            reply->writeInt32(result);
+
+            // out-variables are after exception and return value
+            reply->writeInt32(1); // means the parcelable is included
+            info.writeToParcel(reply);
+            return NO_ERROR;
+        } break;
         case CONNECT: {
             CHECK_INTERFACE(ICameraService, data, reply);
             sp<ICameraClient> cameraClient =
@@ -228,10 +282,17 @@
             int32_t cameraId = data.readInt32();
             const String16 clientName = data.readString16();
             int32_t clientUid = data.readInt32();
-            sp<ICamera> camera = connect(cameraClient, cameraId,
-                    clientName, clientUid);
+            sp<ICamera> camera;
+            status_t status = connect(cameraClient, cameraId,
+                    clientName, clientUid, /*out*/ camera);
             reply->writeNoException();
-            reply->writeStrongBinder(camera->asBinder());
+            reply->writeInt32(status);
+            if (camera != NULL) {
+                reply->writeInt32(1);
+                reply->writeStrongBinder(camera->asBinder());
+            } else {
+                reply->writeInt32(0);
+            }
             return NO_ERROR;
         } break;
         case CONNECT_PRO: {
@@ -241,10 +302,17 @@
             int32_t cameraId = data.readInt32();
             const String16 clientName = data.readString16();
             int32_t clientUid = data.readInt32();
-            sp<IProCameraUser> camera = connect(cameraClient, cameraId,
-                                                clientName, clientUid);
+            sp<IProCameraUser> camera;
+            status_t status = connectPro(cameraClient, cameraId,
+                    clientName, clientUid, /*out*/ camera);
             reply->writeNoException();
-            reply->writeStrongBinder(camera->asBinder());
+            reply->writeInt32(status);
+            if (camera != NULL) {
+                reply->writeInt32(1);
+                reply->writeStrongBinder(camera->asBinder());
+            } else {
+                reply->writeInt32(0);
+            }
             return NO_ERROR;
         } break;
         case CONNECT_DEVICE: {
@@ -254,10 +322,17 @@
             int32_t cameraId = data.readInt32();
             const String16 clientName = data.readString16();
             int32_t clientUid = data.readInt32();
-            sp<ICameraDeviceUser> camera = connect(cameraClient, cameraId,
-                                                clientName, clientUid);
+            sp<ICameraDeviceUser> camera;
+            status_t status = connectDevice(cameraClient, cameraId,
+                    clientName, clientUid, /*out*/ camera);
             reply->writeNoException();
-            reply->writeStrongBinder(camera->asBinder());
+            reply->writeInt32(status);
+            if (camera != NULL) {
+                reply->writeInt32(1);
+                reply->writeStrongBinder(camera->asBinder());
+            } else {
+                reply->writeInt32(0);
+            }
             return NO_ERROR;
         } break;
         case ADD_LISTENER: {
diff --git a/camera/IProCameraCallbacks.cpp b/camera/IProCameraCallbacks.cpp
index 0fdb85a..bd3d420 100644
--- a/camera/IProCameraCallbacks.cpp
+++ b/camera/IProCameraCallbacks.cpp
@@ -67,11 +67,11 @@
                            IBinder::FLAG_ONEWAY);
     }
 
-    void onResultReceived(int32_t frameId, camera_metadata* result) {
+    void onResultReceived(int32_t requestId, camera_metadata* result) {
         ALOGV("onResultReceived");
         Parcel data, reply;
         data.writeInterfaceToken(IProCameraCallbacks::getInterfaceDescriptor());
-        data.writeInt32(frameId);
+        data.writeInt32(requestId);
         CameraMetadata::writeToParcel(data, result);
         remote()->transact(RESULT_RECEIVED, data, &reply, IBinder::FLAG_ONEWAY);
     }
@@ -107,10 +107,10 @@
         case RESULT_RECEIVED: {
             ALOGV("RESULT_RECEIVED");
             CHECK_INTERFACE(IProCameraCallbacks, data, reply);
-            int32_t frameId = data.readInt32();
+            int32_t requestId = data.readInt32();
             camera_metadata_t *result = NULL;
             CameraMetadata::readFromParcel(data, &result);
-            onResultReceived(frameId, result);
+            onResultReceived(requestId, result);
             return NO_ERROR;
             break;
         }
diff --git a/camera/ProCamera.cpp b/camera/ProCamera.cpp
index f6c9ca1..ba5a48c 100644
--- a/camera/ProCamera.cpp
+++ b/camera/ProCamera.cpp
@@ -26,7 +26,6 @@
 #include <binder/IMemory.h>
 
 #include <camera/ProCamera.h>
-#include <camera/ICameraService.h>
 #include <camera/IProCameraUser.h>
 #include <camera/IProCameraCallbacks.h>
 
@@ -47,6 +46,9 @@
 {
 }
 
+CameraTraits<ProCamera>::TCamConnectService CameraTraits<ProCamera>::fnConnectService =
+        &ICameraService::connectPro;
+
 ProCamera::~ProCamera()
 {
 
@@ -88,8 +90,8 @@
     }
 }
 
-void ProCamera::onResultReceived(int32_t frameId, camera_metadata* result) {
-    ALOGV("%s: frameId = %d, result = %p", __FUNCTION__, frameId, result);
+void ProCamera::onResultReceived(int32_t requestId, camera_metadata* result) {
+    ALOGV("%s: requestId = %d, result = %p", __FUNCTION__, requestId, result);
 
     sp<ProCameraListener> listener;
     {
@@ -110,7 +112,7 @@
     result = tmp.release();
 
     if (listener != NULL) {
-        listener->onResultReceived(frameId, result);
+        listener->onResultReceived(requestId, result);
     } else {
         free_camera_metadata(result);
     }
diff --git a/camera/camera2/ICameraDeviceCallbacks.cpp b/camera/camera2/ICameraDeviceCallbacks.cpp
index 3cec1f4..613358a 100644
--- a/camera/camera2/ICameraDeviceCallbacks.cpp
+++ b/camera/camera2/ICameraDeviceCallbacks.cpp
@@ -32,7 +32,9 @@
 namespace android {
 
 enum {
-    NOTIFY_CALLBACK = IBinder::FIRST_CALL_TRANSACTION,
+    CAMERA_ERROR = IBinder::FIRST_CALL_TRANSACTION,
+    CAMERA_IDLE,
+    CAPTURE_STARTED,
     RESULT_RECEIVED,
 };
 
@@ -44,19 +46,37 @@
     {
     }
 
-    // generic callback from camera service to app
-    void notifyCallback(int32_t msgType, int32_t ext1, int32_t ext2)
+    void onDeviceError(CameraErrorCode errorCode)
     {
-        ALOGV("notifyCallback");
+        ALOGV("onDeviceError");
         Parcel data, reply;
         data.writeInterfaceToken(ICameraDeviceCallbacks::getInterfaceDescriptor());
-        data.writeInt32(msgType);
-        data.writeInt32(ext1);
-        data.writeInt32(ext2);
-        remote()->transact(NOTIFY_CALLBACK, data, &reply, IBinder::FLAG_ONEWAY);
+        data.writeInt32(static_cast<int32_t>(errorCode));
+        remote()->transact(CAMERA_ERROR, data, &reply, IBinder::FLAG_ONEWAY);
         data.writeNoException();
     }
 
+    void onDeviceIdle()
+    {
+        ALOGV("onDeviceIdle");
+        Parcel data, reply;
+        data.writeInterfaceToken(ICameraDeviceCallbacks::getInterfaceDescriptor());
+        remote()->transact(CAMERA_IDLE, data, &reply, IBinder::FLAG_ONEWAY);
+        data.writeNoException();
+    }
+
+    void onCaptureStarted(int32_t requestId, int64_t timestamp)
+    {
+        ALOGV("onCaptureStarted");
+        Parcel data, reply;
+        data.writeInterfaceToken(ICameraDeviceCallbacks::getInterfaceDescriptor());
+        data.writeInt32(requestId);
+        data.writeInt64(timestamp);
+        remote()->transact(CAPTURE_STARTED, data, &reply, IBinder::FLAG_ONEWAY);
+        data.writeNoException();
+    }
+
+
     void onResultReceived(int32_t requestId, const CameraMetadata& result) {
         ALOGV("onResultReceived");
         Parcel data, reply;
@@ -79,18 +99,33 @@
 {
     ALOGV("onTransact - code = %d", code);
     switch(code) {
-        case NOTIFY_CALLBACK: {
-            ALOGV("NOTIFY_CALLBACK");
+        case CAMERA_ERROR: {
+            ALOGV("onDeviceError");
             CHECK_INTERFACE(ICameraDeviceCallbacks, data, reply);
-            int32_t msgType = data.readInt32();
-            int32_t ext1 = data.readInt32();
-            int32_t ext2 = data.readInt32();
-            notifyCallback(msgType, ext1, ext2);
+            CameraErrorCode errorCode =
+                    static_cast<CameraErrorCode>(data.readInt32());
+            onDeviceError(errorCode);
+            data.readExceptionCode();
+            return NO_ERROR;
+        } break;
+        case CAMERA_IDLE: {
+            ALOGV("onDeviceIdle");
+            CHECK_INTERFACE(ICameraDeviceCallbacks, data, reply);
+            onDeviceIdle();
+            data.readExceptionCode();
+            return NO_ERROR;
+        } break;
+        case CAPTURE_STARTED: {
+            ALOGV("onCaptureStarted");
+            CHECK_INTERFACE(ICameraDeviceCallbacks, data, reply);
+            int32_t requestId = data.readInt32();
+            int64_t timestamp = data.readInt64();
+            onCaptureStarted(requestId, timestamp);
             data.readExceptionCode();
             return NO_ERROR;
         } break;
         case RESULT_RECEIVED: {
-            ALOGV("RESULT_RECEIVED");
+            ALOGV("onResultReceived");
             CHECK_INTERFACE(ICameraDeviceCallbacks, data, reply);
             int32_t requestId = data.readInt32();
             CameraMetadata result;
@@ -102,8 +137,7 @@
             onResultReceived(requestId, result);
             data.readExceptionCode();
             return NO_ERROR;
-            break;
-        }
+        } break;
         default:
             return BBinder::onTransact(code, data, reply, flags);
     }
diff --git a/camera/camera2/ICameraDeviceUser.cpp b/camera/camera2/ICameraDeviceUser.cpp
index 923f487..1e5822f 100644
--- a/camera/camera2/ICameraDeviceUser.cpp
+++ b/camera/camera2/ICameraDeviceUser.cpp
@@ -41,6 +41,20 @@
     CREATE_DEFAULT_REQUEST,
     GET_CAMERA_INFO,
     WAIT_UNTIL_IDLE,
+    FLUSH
+};
+
+namespace {
+    // Read empty strings without printing a false error message.
+    String16 readMaybeEmptyString16(const Parcel& parcel) {
+        size_t len;
+        const char16_t* str = parcel.readString16Inplace(&len);
+        if (str != NULL) {
+            return String16(str, len);
+        } else {
+            return String16();
+        }
+    }
 };
 
 class BpCameraDeviceUser : public BpInterface<ICameraDeviceUser>
@@ -183,6 +197,16 @@
         return reply.readInt32();
     }
 
+    virtual status_t flush()
+    {
+        ALOGV("flush");
+        Parcel data, reply;
+        data.writeInterfaceToken(ICameraDeviceUser::getInterfaceDescriptor());
+        remote()->transact(FLUSH, data, &reply);
+        reply.readExceptionCode();
+        return reply.readInt32();
+    }
+
 private:
 
 
@@ -250,7 +274,7 @@
 
             sp<IGraphicBufferProducer> bp;
             if (data.readInt32() != 0) {
-                String16 name = data.readString16();
+                String16 name = readMaybeEmptyString16(data);
                 bp = interface_cast<IGraphicBufferProducer>(
                         data.readStrongBinder());
 
@@ -312,6 +336,12 @@
             reply->writeInt32(waitUntilIdle());
             return NO_ERROR;
         } break;
+        case FLUSH: {
+            CHECK_INTERFACE(ICameraDeviceUser, data, reply);
+            reply->writeNoException();
+            reply->writeInt32(flush());
+            return NO_ERROR;
+        }
         default:
             return BBinder::onTransact(code, data, reply, flags);
     }
diff --git a/camera/tests/ProCameraTests.cpp b/camera/tests/ProCameraTests.cpp
index f203949..1f5867a 100644
--- a/camera/tests/ProCameraTests.cpp
+++ b/camera/tests/ProCameraTests.cpp
@@ -271,7 +271,6 @@
             CpuConsumer::LockedBuffer buf;
             status_t ret;
 
-            EXPECT_OK(ret);
             if (OK == (ret = consumer->lockNextBuffer(&buf))) {
 
                 dout << "Frame received on streamId = " << streamId <<
@@ -285,9 +284,9 @@
         }
     }
 
-    virtual void onResultReceived(int32_t frameId,
+    virtual void onResultReceived(int32_t requestId,
                                   camera_metadata* request) {
-        dout << "Result received frameId = " << frameId
+        dout << "Result received requestId = " << requestId
              << ", requestPtr = " << (void*)request << std::endl;
         QueueEvent(RESULT_RECEIVED);
         free_camera_metadata(request);
@@ -482,7 +481,7 @@
      * Creating a streaming request for these output streams from a template,
      *  and submit it
      */
-    void createSubmitRequestForStreams(uint8_t* streamIds, size_t count, int requestCount=-1) {
+    void createSubmitRequestForStreams(int32_t* streamIds, size_t count, int requestCount=-1) {
 
         ASSERT_NE((void*)NULL, streamIds);
         ASSERT_LT(0u, count);
@@ -629,7 +628,7 @@
 
             EXPECT_OK(mCamera->exclusiveTryLock());
 
-            uint8_t streams[] = { depthStreamId };
+            int32_t streams[] = { depthStreamId };
             ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams(
                                                  streams,
                                                  /*count*/1));
@@ -706,7 +705,7 @@
     // set the output streams to just this stream ID
 
     // wow what a verbose API.
-    uint8_t allStreams[] = { streamId, depthStreamId };
+    int32_t allStreams[] = { streamId, depthStreamId };
     // IMPORTANT. bad things will happen if its not a uint8.
     size_t streamCount = sizeof(allStreams) / sizeof(allStreams[0]);
     camera_metadata_entry_t entry;
@@ -735,7 +734,7 @@
 
     free_camera_metadata(request);
 
-    for (int i = 0; i < streamCount; ++i) {
+    for (size_t i = 0; i < streamCount; ++i) {
         EXPECT_OK(mCamera->deleteStream(allStreams[i]));
     }
     EXPECT_OK(mCamera->exclusiveUnlock());
@@ -777,7 +776,7 @@
 
     // set the output streams to just this stream ID
 
-    uint8_t allStreams[] = { streamId };
+    int32_t allStreams[] = { streamId };
     camera_metadata_entry_t entry;
     uint32_t tag = static_cast<uint32_t>(ANDROID_REQUEST_OUTPUT_STREAMS);
     int find = find_camera_metadata_entry(request, tag, &entry);
@@ -848,7 +847,7 @@
     // set the output streams to just this stream ID
 
     // wow what a verbose API.
-    uint8_t allStreams[] = { streamId, depthStreamId };
+    int32_t allStreams[] = { streamId, depthStreamId };
     size_t streamCount = 2;
     camera_metadata_entry_t entry;
     uint32_t tag = static_cast<uint32_t>(ANDROID_REQUEST_OUTPUT_STREAMS);
@@ -923,7 +922,7 @@
 
     // set the output streams to just this stream ID
 
-    uint8_t allStreams[] = { streamId };
+    int32_t allStreams[] = { streamId };
     size_t streamCount = 1;
     camera_metadata_entry_t entry;
     uint32_t tag = static_cast<uint32_t>(ANDROID_REQUEST_OUTPUT_STREAMS);
@@ -974,7 +973,7 @@
 
     EXPECT_OK(mCamera->exclusiveTryLock());
 
-    uint8_t streams[] = { streamId };
+    int32_t streams[] = { streamId };
     ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams(streams, /*count*/1));
 
     // Consume a couple of results
@@ -1002,7 +1001,7 @@
 
     EXPECT_OK(mCamera->exclusiveTryLock());
 
-    uint8_t streams[] = { streamId };
+    int32_t streams[] = { streamId };
     ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams(streams, /*count*/1,
                                             /*requests*/TEST_CPU_FRAME_COUNT));
 
@@ -1049,7 +1048,7 @@
 
     EXPECT_OK(mCamera->exclusiveTryLock());
 
-    uint8_t streams[] = { streamId, depthStreamId };
+    int32_t streams[] = { streamId, depthStreamId };
     ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams(streams, /*count*/2,
                                                     /*requests*/REQUEST_COUNT));
 
@@ -1128,7 +1127,7 @@
 
     EXPECT_OK(mCamera->exclusiveTryLock());
 
-    uint8_t streams[] = { streamId };
+    int32_t streams[] = { streamId };
     ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams(streams, /*count*/1,
                                                      /*requests*/NUM_REQUESTS));
 
@@ -1172,7 +1171,6 @@
     }
 
     const int NUM_REQUESTS = 20 * TEST_CPU_FRAME_COUNT;
-    const int CONSECUTIVE_FAILS_ASSUME_TIME_OUT = 5;
 
     int streamId = -1;
     sp<CpuConsumer> consumer;
@@ -1183,7 +1181,7 @@
 
     EXPECT_OK(mCamera->exclusiveTryLock());
 
-    uint8_t streams[] = { streamId };
+    int32_t streams[] = { streamId };
     ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams(streams, /*count*/1,
                                                      /*requests*/NUM_REQUESTS));
 
@@ -1278,4 +1276,3 @@
 }
 }
 }
-
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index 923f781..49999b5 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -21,6 +21,7 @@
 #include <binder/IPCThreadState.h>
 #include <utils/Errors.h>
 #include <utils/Thread.h>
+#include <utils/Timers.h>
 
 #include <gui/Surface.h>
 #include <gui/SurfaceComposerClient.h>
@@ -35,13 +36,23 @@
 #include <media/stagefright/MediaMuxer.h>
 #include <media/ICrypto.h>
 
+#include <stdlib.h>
+#include <unistd.h>
+#include <string.h>
 #include <stdio.h>
 #include <fcntl.h>
 #include <signal.h>
 #include <getopt.h>
+#include <sys/wait.h>
 
 using namespace android;
 
+static const uint32_t kMinBitRate = 100000;         // 0.1Mbps
+static const uint32_t kMaxBitRate = 100 * 1000000;  // 100Mbps
+static const uint32_t kMaxTimeLimitSec = 180;       // 3 minutes
+static const uint32_t kFallbackWidth = 1280;        // 720p
+static const uint32_t kFallbackHeight = 720;
+
 // Command-line parameters.
 static bool gVerbose = false;               // chatty on stdout
 static bool gRotate = false;                // rotate 90 degrees
@@ -49,6 +60,7 @@
 static uint32_t gVideoWidth = 0;            // default width+height
 static uint32_t gVideoHeight = 0;
 static uint32_t gBitRate = 4000000;         // 4Mbps
+static uint32_t gTimeLimitSec = kMaxTimeLimitSec;
 
 // Set by signal handler to stop recording.
 static bool gStopRequested;
@@ -57,8 +69,6 @@
 static struct sigaction gOrigSigactionINT;
 static struct sigaction gOrigSigactionHUP;
 
-static const uint32_t kMinBitRate = 100000;         // 0.1Mbps
-static const uint32_t kMaxBitRate = 100 * 1000000;  // 100Mbps
 
 /*
  * Catch keyboard interrupt signals.  On receipt, the "stop requested"
@@ -70,9 +80,8 @@
     gStopRequested = true;
     switch (signum) {
     case SIGINT:
-        sigaction(SIGINT, &gOrigSigactionINT, NULL);
-        break;
     case SIGHUP:
+        sigaction(SIGINT, &gOrigSigactionINT, NULL);
         sigaction(SIGHUP, &gOrigSigactionHUP, NULL);
         break;
     default:
@@ -138,15 +147,21 @@
     format->setFloat("frame-rate", displayFps);
     format->setInt32("i-frame-interval", 10);
 
-    /// MediaCodec
     sp<ALooper> looper = new ALooper;
     looper->setName("screenrecord_looper");
     looper->start();
     ALOGV("Creating codec");
     sp<MediaCodec> codec = MediaCodec::CreateByType(looper, "video/avc", true);
+    if (codec == NULL) {
+        fprintf(stderr, "ERROR: unable to create video/avc codec instance\n");
+        return UNKNOWN_ERROR;
+    }
     err = codec->configure(format, NULL, NULL,
             MediaCodec::CONFIGURE_FLAG_ENCODE);
     if (err != NO_ERROR) {
+        codec->release();
+        codec.clear();
+
         fprintf(stderr, "ERROR: unable to configure codec (err=%d)\n", err);
         return err;
     }
@@ -155,6 +170,9 @@
     sp<IGraphicBufferProducer> bufferProducer;
     err = codec->createInputSurface(&bufferProducer);
     if (err != NO_ERROR) {
+        codec->release();
+        codec.clear();
+
         fprintf(stderr,
             "ERROR: unable to create encoder input surface (err=%d)\n", err);
         return err;
@@ -163,6 +181,9 @@
     ALOGV("Starting codec");
     err = codec->start();
     if (err != NO_ERROR) {
+        codec->release();
+        codec.clear();
+
         fprintf(stderr, "ERROR: unable to start codec (err=%d)\n", err);
         return err;
     }
@@ -275,7 +296,8 @@
     status_t err;
     ssize_t trackIdx = -1;
     uint32_t debugNumFrames = 0;
-    time_t debugStartWhen = time(NULL);
+    int64_t startWhenNsec = systemTime(CLOCK_MONOTONIC);
+    int64_t endWhenNsec = startWhenNsec + seconds_to_nanoseconds(gTimeLimitSec);
 
     Vector<sp<ABuffer> > buffers;
     err = encoder->getOutputBuffers(&buffers);
@@ -292,6 +314,14 @@
         size_t bufIndex, offset, size;
         int64_t ptsUsec;
         uint32_t flags;
+
+        if (systemTime(CLOCK_MONOTONIC) > endWhenNsec) {
+            if (gVerbose) {
+                printf("Time limit reached\n");
+            }
+            break;
+        }
+
         ALOGV("Calling dequeueOutputBuffer");
         err = encoder->dequeueOutputBuffer(&bufIndex, &offset, &size, &ptsUsec,
                 &flags, kTimeout);
@@ -341,7 +371,6 @@
             }
             break;
         case -EAGAIN:                       // INFO_TRY_AGAIN_LATER
-            // not expected with infinite timeout
             ALOGV("Got -EAGAIN, looping");
             break;
         case INFO_FORMAT_CHANGED:           // INFO_OUTPUT_FORMAT_CHANGED
@@ -366,18 +395,24 @@
             if (err != NO_ERROR) {
                 fprintf(stderr,
                         "Unable to get new output buffers (err=%d)\n", err);
+                return err;
             }
             break;
+        case INVALID_OPERATION:
+            fprintf(stderr, "Request for encoder buffer failed\n");
+            return err;
         default:
-            ALOGW("Got weird result %d from dequeueOutputBuffer", err);
+            fprintf(stderr,
+                    "Got weird result %d from dequeueOutputBuffer\n", err);
             return err;
         }
     }
 
     ALOGV("Encoder stopping (req=%d)", gStopRequested);
     if (gVerbose) {
-        printf("Encoder stopping; recorded %u frames in %ld seconds\n",
-                debugNumFrames, time(NULL) - debugStartWhen);
+        printf("Encoder stopping; recorded %u frames in %lld seconds\n",
+                debugNumFrames,
+                nanoseconds_to_seconds(systemTime(CLOCK_MONOTONIC) - startWhenNsec));
     }
     return NO_ERROR;
 }
@@ -427,13 +462,18 @@
     sp<MediaCodec> encoder;
     sp<IGraphicBufferProducer> bufferProducer;
     err = prepareEncoder(mainDpyInfo.fps, &encoder, &bufferProducer);
+
     if (err != NO_ERROR && !gSizeSpecified) {
-        ALOGV("Retrying with 720p");
-        if (gVideoWidth != 1280 && gVideoHeight != 720) {
-            fprintf(stderr, "WARNING: failed at %dx%d, retrying at 720p\n",
-                    gVideoWidth, gVideoHeight);
-            gVideoWidth = 1280;
-            gVideoHeight = 720;
+        // fallback is defined for landscape; swap if we're in portrait
+        bool needSwap = gVideoWidth < gVideoHeight;
+        uint32_t newWidth = needSwap ? kFallbackHeight : kFallbackWidth;
+        uint32_t newHeight = needSwap ? kFallbackWidth : kFallbackHeight;
+        if (gVideoWidth != newWidth && gVideoHeight != newHeight) {
+            ALOGV("Retrying with 720p");
+            fprintf(stderr, "WARNING: failed at %dx%d, retrying at %dx%d\n",
+                    gVideoWidth, gVideoHeight, newWidth, newHeight);
+            gVideoWidth = newWidth;
+            gVideoHeight = newHeight;
             err = prepareEncoder(mainDpyInfo.fps, &encoder, &bufferProducer);
         }
     }
@@ -444,7 +484,12 @@
     // Configure virtual display.
     sp<IBinder> dpy;
     err = prepareVirtualDisplay(mainDpyInfo, bufferProducer, &dpy);
-    if (err != NO_ERROR) return err;
+    if (err != NO_ERROR) {
+        encoder->release();
+        encoder.clear();
+
+        return err;
+    }
 
     // Configure, but do not start, muxer.
     sp<MediaMuxer> muxer = new MediaMuxer(fileName,
@@ -455,23 +500,20 @@
 
     // Main encoder loop.
     err = runEncoder(encoder, muxer);
-    if (err != NO_ERROR) return err;
+    if (err != NO_ERROR) {
+        encoder->release();
+        encoder.clear();
+
+        return err;
+    }
 
     if (gVerbose) {
         printf("Stopping encoder and muxer\n");
     }
 
-    // Shut everything down.
-    //
-    // The virtual display will continue to produce frames until "dpy"
-    // goes out of scope (and something causes the Binder traffic to transmit;
-    // can be forced with IPCThreadState::self()->flushCommands()).  This
-    // could cause SurfaceFlinger to get stuck trying to feed us, so we want
-    // to set a NULL Surface to make the virtual display "dormant".
+    // Shut everything down, starting with the producer side.
     bufferProducer = NULL;
-    SurfaceComposerClient::openGlobalTransaction();
-    SurfaceComposerClient::setDisplaySurface(dpy, bufferProducer);
-    SurfaceComposerClient::closeGlobalTransaction();
+    SurfaceComposerClient::destroyDisplay(dpy);
 
     encoder->stop();
     muxer->stop();
@@ -481,6 +523,67 @@
 }
 
 /*
+ * Sends a broadcast to the media scanner to tell it about the new video.
+ *
+ * This is optional, but nice to have.
+ */
+static status_t notifyMediaScanner(const char* fileName) {
+    pid_t pid = fork();
+    if (pid < 0) {
+        int err = errno;
+        ALOGW("fork() failed: %s", strerror(err));
+        return -err;
+    } else if (pid > 0) {
+        // parent; wait for the child, mostly to make the verbose-mode output
+        // look right, but also to check for and log failures
+        int status;
+        pid_t actualPid = TEMP_FAILURE_RETRY(waitpid(pid, &status, 0));
+        if (actualPid != pid) {
+            ALOGW("waitpid() returned %d (errno=%d)", actualPid, errno);
+        } else if (status != 0) {
+            ALOGW("'am broadcast' exited with status=%d", status);
+        } else {
+            ALOGV("'am broadcast' exited successfully");
+        }
+    } else {
+        const char* kCommand = "/system/bin/am";
+
+        // child; we're single-threaded, so okay to alloc
+        String8 fileUrl("file://");
+        fileUrl.append(fileName);
+        const char* const argv[] = {
+                kCommand,
+                "broadcast",
+                "-a",
+                "android.intent.action.MEDIA_SCANNER_SCAN_FILE",
+                "-d",
+                fileUrl.string(),
+                NULL
+        };
+        if (gVerbose) {
+            printf("Executing:");
+            for (int i = 0; argv[i] != NULL; i++) {
+                printf(" %s", argv[i]);
+            }
+            putchar('\n');
+        } else {
+            // non-verbose, suppress 'am' output
+            ALOGV("closing stdout/stderr in child");
+            int fd = open("/dev/null", O_WRONLY);
+            if (fd >= 0) {
+                dup2(fd, STDOUT_FILENO);
+                dup2(fd, STDERR_FILENO);
+                close(fd);
+            }
+        }
+        execv(kCommand, const_cast<char* const*>(argv));
+        ALOGE("execv(%s) failed: %s\n", kCommand, strerror(errno));
+        exit(1);
+    }
+    return NO_ERROR;
+}
+
+/*
  * Parses a string of the form "1280x720".
  *
  * Returns true on success.
@@ -518,10 +621,13 @@
         "\n"
         "Options:\n"
         "--size WIDTHxHEIGHT\n"
-        "    Set the video size, e.g. \"1280x720\".  For best results, use\n"
-        "    a size supported by the AVC encoder.\n"
+        "    Set the video size, e.g. \"1280x720\".  Default is the device's main\n"
+        "    display resolution (if supported), 1280x720 if not.  For best results,\n"
+        "    use a size supported by the AVC encoder.\n"
         "--bit-rate RATE\n"
-        "    Set the video bit rate, in megabits per second.  Default 4Mbps.\n"
+        "    Set the video bit rate, in megabits per second.  Default %dMbps.\n"
+        "--time-limit TIME\n"
+        "    Set the maximum recording time, in seconds.  Default / maximum is %d.\n"
         "--rotate\n"
         "    Rotate the output 90 degrees.\n"
         "--verbose\n"
@@ -529,8 +635,9 @@
         "--help\n"
         "    Show this message.\n"
         "\n"
-        "Recording continues until Ctrl-C is hit.\n"
-        "\n"
+        "Recording continues until Ctrl-C is hit or the time limit is reached.\n"
+        "\n",
+        gBitRate / 1000000, gTimeLimitSec
         );
 }
 
@@ -543,6 +650,7 @@
         { "verbose",    no_argument,        NULL, 'v' },
         { "size",       required_argument,  NULL, 's' },
         { "bit-rate",   required_argument,  NULL, 'b' },
+        { "time-limit", required_argument,  NULL, 't' },
         { "rotate",     no_argument,        NULL, 'r' },
         { NULL,         0,                  NULL, 0 }
     };
@@ -584,6 +692,15 @@
                 return 2;
             }
             break;
+        case 't':
+            gTimeLimitSec = atoi(optarg);
+            if (gTimeLimitSec == 0 || gTimeLimitSec > kMaxTimeLimitSec) {
+                fprintf(stderr,
+                        "Time limit %ds outside acceptable range [1,%d]\n",
+                        gTimeLimitSec, kMaxTimeLimitSec);
+                return 2;
+            }
+            break;
         case 'r':
             gRotate = true;
             break;
@@ -613,6 +730,10 @@
     close(fd);
 
     status_t err = recordScreen(fileName);
+    if (err == NO_ERROR) {
+        // Try to notify the media scanner.  Not fatal if this fails.
+        notifyMediaScanner(fileName);
+    }
     ALOGD(err == NO_ERROR ? "success" : "failed");
     return (int) err;
 }
diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp
index 797e0b6..030bf1b 100644
--- a/cmds/stagefright/stagefright.cpp
+++ b/cmds/stagefright/stagefright.cpp
@@ -51,6 +51,7 @@
 
 #include <fcntl.h>
 
+#include <gui/GLConsumer.h>
 #include <gui/Surface.h>
 #include <gui/SurfaceComposerClient.h>
 
diff --git a/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp
index 06fc29d..f2cadf7 100644
--- a/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp
+++ b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp
@@ -48,6 +48,14 @@
         return (!memcmp(uuid, mock_uuid, sizeof(uuid)));
     }
 
+    bool MockDrmFactory::isContentTypeSupported(const String8 &mimeType)
+    {
+        if (mimeType != "video/mp4") {
+            return false;
+        }
+        return true;
+    }
+
     status_t MockDrmFactory::createDrmPlugin(const uint8_t uuid[16], DrmPlugin **plugin)
     {
         *plugin = new MockDrmPlugin();
diff --git a/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h
index ca9eac7..2297f9b 100644
--- a/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h
+++ b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h
@@ -32,6 +32,7 @@
         virtual ~MockDrmFactory() {}
 
         bool isCryptoSchemeSupported(const uint8_t uuid[16]);
+        bool isContentTypeSupported(const String8 &mimeType);
         status_t createDrmPlugin(const uint8_t uuid[16], DrmPlugin **plugin);
     };
 
diff --git a/include/camera/Camera.h b/include/camera/Camera.h
index c34b3ea..79682b8 100644
--- a/include/camera/Camera.h
+++ b/include/camera/Camera.h
@@ -51,8 +51,14 @@
     typedef CameraListener        TCamListener;
     typedef ICamera               TCamUser;
     typedef ICameraClient         TCamCallbacks;
+    typedef status_t (ICameraService::*TCamConnectService)(const sp<ICameraClient>&,
+                                                           int, const String16&, int,
+                                                           /*out*/
+                                                           sp<ICamera>&);
+    static TCamConnectService     fnConnectService;
 };
 
+
 class Camera :
     public CameraBase<Camera>,
     public BnCameraClient
@@ -75,9 +81,9 @@
             status_t    unlock();
 
             // pass the buffered IGraphicBufferProducer to the camera service
-            status_t    setPreviewTexture(const sp<IGraphicBufferProducer>& bufferProducer);
+            status_t    setPreviewTarget(const sp<IGraphicBufferProducer>& bufferProducer);
 
-            // start preview mode, must call setPreviewDisplay first
+            // start preview mode, must call setPreviewTarget first
             status_t    startPreview();
 
             // stop preview mode
@@ -86,7 +92,7 @@
             // get preview state
             bool        previewEnabled();
 
-            // start recording mode, must call setPreviewDisplay first
+            // start recording mode, must call setPreviewTarget first
             status_t    startRecording();
 
             // stop recording mode
diff --git a/include/camera/CameraBase.h b/include/camera/CameraBase.h
index 9b08c0f..1b93157 100644
--- a/include/camera/CameraBase.h
+++ b/include/camera/CameraBase.h
@@ -54,9 +54,10 @@
 class CameraBase : public IBinder::DeathRecipient
 {
 public:
-    typedef typename TCamTraits::TCamListener    TCamListener;
-    typedef typename TCamTraits::TCamUser        TCamUser;
-    typedef typename TCamTraits::TCamCallbacks   TCamCallbacks;
+    typedef typename TCamTraits::TCamListener       TCamListener;
+    typedef typename TCamTraits::TCamUser           TCamUser;
+    typedef typename TCamTraits::TCamCallbacks      TCamCallbacks;
+    typedef typename TCamTraits::TCamConnectService TCamConnectService;
 
     static sp<TCam>      connect(int cameraId,
                                  const String16& clientPackageName,
diff --git a/include/camera/CameraMetadata.h b/include/camera/CameraMetadata.h
index fe2bd19..1254d3c 100644
--- a/include/camera/CameraMetadata.h
+++ b/include/camera/CameraMetadata.h
@@ -99,6 +99,11 @@
     status_t append(const CameraMetadata &other);
 
     /**
+     * Append metadata from a raw camera_metadata buffer
+     */
+    status_t append(const camera_metadata* other);
+
+    /**
      * Number of metadata entries.
      */
     size_t entryCount() const;
diff --git a/include/camera/ICamera.h b/include/camera/ICamera.h
index f3a186e..b025735 100644
--- a/include/camera/ICamera.h
+++ b/include/camera/ICamera.h
@@ -50,7 +50,7 @@
     virtual status_t        unlock() = 0;
 
     // pass the buffered IGraphicBufferProducer to the camera service
-    virtual status_t        setPreviewTexture(
+    virtual status_t        setPreviewTarget(
             const sp<IGraphicBufferProducer>& bufferProducer) = 0;
 
     // set the preview callback flag to affect how the received frames from
@@ -64,7 +64,7 @@
     virtual status_t        setPreviewCallbackTarget(
             const sp<IGraphicBufferProducer>& callbackProducer) = 0;
 
-    // start preview mode, must call setPreviewDisplay first
+    // start preview mode, must call setPreviewTarget first
     virtual status_t        startPreview() = 0;
 
     // stop preview mode
diff --git a/include/camera/ICameraService.h b/include/camera/ICameraService.h
index fa715b7..f342122 100644
--- a/include/camera/ICameraService.h
+++ b/include/camera/ICameraService.h
@@ -30,6 +30,7 @@
 class ICameraServiceListener;
 class ICameraDeviceUser;
 class ICameraDeviceCallbacks;
+class CameraMetadata;
 
 class ICameraService : public IInterface
 {
@@ -45,6 +46,7 @@
         CONNECT_DEVICE,
         ADD_LISTENER,
         REMOVE_LISTENER,
+        GET_CAMERA_CHARACTERISTICS,
     };
 
     enum {
@@ -58,6 +60,9 @@
     virtual status_t getCameraInfo(int cameraId,
                                           struct CameraInfo* cameraInfo) = 0;
 
+    virtual status_t getCameraCharacteristics(int cameraId,
+                                              CameraMetadata* cameraInfo) = 0;
+
     // Returns 'OK' if operation succeeded
     // - Errors: ALREADY_EXISTS if the listener was already added
     virtual status_t addListener(const sp<ICameraServiceListener>& listener)
@@ -71,21 +76,27 @@
      * clientUid == USE_CALLING_UID, then the calling UID is used instead. Only
      * trusted callers can set a clientUid other than USE_CALLING_UID.
      */
-    virtual sp<ICamera> connect(const sp<ICameraClient>& cameraClient,
+    virtual status_t connect(const sp<ICameraClient>& cameraClient,
             int cameraId,
             const String16& clientPackageName,
-            int clientUid) = 0;
+            int clientUid,
+            /*out*/
+            sp<ICamera>& device) = 0;
 
-    virtual sp<IProCameraUser> connect(const sp<IProCameraCallbacks>& cameraCb,
+    virtual status_t connectPro(const sp<IProCameraCallbacks>& cameraCb,
             int cameraId,
             const String16& clientPackageName,
-            int clientUid) = 0;
+            int clientUid,
+            /*out*/
+            sp<IProCameraUser>& device) = 0;
 
-    virtual sp<ICameraDeviceUser> connect(
+    virtual status_t connectDevice(
             const sp<ICameraDeviceCallbacks>& cameraCb,
             int cameraId,
             const String16& clientPackageName,
-            int clientUid) = 0;
+            int clientUid,
+            /*out*/
+            sp<ICameraDeviceUser>& device) = 0;
 };
 
 // ----------------------------------------------------------------------------
diff --git a/include/camera/IProCameraCallbacks.h b/include/camera/IProCameraCallbacks.h
index c774698..e8abb89 100644
--- a/include/camera/IProCameraCallbacks.h
+++ b/include/camera/IProCameraCallbacks.h
@@ -51,7 +51,7 @@
     /** Missing by design: implementation is client-side in ProCamera.cpp **/
     // virtual void onBufferReceived(int streamId,
     //                               const CpuConsumer::LockedBufer& buf);
-    virtual void            onResultReceived(int32_t frameId,
+    virtual void            onResultReceived(int32_t requestId,
                                              camera_metadata* result) = 0;
 };
 
diff --git a/include/camera/ProCamera.h b/include/camera/ProCamera.h
index 3d1652f..83a3028 100644
--- a/include/camera/ProCamera.h
+++ b/include/camera/ProCamera.h
@@ -25,6 +25,7 @@
 #include <camera/IProCameraUser.h>
 #include <camera/Camera.h>
 #include <camera/CameraMetadata.h>
+#include <camera/ICameraService.h>
 #include <gui/CpuConsumer.h>
 
 #include <gui/Surface.h>
@@ -87,8 +88,14 @@
     typedef ProCameraListener     TCamListener;
     typedef IProCameraUser        TCamUser;
     typedef IProCameraCallbacks   TCamCallbacks;
+    typedef status_t (ICameraService::*TCamConnectService)(const sp<IProCameraCallbacks>&,
+                                                           int, const String16&, int,
+                                                           /*out*/
+                                                           sp<IProCameraUser>&);
+    static TCamConnectService     fnConnectService;
 };
 
+
 class ProCamera :
     public CameraBase<ProCamera>,
     public BnProCameraCallbacks
@@ -245,7 +252,7 @@
     virtual void        onLockStatusChanged(
                                 IProCameraCallbacks::LockStatus newLockStatus);
 
-    virtual void        onResultReceived(int32_t frameId,
+    virtual void        onResultReceived(int32_t requestId,
                                          camera_metadata* result);
 private:
     ProCamera(int cameraId);
diff --git a/include/camera/camera2/ICameraDeviceCallbacks.h b/include/camera/camera2/ICameraDeviceCallbacks.h
index 041fa65..8dac4f2 100644
--- a/include/camera/camera2/ICameraDeviceCallbacks.h
+++ b/include/camera/camera2/ICameraDeviceCallbacks.h
@@ -35,13 +35,27 @@
 public:
     DECLARE_META_INTERFACE(CameraDeviceCallbacks);
 
-    // One way
-    virtual void            notifyCallback(int32_t msgType,
-                                           int32_t ext1,
-                                           int32_t ext2) = 0;
+    /**
+     * Error codes for CAMERA_MSG_ERROR
+     */
+    enum CameraErrorCode {
+        ERROR_CAMERA_DISCONNECTED = 0,
+        ERROR_CAMERA_DEVICE = 1,
+        ERROR_CAMERA_SERVICE = 2
+    };
 
     // One way
-    virtual void            onResultReceived(int32_t frameId,
+    virtual void            onDeviceError(CameraErrorCode errorCode) = 0;
+
+    // One way
+    virtual void            onDeviceIdle() = 0;
+
+    // One way
+    virtual void            onCaptureStarted(int32_t requestId,
+                                             int64_t timestamp) = 0;
+
+    // One way
+    virtual void            onResultReceived(int32_t requestId,
                                              const CameraMetadata& result) = 0;
 };
 
diff --git a/include/camera/camera2/ICameraDeviceUser.h b/include/camera/camera2/ICameraDeviceUser.h
index 45988d0..f71f302 100644
--- a/include/camera/camera2/ICameraDeviceUser.h
+++ b/include/camera/camera2/ICameraDeviceUser.h
@@ -63,6 +63,9 @@
 
     // Wait until all the submitted requests have finished processing
     virtual status_t        waitUntilIdle() =  0;
+
+    // Flush all pending and in-progress work as quickly as possible.
+    virtual status_t        flush() = 0;
 };
 
 // ----------------------------------------------------------------------------
diff --git a/include/media/AudioRecord.h b/include/media/AudioRecord.h
index 62f0c64..052064d 100644
--- a/include/media/AudioRecord.h
+++ b/include/media/AudioRecord.h
@@ -398,18 +398,20 @@
 
                 void        pause();    // suspend thread from execution at next loop boundary
                 void        resume();   // allow thread to execute, if not requested to exit
-                void        pauseConditional();
-                                        // like pause(), but only if prior resume() wasn't latched
 
     private:
+                void        pauseInternal(nsecs_t ns = 0LL);
+                                        // like pause(), but only used internally within thread
+
         friend class AudioRecord;
         virtual bool        threadLoop();
         AudioRecord&        mReceiver;
         virtual ~AudioRecordThread();
         Mutex               mMyLock;    // Thread::mLock is private
         Condition           mMyCond;    // Thread::mThreadExitedCondition is private
-        bool                mPaused;    // whether thread is currently paused
-        bool                mResumeLatch;   // whether next pauseConditional() will be a nop
+        bool                mPaused;    // whether thread is requested to pause at next loop entry
+        bool                mPausedInt; // whether thread internally requests pause
+        nsecs_t             mPausedNs;  // if mPausedInt then associated timeout, otherwise ignored
     };
 
             // body of AudioRecordThread::threadLoop()
diff --git a/include/media/AudioSystem.h b/include/media/AudioSystem.h
index 006af08..225ef76 100644
--- a/include/media/AudioSystem.h
+++ b/include/media/AudioSystem.h
@@ -252,6 +252,9 @@
     // bit rate, duration, video and streaming or offload property is enabled
     static bool isOffloadSupported(const audio_offload_info_t& info);
 
+    // check presence of audio flinger service.
+    // returns NO_ERROR if binding to service succeeds, DEAD_OBJECT otherwise
+    static status_t checkAudioFlinger();
     // ----------------------------------------------------------------------------
 
 private:
diff --git a/include/media/AudioTimestamp.h b/include/media/AudioTimestamp.h
new file mode 100644
index 0000000..c29c7e5
--- /dev/null
+++ b/include/media/AudioTimestamp.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_AUDIO_TIMESTAMP_H
+#define ANDROID_AUDIO_TIMESTAMP_H
+
+#include <time.h>
+
+class AudioTimestamp {
+public:
+    AudioTimestamp() : mPosition(0) {
+        mTime.tv_sec = 0;
+        mTime.tv_nsec = 0;
+    }
+    // FIXME change type to match android.media.AudioTrack
+    uint32_t        mPosition; // a frame position in AudioTrack::getPosition() units
+    struct timespec mTime;     // corresponding CLOCK_MONOTONIC when frame is expected to present
+};
+
+#endif  // ANDROID_AUDIO_TIMESTAMP_H
diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h
index ae92cdd..f379ee5 100644
--- a/include/media/AudioTrack.h
+++ b/include/media/AudioTrack.h
@@ -19,6 +19,7 @@
 
 #include <cutils/sched_policy.h>
 #include <media/AudioSystem.h>
+#include <media/AudioTimestamp.h>
 #include <media/IAudioTrack.h>
 #include <utils/threads.h>
 
@@ -62,6 +63,9 @@
                                     // voluntary invalidation by mediaserver, or mediaserver crash.
         EVENT_STREAM_END = 7,       // Sent after all the buffers queued in AF and HW are played
                                     // back (after stop is called)
+        EVENT_NEW_TIMESTAMP = 8,    // Delivered periodically and when there's a significant change
+                                    // in the mapping from frame position to presentation time.
+                                    // See AudioTimestamp for the information included with event.
     };
 
     /* Client should declare Buffer on the stack and pass address to obtainBuffer()
@@ -107,6 +111,8 @@
      *          - EVENT_NEW_POS: pointer to const uint32_t containing the new position in frames.
      *          - EVENT_BUFFER_END: unused.
      *          - EVENT_NEW_IAUDIOTRACK: unused.
+     *          - EVENT_STREAM_END: unused.
+     *          - EVENT_NEW_TIMESTAMP: pointer to const AudioTimestamp.
      */
 
     typedef void (*callback_t)(int event, void* user, void *info);
@@ -181,7 +187,8 @@
                                     int notificationFrames = 0,
                                     int sessionId        = 0,
                                     transfer_type transferType = TRANSFER_DEFAULT,
-                                    const audio_offload_info_t *offloadInfo = NULL);
+                                    const audio_offload_info_t *offloadInfo = NULL,
+                                    int uid = -1);
 
     /* Creates an audio track and registers it with AudioFlinger.
      * With this constructor, the track is configured for static buffer mode.
@@ -205,7 +212,8 @@
                                     int notificationFrames = 0,
                                     int sessionId       = 0,
                                     transfer_type transferType = TRANSFER_DEFAULT,
-                                    const audio_offload_info_t *offloadInfo = NULL);
+                                    const audio_offload_info_t *offloadInfo = NULL,
+                                    int uid = -1);
 
     /* Terminates the AudioTrack and unregisters it from AudioFlinger.
      * Also destroys all resources associated with the AudioTrack.
@@ -221,6 +229,7 @@
      *  - INVALID_OPERATION: AudioTrack is already initialized
      *  - BAD_VALUE: invalid parameter (channelMask, format, sampleRate...)
      *  - NO_INIT: audio server or audio hardware not initialized
+     * If status is not equal to NO_ERROR, don't call any other APIs on this AudioTrack.
      * If sharedBuffer is non-0, the frameCount parameter is ignored and
      * replaced by the shared buffer's total allocated size in frame units.
      *
@@ -241,9 +250,10 @@
                             bool threadCanCallJava = false,
                             int sessionId       = 0,
                             transfer_type transferType = TRANSFER_DEFAULT,
-                            const audio_offload_info_t *offloadInfo = NULL);
+                            const audio_offload_info_t *offloadInfo = NULL,
+                            int uid = -1);
 
-    /* Result of constructing the AudioTrack. This must be checked
+    /* Result of constructing the AudioTrack. This must be checked for successful initialization
      * before using any AudioTrack API (except for set()), because using
      * an uninitialized AudioTrack produces undefined results.
      * See set() method above for possible return codes.
@@ -564,6 +574,16 @@
     /* Get parameters */
             String8     getParameters(const String8& keys);
 
+    /* Poll for a timestamp on demand.
+     * Use if EVENT_NEW_TIMESTAMP is not delivered often enough for your needs,
+     * or if you need to get the most recent timestamp outside of the event callback handler.
+     * Caution: calling this method too often may be inefficient;
+     * if you need a high resolution mapping between frame position and presentation time,
+     * consider implementing that at application level, based on the low resolution timestamps.
+     * Returns NO_ERROR if timestamp is valid.
+     */
+            status_t    getTimestamp(AudioTimestamp& timestamp);
+
 protected:
     /* copying audio tracks is not allowed */
                         AudioTrack(const AudioTrack& other);
@@ -581,18 +601,21 @@
 
                 void        pause();    // suspend thread from execution at next loop boundary
                 void        resume();   // allow thread to execute, if not requested to exit
-                void        pauseConditional();
-                                        // like pause(), but only if prior resume() wasn't latched
 
     private:
+                void        pauseInternal(nsecs_t ns = 0LL);
+                                        // like pause(), but only used internally within thread
+
         friend class AudioTrack;
         virtual bool        threadLoop();
         AudioTrack&         mReceiver;
         virtual ~AudioTrackThread();
         Mutex               mMyLock;    // Thread::mLock is private
         Condition           mMyCond;    // Thread::mThreadExitedCondition is private
-        bool                mPaused;    // whether thread is currently paused
-        bool                mResumeLatch;   // whether next pauseConditional() will be a nop
+        bool                mPaused;    // whether thread is requested to pause at next loop entry
+        bool                mPausedInt; // whether thread internally requests pause
+        nsecs_t             mPausedNs;  // if mPausedInt then associated timeout, otherwise ignored
+        bool                mIgnoreNextPausedInt;   // whether to ignore next mPausedInt request
     };
 
             // body of AudioTrackThread::threadLoop()
@@ -630,7 +653,7 @@
             bool     isOffloaded() const
                 { return (mFlags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) != 0; }
 
-    // may be changed if IAudioTrack is re-created
+    // Next 3 fields may be changed if IAudioTrack is re-created, but always != 0
     sp<IAudioTrack>         mAudioTrack;
     sp<IMemory>             mCblkMemory;
     audio_track_cblk_t*     mCblk;                  // re-load after mLock.unlock()
@@ -731,6 +754,7 @@
     sp<DeathNotifier>       mDeathNotifier;
     uint32_t                mSequence;              // incremented for each new IAudioTrack attempt
     audio_io_handle_t       mOutput;                // cached output io handle
+    int                     mClientUid;
 };
 
 class TimedAudioTrack : public AudioTrack
diff --git a/include/media/EffectsFactoryApi.h b/include/media/EffectsFactoryApi.h
index b1ed7b0..b1143b9 100644
--- a/include/media/EffectsFactoryApi.h
+++ b/include/media/EffectsFactoryApi.h
@@ -171,6 +171,30 @@
 ////////////////////////////////////////////////////////////////////////////////
 int EffectIsNullUuid(const effect_uuid_t *pEffectUuid);
 
+////////////////////////////////////////////////////////////////////////////////
+//
+//    Function:       EffectGetSubEffects
+//
+//    Description:    Returns the descriptors of the sub effects of the effect
+//                    whose uuid is pointed to by first argument.
+//
+//    Input:
+//          pEffectUuid:    pointer to the effect uuid.
+//          size:           size of the buffer pointed by pDescriptor.
+//
+//    Input/Output:
+//          pDescriptor:    address where to return the sub effect descriptors.
+//
+//    Output:
+//        returned value:    0          successful operation.
+//                          -ENODEV     factory failed to initialize
+//                          -EINVAL     invalid pEffectUuid or pDescriptor
+//                          -ENOENT     no effect with this uuid found
+//        *pDescriptor:     updated with the sub effect descriptors.
+//
+////////////////////////////////////////////////////////////////////////////////
+int EffectGetSubEffects(const effect_uuid_t *pEffectUuid, effect_descriptor_t *pDescriptors, size_t size);
+
 #if __cplusplus
 }  // extern "C"
 #endif
diff --git a/include/media/ExtendedAudioBufferProvider.h b/include/media/ExtendedAudioBufferProvider.h
index 00c4444..2539ed3 100644
--- a/include/media/ExtendedAudioBufferProvider.h
+++ b/include/media/ExtendedAudioBufferProvider.h
@@ -18,12 +18,20 @@
 #define ANDROID_EXTENDED_AUDIO_BUFFER_PROVIDER_H
 
 #include <media/AudioBufferProvider.h>
+#include <media/AudioTimestamp.h>
 
 namespace android {
 
 class ExtendedAudioBufferProvider : public AudioBufferProvider {
 public:
     virtual size_t  framesReady() const = 0;  // see description at AudioFlinger.h
+
+    // Return the total number of frames that have been obtained and released
+    virtual size_t  framesReleased() const { return 0; }
+
+    // Invoked by buffer consumer when a new timestamp is available.
+    // Default implementation ignores the timestamp.
+    virtual void    onTimestamp(const AudioTimestamp& timestamp) { }
 };
 
 }   // namespace android
diff --git a/include/media/IAudioFlinger.h b/include/media/IAudioFlinger.h
index 49f921b..899d79f 100644
--- a/include/media/IAudioFlinger.h
+++ b/include/media/IAudioFlinger.h
@@ -53,6 +53,9 @@
     };
     typedef uint32_t track_flags_t;
 
+    // invariant on exit for all APIs that return an sp<>:
+    //   (return value != 0) == (*status == NO_ERROR)
+
     /* create an audio track and registers it with AudioFlinger.
      * return null if the track cannot be created.
      */
@@ -71,6 +74,7 @@
                                 // output: server's description of IAudioTrack for display in logs.
                                 // Don't attempt to parse, as the format could change.
                                 String8& name,
+                                int clientUid,
                                 status_t *status) = 0;
 
     virtual sp<IAudioRecord> openRecord(
diff --git a/include/media/IAudioTrack.h b/include/media/IAudioTrack.h
index 1014403..5c8a484 100644
--- a/include/media/IAudioTrack.h
+++ b/include/media/IAudioTrack.h
@@ -26,6 +26,7 @@
 #include <binder/IMemory.h>
 #include <utils/LinearTransform.h>
 #include <utils/String8.h>
+#include <media/AudioTimestamp.h>
 
 namespace android {
 
@@ -86,6 +87,12 @@
 
     /* Send parameters to the audio hardware */
     virtual status_t    setParameters(const String8& keyValuePairs) = 0;
+
+    /* Return NO_ERROR if timestamp is valid */
+    virtual status_t    getTimestamp(AudioTimestamp& timestamp) = 0;
+
+    /* Signal the playback thread for a change in control block */
+    virtual void        signal() = 0;
 };
 
 // ----------------------------------------------------------------------------
diff --git a/include/media/IDrm.h b/include/media/IDrm.h
index d630c40..5ef26af 100644
--- a/include/media/IDrm.h
+++ b/include/media/IDrm.h
@@ -32,7 +32,7 @@
 
     virtual status_t initCheck() const = 0;
 
-    virtual bool isCryptoSchemeSupported(const uint8_t uuid[16]) = 0;
+    virtual bool isCryptoSchemeSupported(const uint8_t uuid[16], const String8 &mimeType) = 0;
 
     virtual status_t createPlugin(const uint8_t uuid[16]) = 0;
 
diff --git a/include/media/IHDCP.h b/include/media/IHDCP.h
index 54fefa3..352561e 100644
--- a/include/media/IHDCP.h
+++ b/include/media/IHDCP.h
@@ -46,6 +46,17 @@
     // Request to shutdown the active HDCP session.
     virtual status_t shutdownAsync() = 0;
 
+    // Returns the capability bitmask of this HDCP session.
+    // Possible return values (please refer to HDCAPAPI.h):
+    //   HDCP_CAPS_ENCRYPT: mandatory, meaning the HDCP module can encrypt
+    //   from an input byte-array buffer to an output byte-array buffer
+    //   HDCP_CAPS_ENCRYPT_NATIVE: the HDCP module supports encryption from
+    //   a native buffer to an output byte-array buffer. The format of the
+    //   input native buffer is specific to vendor's encoder implementation.
+    //   It is the same format as that used by the encoder when
+    //   "storeMetaDataInBuffers" extension is enabled on its output port.
+    virtual uint32_t getCaps() = 0;
+
     // ENCRYPTION only:
     // Encrypt data according to the HDCP spec. "size" bytes of data are
     // available at "inData" (virtual address), "size" may not be a multiple
diff --git a/include/media/IMediaPlayerService.h b/include/media/IMediaPlayerService.h
index fef7af2..2998b37 100644
--- a/include/media/IMediaPlayerService.h
+++ b/include/media/IMediaPlayerService.h
@@ -49,8 +49,12 @@
     virtual sp<IMediaMetadataRetriever> createMetadataRetriever() = 0;
     virtual sp<IMediaPlayer> create(const sp<IMediaPlayerClient>& client, int audioSessionId = 0) = 0;
 
-    virtual sp<IMemory>         decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat) = 0;
-    virtual sp<IMemory>         decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat) = 0;
+    virtual status_t         decode(const char* url, uint32_t *pSampleRate, int* pNumChannels,
+                                    audio_format_t* pFormat,
+                                    const sp<IMemoryHeap>& heap, size_t *pSize) = 0;
+    virtual status_t         decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate,
+                                    int* pNumChannels, audio_format_t* pFormat,
+                                    const sp<IMemoryHeap>& heap, size_t *pSize) = 0;
     virtual sp<IOMX>            getOMX() = 0;
     virtual sp<ICrypto>         makeCrypto() = 0;
     virtual sp<IDrm>            makeDrm() = 0;
diff --git a/include/media/IOMX.h b/include/media/IOMX.h
index 6d116f0..9c8451c 100644
--- a/include/media/IOMX.h
+++ b/include/media/IOMX.h
@@ -83,6 +83,10 @@
     virtual status_t storeMetaDataInBuffers(
             node_id node, OMX_U32 port_index, OMX_BOOL enable) = 0;
 
+    virtual status_t prepareForAdaptivePlayback(
+            node_id node, OMX_U32 portIndex, OMX_BOOL enable,
+            OMX_U32 maxFrameWidth, OMX_U32 maxFrameHeight) = 0;
+
     virtual status_t enableGraphicBuffers(
             node_id node, OMX_U32 port_index, OMX_BOOL enable) = 0;
 
@@ -137,6 +141,7 @@
 
     enum InternalOptionType {
         INTERNAL_OPTION_SUSPEND,  // data is a bool
+        INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY,  // data is an int64_t
     };
     virtual status_t setInternalOption(
             node_id node,
diff --git a/include/media/IRemoteDisplayClient.h b/include/media/IRemoteDisplayClient.h
index 7b0fa9e..0e6d55d 100644
--- a/include/media/IRemoteDisplayClient.h
+++ b/include/media/IRemoteDisplayClient.h
@@ -49,7 +49,7 @@
     // Provides a surface texture that the client should use to stream buffers to
     // the remote display.
     virtual void onDisplayConnected(const sp<IGraphicBufferProducer>& bufferProducer,
-            uint32_t width, uint32_t height, uint32_t flags) = 0; // one-way
+            uint32_t width, uint32_t height, uint32_t flags, uint32_t session) = 0; // one-way
 
     // Indicates that the remote display has been disconnected normally.
     // This method should only be called once the client has called 'dispose()'
diff --git a/include/media/MediaPlayerInterface.h b/include/media/MediaPlayerInterface.h
index 3b151ef..cc244f0 100644
--- a/include/media/MediaPlayerInterface.h
+++ b/include/media/MediaPlayerInterface.h
@@ -99,6 +99,7 @@
         virtual status_t    getPosition(uint32_t *position) const = 0;
         virtual status_t    getFramesWritten(uint32_t *frameswritten) const = 0;
         virtual int         getSessionId() const = 0;
+        virtual audio_stream_type_t getAudioStreamType() const = 0;
 
         // If no callback is specified, use the "write" API below to submit
         // audio data.
diff --git a/include/media/SoundPool.h b/include/media/SoundPool.h
index 9e5654f..2dd78cc 100644
--- a/include/media/SoundPool.h
+++ b/include/media/SoundPool.h
@@ -22,6 +22,8 @@
 #include <utils/Vector.h>
 #include <utils/KeyedVector.h>
 #include <media/AudioTrack.h>
+#include <binder/MemoryHeapBase.h>
+#include <binder/MemoryBase.h>
 
 namespace android {
 
@@ -85,6 +87,7 @@
     int64_t             mLength;
     char*               mUrl;
     sp<IMemory>         mData;
+    sp<MemoryHeapBase>  mHeap;
 };
 
 // stores pending events for stolen channels
diff --git a/include/media/Visualizer.h b/include/media/Visualizer.h
index e429263..6167dd6 100644
--- a/include/media/Visualizer.h
+++ b/include/media/Visualizer.h
@@ -114,6 +114,14 @@
     status_t setScalingMode(uint32_t mode);
     uint32_t getScalingMode() { return mScalingMode; }
 
+    // set which measurements are done on the audio buffers processed by the effect.
+    // valid measurements (mask): MEASUREMENT_MODE_PEAK_RMS
+    status_t setMeasurementMode(uint32_t mode);
+    uint32_t getMeasurementMode() { return mMeasurementMode; }
+
+    // return a set of int32_t measurements
+    status_t getIntMeasurements(uint32_t type, uint32_t number, int32_t *measurements);
+
     // return a capture in PCM 8 bit unsigned format. The size of the capture is equal to
     // getCaptureSize()
     status_t getWaveForm(uint8_t *waveform);
@@ -156,6 +164,7 @@
     uint32_t mCaptureSize;
     uint32_t mSampleRate;
     uint32_t mScalingMode;
+    uint32_t mMeasurementMode;
     capture_cbk_t mCaptureCallBack;
     void *mCaptureCbkUser;
     sp<CaptureThread> mCaptureThread;
diff --git a/include/media/mediaplayer.h b/include/media/mediaplayer.h
index 14381c7..4c05fc3 100644
--- a/include/media/mediaplayer.h
+++ b/include/media/mediaplayer.h
@@ -42,9 +42,14 @@
     MEDIA_BUFFERING_UPDATE  = 3,
     MEDIA_SEEK_COMPLETE     = 4,
     MEDIA_SET_VIDEO_SIZE    = 5,
+    MEDIA_STARTED           = 6,
+    MEDIA_PAUSED            = 7,
+    MEDIA_STOPPED           = 8,
+    MEDIA_SKIPPED           = 9,
     MEDIA_TIMED_TEXT        = 99,
     MEDIA_ERROR             = 100,
     MEDIA_INFO              = 200,
+    MEDIA_SUBTITLE_DATA     = 201,
 };
 
 // Generic error codes for the media player framework.  Errors are fatal, the
@@ -173,6 +178,7 @@
     MEDIA_TRACK_TYPE_VIDEO = 1,
     MEDIA_TRACK_TYPE_AUDIO = 2,
     MEDIA_TRACK_TYPE_TIMEDTEXT = 3,
+    MEDIA_TRACK_TYPE_SUBTITLE = 4,
 };
 
 // ----------------------------------------------------------------------------
@@ -218,8 +224,12 @@
             bool            isLooping();
             status_t        setVolume(float leftVolume, float rightVolume);
             void            notify(int msg, int ext1, int ext2, const Parcel *obj = NULL);
-    static  sp<IMemory>     decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat);
-    static  sp<IMemory>     decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat);
+    static  status_t        decode(const char* url, uint32_t *pSampleRate, int* pNumChannels,
+                                   audio_format_t* pFormat,
+                                   const sp<IMemoryHeap>& heap, size_t *pSize);
+    static  status_t        decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate,
+                                   int* pNumChannels, audio_format_t* pFormat,
+                                   const sp<IMemoryHeap>& heap, size_t *pSize);
             status_t        invoke(const Parcel& request, Parcel *reply);
             status_t        setMetadataFilter(const Parcel& filter);
             status_t        getMetadata(bool update_only, bool apply_filter, Parcel *metadata);
diff --git a/include/media/nbaio/AudioStreamOutSink.h b/include/media/nbaio/AudioStreamOutSink.h
index 5976b18..7948d40 100644
--- a/include/media/nbaio/AudioStreamOutSink.h
+++ b/include/media/nbaio/AudioStreamOutSink.h
@@ -52,6 +52,8 @@
     // implementation of GNWT (if any)
     virtual status_t getNextWriteTimestamp(int64_t *timestamp);
 
+    virtual status_t getTimestamp(AudioTimestamp& timestamp);
+
     // NBAIO_Sink end
 
 #if 0   // until necessary
diff --git a/include/media/nbaio/MonoPipe.h b/include/media/nbaio/MonoPipe.h
index 5fcfe9e..d3802fe 100644
--- a/include/media/nbaio/MonoPipe.h
+++ b/include/media/nbaio/MonoPipe.h
@@ -20,9 +20,12 @@
 #include <time.h>
 #include <utils/LinearTransform.h>
 #include "NBAIO.h"
+#include <media/SingleStateQueue.h>
 
 namespace android {
 
+typedef SingleStateQueue<AudioTimestamp> AudioTimestampSingleStateQueue;
+
 // MonoPipe is similar to Pipe except:
 //  - supports only a single reader, called MonoPipeReader
 //  - write() cannot overrun; instead it will return a short actual count if insufficient space
@@ -88,6 +91,9 @@
             // Return true if the write side of a pipe is currently shutdown.
             bool    isShutdown();
 
+            // Return NO_ERROR if there is a timestamp available
+            status_t getTimestamp(AudioTimestamp& timestamp);
+
 private:
     // A pair of methods and a helper variable which allows the reader and the
     // writer to update and observe the values of mFront and mNextRdPTS in an
@@ -127,6 +133,10 @@
     LinearTransform mSamplesToLocalTime;
 
     bool            mIsShutdown;    // whether shutdown(true) was called, no barriers are needed
+
+    AudioTimestampSingleStateQueue::Shared      mTimestampShared;
+    AudioTimestampSingleStateQueue::Mutator     mTimestampMutator;
+    AudioTimestampSingleStateQueue::Observer    mTimestampObserver;
 };
 
 }   // namespace android
diff --git a/include/media/nbaio/MonoPipeReader.h b/include/media/nbaio/MonoPipeReader.h
index 0e1c992..78fe867 100644
--- a/include/media/nbaio/MonoPipeReader.h
+++ b/include/media/nbaio/MonoPipeReader.h
@@ -49,6 +49,8 @@
 
     virtual ssize_t read(void *buffer, size_t count, int64_t readPTS);
 
+    virtual void    onTimestamp(const AudioTimestamp& timestamp);
+
     // NBAIO_Source end
 
 #if 0   // until necessary
diff --git a/include/media/nbaio/NBAIO.h b/include/media/nbaio/NBAIO.h
index f5d6eb5..1da0c73 100644
--- a/include/media/nbaio/NBAIO.h
+++ b/include/media/nbaio/NBAIO.h
@@ -28,6 +28,7 @@
 #include <stdlib.h>
 #include <utils/Errors.h>
 #include <utils/RefBase.h>
+#include <media/AudioTimestamp.h>
 
 namespace android {
 
@@ -213,6 +214,11 @@
     //  <other> Something unexpected happened internally.  Check the logs and start debugging.
     virtual status_t getNextWriteTimestamp(int64_t *ts) { return INVALID_OPERATION; }
 
+    // Returns NO_ERROR if a timestamp is available.  The timestamp includes the total number
+    // of frames presented to an external observer, together with the value of CLOCK_MONOTONIC
+    // as of this presentation count.
+    virtual status_t getTimestamp(AudioTimestamp& timestamp) { return INVALID_OPERATION; }
+
 protected:
     NBAIO_Sink(NBAIO_Format format = Format_Invalid) : NBAIO_Port(format), mFramesWritten(0) { }
     virtual ~NBAIO_Sink() { }
@@ -300,6 +306,10 @@
     virtual ssize_t readVia(readVia_t via, size_t total, void *user,
                             int64_t readPTS, size_t block = 0);
 
+    // Invoked asynchronously by corresponding sink when a new timestamp is available.
+    // Default implementation ignores the timestamp.
+    virtual void    onTimestamp(const AudioTimestamp& timestamp) { }
+
 protected:
     NBAIO_Source(NBAIO_Format format = Format_Invalid) : NBAIO_Port(format), mFramesRead(0) { }
     virtual ~NBAIO_Source() { }
diff --git a/include/media/nbaio/SourceAudioBufferProvider.h b/include/media/nbaio/SourceAudioBufferProvider.h
index c08331b..cdfb6fe 100644
--- a/include/media/nbaio/SourceAudioBufferProvider.h
+++ b/include/media/nbaio/SourceAudioBufferProvider.h
@@ -36,6 +36,8 @@
 
     // ExtendedAudioBufferProvider interface
     virtual size_t   framesReady() const;
+    virtual size_t   framesReleased() const;
+    virtual void     onTimestamp(const AudioTimestamp& timestamp);
 
 private:
     const sp<NBAIO_Source> mSource;     // the wrapped source
@@ -45,6 +47,7 @@
     size_t              mOffset;    // frame offset within mAllocated of valid data
     size_t              mRemaining; // frame count within mAllocated of valid data
     size_t              mGetCount;  // buffer.frameCount of the most recent getNextBuffer
+    uint32_t            mFramesReleased;    // counter of the total number of frames released
 };
 
 }   // namespace android
diff --git a/include/media/stagefright/ACodec.h b/include/media/stagefright/ACodec.h
index 6bf83dd..7395055 100644
--- a/include/media/stagefright/ACodec.h
+++ b/include/media/stagefright/ACodec.h
@@ -116,6 +116,7 @@
         kWhatStart                   = 'star',
         kWhatRequestIDRFrame         = 'ridr',
         kWhatSetParameters           = 'setP',
+        kWhatSubmitOutputMetaDataBufferIfEOS = 'subm',
     };
 
     enum {
@@ -124,7 +125,8 @@
     };
 
     enum {
-        kFlagIsSecure   = 1,
+        kFlagIsSecure                                 = 1,
+        kFlagPushBlankBuffersToNativeWindowOnShutdown = 2,
     };
 
     struct BufferInfo {
@@ -199,6 +201,8 @@
     bool mStoreMetaDataInOutputBuffers;
     int32_t mMetaDataBuffersToSubmit;
 
+    int64_t mRepeatFrameDelayUs;
+
     status_t setCyclicIntraMacroblockRefresh(const sp<AMessage> &msg, int32_t mode);
     status_t allocateBuffersOnPort(OMX_U32 portIndex);
     status_t freeBuffersOnPort(OMX_U32 portIndex);
@@ -209,6 +213,7 @@
             OMX_U32 *nMinUndequeuedBuffers);
     status_t allocateOutputMetaDataBuffers();
     status_t submitOutputMetaDataBuffer();
+    void signalSubmitOutputMetaDataBufferIfEOS_workaround();
     status_t allocateOutputBuffersFromNativeWindow();
     status_t cancelBufferToNativeWindow(BufferInfo *info);
     status_t freeOutputBuffersNotOwnedByComponent();
@@ -261,6 +266,7 @@
     status_t setupMPEG4EncoderParameters(const sp<AMessage> &msg);
     status_t setupH263EncoderParameters(const sp<AMessage> &msg);
     status_t setupAVCEncoderParameters(const sp<AMessage> &msg);
+    status_t setupVPXEncoderParameters(const sp<AMessage> &msg);
 
     status_t verifySupportForProfileAndLevel(int32_t profile, int32_t level);
 
diff --git a/include/media/stagefright/CameraSourceTimeLapse.h b/include/media/stagefright/CameraSourceTimeLapse.h
index 6b7a63c..34213be 100644
--- a/include/media/stagefright/CameraSourceTimeLapse.h
+++ b/include/media/stagefright/CameraSourceTimeLapse.h
@@ -41,7 +41,8 @@
         Size videoSize,
         int32_t videoFrameRate,
         const sp<IGraphicBufferProducer>& surface,
-        int64_t timeBetweenTimeLapseFrameCaptureUs);
+        int64_t timeBetweenTimeLapseFrameCaptureUs,
+        bool storeMetaDataInVideoBuffers = true);
 
     virtual ~CameraSourceTimeLapse();
 
@@ -116,7 +117,8 @@
         Size videoSize,
         int32_t videoFrameRate,
         const sp<IGraphicBufferProducer>& surface,
-        int64_t timeBetweenTimeLapseFrameCaptureUs);
+        int64_t timeBetweenTimeLapseFrameCaptureUs,
+        bool storeMetaDataInVideoBuffers = true);
 
     // Wrapper over CameraSource::signalBufferReturned() to implement quick stop.
     // It only handles the case when mLastReadBufferCopy is signalled. Otherwise
diff --git a/include/media/stagefright/DataSource.h b/include/media/stagefright/DataSource.h
index 742bc0e..157b1aa 100644
--- a/include/media/stagefright/DataSource.h
+++ b/include/media/stagefright/DataSource.h
@@ -80,7 +80,6 @@
             const sp<DataSource> &source, String8 *mimeType,
             float *confidence, sp<AMessage> *meta);
 
-    static void RegisterSniffer(SnifferFunc func);
     static void RegisterDefaultSniffers();
 
     // for DRM
@@ -101,6 +100,9 @@
 private:
     static Mutex gSnifferMutex;
     static List<SnifferFunc> gSniffers;
+    static bool gSniffersRegistered;
+
+    static void RegisterSniffer_l(SnifferFunc func);
 
     DataSource(const DataSource &);
     DataSource &operator=(const DataSource &);
diff --git a/include/media/stagefright/MediaCodecList.h b/include/media/stagefright/MediaCodecList.h
index dfb845b..590623b 100644
--- a/include/media/stagefright/MediaCodecList.h
+++ b/include/media/stagefright/MediaCodecList.h
@@ -50,7 +50,8 @@
     status_t getCodecCapabilities(
             size_t index, const char *type,
             Vector<ProfileLevel> *profileLevels,
-            Vector<uint32_t> *colorFormats) const;
+            Vector<uint32_t> *colorFormats,
+            uint32_t *flags) const;
 
 private:
     enum Section {
diff --git a/include/media/stagefright/MediaMuxer.h b/include/media/stagefright/MediaMuxer.h
index c1fdbad..ff6a66e 100644
--- a/include/media/stagefright/MediaMuxer.h
+++ b/include/media/stagefright/MediaMuxer.h
@@ -79,6 +79,16 @@
     status_t setOrientationHint(int degrees);
 
     /**
+     * Set the location.
+     * @param latitude The latitude in degree x 1000. Its value must be in the range
+     * [-900000, 900000].
+     * @param longitude The longitude in degree x 1000. Its value must be in the range
+     * [-1800000, 1800000].
+     * @return OK if no error.
+     */
+    status_t setLocation(int latitude, int longitude);
+
+    /**
      * Stop muxing.
      * This method is a blocking call. Depending on how
      * much data is bufferred internally, the time needed for stopping
diff --git a/include/media/stagefright/OMXCodec.h b/include/media/stagefright/OMXCodec.h
index 583c3b3..daaf20f 100644
--- a/include/media/stagefright/OMXCodec.h
+++ b/include/media/stagefright/OMXCodec.h
@@ -361,9 +361,14 @@
 };
 
 struct CodecCapabilities {
+    enum {
+        kFlagSupportsAdaptivePlayback = 1 << 0,
+    };
+
     String8 mComponentName;
     Vector<CodecProfileLevel> mProfileLevels;
     Vector<OMX_U32> mColorFormats;
+    uint32_t mFlags;
 };
 
 // Return a vector of componentNames with supported profile/level pairs
diff --git a/include/media/stagefright/Utils.h b/include/media/stagefright/Utils.h
index c24f612..bbad271 100644
--- a/include/media/stagefright/Utils.h
+++ b/include/media/stagefright/Utils.h
@@ -57,7 +57,8 @@
 status_t sendMetaDataToHal(sp<MediaPlayerBase::AudioSink>& sink, const sp<MetaData>& meta);
 
 // Check whether the stream defined by meta can be offloaded to hardware
-bool canOffloadStream(const sp<MetaData>& meta, bool hasVideo, bool isStreaming);
+bool canOffloadStream(const sp<MetaData>& meta, bool hasVideo,
+                      bool isStreaming, audio_stream_type_t streamType);
 
 }  // namespace android
 
diff --git a/include/private/media/AudioTrackShared.h b/include/private/media/AudioTrackShared.h
index 1379379..7fd9379 100644
--- a/include/private/media/AudioTrackShared.h
+++ b/include/private/media/AudioTrackShared.h
@@ -256,6 +256,8 @@
         return mEpoch;
     }
 
+    size_t      getFramesFilled();
+
 private:
     size_t      mEpoch;
 };
@@ -358,6 +360,7 @@
     //      which must be > 0.
     //  buffer->mNonContig is unused.
     //  buffer->mRaw is unused.
+    //  ackFlush is true iff being called from Track::start to acknowledge a pending flush.
     // On exit:
     //  buffer->mFrameCount has the actual number of contiguous available frames,
     //      which is always 0 when the return status != NO_ERROR.
@@ -368,7 +371,7 @@
     //  NO_ERROR    Success, buffer->mFrameCount > 0.
     //  WOULD_BLOCK No frames are available.
     //  NO_INIT     Shared memory is corrupt.
-    virtual status_t    obtainBuffer(Buffer* buffer);
+    virtual status_t    obtainBuffer(Buffer* buffer, bool ackFlush = false);
 
     // Release (some of) the frames last obtained.
     // On entry, buffer->mFrameCount should have the number of frames to release,
@@ -383,8 +386,6 @@
 protected:
     size_t      mAvailToClient; // estimated frames available to client prior to releaseBuffer()
     int32_t     mFlush;         // our copy of cblk->u.mStreaming.mFlush, for streaming output only
-private:
-    bool        mDeferWake;     // whether another releaseBuffer() is expected soon
 };
 
 // Proxy used by AudioFlinger for servicing AudioTrack
@@ -422,6 +423,9 @@
     // Return the total number of frames which AudioFlinger desired but were unavailable,
     // and thus which resulted in an underrun.
     virtual uint32_t    getUnderrunFrames() const { return mCblk->u.mStreaming.mUnderrunFrames; }
+
+    // Return the total number of frames that AudioFlinger has obtained and released
+    virtual size_t      framesReleased() const { return mCblk->mServer; }
 };
 
 class StaticAudioTrackServerProxy : public AudioTrackServerProxy {
@@ -434,7 +438,7 @@
 public:
     virtual size_t      framesReady();
     virtual void        framesReadyIsCalledByMultipleThreads();
-    virtual status_t    obtainBuffer(Buffer* buffer);
+    virtual status_t    obtainBuffer(Buffer* buffer, bool ackFlush);
     virtual void        releaseBuffer(Buffer* buffer);
     virtual void        tallyUnderrunFrames(uint32_t frameCount);
     virtual uint32_t    getUnderrunFrames() const { return 0; }
diff --git a/libvideoeditor/lvpp/VideoEditorPlayer.h b/libvideoeditor/lvpp/VideoEditorPlayer.h
index ab6d731..5862c08 100755
--- a/libvideoeditor/lvpp/VideoEditorPlayer.h
+++ b/libvideoeditor/lvpp/VideoEditorPlayer.h
@@ -62,6 +62,7 @@
         virtual void            pause();
         virtual void            close();
         void setAudioStreamType(audio_stream_type_t streamType) { mStreamType = streamType; }
+        virtual audio_stream_type_t getAudioStreamType() const { return mStreamType; }
                 void            setVolume(float left, float right);
         virtual status_t        dump(int fd,const Vector<String16>& args) const;
 
diff --git a/media/libeffects/data/audio_effects.conf b/media/libeffects/data/audio_effects.conf
index 93f27cb..c3c4b67 100644
--- a/media/libeffects/data/audio_effects.conf
+++ b/media/libeffects/data/audio_effects.conf
@@ -6,6 +6,23 @@
 #        }
 #    }
 libraries {
+# This is a proxy library that will be an abstraction for
+# the HW and SW effects
+
+  #proxy {
+    #path /system/lib/soundfx/libeffectproxy.so
+  #}
+
+# This is the SW implementation library of the effect
+  #libSW {
+    #path /system/lib/soundfx/libswwrapper.so
+  #}
+
+# This is the HW implementation library for the effect
+  #libHW {
+    #path /system/lib/soundfx/libhwwrapper.so
+  #}
+
   bundle {
     path /system/lib/soundfx/libbundlewrapper.so
   }
@@ -18,6 +35,9 @@
   downmix {
     path /system/lib/soundfx/libdownmix.so
   }
+  loudness_enhancer {
+    path /system/lib/soundfx/libldnhncr.so
+  }
 }
 
 # Default pre-processing library. Add to audio_effect.conf "libraries" section if
@@ -43,6 +63,28 @@
 #    }
 
 effects {
+
+# additions for the proxy implementation
+# Proxy implementation
+  #effectname {
+    #library proxy
+    #uuid  xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+
+    # SW implemetation of the effect. Added as a node under the proxy to
+    # indicate this as a sub effect.
+      #libsw {
+         #library libSW
+         #uuid  yyyyyyyy-yyyy-yyyy-yyyy-yyyyyyyyyyyy
+      #} End of SW effect
+
+    # HW implementation of the effect. Added as a node under the proxy to
+    # indicate this as a sub effect.
+      #libhw {
+         #library libHW
+         #uuid  zzzzzzzz-zzzz-zzzz-zzzz-zzzzzzzzzzzz
+      #}End of HW effect
+  #} End of effect proxy
+
   bassboost {
     library bundle
     uuid 8631f300-72e2-11df-b57e-0002a5d5c51b
@@ -83,6 +125,10 @@
     library downmix
     uuid 93f04452-e4fe-41cc-91f9-e475b6d1d69f
   }
+  loudness_enhancer {
+    library loudness_enhancer
+    uuid fa415329-2034-4bea-b5dc-5b381c8d1e2c
+  }
 }
 
 # Default pre-processing effects. Add to audio_effect.conf "effects" section if
diff --git a/media/libeffects/factory/EffectsFactory.c b/media/libeffects/factory/EffectsFactory.c
index f158929..f8d6041 100644
--- a/media/libeffects/factory/EffectsFactory.c
+++ b/media/libeffects/factory/EffectsFactory.c
@@ -28,6 +28,9 @@
 
 static list_elem_t *gEffectList; // list of effect_entry_t: all currently created effects
 static list_elem_t *gLibraryList; // list of lib_entry_t: all currently loaded libraries
+// list of effect_descriptor and list of sub effects : all currently loaded
+// It does not contain effects without sub effects.
+static list_sub_elem_t *gSubEffectList;
 static pthread_mutex_t gLibLock = PTHREAD_MUTEX_INITIALIZER; // controls access to gLibraryList
 static uint32_t gNumEffects;         // total number number of effects
 static list_elem_t *gCurLib;    // current library in enumeration process
@@ -50,6 +53,8 @@
 static int loadLibrary(cnode *root, const char *name);
 static int loadEffects(cnode *root);
 static int loadEffect(cnode *node);
+// To get and add the effect pointed by the passed node to the gSubEffectList
+static int addSubEffect(cnode *root);
 static lib_entry_t *getLibrary(const char *path);
 static void resetEffectEnumeration();
 static uint32_t updateNumEffects();
@@ -57,6 +62,10 @@
                const effect_uuid_t *uuid,
                lib_entry_t **lib,
                effect_descriptor_t **desc);
+// To search a subeffect in the gSubEffectList
+int findSubEffect(const effect_uuid_t *uuid,
+               lib_entry_t **lib,
+               effect_descriptor_t **desc);
 static void dumpEffectDescriptor(effect_descriptor_t *desc, char *str, size_t len);
 static int stringToUuid(const char *str, effect_uuid_t *uuid);
 static int uuidToString(const effect_uuid_t *uuid, char *str, size_t maxLen);
@@ -287,7 +296,12 @@
 
     ret = findEffect(NULL, uuid, &l, &d);
     if (ret < 0){
-        goto exit;
+        // Sub effects are not associated with the library->effects,
+        // so, findEffect will fail. Search for the effect in gSubEffectList.
+        ret = findSubEffect(uuid, &l, &d);
+        if (ret < 0 ) {
+            goto exit;
+        }
     }
 
     // create effect in library
@@ -354,21 +368,27 @@
     }
     if (e1 == NULL) {
         ret = -ENOENT;
+        pthread_mutex_unlock(&gLibLock);
         goto exit;
     }
 
     // release effect in library
     if (fx->lib == NULL) {
         ALOGW("EffectRelease() fx %p library already unloaded", handle);
+        pthread_mutex_unlock(&gLibLock);
     } else {
         pthread_mutex_lock(&fx->lib->lock);
+        // Releasing the gLibLock here as the list access is over as the
+        // effect is removed from the list.
+        // If the gLibLock is not released, we will have a deadlock situation
+        // since we call the sub effect release inside the EffectRelease of Proxy
+        pthread_mutex_unlock(&gLibLock);
         fx->lib->desc->release_effect(fx->subItfe);
         pthread_mutex_unlock(&fx->lib->lock);
     }
     free(fx);
 
 exit:
-    pthread_mutex_unlock(&gLibLock);
     return ret;
 }
 
@@ -380,6 +400,49 @@
     return 1;
 }
 
+// Function to get the sub effect descriptors of the effect whose uuid
+// is pointed by the first argument. It searches the gSubEffectList for the
+// matching uuid and then copies the corresponding sub effect descriptors
+// to the inout param
+int EffectGetSubEffects(const effect_uuid_t *uuid,
+                        effect_descriptor_t *pDescriptors, size_t size)
+{
+   ALOGV("EffectGetSubEffects() UUID: %08X-%04X-%04X-%04X-%02X%02X%02X%02X%02X"
+          "%02X\n",uuid->timeLow, uuid->timeMid, uuid->timeHiAndVersion,
+          uuid->clockSeq, uuid->node[0], uuid->node[1],uuid->node[2],
+          uuid->node[3],uuid->node[4],uuid->node[5]);
+
+   // Check if the size of the desc buffer is large enough for 2 subeffects
+   if ((uuid == NULL) || (pDescriptors == NULL) ||
+       (size < 2*sizeof(effect_descriptor_t))) {
+       ALOGW("NULL pointer or insufficient memory. Cannot query subeffects");
+       return -EINVAL;
+   }
+   int ret = init();
+   if (ret < 0)
+      return ret;
+   list_sub_elem_t *e = gSubEffectList;
+   sub_effect_entry_t *subeffect;
+   effect_descriptor_t *d;
+   int count = 0;
+   while (e != NULL) {
+       d = (effect_descriptor_t*)e->object;
+       if (memcmp(uuid, &d->uuid, sizeof(effect_uuid_t)) == 0) {
+           ALOGV("EffectGetSubEffects: effect found in the list");
+           list_elem_t *subefx = e->sub_elem;
+           while (subefx != NULL) {
+               subeffect = (sub_effect_entry_t*)subefx->object;
+               d = (effect_descriptor_t*)(subeffect->object);
+               pDescriptors[count++] = *d;
+               subefx = subefx->next;
+           }
+           ALOGV("EffectGetSubEffects end - copied the sub effect descriptors");
+           return count;
+       }
+       e = e->next;
+   }
+   return -ENOENT;
+}
 /////////////////////////////////////////////////
 //      Local functions
 /////////////////////////////////////////////////
@@ -503,6 +566,65 @@
     return -EINVAL;
 }
 
+// This will find the library and UUID tags of the sub effect pointed by the
+// node, gets the effect descriptor and lib_entry_t and adds the subeffect -
+// sub_entry_t to the gSubEffectList
+int addSubEffect(cnode *root)
+{
+    ALOGV("addSubEffect");
+    cnode *node;
+    effect_uuid_t uuid;
+    effect_descriptor_t *d;
+    lib_entry_t *l;
+    list_elem_t *e;
+    node = config_find(root, LIBRARY_TAG);
+    if (node == NULL) {
+        return -EINVAL;
+    }
+    l = getLibrary(node->value);
+    if (l == NULL) {
+        ALOGW("addSubEffect() could not get library %s", node->value);
+        return -EINVAL;
+    }
+    node = config_find(root, UUID_TAG);
+    if (node == NULL) {
+        return -EINVAL;
+    }
+    if (stringToUuid(node->value, &uuid) != 0) {
+        ALOGW("addSubEffect() invalid uuid %s", node->value);
+        return -EINVAL;
+    }
+    d = malloc(sizeof(effect_descriptor_t));
+    if (l->desc->get_descriptor(&uuid, d) != 0) {
+        char s[40];
+        uuidToString(&uuid, s, 40);
+        ALOGW("Error querying effect %s on lib %s", s, l->name);
+        free(d);
+        return -EINVAL;
+    }
+#if (LOG_NDEBUG==0)
+    char s[256];
+    dumpEffectDescriptor(d, s, 256);
+    ALOGV("addSubEffect() read descriptor %p:%s",d, s);
+#endif
+    if (EFFECT_API_VERSION_MAJOR(d->apiVersion) !=
+            EFFECT_API_VERSION_MAJOR(EFFECT_CONTROL_API_VERSION)) {
+        ALOGW("Bad API version %08x on lib %s", d->apiVersion, l->name);
+        free(d);
+        return -EINVAL;
+    }
+    sub_effect_entry_t *sub_effect = malloc(sizeof(sub_effect_entry_t));
+    sub_effect->object = d;
+    // lib_entry_t is stored since the sub effects are not linked to the library
+    sub_effect->lib = l;
+    e = malloc(sizeof(list_elem_t));
+    e->object = sub_effect;
+    e->next = gSubEffectList->sub_elem;
+    gSubEffectList->sub_elem = e;
+    ALOGV("addSubEffect end");
+    return 0;
+}
+
 int loadEffects(cnode *root)
 {
     cnode *node;
@@ -571,9 +693,101 @@
     e->next = l->effects;
     l->effects = e;
 
+    // After the UUID node in the config_tree, if node->next is valid,
+    // that would be sub effect node.
+    // Find the sub effects and add them to the gSubEffectList
+    node = node->next;
+    int count = 2;
+    bool hwSubefx = false, swSubefx = false;
+    list_sub_elem_t *sube = NULL;
+    if (node != NULL) {
+        ALOGV("Adding the effect to gEffectSubList as there are sub effects");
+        sube = malloc(sizeof(list_sub_elem_t));
+        sube->object = d;
+        sube->sub_elem = NULL;
+        sube->next = gSubEffectList;
+        gSubEffectList = sube;
+    }
+    while (node != NULL && count) {
+       if (addSubEffect(node)) {
+           ALOGW("loadEffect() could not add subEffect %s", node->value);
+           // Change the gSubEffectList to point to older list;
+           gSubEffectList = sube->next;
+           free(sube->sub_elem);// Free an already added sub effect
+           sube->sub_elem = NULL;
+           free(sube);
+           return -ENOENT;
+       }
+       sub_effect_entry_t *subEntry = (sub_effect_entry_t*)gSubEffectList->sub_elem->object;
+       effect_descriptor_t *subEffectDesc = (effect_descriptor_t*)(subEntry->object);
+       // Since we return a dummy descriptor for the proxy during
+       // get_descriptor call,we replace it with the correspoding
+       // sw effect descriptor, but with Proxy UUID
+       // check for Sw desc
+        if (!((subEffectDesc->flags & EFFECT_FLAG_HW_ACC_MASK) ==
+                                           EFFECT_FLAG_HW_ACC_TUNNEL)) {
+             swSubefx = true;
+             *d = *subEffectDesc;
+             d->uuid = uuid;
+             ALOGV("loadEffect() Changed the Proxy desc");
+       } else
+           hwSubefx = true;
+       count--;
+       node = node->next;
+    }
+    // 1 HW and 1 SW sub effect found. Set the offload flag in the Proxy desc
+    if (hwSubefx && swSubefx) {
+        d->flags |= EFFECT_FLAG_OFFLOAD_SUPPORTED;
+    }
     return 0;
 }
 
+// Searches the sub effect matching to the specified uuid
+// in the gSubEffectList. It gets the lib_entry_t for
+// the matched sub_effect . Used in EffectCreate of sub effects
+int findSubEffect(const effect_uuid_t *uuid,
+               lib_entry_t **lib,
+               effect_descriptor_t **desc)
+{
+    list_sub_elem_t *e = gSubEffectList;
+    list_elem_t *subefx;
+    sub_effect_entry_t *effect;
+    lib_entry_t *l = NULL;
+    effect_descriptor_t *d = NULL;
+    int found = 0;
+    int ret = 0;
+
+    if (uuid == NULL)
+        return -EINVAL;
+
+    while (e != NULL && !found) {
+        subefx = (list_elem_t*)(e->sub_elem);
+        while (subefx != NULL) {
+            effect = (sub_effect_entry_t*)subefx->object;
+            l = (lib_entry_t *)effect->lib;
+            d = (effect_descriptor_t *)effect->object;
+            if (memcmp(&d->uuid, uuid, sizeof(effect_uuid_t)) == 0) {
+                ALOGV("uuid matched");
+                found = 1;
+                break;
+            }
+            subefx = subefx->next;
+        }
+        e = e->next;
+    }
+    if (!found) {
+        ALOGV("findSubEffect() effect not found");
+        ret = -ENOENT;
+    } else {
+        ALOGV("findSubEffect() found effect: %s in lib %s", d->name, l->name);
+        *lib = l;
+        if (desc != NULL) {
+            *desc = d;
+        }
+    }
+    return ret;
+}
+
 lib_entry_t *getLibrary(const char *name)
 {
     list_elem_t *e;
diff --git a/media/libeffects/factory/EffectsFactory.h b/media/libeffects/factory/EffectsFactory.h
index c1d4319..147ff18 100644
--- a/media/libeffects/factory/EffectsFactory.h
+++ b/media/libeffects/factory/EffectsFactory.h
@@ -32,6 +32,15 @@
     struct list_elem_s *next;
 } list_elem_t;
 
+// Structure used for storing effects with their sub effects.
+// Used in creating gSubEffectList. Here,
+// object holds the effect desc and the list sub_elem holds the sub effects
+typedef struct list_sub_elem_s {
+    void *object;
+    list_elem_t *sub_elem;
+    struct list_sub_elem_s *next;
+} list_sub_elem_t;
+
 typedef struct lib_entry_s {
     audio_effect_library_t *desc;
     char *name;
@@ -47,6 +56,16 @@
     lib_entry_t *lib;
 } effect_entry_t;
 
+// Structure used to store the lib entry
+// and the descriptor of the sub effects.
+// The library entry is to be stored in case of
+// sub effects as the sub effects are not linked
+// to the library list - gLibraryList.
+typedef struct sub_effect_entry_s {
+    lib_entry_t *lib;
+    void *object;
+} sub_effect_entry_t;
+
 #if __cplusplus
 }  // extern "C"
 #endif
diff --git a/media/libeffects/loudness/Android.mk b/media/libeffects/loudness/Android.mk
new file mode 100644
index 0000000..dcb7b27
--- /dev/null
+++ b/media/libeffects/loudness/Android.mk
@@ -0,0 +1,27 @@
+LOCAL_PATH:= $(call my-dir)
+
+# LoudnessEnhancer library
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+	EffectLoudnessEnhancer.cpp \
+	dsp/core/dynamic_range_compression.cpp
+
+LOCAL_CFLAGS+= -O2 -fvisibility=hidden
+
+LOCAL_SHARED_LIBRARIES := \
+	libcutils \
+	liblog \
+	libstlport
+
+LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/soundfx
+LOCAL_MODULE:= libldnhncr
+
+LOCAL_C_INCLUDES := \
+	$(call include-path-for, audio-effects) \
+	bionic \
+	bionic/libstdc++/include \
+	external/stlport/stlport
+
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libeffects/loudness/EffectLoudnessEnhancer.cpp b/media/libeffects/loudness/EffectLoudnessEnhancer.cpp
new file mode 100644
index 0000000..91ed677
--- /dev/null
+++ b/media/libeffects/loudness/EffectLoudnessEnhancer.cpp
@@ -0,0 +1,466 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "EffectLE"
+//#define LOG_NDEBUG 0
+#include <cutils/log.h>
+#include <assert.h>
+#include <stdlib.h>
+#include <string.h>
+#include <new>
+#include <time.h>
+#include <math.h>
+#include <audio_effects/effect_loudnessenhancer.h>
+#include "dsp/core/dynamic_range_compression.h"
+
+extern "C" {
+
+// effect_handle_t interface implementation for LE effect
+extern const struct effect_interface_s gLEInterface;
+
+// AOSP Loudness Enhancer UUID: fa415329-2034-4bea-b5dc-5b381c8d1e2c
+const effect_descriptor_t gLEDescriptor = {
+        {0xfe3199be, 0xaed0, 0x413f, 0x87bb, {0x11, 0x26, 0x0e, 0xb6, 0x3c, 0xf1}}, // type
+        {0xfa415329, 0x2034, 0x4bea, 0xb5dc, {0x5b, 0x38, 0x1c, 0x8d, 0x1e, 0x2c}}, // uuid
+        EFFECT_CONTROL_API_VERSION,
+        (EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_INSERT_FIRST),
+        0, // TODO
+        1,
+        "Loudness Enhancer",
+        "The Android Open Source Project",
+};
+
+enum le_state_e {
+    LOUDNESS_ENHANCER_STATE_UNINITIALIZED,
+    LOUDNESS_ENHANCER_STATE_INITIALIZED,
+    LOUDNESS_ENHANCER_STATE_ACTIVE,
+};
+
+struct LoudnessEnhancerContext {
+    const struct effect_interface_s *mItfe;
+    effect_config_t mConfig;
+    uint8_t mState;
+    int32_t mTargetGainmB;// target gain in mB
+    // in this implementation, there is no coupling between the compression on the left and right
+    // channels
+    le_fx::AdaptiveDynamicRangeCompression* mCompressor;
+};
+
+//
+//--- Local functions (not directly used by effect interface)
+//
+
+void LE_reset(LoudnessEnhancerContext *pContext)
+{
+    ALOGV("  > LE_reset(%p)", pContext);
+
+    if (pContext->mCompressor != NULL) {
+        float targetAmp = pow(10, pContext->mTargetGainmB/2000.0f); // mB to linear amplification
+        ALOGV("LE_reset(): Target gain=%dmB <=> factor=%.2fX", pContext->mTargetGainmB, targetAmp);
+        pContext->mCompressor->Initialize(targetAmp, pContext->mConfig.inputCfg.samplingRate);
+    } else {
+        ALOGE("LE_reset(%p): null compressors, can't apply target gain", pContext);
+    }
+}
+
+static inline int16_t clamp16(int32_t sample)
+{
+    if ((sample>>15) ^ (sample>>31))
+        sample = 0x7FFF ^ (sample>>31);
+    return sample;
+}
+
+//----------------------------------------------------------------------------
+// LE_setConfig()
+//----------------------------------------------------------------------------
+// Purpose: Set input and output audio configuration.
+//
+// Inputs:
+//  pContext:   effect engine context
+//  pConfig:    pointer to effect_config_t structure holding input and output
+//      configuration parameters
+//
+// Outputs:
+//
+//----------------------------------------------------------------------------
+
+int LE_setConfig(LoudnessEnhancerContext *pContext, effect_config_t *pConfig)
+{
+    ALOGV("LE_setConfig(%p)", pContext);
+
+    if (pConfig->inputCfg.samplingRate != pConfig->outputCfg.samplingRate) return -EINVAL;
+    if (pConfig->inputCfg.channels != pConfig->outputCfg.channels) return -EINVAL;
+    if (pConfig->inputCfg.format != pConfig->outputCfg.format) return -EINVAL;
+    if (pConfig->inputCfg.channels != AUDIO_CHANNEL_OUT_STEREO) return -EINVAL;
+    if (pConfig->outputCfg.accessMode != EFFECT_BUFFER_ACCESS_WRITE &&
+            pConfig->outputCfg.accessMode != EFFECT_BUFFER_ACCESS_ACCUMULATE) return -EINVAL;
+    if (pConfig->inputCfg.format != AUDIO_FORMAT_PCM_16_BIT) return -EINVAL;
+
+    pContext->mConfig = *pConfig;
+
+    LE_reset(pContext);
+
+    return 0;
+}
+
+
+//----------------------------------------------------------------------------
+// LE_getConfig()
+//----------------------------------------------------------------------------
+// Purpose: Get input and output audio configuration.
+//
+// Inputs:
+//  pContext:   effect engine context
+//  pConfig:    pointer to effect_config_t structure holding input and output
+//      configuration parameters
+//
+// Outputs:
+//
+//----------------------------------------------------------------------------
+
+void LE_getConfig(LoudnessEnhancerContext *pContext, effect_config_t *pConfig)
+{
+    *pConfig = pContext->mConfig;
+}
+
+
+//----------------------------------------------------------------------------
+// LE_init()
+//----------------------------------------------------------------------------
+// Purpose: Initialize engine with default configuration.
+//
+// Inputs:
+//  pContext:   effect engine context
+//
+// Outputs:
+//
+//----------------------------------------------------------------------------
+
+int LE_init(LoudnessEnhancerContext *pContext)
+{
+    ALOGV("LE_init(%p)", pContext);
+
+    pContext->mConfig.inputCfg.accessMode = EFFECT_BUFFER_ACCESS_READ;
+    pContext->mConfig.inputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
+    pContext->mConfig.inputCfg.format = AUDIO_FORMAT_PCM_16_BIT;
+    pContext->mConfig.inputCfg.samplingRate = 44100;
+    pContext->mConfig.inputCfg.bufferProvider.getBuffer = NULL;
+    pContext->mConfig.inputCfg.bufferProvider.releaseBuffer = NULL;
+    pContext->mConfig.inputCfg.bufferProvider.cookie = NULL;
+    pContext->mConfig.inputCfg.mask = EFFECT_CONFIG_ALL;
+    pContext->mConfig.outputCfg.accessMode = EFFECT_BUFFER_ACCESS_ACCUMULATE;
+    pContext->mConfig.outputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
+    pContext->mConfig.outputCfg.format = AUDIO_FORMAT_PCM_16_BIT;
+    pContext->mConfig.outputCfg.samplingRate = 44100;
+    pContext->mConfig.outputCfg.bufferProvider.getBuffer = NULL;
+    pContext->mConfig.outputCfg.bufferProvider.releaseBuffer = NULL;
+    pContext->mConfig.outputCfg.bufferProvider.cookie = NULL;
+    pContext->mConfig.outputCfg.mask = EFFECT_CONFIG_ALL;
+
+    pContext->mTargetGainmB = LOUDNESS_ENHANCER_DEFAULT_TARGET_GAIN_MB;
+    float targetAmp = pow(10, pContext->mTargetGainmB/2000.0f); // mB to linear amplification
+    ALOGV("LE_init(): Target gain=%dmB <=> factor=%.2fX", pContext->mTargetGainmB, targetAmp);
+
+    if (pContext->mCompressor == NULL) {
+        pContext->mCompressor = new le_fx::AdaptiveDynamicRangeCompression();
+        pContext->mCompressor->Initialize(targetAmp, pContext->mConfig.inputCfg.samplingRate);
+    }
+
+    LE_setConfig(pContext, &pContext->mConfig);
+
+    return 0;
+}
+
+//
+//--- Effect Library Interface Implementation
+//
+
+int LELib_Create(const effect_uuid_t *uuid,
+                         int32_t sessionId,
+                         int32_t ioId,
+                         effect_handle_t *pHandle) {
+    ALOGV("LELib_Create()");
+    int ret;
+    int i;
+
+    if (pHandle == NULL || uuid == NULL) {
+        return -EINVAL;
+    }
+
+    if (memcmp(uuid, &gLEDescriptor.uuid, sizeof(effect_uuid_t)) != 0) {
+        return -EINVAL;
+    }
+
+    LoudnessEnhancerContext *pContext = new LoudnessEnhancerContext;
+
+    pContext->mItfe = &gLEInterface;
+    pContext->mState = LOUDNESS_ENHANCER_STATE_UNINITIALIZED;
+
+    pContext->mCompressor = NULL;
+    ret = LE_init(pContext);
+    if (ret < 0) {
+        ALOGW("LELib_Create() init failed");
+        delete pContext;
+        return ret;
+    }
+
+    *pHandle = (effect_handle_t)pContext;
+
+    pContext->mState = LOUDNESS_ENHANCER_STATE_INITIALIZED;
+
+    ALOGV("  LELib_Create context is %p", pContext);
+
+    return 0;
+
+}
+
+int LELib_Release(effect_handle_t handle) {
+    LoudnessEnhancerContext * pContext = (LoudnessEnhancerContext *)handle;
+
+    ALOGV("LELib_Release %p", handle);
+    if (pContext == NULL) {
+        return -EINVAL;
+    }
+    pContext->mState = LOUDNESS_ENHANCER_STATE_UNINITIALIZED;
+    if (pContext->mCompressor != NULL) {
+        delete pContext->mCompressor;
+        pContext->mCompressor = NULL;
+    }
+    delete pContext;
+
+    return 0;
+}
+
+int LELib_GetDescriptor(const effect_uuid_t *uuid,
+                                effect_descriptor_t *pDescriptor) {
+
+    if (pDescriptor == NULL || uuid == NULL){
+        ALOGV("LELib_GetDescriptor() called with NULL pointer");
+        return -EINVAL;
+    }
+
+    if (memcmp(uuid, &gLEDescriptor.uuid, sizeof(effect_uuid_t)) == 0) {
+        *pDescriptor = gLEDescriptor;
+        return 0;
+    }
+
+    return  -EINVAL;
+} /* end LELib_GetDescriptor */
+
+//
+//--- Effect Control Interface Implementation
+//
+int LE_process(
+        effect_handle_t self, audio_buffer_t *inBuffer, audio_buffer_t *outBuffer)
+{
+    LoudnessEnhancerContext * pContext = (LoudnessEnhancerContext *)self;
+
+    if (pContext == NULL) {
+        return -EINVAL;
+    }
+
+    if (inBuffer == NULL || inBuffer->raw == NULL ||
+        outBuffer == NULL || outBuffer->raw == NULL ||
+        inBuffer->frameCount != outBuffer->frameCount ||
+        inBuffer->frameCount == 0) {
+        return -EINVAL;
+    }
+
+    //ALOGV("LE about to process %d samples", inBuffer->frameCount);
+    uint16_t inIdx;
+    float inputAmp = pow(10, pContext->mTargetGainmB/2000.0f);
+    float leftSample, rightSample;
+    for (inIdx = 0 ; inIdx < inBuffer->frameCount ; inIdx++) {
+        // makeup gain is applied on the input of the compressor
+        leftSample  = inputAmp * (float)inBuffer->s16[2*inIdx];
+        rightSample = inputAmp * (float)inBuffer->s16[2*inIdx +1];
+        pContext->mCompressor->Compress(&leftSample, &rightSample);
+        inBuffer->s16[2*inIdx]    = (int16_t) leftSample;
+        inBuffer->s16[2*inIdx +1] = (int16_t) rightSample;
+    }
+
+    if (inBuffer->raw != outBuffer->raw) {
+        if (pContext->mConfig.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
+            for (size_t i = 0; i < outBuffer->frameCount*2; i++) {
+                outBuffer->s16[i] = clamp16(outBuffer->s16[i] + inBuffer->s16[i]);
+            }
+        } else {
+            memcpy(outBuffer->raw, inBuffer->raw, outBuffer->frameCount * 2 * sizeof(int16_t));
+        }
+    }
+    if (pContext->mState != LOUDNESS_ENHANCER_STATE_ACTIVE) {
+        return -ENODATA;
+    }
+    return 0;
+}
+
+int LE_command(effect_handle_t self, uint32_t cmdCode, uint32_t cmdSize,
+        void *pCmdData, uint32_t *replySize, void *pReplyData) {
+
+    LoudnessEnhancerContext * pContext = (LoudnessEnhancerContext *)self;
+    int retsize;
+
+    if (pContext == NULL || pContext->mState == LOUDNESS_ENHANCER_STATE_UNINITIALIZED) {
+        return -EINVAL;
+    }
+
+//    ALOGV("LE_command command %d cmdSize %d",cmdCode, cmdSize);
+    switch (cmdCode) {
+    case EFFECT_CMD_INIT:
+        if (pReplyData == NULL || *replySize != sizeof(int)) {
+            return -EINVAL;
+        }
+        *(int *) pReplyData = LE_init(pContext);
+        break;
+    case EFFECT_CMD_SET_CONFIG:
+        if (pCmdData == NULL || cmdSize != sizeof(effect_config_t)
+                || pReplyData == NULL || *replySize != sizeof(int)) {
+            return -EINVAL;
+        }
+        *(int *) pReplyData = LE_setConfig(pContext,
+                (effect_config_t *) pCmdData);
+        break;
+    case EFFECT_CMD_GET_CONFIG:
+        if (pReplyData == NULL ||
+            *replySize != sizeof(effect_config_t)) {
+            return -EINVAL;
+        }
+        LE_getConfig(pContext, (effect_config_t *)pReplyData);
+        break;
+    case EFFECT_CMD_RESET:
+        LE_reset(pContext);
+        break;
+    case EFFECT_CMD_ENABLE:
+        if (pReplyData == NULL || *replySize != sizeof(int)) {
+            return -EINVAL;
+        }
+        if (pContext->mState != LOUDNESS_ENHANCER_STATE_INITIALIZED) {
+            return -ENOSYS;
+        }
+        pContext->mState = LOUDNESS_ENHANCER_STATE_ACTIVE;
+        ALOGV("EFFECT_CMD_ENABLE() OK");
+        *(int *)pReplyData = 0;
+        break;
+    case EFFECT_CMD_DISABLE:
+        if (pReplyData == NULL || *replySize != sizeof(int)) {
+            return -EINVAL;
+        }
+        if (pContext->mState != LOUDNESS_ENHANCER_STATE_ACTIVE) {
+            return -ENOSYS;
+        }
+        pContext->mState = LOUDNESS_ENHANCER_STATE_INITIALIZED;
+        ALOGV("EFFECT_CMD_DISABLE() OK");
+        *(int *)pReplyData = 0;
+        break;
+    case EFFECT_CMD_GET_PARAM: {
+        if (pCmdData == NULL ||
+            cmdSize != (int)(sizeof(effect_param_t) + sizeof(uint32_t)) ||
+            pReplyData == NULL ||
+            *replySize < (int)(sizeof(effect_param_t) + sizeof(uint32_t) + sizeof(uint32_t))) {
+            return -EINVAL;
+        }
+        memcpy(pReplyData, pCmdData, sizeof(effect_param_t) + sizeof(uint32_t));
+        effect_param_t *p = (effect_param_t *)pReplyData;
+        p->status = 0;
+        *replySize = sizeof(effect_param_t) + sizeof(uint32_t);
+        if (p->psize != sizeof(uint32_t)) {
+            p->status = -EINVAL;
+            break;
+        }
+        switch (*(uint32_t *)p->data) {
+        case LOUDNESS_ENHANCER_PARAM_TARGET_GAIN_MB:
+            ALOGV("get target gain(mB) = %d", pContext->mTargetGainmB);
+            *((int32_t *)p->data + 1) = pContext->mTargetGainmB;
+            p->vsize = sizeof(int32_t);
+            *replySize += sizeof(int32_t);
+            break;
+        default:
+            p->status = -EINVAL;
+        }
+        } break;
+    case EFFECT_CMD_SET_PARAM: {
+        if (pCmdData == NULL ||
+            cmdSize != (int)(sizeof(effect_param_t) + sizeof(uint32_t) + sizeof(uint32_t)) ||
+            pReplyData == NULL || *replySize != sizeof(int32_t)) {
+            return -EINVAL;
+        }
+        *(int32_t *)pReplyData = 0;
+        effect_param_t *p = (effect_param_t *)pCmdData;
+        if (p->psize != sizeof(uint32_t) || p->vsize != sizeof(uint32_t)) {
+            *(int32_t *)pReplyData = -EINVAL;
+            break;
+        }
+        switch (*(uint32_t *)p->data) {
+        case LOUDNESS_ENHANCER_PARAM_TARGET_GAIN_MB:
+            pContext->mTargetGainmB = *((int32_t *)p->data + 1);
+            ALOGV("set target gain(mB) = %d", pContext->mTargetGainmB);
+            LE_reset(pContext); // apply parameter update
+            break;
+        default:
+            *(int32_t *)pReplyData = -EINVAL;
+        }
+        } break;
+    case EFFECT_CMD_SET_DEVICE:
+    case EFFECT_CMD_SET_VOLUME:
+    case EFFECT_CMD_SET_AUDIO_MODE:
+        break;
+
+    default:
+        ALOGW("LE_command invalid command %d",cmdCode);
+        return -EINVAL;
+    }
+
+    return 0;
+}
+
+/* Effect Control Interface Implementation: get_descriptor */
+int LE_getDescriptor(effect_handle_t   self,
+                                    effect_descriptor_t *pDescriptor)
+{
+    LoudnessEnhancerContext * pContext = (LoudnessEnhancerContext *) self;
+
+    if (pContext == NULL || pDescriptor == NULL) {
+        ALOGV("LE_getDescriptor() invalid param");
+        return -EINVAL;
+    }
+
+    *pDescriptor = gLEDescriptor;
+
+    return 0;
+}   /* end LE_getDescriptor */
+
+// effect_handle_t interface implementation for DRC effect
+const struct effect_interface_s gLEInterface = {
+        LE_process,
+        LE_command,
+        LE_getDescriptor,
+        NULL,
+};
+
+// This is the only symbol that needs to be exported
+__attribute__ ((visibility ("default")))
+audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = {
+    tag : AUDIO_EFFECT_LIBRARY_TAG,
+    version : EFFECT_LIBRARY_API_VERSION,
+    name : "Loudness Enhancer Library",
+    implementor : "The Android Open Source Project",
+    create_effect : LELib_Create,
+    release_effect : LELib_Release,
+    get_descriptor : LELib_GetDescriptor,
+};
+
+}; // extern "C"
+
diff --git a/media/libeffects/loudness/MODULE_LICENSE_APACHE2 b/media/libeffects/loudness/MODULE_LICENSE_APACHE2
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/media/libeffects/loudness/MODULE_LICENSE_APACHE2
diff --git a/media/libeffects/loudness/NOTICE b/media/libeffects/loudness/NOTICE
new file mode 100644
index 0000000..ad6ed94
--- /dev/null
+++ b/media/libeffects/loudness/NOTICE
@@ -0,0 +1,190 @@
+
+   Copyright (c) 2013, The Android Open Source Project
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
diff --git a/media/libeffects/loudness/common/core/basic_types.h b/media/libeffects/loudness/common/core/basic_types.h
new file mode 100644
index 0000000..593e914
--- /dev/null
+++ b/media/libeffects/loudness/common/core/basic_types.h
@@ -0,0 +1,114 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LE_FX_ENGINE_COMMON_CORE_BASIC_TYPES_H_
+#define LE_FX_ENGINE_COMMON_CORE_BASIC_TYPES_H_
+
+#include <stddef.h>
+#include <stdlib.h>
+#include <string>
+using ::std::string;
+using ::std::basic_string;
+#include <vector>
+using ::std::vector;
+
+#include "common/core/os.h"
+
+// -----------------------------------------------------------------------------
+// Definitions of common basic types:
+// -----------------------------------------------------------------------------
+
+#if !defined(G_COMPILE) && !defined(BASE_INTEGRAL_TYPES_H_)
+
+namespace le_fx {
+
+typedef signed char         schar;
+typedef signed char         int8;
+typedef short               int16;
+typedef int                 int32;
+typedef long long           int64;
+
+typedef unsigned char       uint8;
+typedef unsigned short      uint16;
+typedef unsigned int        uint32;
+typedef unsigned long long  uint64;
+
+}  // namespace le_fx
+
+#endif
+
+namespace le_fx {
+
+struct FloatArray {
+  int length;
+  float *data;
+
+  FloatArray(void) {
+    data = NULL;
+    length = 0;
+  }
+};
+
+struct Int16Array {
+  int length;
+  int16 *data;
+
+  Int16Array(void) {
+    data = NULL;
+    length = 0;
+  }
+};
+
+struct Int32Array {
+  int length;
+  int32 *data;
+
+  Int32Array(void) {
+    data = NULL;
+    length = 0;
+  }
+};
+
+struct Int8Array {
+  int length;
+  uint8 *data;
+
+  Int8Array(void) {
+    data = NULL;
+    length = 0;
+  }
+};
+
+//
+// Simple wrapper for waveform data:
+//
+class WaveData : public vector<int16> {
+ public:
+  WaveData();
+  ~WaveData();
+
+  void Set(int number_samples, int sampling_rate, int16 *data);
+  int sample_rate(void) const;
+  void set_sample_rate(int sample_rate);
+  bool Equals(const WaveData &wave_data, int threshold = 0) const;
+
+ private:
+  int sample_rate_;
+};
+
+}  // namespace le_fx
+
+#endif  // LE_FX_ENGINE_COMMON_CORE_BASIC_TYPES_H_
diff --git a/media/libeffects/loudness/common/core/byte_swapper.h b/media/libeffects/loudness/common/core/byte_swapper.h
new file mode 100644
index 0000000..8f0caf3
--- /dev/null
+++ b/media/libeffects/loudness/common/core/byte_swapper.h
@@ -0,0 +1,151 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LE_FX_ENGINE_COMMON_CORE_BYTE_SWAPPER_H_
+#define LE_FX_ENGINE_COMMON_CORE_BYTE_SWAPPER_H_
+
+#include <stdio.h>
+#include <string.h>
+
+#include "common/core/basic_types.h"
+#include "common/core/os.h"
+
+namespace le_fx {
+
+namespace arch {
+
+inline bool IsLittleEndian(void) {
+  int16 word = 1;
+  char *cp = reinterpret_cast<char *>(&word);
+  return cp[0] != 0;
+}
+
+inline bool IsBigEndian(void) {
+  return !IsLittleEndian();
+}
+
+template <typename T, unsigned int kValSize>
+struct ByteSwapper {
+  static T Swap(const T &val) {
+    T new_val = val;
+    char *first = &new_val, *last = first + kValSize - 1, x;
+    for (; first < last; ++first, --last) {
+      x = *last;
+      *last = *first;
+      *first = x;
+    }
+    return new_val;
+  }
+};
+
+template <typename T>
+struct ByteSwapper<T, 1> {
+  static T Swap(const T &val) {
+    return val;
+  }
+};
+
+template <typename T>
+struct ByteSwapper<T, 2> {
+  static T Swap(const T &val) {
+    T new_val;
+    const char *o = (const char *)&val;
+    char *p = reinterpret_cast<char *>(&new_val);
+    p[0] = o[1];
+    p[1] = o[0];
+    return new_val;
+  }
+};
+
+template <typename T>
+struct ByteSwapper<T, 4> {
+  static T Swap(const T &val) {
+    T new_val;
+    const char *o = (const char *)&val;
+    char *p = reinterpret_cast<char *>(&new_val);
+    p[0] = o[3];
+    p[1] = o[2];
+    p[2] = o[1];
+    p[3] = o[0];
+    return new_val;
+  }
+};
+
+template <typename T>
+struct ByteSwapper<T, 8> {
+  static T Swap(const T &val) {
+    T new_val = val;
+    const char *o = (const char *)&val;
+    char *p = reinterpret_cast<char *>(&new_val);
+    p[0] = o[7];
+    p[1] = o[6];
+    p[2] = o[5];
+    p[3] = o[4];
+    p[4] = o[3];
+    p[5] = o[2];
+    p[6] = o[1];
+    p[7] = o[0];
+    return new_val;
+  }
+};
+
+template <typename T>
+T SwapBytes(const T &val, bool force_swap) {
+  if (force_swap) {
+#if !defined(LE_FX__NEED_BYTESWAP)
+    return ByteSwapper<T, sizeof(T)>::Swap(val);
+#else
+    return val;
+#endif  // !LE_FX_NEED_BYTESWAP
+  } else {
+#if !defined(LE_FX_NEED_BYTESWAP)
+    return val;
+#else
+    return ByteSwapper<T, sizeof(T)>::Swap(val);
+#endif  // !LE_FX_NEED_BYTESWAP
+  }
+}
+
+template <typename T>
+const T *SwapBytes(const T *vals, unsigned int num_items, bool force_swap) {
+  if (force_swap) {
+#if !defined(LE_FX_NEED_BYTESWAP)
+    T *writeable_vals = const_cast<T *>(vals);
+    for (unsigned int i = 0; i < num_items; i++) {
+      writeable_vals[i] = ByteSwapper<T, sizeof(T)>::Swap(vals[i]);
+    }
+    return writeable_vals;
+#else
+    return vals;
+#endif  // !LE_FX_NEED_BYTESWAP
+  } else {
+#if !defined(LE_FX_NEED_BYTESWAP)
+    return vals;
+#else
+    T *writeable_vals = const_cast<T *>(vals);
+    for (unsigned int i = 0; i < num_items; i++) {
+      writeable_vals[i] = ByteSwapper<T, sizeof(T)>::Swap(vals[i]);
+    }
+    return writeable_vals;
+#endif  // !LE_FX_NEED_BYTESWAP
+  }
+}
+
+}  // namespace arch
+
+}  // namespace le_fx
+
+#endif  // LE_FX_ENGINE_COMMON_CORE_BYTE_SWAPPER_H_
diff --git a/media/libeffects/loudness/common/core/math.h b/media/libeffects/loudness/common/core/math.h
new file mode 100644
index 0000000..3f302cc
--- /dev/null
+++ b/media/libeffects/loudness/common/core/math.h
@@ -0,0 +1,89 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LE_FX_ENGINE_COMMON_CORE_MATH_H_
+#define LE_FX_ENGINE_COMMON_CORE_MATH_H_
+
+#include <math.h>
+#include <algorithm>
+using ::std::min;
+using ::std::max;
+using ::std::fill;
+using ::std::fill_n;using ::std::lower_bound;
+#include <cmath>
+#include <math.h>
+//using ::std::fpclassify;
+
+#include "common/core/os.h"
+#include "common/core/types.h"
+
+namespace le_fx {
+namespace math {
+
+// A fast approximation to log2(.)
+inline float fast_log2(float val) {
+  int* const exp_ptr = reinterpret_cast <int *> (&val);
+  int x = *exp_ptr;
+  const int log_2 = ((x >> 23) & 255) - 128;
+  x &= ~(255 << 23);
+  x += 127 << 23;
+  *exp_ptr = x;
+  val = ((-1.0f / 3) * val + 2) * val - 2.0f / 3;
+  return static_cast<float>(val + log_2);
+}
+
+// A fast approximation to log(.)
+inline float fast_log(float val) {
+  return fast_log2(val) *
+      0.693147180559945286226763982995180413126945495605468750f;
+}
+
+// An approximation of the exp(.) function using a 5-th order Taylor expansion.
+// It's pretty accurate between +-0.1 and accurate to 10e-3 between +-1
+template <typename T>
+inline T ExpApproximationViaTaylorExpansionOrder5(T x) {
+  const T x2 = x * x;
+  const T x3 = x2 * x;
+  const T x4 = x2 * x2;
+  const T x5 = x3 * x2;
+  return 1.0f + x + 0.5f * x2 +
+      0.16666666666666665741480812812369549646973609924316406250f * x3 +
+      0.0416666666666666643537020320309238741174340248107910156250f * x4 +
+      0.008333333333333333217685101601546193705871701240539550781250f * x5;
+}
+
+}  // namespace math
+}  // namespace le_fx
+
+// Math functions missing in Android NDK:
+#if defined(LE_FX_OS_ANDROID)
+
+namespace std {
+
+//
+// Round to the nearest integer: We need this implementation
+// since std::round is missing on android.
+//
+template <typename T>
+inline T round(const T &x) {
+  return static_cast<T>(std::floor(static_cast<double>(x) + 0.5));
+}
+
+}  // namespace std
+
+#endif  // LE_FX_OS_ANDROID
+
+#endif  // LE_FX_ENGINE_COMMON_CORE_MATH_H_
diff --git a/media/libeffects/loudness/common/core/os.h b/media/libeffects/loudness/common/core/os.h
new file mode 100644
index 0000000..4a8ce82
--- /dev/null
+++ b/media/libeffects/loudness/common/core/os.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LE_FX_ENGINE_COMMON_CORE_OS_H_
+#define LE_FX_ENGINE_COMMON_CORE_OS_H_
+
+// -----------------------------------------------------------------------------
+// OS Identification:
+// -----------------------------------------------------------------------------
+
+#define LE_FX_OS_UNIX
+#if defined(__ANDROID__)
+#    define LE_FX_OS_ANDROID
+#endif  // Android
+
+#endif // LE_FX_ENGINE_COMMON_CORE_OS_H_
diff --git a/media/libeffects/loudness/common/core/types.h b/media/libeffects/loudness/common/core/types.h
new file mode 100644
index 0000000..d1b6c6a
--- /dev/null
+++ b/media/libeffects/loudness/common/core/types.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LE_FX_ENGINE_COMMON_CORE_TYPES_H_
+#define LE_FX_ENGINE_COMMON_CORE_TYPES_H_
+
+#include "common/core/os.h"
+
+#include "common/core/basic_types.h"
+
+#ifndef LE_FX_DISALLOW_COPY_AND_ASSIGN
+#define LE_FX_DISALLOW_COPY_AND_ASSIGN(TypeName) \
+  TypeName(const TypeName&); \
+  void operator=(const TypeName&)
+#endif  // LE_FX_DISALLOW_COPY_AND_ASSIGN
+
+
+#endif  // LE_FX_ENGINE_COMMON_CORE_TYPES_H_
diff --git a/media/libeffects/loudness/dsp/core/basic-inl.h b/media/libeffects/loudness/dsp/core/basic-inl.h
new file mode 100644
index 0000000..3f77147
--- /dev/null
+++ b/media/libeffects/loudness/dsp/core/basic-inl.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LE_FX_ENGINE_DSP_CORE_BASIC_INL_H_
+#define LE_FX_ENGINE_DSP_CORE_BASIC_INL_H_
+
+#include <math.h>
+
+namespace le_fx {
+
+namespace sigmod {
+
+template <typename T>
+int SearchIndex(const T x_data[],
+                T x,
+                int start_index,
+                int end_index) {
+  int start = start_index;
+  int end = end_index;
+  while (end > start + 1) {
+    int i = (end + start) / 2;
+    if (x_data[i] > x) {
+      end = i;
+    } else {
+      start = i;
+    }
+  }
+  return start;
+}
+
+}  // namespace sigmod
+
+}  // namespace le_fx
+
+#endif  // LE_FX_ENGINE_DSP_CORE_BASIC_INL_H_
diff --git a/media/libeffects/loudness/dsp/core/basic.h b/media/libeffects/loudness/dsp/core/basic.h
new file mode 100644
index 0000000..27e0a8d
--- /dev/null
+++ b/media/libeffects/loudness/dsp/core/basic.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LE_FX_ENGINE_DSP_CORE_BASIC_H_
+#define LE_FX_ENGINE_DSP_CORE_BASIC_H_
+
+#include <limits.h>
+#include "common/core/math.h"
+#include "common/core/types.h"
+
+namespace le_fx {
+
+namespace sigmod {
+
+// Searchs for the interval that contains <x> using a divide-and-conquer
+// algorithm.
+// X[]: a vector of sorted values (X[i+1] > X[i])
+// x:   a value
+// StartIndex: the minimum searched index
+// EndIndex: the maximum searched index
+// returns: the index <i> that satisfies: X[i] <= x <= X[i+1] &&
+//          StartIndex <= i <= (EndIndex-1)
+template <typename T>
+int SearchIndex(const T x_data[],
+                T x,
+                int start_index,
+                int end_index);
+
+}  // namespace sigmod
+
+}  // namespace le_fx
+
+#include "dsp/core/basic-inl.h"
+
+#endif  // LE_FX_ENGINE_DSP_CORE_BASIC_H_
diff --git a/media/libeffects/loudness/dsp/core/dynamic_range_compression-inl.h b/media/libeffects/loudness/dsp/core/dynamic_range_compression-inl.h
new file mode 100644
index 0000000..da75ceb
--- /dev/null
+++ b/media/libeffects/loudness/dsp/core/dynamic_range_compression-inl.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef LE_FX_ENGINE_DSP_CORE_DYNAMIC_RANGE_COMPRESSION_INL_H_
+#define LE_FX_ENGINE_DSP_CORE_DYNAMIC_RANGE_COMPRESSION_INL_H_
+
+//#define LOG_NDEBUG 0
+#include <cutils/log.h>
+
+
+namespace le_fx {
+
+
+inline void AdaptiveDynamicRangeCompression::set_knee_threshold(float decibel) {
+  // Converts to 1og-base
+  knee_threshold_in_decibel_ = decibel;
+  knee_threshold_ = 0.1151292546497023061569109358970308676362037658691406250f *
+      decibel + 10.39717719035538401328722102334722876548767089843750f;
+}
+
+
+inline void AdaptiveDynamicRangeCompression::set_knee_threshold_via_target_gain(
+    float target_gain) {
+  const float decibel = target_gain_to_knee_threshold_.Interpolate(
+        target_gain);
+  ALOGV("set_knee_threshold_via_target_gain: decibel =%.3fdB", decibel);
+  set_knee_threshold(decibel);
+}
+
+}  // namespace le_fx
+
+
+#endif  // LE_FX_ENGINE_DSP_CORE_DYNAMIC_RANGE_COMPRESSION_INL_H_
diff --git a/media/libeffects/loudness/dsp/core/dynamic_range_compression.cpp b/media/libeffects/loudness/dsp/core/dynamic_range_compression.cpp
new file mode 100644
index 0000000..7bd068e
--- /dev/null
+++ b/media/libeffects/loudness/dsp/core/dynamic_range_compression.cpp
@@ -0,0 +1,141 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cmath>
+
+#include "common/core/math.h"
+#include "common/core/types.h"
+#include "dsp/core/basic.h"
+#include "dsp/core/interpolation.h"
+#include "dsp/core/dynamic_range_compression.h"
+
+//#define LOG_NDEBUG 0
+#include <cutils/log.h>
+
+
+namespace le_fx {
+
+// Definitions for static const class members declared in
+// dynamic_range_compression.h.
+const float AdaptiveDynamicRangeCompression::kMinAbsValue = 0.000001f;
+const float AdaptiveDynamicRangeCompression::kMinLogAbsValue =
+    0.032766999999999997517097227728299912996590137481689453125f;
+const float AdaptiveDynamicRangeCompression::kFixedPointLimit = 32767.0f;
+const float AdaptiveDynamicRangeCompression::kInverseFixedPointLimit =
+    1.0f / AdaptiveDynamicRangeCompression::kFixedPointLimit;
+const float AdaptiveDynamicRangeCompression::kDefaultKneeThresholdInDecibel =
+    -8.0f;
+const float AdaptiveDynamicRangeCompression::kCompressionRatio = 7.0f;
+const float AdaptiveDynamicRangeCompression::kTauAttack = 0.001f;
+const float AdaptiveDynamicRangeCompression::kTauRelease = 0.015f;
+
+AdaptiveDynamicRangeCompression::AdaptiveDynamicRangeCompression() {
+  static const float kTargetGain[] = {
+      1.0f, 2.0f, 3.0f, 4.0f, 5.0f };
+  static const float kKneeThreshold[] = {
+      -8.0f, -8.0f, -8.5f, -9.0f, -10.0f };
+  target_gain_to_knee_threshold_.Initialize(
+      &kTargetGain[0], &kKneeThreshold[0],
+      sizeof(kTargetGain) / sizeof(kTargetGain[0]));
+}
+
+bool AdaptiveDynamicRangeCompression::Initialize(
+        float target_gain, float sampling_rate) {
+  set_knee_threshold_via_target_gain(target_gain);
+  sampling_rate_ = sampling_rate;
+  state_ = 0.0f;
+  compressor_gain_ = 1.0f;
+  if (kTauAttack > 0.0f) {
+    const float taufs = kTauAttack * sampling_rate_;
+    alpha_attack_ = std::exp(-1.0f / taufs);
+  } else {
+    alpha_attack_ = 0.0f;
+  }
+  if (kTauRelease > 0.0f) {
+    const float taufs = kTauRelease * sampling_rate_;
+    alpha_release_ = std::exp(-1.0f / taufs);
+  } else {
+    alpha_release_ = 0.0f;
+  }
+  // Feed-forward topology
+  slope_ = 1.0f / kCompressionRatio - 1.0f;
+  return true;
+}
+
+float AdaptiveDynamicRangeCompression::Compress(float x) {
+  const float max_abs_x = std::max(std::fabs(x), kMinLogAbsValue);
+  const float max_abs_x_dB = math::fast_log(max_abs_x);
+  // Subtract Threshold from log-encoded input to get the amount of overshoot
+  const float overshoot = max_abs_x_dB - knee_threshold_;
+  // Hard half-wave rectifier
+  const float rect = std::max(overshoot, 0.0f);
+  // Multiply rectified overshoot with slope
+  const float cv = rect * slope_;
+  const float prev_state = state_;
+  if (cv <= state_) {
+    state_ = alpha_attack_ * state_ + (1.0f - alpha_attack_) * cv;
+  } else {
+    state_ = alpha_release_ * state_ + (1.0f - alpha_release_) * cv;
+  }
+  compressor_gain_ *=
+      math::ExpApproximationViaTaylorExpansionOrder5(state_ - prev_state);
+  x *= compressor_gain_;
+  if (x > kFixedPointLimit) {
+    return kFixedPointLimit;
+  }
+  if (x < -kFixedPointLimit) {
+    return -kFixedPointLimit;
+  }
+  return x;
+}
+
+void AdaptiveDynamicRangeCompression::Compress(float *x1, float *x2) {
+  // Taking the maximum amplitude of both channels
+  const float max_abs_x = std::max(std::fabs(*x1),
+    std::max(std::fabs(*x2), kMinLogAbsValue));
+  const float max_abs_x_dB = math::fast_log(max_abs_x);
+  // Subtract Threshold from log-encoded input to get the amount of overshoot
+  const float overshoot = max_abs_x_dB - knee_threshold_;
+  // Hard half-wave rectifier
+  const float rect = std::max(overshoot, 0.0f);
+  // Multiply rectified overshoot with slope
+  const float cv = rect * slope_;
+  const float prev_state = state_;
+  if (cv <= state_) {
+    state_ = alpha_attack_ * state_ + (1.0f - alpha_attack_) * cv;
+  } else {
+    state_ = alpha_release_ * state_ + (1.0f - alpha_release_) * cv;
+  }
+  compressor_gain_ *=
+      math::ExpApproximationViaTaylorExpansionOrder5(state_ - prev_state);
+  *x1 *= compressor_gain_;
+  if (*x1 > kFixedPointLimit) {
+    *x1 = kFixedPointLimit;
+  }
+  if (*x1 < -kFixedPointLimit) {
+    *x1 = -kFixedPointLimit;
+  }
+  *x2 *= compressor_gain_;
+  if (*x2 > kFixedPointLimit) {
+    *x2 = kFixedPointLimit;
+  }
+  if (*x2 < -kFixedPointLimit) {
+    *x2 = -kFixedPointLimit;
+  }
+}
+
+}  // namespace le_fx
+
diff --git a/media/libeffects/loudness/dsp/core/dynamic_range_compression.h b/media/libeffects/loudness/dsp/core/dynamic_range_compression.h
new file mode 100644
index 0000000..2821a78
--- /dev/null
+++ b/media/libeffects/loudness/dsp/core/dynamic_range_compression.h
@@ -0,0 +1,119 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef LE_FX_ENGINE_DSP_CORE_DYNAMIC_RANGE_COMPRESSION_H_
+#define LE_FX_ENGINE_DSP_CORE_DYNAMIC_RANGE_COMPRESSION_H_
+
+#include "common/core/types.h"
+#include "common/core/math.h"
+#include "dsp/core/basic.h"
+#include "dsp/core/interpolation.h"
+
+//#define LOG_NDEBUG 0
+#include <cutils/log.h>
+
+
+namespace le_fx {
+
+// An adaptive dynamic range compression algorithm. The gain adaptation is made
+// at the logarithmic domain and it is based on a Branching-Smooth compensated
+// digital peak detector with different time constants for attack and release.
+class AdaptiveDynamicRangeCompression {
+ public:
+    AdaptiveDynamicRangeCompression();
+
+    // Initializes the compressor using prior information. It assumes that the
+    // input signal is speech from high-quality recordings that is scaled and then
+    // fed to the compressor. The compressor is tuned according to the target gain
+    // that is expected to be applied.
+    //
+    // Target gain receives values between 0.0 and 10.0. The knee threshold is
+    // reduced as the target gain increases in order to fit the increased range of
+    // values.
+    //
+    // Values between 1.0 and 2.0 will only mildly affect your signal. Higher
+    // values will reduce the dynamic range of the signal to the benefit of
+    // increased loudness.
+    //
+    // If nothing is known regarding the input, a `target_gain` of 1.0f is a
+    // relatively safe choice for many signals.
+    bool Initialize(float target_gain, float sampling_rate);
+
+  // A fast version of the algorithm that uses approximate computations for the
+  // log(.) and exp(.).
+  float Compress(float x);
+
+  // Stereo channel version of the compressor
+  void Compress(float *x1, float *x2);
+
+  // This version is slower than Compress(.) but faster than CompressSlow(.)
+  float CompressNormalSpeed(float x);
+
+  // A slow version of the algorithm that is easier for further developement,
+  // tuning and debugging
+  float CompressSlow(float x);
+
+  // Sets knee threshold (in decibel).
+  void set_knee_threshold(float decibel);
+
+  // Sets knee threshold via the target gain using an experimentally derived
+  // relationship.
+  void set_knee_threshold_via_target_gain(float target_gain);
+
+ private:
+  // The minimum accepted absolute input value and it's natural logarithm. This
+  // is to prevent numerical issues when the input is close to zero
+  static const float kMinAbsValue;
+  static const float kMinLogAbsValue;
+  // Fixed-point arithmetic limits
+  static const float kFixedPointLimit;
+  static const float kInverseFixedPointLimit;
+  // The default knee threshold in decibel. The knee threshold defines when the
+  // compressor is actually starting to compress the value of the input samples
+  static const float kDefaultKneeThresholdInDecibel;
+  // The compression ratio is the reciprocal of the slope of the line segment
+  // above the threshold (in the log-domain). The ratio controls the
+  // effectiveness of the compression.
+  static const float kCompressionRatio;
+  // The attack time of the envelope detector
+  static const float kTauAttack;
+  // The release time of the envelope detector
+  static const float kTauRelease;
+
+  float sampling_rate_;
+  // the internal state of the envelope detector
+  float state_;
+  // the latest gain factor that was applied to the input signal
+  float compressor_gain_;
+  // attack constant for exponential dumping
+  float alpha_attack_;
+  // release constant for exponential dumping
+  float alpha_release_;
+  float slope_;
+  // The knee threshold
+  float knee_threshold_;
+  float knee_threshold_in_decibel_;
+  // This interpolator provides the function that relates target gain to knee
+  // threshold.
+  sigmod::InterpolatorLinear<float> target_gain_to_knee_threshold_;
+
+  LE_FX_DISALLOW_COPY_AND_ASSIGN(AdaptiveDynamicRangeCompression);
+};
+
+}  // namespace le_fx
+
+#include "dsp/core/dynamic_range_compression-inl.h"
+
+#endif  // LE_FX_ENGINE_DSP_CORE_DYNAMIC_RANGE_COMPRESSION_H_
diff --git a/media/libeffects/loudness/dsp/core/interpolation.h b/media/libeffects/loudness/dsp/core/interpolation.h
new file mode 100644
index 0000000..23c287c
--- /dev/null
+++ b/media/libeffects/loudness/dsp/core/interpolation.h
@@ -0,0 +1,24 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef LE_FX_ENGINE_DSP_CORE_INTERPOLATION_H_
+#define LE_FX_ENGINE_DSP_CORE_INTERPOLATION_H_
+
+#include "common/core/math.h"
+#include "dsp/core/interpolator_base.h"
+#include "dsp/core/interpolator_linear.h"
+
+#endif  // LE_FX_ENGINE_DSP_CORE_INTERPOLATION_H_
+
diff --git a/media/libeffects/loudness/dsp/core/interpolator_base-inl.h b/media/libeffects/loudness/dsp/core/interpolator_base-inl.h
new file mode 100644
index 0000000..bd08b65
--- /dev/null
+++ b/media/libeffects/loudness/dsp/core/interpolator_base-inl.h
@@ -0,0 +1,180 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LE_FX_ENGINE_DSP_CORE_INTERPOLATOR_BASE_INL_H_
+#define LE_FX_ENGINE_DSP_CORE_INTERPOLATOR_BASE_INL_H_
+
+#include "dsp/core/basic.h"
+
+//#define LOG_NDEBUG 0
+#include <cutils/log.h>
+
+
+namespace le_fx {
+
+namespace sigmod {
+
+template <typename T, class Algorithm>
+InterpolatorBase<T, Algorithm>::InterpolatorBase() {
+  status_ = false;
+  cached_index_ = 0;
+  x_data_ = NULL;
+  y_data_ = NULL;
+  data_length_ = 0;
+  own_x_data_ = false;
+  x_start_offset_ = 0.0;
+  last_element_index_ = -1;
+  x_inverse_sampling_interval_ = 0.0;
+  state_ = NULL;
+}
+
+template <typename T, class Algorithm>
+InterpolatorBase<T, Algorithm>::~InterpolatorBase() {
+  delete [] state_;
+  if (own_x_data_) {
+    delete [] x_data_;
+  }
+}
+
+template <typename T, class Algorithm>
+bool InterpolatorBase<T, Algorithm>::Initialize(const vector<T> &x_data,
+                                                const vector<T> &y_data) {
+#ifndef NDEBUG
+  if (x_data.size() != y_data.size()) {
+    LoggerError("InterpolatorBase::Initialize: xData size (%d) != yData size"
+                  " (%d)", x_data.size(), y_data.size());
+  }
+#endif
+  return Initialize(&x_data[0], &y_data[0], x_data.size());
+}
+
+template <typename T, class Algorithm>
+bool InterpolatorBase<T, Algorithm>::Initialize(double x_start_offset,
+                                                double x_sampling_interval,
+                                                const vector<T> &y_data) {
+  return Initialize(x_start_offset,
+                    x_sampling_interval,
+                    &y_data[0],
+                    y_data.size());
+}
+
+template <typename T, class Algorithm>
+bool InterpolatorBase<T, Algorithm>::Initialize(double x_start_offset,
+                                                double x_sampling_interval,
+                                                const T *y_data,
+                                                int data_length) {
+  // Constructs and populate x-axis data: `x_data_`
+  T *x_data_tmp = new T[data_length];
+  float time_offset = x_start_offset;
+  for (int n = 0; n < data_length; n++) {
+    x_data_tmp[n] = time_offset;
+    time_offset += x_sampling_interval;
+  }
+  Initialize(x_data_tmp, y_data, data_length);
+  // Sets-up the regularly sampled interpolation mode
+  x_start_offset_ = x_start_offset;
+  x_inverse_sampling_interval_ = 1.0 / x_sampling_interval;
+  own_x_data_ = true;
+  return status_;
+}
+
+
+template <typename T, class Algorithm>
+bool InterpolatorBase<T, Algorithm>::Initialize(
+    const T *x_data, const T *y_data, int data_length) {
+  // Default settings
+  cached_index_ = 0;
+  data_length_ = 0;
+  x_start_offset_ = 0;
+  x_inverse_sampling_interval_ = 0;
+  state_ = NULL;
+  // Input data is externally owned
+  own_x_data_ = false;
+  x_data_ = x_data;
+  y_data_ = y_data;
+  data_length_ = data_length;
+  last_element_index_ = data_length - 1;
+  // Check input data sanity
+  for (int n = 0; n < last_element_index_; ++n) {
+    if (x_data_[n + 1] <= x_data_[n]) {
+      ALOGE("InterpolatorBase::Initialize: xData are not ordered or "
+              "contain equal values (X[%d] <= X[%d]) (%.5e <= %.5e)",
+              n + 1, n, x_data_[n + 1], x_data_[n]);
+      status_ = false;
+      return false;
+    }
+  }
+  // Pre-compute internal state by calling the corresponding function of the
+  // derived class.
+  status_ = static_cast<Algorithm*>(this)->SetInternalState();
+  return status_;
+}
+
+template <typename T, class Algorithm>
+T InterpolatorBase<T, Algorithm>::Interpolate(T x) {
+#ifndef NDEBUG
+  if (cached_index_ < 0 || cached_index_ > data_length_ - 2) {
+    LoggerError("InterpolatorBase:Interpolate: CachedIndex_ out of bounds "
+                  "[0, %d, %d]", cached_index_, data_length_ - 2);
+  }
+#endif
+  // Search for the containing interval
+  if (x <= x_data_[cached_index_]) {
+    if (cached_index_ <= 0) {
+      cached_index_ = 0;
+      return y_data_[0];
+    }
+    if (x >= x_data_[cached_index_ - 1]) {
+      cached_index_--;  // Fast descending
+    } else {
+      if (x <= x_data_[0]) {
+        cached_index_ = 0;
+        return y_data_[0];
+      }
+      cached_index_ = SearchIndex(x_data_, x, 0, cached_index_);
+    }
+  } else {
+    if (cached_index_ >= last_element_index_) {
+      cached_index_ = last_element_index_;
+      return y_data_[last_element_index_];
+    }
+    if (x > x_data_[cached_index_ + 1]) {
+      if (cached_index_ + 2 > last_element_index_) {
+        cached_index_ = last_element_index_ - 1;
+        return y_data_[last_element_index_];
+      }
+      if (x <= x_data_[cached_index_ + 2]) {
+        cached_index_++;  // Fast ascending
+      } else {
+        if (x >= x_data_[last_element_index_]) {
+          cached_index_ = last_element_index_ - 1;
+          return y_data_[last_element_index_];
+        }
+        cached_index_ = SearchIndex(
+            x_data_, x, cached_index_, last_element_index_);
+      }
+    }
+  }
+  // Compute interpolated value by calling the corresponding function of the
+  // derived class.
+  return static_cast<Algorithm*>(this)->MethodSpecificInterpolation(x);
+}
+
+}  // namespace sigmod
+
+}  // namespace le_fx
+
+#endif  // LE_FX_ENGINE_DSP_CORE_INTERPOLATOR_BASE_INL_H_
diff --git a/media/libeffects/loudness/dsp/core/interpolator_base.h b/media/libeffects/loudness/dsp/core/interpolator_base.h
new file mode 100644
index 0000000..0cd1a35
--- /dev/null
+++ b/media/libeffects/loudness/dsp/core/interpolator_base.h
@@ -0,0 +1,112 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LE_FX_ENGINE_DSP_CORE_INTERPOLATOR_BASE_H_
+#define LE_FX_ENGINE_DSP_CORE_INTERPOLATOR_BASE_H_
+
+#include "common/core/types.h"
+
+namespace le_fx {
+
+namespace sigmod {
+
+// Interpolation base-class that provides the interface, while it is the derived
+// class that provides the specific interpolation algorithm. The following list
+// of interpolation algorithms are currently present:
+//
+// InterpolationSine<T>: weighted interpolation between y_data[n] and
+//                       y_data[n+1] using a sin(.) weighting factor from
+//                       0 to pi/4.
+// InterpolationLinear<T>: linear interpolation
+// InterpolationSplines<T>: spline-based interpolation
+//
+// Example (using derived spline-based interpolation class):
+//  InterpolatorSplines<float> interp(x_data, y_data, data_length);
+//  for (int n = 0; n < data_length; n++) Y[n] = interp.Interpolate(X[n]);
+//
+template <typename T, class Algorithm>
+class InterpolatorBase {
+ public:
+  InterpolatorBase();
+  ~InterpolatorBase();
+
+  // Generic random-access interpolation with arbitrary spaced x-axis samples.
+  // Below X[0], the interpolator returns Y[0]. Above X[data_length-1], it
+  // returns Y[data_length-1].
+  T Interpolate(T x);
+
+  bool get_status() const {
+    return status_;
+  }
+
+  // Initializes internal buffers.
+  //  x_data: [(data_length)x1] x-axis coordinates (searching axis)
+  //  y_data: [(data_length)x1] y-axis coordinates (interpolation axis)
+  //  data_length: number of points
+  // returns `true` if everything is ok, `false`, otherwise
+  bool Initialize(const T *x_data, const T *y_data, int data_length);
+
+  // Initializes internal buffers.
+  //  x_data: x-axis coordinates (searching axis)
+  //  y_data: y-axis coordinates (interpolating axis)
+  // returns `true` if everything is ok, `false`, otherwise
+  bool Initialize(const vector<T> &x_data, const vector<T> &y_data);
+
+  // Initialization for regularly sampled sequences, where:
+  //  x_data[i] = x_start_offset + i * x_sampling_interval
+  bool Initialize(double x_start_offset,
+                  double x_sampling_interval,
+                  const vector<T> &y_data);
+
+  // Initialization for regularly sampled sequences, where:
+  //  x_data[i] = x_start_offset + i * x_sampling_interval
+  bool Initialize(double x_start_offset,
+                  double x_sampling_interval,
+                  const T *y_data,
+                  int data_length);
+
+ protected:
+  // Is set to false if something goes wrong, and to true if everything is ok.
+  bool status_;
+
+  // The start-index of the previously searched interval
+  int cached_index_;
+
+  // Data points
+  const T *x_data_;  // Externally or internally owned, depending on own_x_data_
+  const T *y_data_;  // Externally owned (always)
+  int data_length_;
+  // Index of the last element `data_length_ - 1` kept here for optimization
+  int last_element_index_;
+  bool own_x_data_;
+  // For regularly-samples sequences, keep only the boundaries and the intervals
+  T x_start_offset_;
+  float x_inverse_sampling_interval_;
+
+  // Algorithm state (internally owned)
+  double *state_;
+
+ private:
+  LE_FX_DISALLOW_COPY_AND_ASSIGN(InterpolatorBase);
+};
+
+}  // namespace sigmod
+
+}  // namespace le_fx
+
+#include "dsp/core/interpolator_base-inl.h"
+
+#endif  // LE_FX_ENGINE_DSP_CORE_INTERPOLATOR_BASE_H_
diff --git a/media/libeffects/loudness/dsp/core/interpolator_linear.h b/media/libeffects/loudness/dsp/core/interpolator_linear.h
new file mode 100644
index 0000000..434698a
--- /dev/null
+++ b/media/libeffects/loudness/dsp/core/interpolator_linear.h
@@ -0,0 +1,81 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LE_FX_ENGINE_DSP_CORE_INTERPOLATOR_LINEAR_H_
+#define LE_FX_ENGINE_DSP_CORE_INTERPOLATOR_LINEAR_H_
+
+#include <math.h>
+#include "dsp/core/interpolator_base.h"
+
+namespace le_fx {
+
+namespace sigmod {
+
+// Linear interpolation class.
+//
+// The main functionality of this class is provided by it's base-class, so
+// please refer to: InterpolatorBase
+//
+// Example:
+//  InterpolatorLinear<float> interp(x_data, y_data, data_length);
+//  for (int n = 0; n < data_length; n++) Y[n] = interp.Interpolate(X[n]);
+//
+template <typename T>
+class InterpolatorLinear: public InterpolatorBase<T, InterpolatorLinear<T> > {
+ public:
+  InterpolatorLinear() { }
+  ~InterpolatorLinear() { }
+
+ protected:
+  // Provides the main implementation of the linear interpolation algorithm.
+  // Assumes that: X[cached_index_] < x < X[cached_index_ + 1]
+  T MethodSpecificInterpolation(T x);
+
+  // Pre-compute internal state_ parameters.
+  bool SetInternalState();
+
+ private:
+  friend class InterpolatorBase<T, InterpolatorLinear<T> >;
+  typedef InterpolatorBase<T, InterpolatorLinear<T> > BaseClass;
+  using BaseClass::status_;
+  using BaseClass::cached_index_;
+  using BaseClass::x_data_;
+  using BaseClass::y_data_;
+  using BaseClass::data_length_;
+  using BaseClass::state_;
+
+  LE_FX_DISALLOW_COPY_AND_ASSIGN(InterpolatorLinear<T>);
+};
+
+template <typename T>
+inline T InterpolatorLinear<T>::MethodSpecificInterpolation(T x) {
+  T dX = x_data_[cached_index_ + 1] - x_data_[cached_index_];
+  T dY = y_data_[cached_index_ + 1] - y_data_[cached_index_];
+  T dx = x - x_data_[cached_index_];
+  return y_data_[cached_index_] + (dY * dx) / dX;
+}
+
+template <typename T>
+bool InterpolatorLinear<T>::SetInternalState() {
+  state_ = NULL;
+  return true;
+}
+
+}  // namespace sigmod
+
+}  // namespace le_fx
+
+#endif  // LE_FX_ENGINE_DSP_CORE_INTERPOLATOR_LINEAR_H_
diff --git a/media/libeffects/proxy/Android.mk b/media/libeffects/proxy/Android.mk
new file mode 100644
index 0000000..01b3be1
--- /dev/null
+++ b/media/libeffects/proxy/Android.mk
@@ -0,0 +1,34 @@
+# Copyright 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+LOCAL_PATH:= $(call my-dir)
+include $(CLEAR_VARS)
+LOCAL_MODULE:= libeffectproxy
+LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/soundfx
+LOCAL_MODULE_TAGS := optional
+
+
+LOCAL_SRC_FILES := \
+        EffectProxy.cpp
+
+LOCAL_CFLAGS+= -fvisibility=hidden
+
+LOCAL_SHARED_LIBRARIES := liblog libcutils libutils libdl libeffects
+
+LOCAL_C_INCLUDES := \
+        system/media/audio_effects/include \
+        bionic/libc/include
+
+include $(BUILD_SHARED_LIBRARY)
+
diff --git a/media/libeffects/proxy/EffectProxy.cpp b/media/libeffects/proxy/EffectProxy.cpp
new file mode 100644
index 0000000..dd4ad08
--- /dev/null
+++ b/media/libeffects/proxy/EffectProxy.cpp
@@ -0,0 +1,339 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "EffectProxy"
+//#define LOG_NDEBUG 0
+
+#include <cutils/log.h>
+#include <assert.h>
+#include <stdlib.h>
+#include <string.h>
+#include <new>
+#include <EffectProxy.h>
+#include <utils/threads.h>
+#include <media/EffectsFactoryApi.h>
+
+namespace android {
+// This is a dummy proxy descriptor just to return to Factory during the initial
+// GetDescriptor call. Later in the factory, it is replaced with the
+// SW sub effect descriptor
+// proxy UUID af8da7e0-2ca1-11e3-b71d-0002a5d5c51b
+const effect_descriptor_t gProxyDescriptor = {
+        EFFECT_UUID_INITIALIZER, // type
+        {0xaf8da7e0, 0x2ca1, 0x11e3, 0xb71d, { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b }}, // uuid
+        EFFECT_CONTROL_API_VERSION, //version of effect control API
+        (EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_INSERT_LAST |
+         EFFECT_FLAG_VOLUME_CTRL), // effect capability flags
+        0, // CPU load
+        1, // Data memory
+        "Proxy", //effect name
+        "AOSP", //implementor name
+};
+
+
+static const effect_descriptor_t *const gDescriptors[] =
+{
+    &gProxyDescriptor,
+};
+
+
+int EffectProxyCreate(const effect_uuid_t *uuid,
+                            int32_t             sessionId,
+                            int32_t             ioId,
+                           effect_handle_t  *pHandle) {
+
+    effect_descriptor_t* desc;
+    EffectContext* pContext;
+    if (pHandle == NULL || uuid == NULL) {
+        ALOGE("EffectProxyCreate() called with NULL pointer");
+        return -EINVAL;
+    }
+    ALOGV("EffectProxyCreate start..");
+    pContext = new EffectContext;
+    pContext->sessionId = sessionId;
+    pContext->ioId = ioId;
+    pContext->uuid = *uuid;
+    pContext->common_itfe = &gEffectInterface;
+
+    // The sub effects will be created in effect_command when the first command
+    // for the effect is received
+    pContext->eHandle[SUB_FX_HOST] = pContext->eHandle[SUB_FX_OFFLOAD] = NULL;
+
+    // Get the HW and SW sub effect descriptors from the effects factory
+    desc = new effect_descriptor_t[SUB_FX_COUNT];
+    pContext->desc = new effect_descriptor_t[SUB_FX_COUNT];
+    int retValue = EffectGetSubEffects(uuid, desc,
+                                sizeof(effect_descriptor_t) * SUB_FX_COUNT);
+    // EffectGetSubEffects returns the number of sub-effects copied.
+    if (retValue != SUB_FX_COUNT) {
+       ALOGE("EffectCreate() could not get the sub effects");
+       delete desc;
+       delete pContext->desc;
+       return -EINVAL;
+    }
+    // Check which is the HW descriptor and copy the descriptors
+    // to the Context desc array
+    // Also check if there is only one HW and one SW descriptor.
+    // HW descriptor alone has the HW_TUNNEL flag.
+    if ((desc[0].flags & EFFECT_FLAG_HW_ACC_TUNNEL) &&
+       !(desc[1].flags & EFFECT_FLAG_HW_ACC_TUNNEL)) {
+        pContext->desc[SUB_FX_OFFLOAD] = desc[0];
+        pContext->desc[SUB_FX_HOST] = desc[1];
+    }
+    else if ((desc[1].flags & EFFECT_FLAG_HW_ACC_TUNNEL) &&
+             !(desc[0].flags & EFFECT_FLAG_HW_ACC_TUNNEL)) {
+        pContext->desc[SUB_FX_HOST] = desc[0];
+        pContext->desc[SUB_FX_OFFLOAD] = desc[1];
+    }
+    delete desc;
+#if (LOG_NDEBUG == 0)
+    effect_uuid_t uuid_print = pContext->desc[SUB_FX_HOST].uuid;
+    ALOGV("EffectCreate() UUID of HOST: %08X-%04X-%04X-%04X-%02X%02X%02X%02X"
+        "%02X%02X\n",uuid_print.timeLow, uuid_print.timeMid,
+        uuid_print.timeHiAndVersion, uuid_print.clockSeq, uuid_print.node[0],
+        uuid_print.node[1], uuid_print.node[2], uuid_print.node[3],
+        uuid_print.node[4], uuid_print.node[5]);
+    ALOGV("EffectCreate() UUID of OFFLOAD: %08X-%04X-%04X-%04X-%02X%02X%02X%02X"
+        "%02X%02X\n", uuid_print.timeLow, uuid_print.timeMid,
+        uuid_print.timeHiAndVersion, uuid_print.clockSeq, uuid_print.node[0],
+        uuid_print.node[1], uuid_print.node[2], uuid_print.node[3],
+        uuid_print.node[4], uuid_print.node[5]);
+#endif
+
+    pContext->replySize = PROXY_REPLY_SIZE_DEFAULT;
+    pContext->replyData = (char *)malloc(PROXY_REPLY_SIZE_DEFAULT);
+
+    *pHandle = (effect_handle_t)pContext;
+    ALOGV("EffectCreate end");
+    return 0;
+} //end EffectProxyCreate
+
+int EffectProxyRelease(effect_handle_t handle) {
+    EffectContext * pContext = (EffectContext *)handle;
+    if (pContext == NULL) {
+        ALOGV("ERROR : EffectRelease called with NULL pointer");
+        return -EINVAL;
+    }
+    ALOGV("EffectRelease");
+    delete pContext->desc;
+    free(pContext->replyData);
+
+    if (pContext->eHandle[SUB_FX_HOST])
+       EffectRelease(pContext->eHandle[SUB_FX_HOST]);
+    if (pContext->eHandle[SUB_FX_OFFLOAD])
+       EffectRelease(pContext->eHandle[SUB_FX_OFFLOAD]);
+    delete pContext;
+    pContext = NULL;
+    return 0;
+} /*end EffectProxyRelease */
+
+int EffectProxyGetDescriptor(const effect_uuid_t *uuid,
+                                   effect_descriptor_t *pDescriptor) {
+    const effect_descriptor_t *desc = NULL;
+
+    if (pDescriptor == NULL || uuid == NULL) {
+        ALOGV("EffectGetDescriptor() called with NULL pointer");
+        return -EINVAL;
+    }
+    desc = &gProxyDescriptor;
+    *pDescriptor = *desc;
+    return 0;
+} /* end EffectProxyGetDescriptor */
+
+/* Effect Control Interface Implementation: Process */
+int Effect_process(effect_handle_t     self,
+                              audio_buffer_t         *inBuffer,
+                              audio_buffer_t         *outBuffer) {
+
+    EffectContext *pContext = (EffectContext *) self;
+    int ret = 0;
+    if (pContext != NULL) {
+        int index = pContext->index;
+        // if the index refers to HW , do not do anything. Just return.
+        if (index == SUB_FX_HOST) {
+            ret = (*pContext->eHandle[index])->process(pContext->eHandle[index],
+                                                       inBuffer, outBuffer);
+        }
+    }
+    return ret;
+}   /* end Effect_process */
+
+/* Effect Control Interface Implementation: Command */
+int Effect_command(effect_handle_t  self,
+                              uint32_t            cmdCode,
+                              uint32_t            cmdSize,
+                              void                *pCmdData,
+                              uint32_t            *replySize,
+                              void                *pReplyData) {
+
+    EffectContext *pContext = (EffectContext *) self;
+    int status = 0;
+    if (pContext == NULL) {
+        ALOGV("Effect_command() Proxy context is NULL");
+        return -EINVAL;
+    }
+    if (pContext->eHandle[SUB_FX_HOST] == NULL) {
+        ALOGV("Effect_command() Calling HOST EffectCreate");
+        status = EffectCreate(&pContext->desc[SUB_FX_HOST].uuid,
+                              pContext->sessionId, pContext->ioId,
+                              &(pContext->eHandle[SUB_FX_HOST]));
+        if (status != NO_ERROR || (pContext->eHandle[SUB_FX_HOST] == NULL)) {
+            ALOGV("Effect_command() Error creating SW sub effect");
+            return status;
+        }
+    }
+    if (pContext->eHandle[SUB_FX_OFFLOAD] == NULL) {
+        ALOGV("Effect_command() Calling OFFLOAD EffectCreate");
+        status = EffectCreate(&pContext->desc[SUB_FX_OFFLOAD].uuid,
+                              pContext->sessionId, pContext->ioId,
+                              &(pContext->eHandle[SUB_FX_OFFLOAD]));
+        if (status != NO_ERROR || (pContext->eHandle[SUB_FX_OFFLOAD] == NULL)) {
+            ALOGV("Effect_command() Error creating HW effect");
+            // Do not return error here as SW effect is created
+            // Return error if the CMD_OFFLOAD sends the index as OFFLOAD
+        }
+        pContext->index = SUB_FX_HOST;
+    }
+    // EFFECT_CMD_OFFLOAD used to (1) send whether the thread is offload or not
+    // (2) Send the ioHandle of the effectThread when the effect
+    // is moved from one type of thread to another.
+    // pCmdData points to a memory holding effect_offload_param_t structure
+    if (cmdCode == EFFECT_CMD_OFFLOAD) {
+        ALOGV("Effect_command() cmdCode = EFFECT_CMD_OFFLOAD");
+        if (cmdSize == 0 || pCmdData == NULL) {
+            ALOGV("effectsOffload: Effect_command: CMD_OFFLOAD has no data");
+             *(int*)pReplyData = FAILED_TRANSACTION;
+            return FAILED_TRANSACTION;
+        }
+        effect_offload_param_t* offloadParam = (effect_offload_param_t*)pCmdData;
+        // Assign the effect context index based on isOffload field of the structure
+        pContext->index = offloadParam->isOffload ? SUB_FX_OFFLOAD : SUB_FX_HOST;
+        // if the index is HW and the HW effect is unavailable, return error
+        // and reset the index to SW
+        if (pContext->eHandle[pContext->index] == NULL) {
+            ALOGV("Effect_command()CMD_OFFLOAD sub effect unavailable");
+            *(int*)pReplyData = FAILED_TRANSACTION;
+            return FAILED_TRANSACTION;
+        }
+        pContext->ioId = offloadParam->ioHandle;
+        ALOGV("Effect_command()CMD_OFFLOAD index:%d io %d", pContext->index, pContext->ioId);
+        // Update the DSP wrapper with the new ioHandle.
+        // Pass the OFFLOAD command to the wrapper.
+        // The DSP wrapper needs to handle this CMD
+        if (pContext->eHandle[SUB_FX_OFFLOAD])
+            status = (*pContext->eHandle[SUB_FX_OFFLOAD])->command(
+                             pContext->eHandle[SUB_FX_OFFLOAD], cmdCode, cmdSize,
+                             pCmdData, replySize, pReplyData);
+        return status;
+    }
+
+    int index = pContext->index;
+    if (index != SUB_FX_HOST && index != SUB_FX_OFFLOAD) {
+        ALOGV("Effect_command: effect index is neither offload nor host");
+        return -EINVAL;
+    }
+
+    // Getter commands are only sent to the active sub effect.
+    int *subStatus[SUB_FX_COUNT];
+    uint32_t *subReplySize[SUB_FX_COUNT];
+    void *subReplyData[SUB_FX_COUNT];
+    uint32_t tmpSize;
+    int tmpStatus;
+
+    // grow temp reply buffer if needed
+    if (replySize != NULL) {
+        tmpSize = pContext->replySize;
+        while (tmpSize < *replySize && tmpSize < PROXY_REPLY_SIZE_MAX) {
+            tmpSize *= 2;
+        }
+        if (tmpSize > pContext->replySize) {
+            ALOGV("Effect_command grow reply buf to %d", tmpSize);
+            pContext->replyData = (char *)realloc(pContext->replyData, tmpSize);
+            pContext->replySize = tmpSize;
+        }
+        if (tmpSize > *replySize) {
+            tmpSize = *replySize;
+        }
+    } else {
+        tmpSize = 0;
+    }
+    // tmpSize is now the actual reply size for the non active sub effect
+
+    // Send command to sub effects. The command is sent to all sub effects so that their internal
+    // state is kept in sync.
+    // Only the reply from the active sub effect is returned to the caller. The reply from the
+    // other sub effect is lost in pContext->replyData
+    for (int i = 0; i < SUB_FX_COUNT; i++) {
+        if (pContext->eHandle[i] == NULL) {
+            continue;
+        }
+        if (i == index) {
+            subStatus[i] = &status;
+            subReplySize[i] = replySize;
+            subReplyData[i] = pReplyData;
+        } else {
+            subStatus[i] = &tmpStatus;
+            subReplySize[i] = replySize == NULL ? NULL : &tmpSize;
+            subReplyData[i] = pReplyData == NULL ? NULL : pContext->replyData;
+        }
+        *subStatus[i] = (*pContext->eHandle[i])->command(
+                             pContext->eHandle[i], cmdCode, cmdSize,
+                             pCmdData, subReplySize[i], subReplyData[i]);
+    }
+
+    return status;
+}    /* end Effect_command */
+
+
+/* Effect Control Interface Implementation: get_descriptor */
+int Effect_getDescriptor(effect_handle_t   self,
+                         effect_descriptor_t *pDescriptor) {
+
+    EffectContext * pContext = (EffectContext *) self;
+    const effect_descriptor_t *desc;
+
+    ALOGV("Effect_getDescriptor");
+    if (pContext == NULL || pDescriptor == NULL) {
+        ALOGV("Effect_getDescriptor() invalid param");
+        return -EINVAL;
+    }
+    if (pContext->desc == NULL) {
+        ALOGV("Effect_getDescriptor() could not get descriptor");
+        return -EINVAL;
+    }
+    desc = &pContext->desc[SUB_FX_HOST];
+    *pDescriptor = *desc;
+    pDescriptor->uuid = pContext->uuid; // Replace the uuid with the Proxy UUID
+    // Also set/clear the EFFECT_FLAG_OFFLOAD_SUPPORTED flag based on the sub effects availability
+    if (pContext->eHandle[SUB_FX_OFFLOAD] != NULL)
+        pDescriptor->flags |= EFFECT_FLAG_OFFLOAD_SUPPORTED;
+    else
+        pDescriptor->flags &= ~EFFECT_FLAG_OFFLOAD_SUPPORTED;
+    return 0;
+} /* end Effect_getDescriptor */
+
+} // namespace android
+
+__attribute__ ((visibility ("default")))
+audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = {
+    tag : AUDIO_EFFECT_LIBRARY_TAG,
+    version : EFFECT_LIBRARY_API_VERSION,
+    name : "Effect Proxy",
+    implementor : "AOSP",
+    create_effect : android::EffectProxyCreate,
+    release_effect : android::EffectProxyRelease,
+    get_descriptor : android::EffectProxyGetDescriptor,
+};
diff --git a/media/libeffects/proxy/EffectProxy.h b/media/libeffects/proxy/EffectProxy.h
new file mode 100644
index 0000000..acbe17e
--- /dev/null
+++ b/media/libeffects/proxy/EffectProxy.h
@@ -0,0 +1,80 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <hardware/audio.h>
+#include <hardware/audio_effect.h>
+namespace android {
+enum {
+    SUB_FX_HOST,       // Index of HOST in the descriptor and handle arrays
+                       // of the Proxy context
+    SUB_FX_OFFLOAD,    // Index of OFFLOAD in the descriptor and handle arrays
+                       // of the Proxy context
+    SUB_FX_COUNT       // The number of sub effects for a Proxy(1 HW, 1 SW)
+};
+#if __cplusplus
+extern "C" {
+#endif
+
+int EffectProxyCreate(const effect_uuid_t *uuid,
+                          int32_t             sessionId,
+                          int32_t             ioId,
+                          effect_handle_t  *pHandle);
+int EffectProxyRelease(effect_handle_t handle);
+int EffectProxyGetDescriptor(const effect_uuid_t *uuid,
+                                 effect_descriptor_t *pDescriptor);
+/* Effect Control Interface Implementation: Process */
+int Effect_process(effect_handle_t     self,
+                            audio_buffer_t         *inBuffer,
+                            audio_buffer_t         *outBuffer);
+
+/* Effect Control Interface Implementation: Command */
+int Effect_command(effect_handle_t  self,
+                            uint32_t            cmdCode,
+                            uint32_t            cmdSize,
+                            void                *pCmdData,
+                            uint32_t            *replySize,
+                            void                *pReplyData);
+int Effect_getDescriptor(effect_handle_t   self,
+                       effect_descriptor_t *pDescriptor);
+
+const struct effect_interface_s gEffectInterface = {
+  Effect_process,
+  Effect_command,
+  Effect_getDescriptor,
+  NULL,
+};
+
+#define PROXY_REPLY_SIZE_MAX     (64 * 1024) // must be power of two
+#define PROXY_REPLY_SIZE_DEFAULT 32          // must be power of two
+
+struct EffectContext {
+  const struct effect_interface_s  *common_itfe; // Holds the itfe of the Proxy
+  effect_descriptor_t*  desc;                    // Points to the sub effect descriptors
+  effect_handle_t       eHandle[SUB_FX_COUNT];   // The effect handles of the sub effects
+  int                   index;       // The index that is currently active - HOST or OFFLOAD
+  int32_t               sessionId;   // The sessiond in which the effect is created.
+                                     // Stored in context to pass on to sub effect creation
+  int32_t               ioId;        // The ioId in which the effect is created.
+                                     // Stored in context to pass on to sub effect creation
+  effect_uuid_t         uuid;        // UUID of the Proxy
+  char*                 replyData;   // temporary buffer for non active sub effect command reply
+  uint32_t              replySize;   // current size of temporary reply buffer
+};
+
+#if __cplusplus
+}  // extern "C"
+#endif
+} //namespace android
diff --git a/media/libeffects/visualizer/EffectVisualizer.cpp b/media/libeffects/visualizer/EffectVisualizer.cpp
index e7eccf1..dc403ab 100644
--- a/media/libeffects/visualizer/EffectVisualizer.cpp
+++ b/media/libeffects/visualizer/EffectVisualizer.cpp
@@ -22,6 +22,7 @@
 #include <string.h>
 #include <new>
 #include <time.h>
+#include <math.h>
 #include <audio_effects/effect_visualizer.h>
 
 
@@ -54,6 +55,18 @@
 
 #define CAPTURE_BUF_SIZE 65536 // "64k should be enough for everyone"
 
+#define DISCARD_MEASUREMENTS_TIME_MS 2000 // discard measurements older than this number of ms
+
+// maximum number of buffers for which we keep track of the measurements
+#define MEASUREMENT_WINDOW_MAX_SIZE_IN_BUFFERS 25 // note: buffer index is stored in uint8_t
+
+
+struct BufferStats {
+    bool mIsValid;
+    uint16_t mPeakU16; // the positive peak of the absolute value of the samples in a buffer
+    float mRmsSquared; // the average square of the samples in a buffer
+};
+
 struct VisualizerContext {
     const struct effect_interface_s *mItfe;
     effect_config_t mConfig;
@@ -61,15 +74,38 @@
     uint32_t mCaptureSize;
     uint32_t mScalingMode;
     uint8_t mState;
-    uint8_t mLastCaptureIdx;
+    uint32_t mLastCaptureIdx;
     uint32_t mLatency;
     struct timespec mBufferUpdateTime;
     uint8_t mCaptureBuf[CAPTURE_BUF_SIZE];
+    // for measurements
+    uint8_t mChannelCount; // to avoid recomputing it every time a buffer is processed
+    uint32_t mMeasurementMode;
+    uint8_t mMeasurementWindowSizeInBuffers;
+    uint8_t mMeasurementBufferIdx;
+    BufferStats mPastMeasurements[MEASUREMENT_WINDOW_MAX_SIZE_IN_BUFFERS];
 };
 
 //
 //--- Local functions
 //
+uint32_t Visualizer_getDeltaTimeMsFromUpdatedTime(VisualizerContext* pContext) {
+    uint32_t deltaMs = 0;
+    if (pContext->mBufferUpdateTime.tv_sec != 0) {
+        struct timespec ts;
+        if (clock_gettime(CLOCK_MONOTONIC, &ts) == 0) {
+            time_t secs = ts.tv_sec - pContext->mBufferUpdateTime.tv_sec;
+            long nsec = ts.tv_nsec - pContext->mBufferUpdateTime.tv_nsec;
+            if (nsec < 0) {
+                --secs;
+                nsec += 1000000000;
+            }
+            deltaMs = secs * 1000 + nsec / 1000000;
+        }
+    }
+    return deltaMs;
+}
+
 
 void Visualizer_reset(VisualizerContext *pContext)
 {
@@ -165,9 +201,21 @@
     pContext->mConfig.outputCfg.bufferProvider.cookie = NULL;
     pContext->mConfig.outputCfg.mask = EFFECT_CONFIG_ALL;
 
+    // visualization initialization
     pContext->mCaptureSize = VISUALIZER_CAPTURE_SIZE_MAX;
     pContext->mScalingMode = VISUALIZER_SCALING_MODE_NORMALIZED;
 
+    // measurement initialization
+    pContext->mChannelCount = popcount(pContext->mConfig.inputCfg.channels);
+    pContext->mMeasurementMode = MEASUREMENT_MODE_NONE;
+    pContext->mMeasurementWindowSizeInBuffers = MEASUREMENT_WINDOW_MAX_SIZE_IN_BUFFERS;
+    pContext->mMeasurementBufferIdx = 0;
+    for (uint32_t i=0 ; i<pContext->mMeasurementWindowSizeInBuffers ; i++) {
+        pContext->mPastMeasurements[i].mIsValid = false;
+        pContext->mPastMeasurements[i].mPeakU16 = 0;
+        pContext->mPastMeasurements[i].mRmsSquared = 0;
+    }
+
     Visualizer_setConfig(pContext, &pContext->mConfig);
 
     return 0;
@@ -270,6 +318,30 @@
         return -EINVAL;
     }
 
+    // perform measurements if needed
+    if (pContext->mMeasurementMode & MEASUREMENT_MODE_PEAK_RMS) {
+        // find the peak and RMS squared for the new buffer
+        uint32_t inIdx;
+        int16_t maxSample = 0;
+        float rmsSqAcc = 0;
+        for (inIdx = 0 ; inIdx < inBuffer->frameCount * pContext->mChannelCount ; inIdx++) {
+            if (inBuffer->s16[inIdx] > maxSample) {
+                maxSample = inBuffer->s16[inIdx];
+            } else if (-inBuffer->s16[inIdx] > maxSample) {
+                maxSample = -inBuffer->s16[inIdx];
+            }
+            rmsSqAcc += (inBuffer->s16[inIdx] * inBuffer->s16[inIdx]);
+        }
+        // store the measurement
+        pContext->mPastMeasurements[pContext->mMeasurementBufferIdx].mPeakU16 = (uint16_t)maxSample;
+        pContext->mPastMeasurements[pContext->mMeasurementBufferIdx].mRmsSquared =
+                rmsSqAcc / (inBuffer->frameCount * pContext->mChannelCount);
+        pContext->mPastMeasurements[pContext->mMeasurementBufferIdx].mIsValid = true;
+        if (++pContext->mMeasurementBufferIdx >= pContext->mMeasurementWindowSizeInBuffers) {
+            pContext->mMeasurementBufferIdx = 0;
+        }
+    }
+
     // all code below assumes stereo 16 bit PCM output and input
     int32_t shift;
 
@@ -423,6 +495,12 @@
             p->vsize = sizeof(uint32_t);
             *replySize += sizeof(uint32_t);
             break;
+        case VISUALIZER_PARAM_MEASUREMENT_MODE:
+            ALOGV("get mMeasurementMode = %d", pContext->mMeasurementMode);
+            *((uint32_t *)p->data + 1) = pContext->mMeasurementMode;
+            p->vsize = sizeof(uint32_t);
+            *replySize += sizeof(uint32_t);
+            break;
         default:
             p->status = -EINVAL;
         }
@@ -452,6 +530,10 @@
             pContext->mLatency = *((uint32_t *)p->data + 1);
             ALOGV("set mLatency = %d", pContext->mLatency);
             break;
+        case VISUALIZER_PARAM_MEASUREMENT_MODE:
+            pContext->mMeasurementMode = *((uint32_t *)p->data + 1);
+            ALOGV("set mMeasurementMode = %d", pContext->mMeasurementMode);
+            break;
         default:
             *(int32_t *)pReplyData = -EINVAL;
         }
@@ -470,24 +552,12 @@
         }
         if (pContext->mState == VISUALIZER_STATE_ACTIVE) {
             int32_t latencyMs = pContext->mLatency;
-            uint32_t deltaMs = 0;
-            if (pContext->mBufferUpdateTime.tv_sec != 0) {
-                struct timespec ts;
-                if (clock_gettime(CLOCK_MONOTONIC, &ts) == 0) {
-                    time_t secs = ts.tv_sec - pContext->mBufferUpdateTime.tv_sec;
-                    long nsec = ts.tv_nsec - pContext->mBufferUpdateTime.tv_nsec;
-                    if (nsec < 0) {
-                        --secs;
-                        nsec += 1000000000;
-                    }
-                    deltaMs = secs * 1000 + nsec / 1000000;
-                    latencyMs -= deltaMs;
-                    if (latencyMs < 0) {
-                        latencyMs = 0;
-                    }
-                }
+            const uint32_t deltaMs = Visualizer_getDeltaTimeMsFromUpdatedTime(pContext);
+            latencyMs -= deltaMs;
+            if (latencyMs < 0) {
+                latencyMs = 0;
             }
-            uint32_t deltaSmpl = pContext->mConfig.inputCfg.samplingRate * latencyMs / 1000;
+            const uint32_t deltaSmpl = pContext->mConfig.inputCfg.samplingRate * latencyMs / 1000;
 
             int32_t capturePoint = pContext->mCaptureIdx - pContext->mCaptureSize - deltaSmpl;
             int32_t captureSize = pContext->mCaptureSize;
@@ -499,7 +569,7 @@
                 memcpy(pReplyData,
                        pContext->mCaptureBuf + CAPTURE_BUF_SIZE + capturePoint,
                        size);
-                pReplyData += size;
+                pReplyData = (char *)pReplyData + size;
                 captureSize -= size;
                 capturePoint = 0;
             }
@@ -525,6 +595,54 @@
 
         break;
 
+    case VISUALIZER_CMD_MEASURE: {
+        uint16_t peakU16 = 0;
+        float sumRmsSquared = 0.0f;
+        uint8_t nbValidMeasurements = 0;
+        // reset measurements if last measurement was too long ago (which implies stored
+        // measurements aren't relevant anymore and shouldn't bias the new one)
+        const int32_t delayMs = Visualizer_getDeltaTimeMsFromUpdatedTime(pContext);
+        if (delayMs > DISCARD_MEASUREMENTS_TIME_MS) {
+            ALOGV("Discarding measurements, last measurement is %dms old", delayMs);
+            for (uint32_t i=0 ; i<pContext->mMeasurementWindowSizeInBuffers ; i++) {
+                pContext->mPastMeasurements[i].mIsValid = false;
+                pContext->mPastMeasurements[i].mPeakU16 = 0;
+                pContext->mPastMeasurements[i].mRmsSquared = 0;
+            }
+            pContext->mMeasurementBufferIdx = 0;
+        } else {
+            // only use actual measurements, otherwise the first RMS measure happening before
+            // MEASUREMENT_WINDOW_MAX_SIZE_IN_BUFFERS have been played will always be artificially
+            // low
+            for (uint32_t i=0 ; i < pContext->mMeasurementWindowSizeInBuffers ; i++) {
+                if (pContext->mPastMeasurements[i].mIsValid) {
+                    if (pContext->mPastMeasurements[i].mPeakU16 > peakU16) {
+                        peakU16 = pContext->mPastMeasurements[i].mPeakU16;
+                    }
+                    sumRmsSquared += pContext->mPastMeasurements[i].mRmsSquared;
+                    nbValidMeasurements++;
+                }
+            }
+        }
+        float rms = nbValidMeasurements == 0 ? 0.0f : sqrtf(sumRmsSquared / nbValidMeasurements);
+        int32_t* pIntReplyData = (int32_t*)pReplyData;
+        // convert from I16 sample values to mB and write results
+        if (rms < 0.000016f) {
+            pIntReplyData[MEASUREMENT_IDX_RMS] = -9600; //-96dB
+        } else {
+            pIntReplyData[MEASUREMENT_IDX_RMS] = (int32_t) (2000 * log10(rms / 32767.0f));
+        }
+        if (peakU16 == 0) {
+            pIntReplyData[MEASUREMENT_IDX_PEAK] = -9600; //-96dB
+        } else {
+            pIntReplyData[MEASUREMENT_IDX_PEAK] = (int32_t) (2000 * log10(peakU16 / 32767.0f));
+        }
+        ALOGV("VISUALIZER_CMD_MEASURE peak=%d (%dmB), rms=%.1f (%dmB)",
+                peakU16, pIntReplyData[MEASUREMENT_IDX_PEAK],
+                rms, pIntReplyData[MEASUREMENT_IDX_RMS]);
+        }
+        break;
+
     default:
         ALOGW("Visualizer_command invalid command %d",cmdCode);
         return -EINVAL;
diff --git a/media/libmedia/Android.mk b/media/libmedia/Android.mk
index 96755bb..56e7787 100644
--- a/media/libmedia/Android.mk
+++ b/media/libmedia/Android.mk
@@ -62,6 +62,7 @@
 LOCAL_CFLAGS += -DANDROID_SMP=$(if $(findstring true,$(TARGET_CPU_SMP)),1,0)
 LOCAL_SRC_FILES += SingleStateQueue.cpp
 LOCAL_CFLAGS += -DSINGLE_STATE_QUEUE_INSTANTIATIONS='"SingleStateQueueInstantiations.cpp"'
+# Consider a separate a library for SingleStateQueueInstantiations.
 
 LOCAL_SHARED_LIBRARIES := \
 	libui liblog libcutils libutils libbinder libsonivox libicuuc libexpat \
diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp
index e934a3e..666fafa 100644
--- a/media/libmedia/AudioRecord.cpp
+++ b/media/libmedia/AudioRecord.cpp
@@ -105,6 +105,7 @@
         // Otherwise the callback thread will never exit.
         stop();
         if (mAudioRecordThread != 0) {
+            mProxy->interrupt();
             mAudioRecordThread->requestExit();  // see comment in AudioRecord.h
             mAudioRecordThread->requestExitAndWait();
             mAudioRecordThread.clear();
@@ -473,7 +474,7 @@
     ALOGE_IF(originalSessionId != 0 && mSessionId != originalSessionId,
             "session ID changed from %d to %d", originalSessionId, mSessionId);
 
-    if (record == 0) {
+    if (record == 0 || status != NO_ERROR) {
         ALOGE("AudioFlinger could not create record track, status: %d", status);
         AudioSystem::releaseInput(input);
         return status;
@@ -483,6 +484,11 @@
         ALOGE("Could not get control block");
         return NO_INIT;
     }
+    void *iMemPointer = iMem->pointer();
+    if (iMemPointer == NULL) {
+        ALOGE("Could not get control block pointer");
+        return NO_INIT;
+    }
     if (mAudioRecord != 0) {
         mAudioRecord->asBinder()->unlinkToDeath(mDeathNotifier, this);
         mDeathNotifier.clear();
@@ -490,7 +496,7 @@
     mInput = input;
     mAudioRecord = record;
     mCblkMemory = iMem;
-    audio_track_cblk_t* cblk = static_cast<audio_track_cblk_t*>(iMem->pointer());
+    audio_track_cblk_t* cblk = static_cast<audio_track_cblk_t*>(iMemPointer);
     mCblk = cblk;
     // FIXME missing fast track frameCount logic
     mAwaitBoost = false;
@@ -960,7 +966,7 @@
 // =========================================================================
 
 AudioRecord::AudioRecordThread::AudioRecordThread(AudioRecord& receiver, bool bCanCallJava)
-    : Thread(bCanCallJava), mReceiver(receiver), mPaused(true), mResumeLatch(false)
+    : Thread(bCanCallJava), mReceiver(receiver), mPaused(true), mPausedInt(false), mPausedNs(0LL)
 {
 }
 
@@ -977,25 +983,32 @@
             // caller will check for exitPending()
             return true;
         }
+        if (mPausedInt) {
+            if (mPausedNs > 0) {
+                (void) mMyCond.waitRelative(mMyLock, mPausedNs);
+            } else {
+                mMyCond.wait(mMyLock);
+            }
+            mPausedInt = false;
+            return true;
+        }
     }
     nsecs_t ns =  mReceiver.processAudioBuffer(this);
     switch (ns) {
     case 0:
         return true;
-    case NS_WHENEVER:
-        sleep(1);
-        return true;
     case NS_INACTIVE:
-        pauseConditional();
+        pauseInternal();
         return true;
     case NS_NEVER:
         return false;
+    case NS_WHENEVER:
+        // FIXME increase poll interval, or make event-driven
+        ns = 1000000000LL;
+        // fall through
     default:
         LOG_ALWAYS_FATAL_IF(ns < 0, "processAudioBuffer() returned %lld", ns);
-        struct timespec req;
-        req.tv_sec = ns / 1000000000LL;
-        req.tv_nsec = ns % 1000000000LL;
-        nanosleep(&req, NULL /*rem*/);
+        pauseInternal(ns);
         return true;
     }
 }
@@ -1004,38 +1017,37 @@
 {
     // must be in this order to avoid a race condition
     Thread::requestExit();
-    resume();
+    AutoMutex _l(mMyLock);
+    if (mPaused || mPausedInt) {
+        mPaused = false;
+        mPausedInt = false;
+        mMyCond.signal();
+    }
 }
 
 void AudioRecord::AudioRecordThread::pause()
 {
     AutoMutex _l(mMyLock);
     mPaused = true;
-    mResumeLatch = false;
-}
-
-void AudioRecord::AudioRecordThread::pauseConditional()
-{
-    AutoMutex _l(mMyLock);
-    if (mResumeLatch) {
-        mResumeLatch = false;
-    } else {
-        mPaused = true;
-    }
 }
 
 void AudioRecord::AudioRecordThread::resume()
 {
     AutoMutex _l(mMyLock);
-    if (mPaused) {
+    if (mPaused || mPausedInt) {
         mPaused = false;
-        mResumeLatch = false;
+        mPausedInt = false;
         mMyCond.signal();
-    } else {
-        mResumeLatch = true;
     }
 }
 
+void AudioRecord::AudioRecordThread::pauseInternal(nsecs_t ns)
+{
+    AutoMutex _l(mMyLock);
+    mPausedInt = true;
+    mPausedNs = ns;
+}
+
 // -------------------------------------------------------------------------
 
 }; // namespace android
diff --git a/media/libmedia/AudioSystem.cpp b/media/libmedia/AudioSystem.cpp
index a571fe4..8033c2c 100644
--- a/media/libmedia/AudioSystem.cpp
+++ b/media/libmedia/AudioSystem.cpp
@@ -76,6 +76,14 @@
     return gAudioFlinger;
 }
 
+/* static */ status_t AudioSystem::checkAudioFlinger()
+{
+    if (defaultServiceManager()->checkService(String16("media.audio_flinger")) != 0) {
+        return NO_ERROR;
+    }
+    return DEAD_OBJECT;
+}
+
 status_t AudioSystem::muteMicrophone(bool state) {
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp
index dd0ec73..b8a89a0 100644
--- a/media/libmedia/AudioTrack.cpp
+++ b/media/libmedia/AudioTrack.cpp
@@ -101,7 +101,8 @@
         int notificationFrames,
         int sessionId,
         transfer_type transferType,
-        const audio_offload_info_t *offloadInfo)
+        const audio_offload_info_t *offloadInfo,
+        int uid)
     : mStatus(NO_INIT),
       mIsTimed(false),
       mPreviousPriority(ANDROID_PRIORITY_NORMAL),
@@ -109,7 +110,8 @@
 {
     mStatus = set(streamType, sampleRate, format, channelMask,
             frameCount, flags, cbf, user, notificationFrames,
-            0 /*sharedBuffer*/, false /*threadCanCallJava*/, sessionId, transferType, offloadInfo);
+            0 /*sharedBuffer*/, false /*threadCanCallJava*/, sessionId, transferType,
+            offloadInfo, uid);
 }
 
 AudioTrack::AudioTrack(
@@ -124,7 +126,8 @@
         int notificationFrames,
         int sessionId,
         transfer_type transferType,
-        const audio_offload_info_t *offloadInfo)
+        const audio_offload_info_t *offloadInfo,
+        int uid)
     : mStatus(NO_INIT),
       mIsTimed(false),
       mPreviousPriority(ANDROID_PRIORITY_NORMAL),
@@ -132,7 +135,7 @@
 {
     mStatus = set(streamType, sampleRate, format, channelMask,
             0 /*frameCount*/, flags, cbf, user, notificationFrames,
-            sharedBuffer, false /*threadCanCallJava*/, sessionId, transferType, offloadInfo);
+            sharedBuffer, false /*threadCanCallJava*/, sessionId, transferType, offloadInfo, uid);
 }
 
 AudioTrack::~AudioTrack()
@@ -148,10 +151,8 @@
             mAudioTrackThread->requestExitAndWait();
             mAudioTrackThread.clear();
         }
-        if (mAudioTrack != 0) {
-            mAudioTrack->asBinder()->unlinkToDeath(mDeathNotifier, this);
-            mAudioTrack.clear();
-        }
+        mAudioTrack->asBinder()->unlinkToDeath(mDeathNotifier, this);
+        mAudioTrack.clear();
         IPCThreadState::self()->flushCommands();
         AudioSystem::releaseAudioSessionId(mSessionId);
     }
@@ -171,7 +172,8 @@
         bool threadCanCallJava,
         int sessionId,
         transfer_type transferType,
-        const audio_offload_info_t *offloadInfo)
+        const audio_offload_info_t *offloadInfo,
+        int uid)
 {
     switch (transferType) {
     case TRANSFER_DEFAULT:
@@ -222,6 +224,7 @@
 
     AutoMutex lock(mLock);
 
+    // invariant that mAudioTrack != 0 is true only after set() returns successfully
     if (mAudioTrack != 0) {
         ALOGE("Track already in use");
         return INVALID_OPERATION;
@@ -314,6 +317,11 @@
     mNotificationFramesReq = notificationFrames;
     mNotificationFramesAct = 0;
     mSessionId = sessionId;
+    if (uid == -1 || (IPCThreadState::self()->getCallingPid() != getpid())) {
+        mClientUid = IPCThreadState::self()->getCallingUid();
+    } else {
+        mClientUid = uid;
+    }
     mAuxEffectId = 0;
     mFlags = flags;
     mCbf = cbf;
@@ -387,6 +395,9 @@
     if (previousState == STATE_STOPPED || previousState == STATE_FLUSHED) {
         // reset current position as seen by client to 0
         mProxy->setEpoch(mProxy->getEpoch() - mProxy->getPosition());
+        // force refresh of remaining frames by processAudioBuffer() as last
+        // write before stop could be partial.
+        mRefreshRemaining = true;
     }
     mNewPosition = mProxy->getPosition() + mUpdatePeriod;
     int32_t flags = android_atomic_and(~CBLK_DISABLED, &mCblk->mFlags);
@@ -532,6 +543,9 @@
 
     mProxy->setVolumeLR((uint32_t(uint16_t(right * 0x1000)) << 16) | uint16_t(left * 0x1000));
 
+    if (isOffloaded()) {
+        mAudioTrack->signal();
+    }
     return NO_ERROR;
 }
 
@@ -813,12 +827,29 @@
         return NO_INIT;
     }
 
+    // Not all of these values are needed under all conditions, but it is easier to get them all
+
     uint32_t afLatency;
-    if ((status = AudioSystem::getLatency(output, streamType, &afLatency)) != NO_ERROR) {
+    status = AudioSystem::getLatency(output, streamType, &afLatency);
+    if (status != NO_ERROR) {
         ALOGE("getLatency(%d) failed status %d", output, status);
         return NO_INIT;
     }
 
+    size_t afFrameCount;
+    status = AudioSystem::getFrameCount(output, streamType, &afFrameCount);
+    if (status != NO_ERROR) {
+        ALOGE("getFrameCount(output=%d, streamType=%d) status %d", output, streamType, status);
+        return NO_INIT;
+    }
+
+    uint32_t afSampleRate;
+    status = AudioSystem::getSamplingRate(output, streamType, &afSampleRate);
+    if (status != NO_ERROR) {
+        ALOGE("getSamplingRate(output=%d, streamType=%d) status %d", output, streamType, status);
+        return NO_INIT;
+    }
+
     // Client decides whether the track is TIMED (see below), but can only express a preference
     // for FAST.  Server will perform additional tests.
     if ((flags & AUDIO_OUTPUT_FLAG_FAST) && !(
@@ -834,6 +865,14 @@
     }
     ALOGV("createTrack_l() output %d afLatency %d", output, afLatency);
 
+    // The client's AudioTrack buffer is divided into n parts for purpose of wakeup by server, where
+    //  n = 1   fast track; nBuffering is ignored
+    //  n = 2   normal track, no sample rate conversion
+    //  n = 3   normal track, with sample rate conversion
+    //          (pessimistic; some non-1:1 conversion ratios don't actually need triple-buffering)
+    //  n > 3   very high latency or very small notification interval; nBuffering is ignored
+    const uint32_t nBuffering = (sampleRate == afSampleRate) ? 2 : 3;
+
     mNotificationFramesAct = mNotificationFramesReq;
 
     if (!audio_is_linear_pcm(format)) {
@@ -842,13 +881,6 @@
             // Same comment as below about ignoring frameCount parameter for set()
             frameCount = sharedBuffer->size();
         } else if (frameCount == 0) {
-            size_t afFrameCount;
-            status = AudioSystem::getFrameCount(output, streamType, &afFrameCount);
-            if (status != NO_ERROR) {
-                ALOGE("getFrameCount(output=%d, streamType=%d) status %d", output, streamType,
-                        status);
-                return NO_INIT;
-            }
             frameCount = afFrameCount;
         }
         if (mNotificationFramesAct != frameCount) {
@@ -878,26 +910,13 @@
     } else if (!(flags & AUDIO_OUTPUT_FLAG_FAST)) {
 
         // FIXME move these calculations and associated checks to server
-        uint32_t afSampleRate;
-        status = AudioSystem::getSamplingRate(output, streamType, &afSampleRate);
-        if (status != NO_ERROR) {
-            ALOGE("getSamplingRate(output=%d, streamType=%d) status %d", output, streamType,
-                    status);
-            return NO_INIT;
-        }
-        size_t afFrameCount;
-        status = AudioSystem::getFrameCount(output, streamType, &afFrameCount);
-        if (status != NO_ERROR) {
-            ALOGE("getFrameCount(output=%d, streamType=%d) status %d", output, streamType, status);
-            return NO_INIT;
-        }
 
         // Ensure that buffer depth covers at least audio hardware latency
         uint32_t minBufCount = afLatency / ((1000 * afFrameCount)/afSampleRate);
         ALOGV("afFrameCount=%d, minBufCount=%d, afSampleRate=%u, afLatency=%d",
                 afFrameCount, minBufCount, afSampleRate, afLatency);
-        if (minBufCount <= 2) {
-            minBufCount = sampleRate == afSampleRate ? 2 : 3;
+        if (minBufCount <= nBuffering) {
+            minBufCount = nBuffering;
         }
 
         size_t minFrameCount = (afFrameCount*sampleRate*minBufCount)/afSampleRate;
@@ -907,18 +926,16 @@
 
         if (frameCount == 0) {
             frameCount = minFrameCount;
-        }
-        // Make sure that application is notified with sufficient margin
-        // before underrun
-        if (mNotificationFramesAct == 0 || mNotificationFramesAct > frameCount/2) {
-            mNotificationFramesAct = frameCount/2;
-        }
-        if (frameCount < minFrameCount) {
+        } else if (frameCount < minFrameCount) {
             // not ALOGW because it happens all the time when playing key clicks over A2DP
             ALOGV("Minimum buffer size corrected from %d to %d",
                      frameCount, minFrameCount);
             frameCount = minFrameCount;
         }
+        // Make sure that application is notified with sufficient margin before underrun
+        if (mNotificationFramesAct == 0 || mNotificationFramesAct > frameCount/nBuffering) {
+            mNotificationFramesAct = frameCount/nBuffering;
+        }
 
     } else {
         // For fast tracks, the frame count calculations and checks are done by server
@@ -954,6 +971,7 @@
                                                       tid,
                                                       &mSessionId,
                                                       mName,
+                                                      mClientUid,
                                                       &status);
 
     if (track == 0) {
@@ -965,6 +983,7 @@
         ALOGE("Could not get control block");
         return NO_INIT;
     }
+    // invariant that mAudioTrack != 0 is true only after set() returns successfully
     if (mAudioTrack != 0) {
         mAudioTrack->asBinder()->unlinkToDeath(mDeathNotifier, this);
         mDeathNotifier.clear();
@@ -998,8 +1017,8 @@
             flags = (audio_output_flags_t) (flags & ~AUDIO_OUTPUT_FLAG_FAST);
             mFlags = flags;
             if (sharedBuffer == 0) {
-                if (mNotificationFramesAct == 0 || mNotificationFramesAct > frameCount/2) {
-                    mNotificationFramesAct = frameCount/2;
+                if (mNotificationFramesAct == 0 || mNotificationFramesAct > frameCount/nBuffering) {
+                    mNotificationFramesAct = frameCount/nBuffering;
                 }
             }
         }
@@ -1547,7 +1566,7 @@
             return NS_NEVER;
         }
 
-        if (mRetryOnPartialBuffer) {
+        if (mRetryOnPartialBuffer && !isOffloaded()) {
             mRetryOnPartialBuffer = false;
             if (avail < mRemainingFrames) {
                 int64_t myns = ((mRemainingFrames - avail) * 1100000000LL) / sampleRate;
@@ -1653,7 +1672,9 @@
     // if the new IAudioTrack is created, createTrack_l() will modify the
     // following member variables: mAudioTrack, mCblkMemory and mCblk.
     // It will also delete the strong references on previous IAudioTrack and IMemory
-    size_t position = mProxy->getPosition();
+
+    // take the frames that will be lost by track recreation into account in saved position
+    size_t position = mProxy->getPosition() + mProxy->getFramesFilled();
     mNewPosition = position + mUpdatePeriod;
     size_t bufferPosition = mStaticProxy != NULL ? mStaticProxy->getBufferPosition() : 0;
     result = createTrack_l(mStreamType,
@@ -1705,11 +1726,24 @@
 status_t AudioTrack::setParameters(const String8& keyValuePairs)
 {
     AutoMutex lock(mLock);
-    if (mAudioTrack != 0) {
-        return mAudioTrack->setParameters(keyValuePairs);
-    } else {
-        return NO_INIT;
+    return mAudioTrack->setParameters(keyValuePairs);
+}
+
+status_t AudioTrack::getTimestamp(AudioTimestamp& timestamp)
+{
+    AutoMutex lock(mLock);
+    // FIXME not implemented for fast tracks; should use proxy and SSQ
+    if (mFlags & AUDIO_OUTPUT_FLAG_FAST) {
+        return INVALID_OPERATION;
     }
+    if (mState != STATE_ACTIVE && mState != STATE_PAUSED) {
+        return INVALID_OPERATION;
+    }
+    status_t status = mAudioTrack->getTimestamp(timestamp);
+    if (status == NO_ERROR) {
+        timestamp.mPosition += mProxy->getEpoch();
+    }
+    return status;
 }
 
 String8 AudioTrack::getParameters(const String8& keys)
@@ -1763,7 +1797,8 @@
 // =========================================================================
 
 AudioTrack::AudioTrackThread::AudioTrackThread(AudioTrack& receiver, bool bCanCallJava)
-    : Thread(bCanCallJava), mReceiver(receiver), mPaused(true), mResumeLatch(false)
+    : Thread(bCanCallJava), mReceiver(receiver), mPaused(true), mPausedInt(false), mPausedNs(0LL),
+      mIgnoreNextPausedInt(false)
 {
 }
 
@@ -1780,25 +1815,36 @@
             // caller will check for exitPending()
             return true;
         }
+        if (mIgnoreNextPausedInt) {
+            mIgnoreNextPausedInt = false;
+            mPausedInt = false;
+        }
+        if (mPausedInt) {
+            if (mPausedNs > 0) {
+                (void) mMyCond.waitRelative(mMyLock, mPausedNs);
+            } else {
+                mMyCond.wait(mMyLock);
+            }
+            mPausedInt = false;
+            return true;
+        }
     }
     nsecs_t ns = mReceiver.processAudioBuffer(this);
     switch (ns) {
     case 0:
         return true;
-    case NS_WHENEVER:
-        sleep(1);
-        return true;
     case NS_INACTIVE:
-        pauseConditional();
+        pauseInternal();
         return true;
     case NS_NEVER:
         return false;
+    case NS_WHENEVER:
+        // FIXME increase poll interval, or make event-driven
+        ns = 1000000000LL;
+        // fall through
     default:
         LOG_ALWAYS_FATAL_IF(ns < 0, "processAudioBuffer() returned %lld", ns);
-        struct timespec req;
-        req.tv_sec = ns / 1000000000LL;
-        req.tv_nsec = ns % 1000000000LL;
-        nanosleep(&req, NULL /*rem*/);
+        pauseInternal(ns);
         return true;
     }
 }
@@ -1814,29 +1860,24 @@
 {
     AutoMutex _l(mMyLock);
     mPaused = true;
-    mResumeLatch = false;
-}
-
-void AudioTrack::AudioTrackThread::pauseConditional()
-{
-    AutoMutex _l(mMyLock);
-    if (mResumeLatch) {
-        mResumeLatch = false;
-    } else {
-        mPaused = true;
-    }
 }
 
 void AudioTrack::AudioTrackThread::resume()
 {
     AutoMutex _l(mMyLock);
-    if (mPaused) {
+    mIgnoreNextPausedInt = true;
+    if (mPaused || mPausedInt) {
         mPaused = false;
-        mResumeLatch = false;
+        mPausedInt = false;
         mMyCond.signal();
-    } else {
-        mResumeLatch = true;
     }
 }
 
+void AudioTrack::AudioTrackThread::pauseInternal(nsecs_t ns)
+{
+    AutoMutex _l(mMyLock);
+    mPausedInt = true;
+    mPausedNs = ns;
+}
+
 }; // namespace android
diff --git a/media/libmedia/AudioTrackShared.cpp b/media/libmedia/AudioTrackShared.cpp
index e7abb40..caa7900 100644
--- a/media/libmedia/AudioTrackShared.cpp
+++ b/media/libmedia/AudioTrackShared.cpp
@@ -316,6 +316,27 @@
             (mFrameCountP2 - 1);
 }
 
+size_t ClientProxy::getFramesFilled() {
+    audio_track_cblk_t* cblk = mCblk;
+    int32_t front;
+    int32_t rear;
+
+    if (mIsOut) {
+        front = android_atomic_acquire_load(&cblk->u.mStreaming.mFront);
+        rear = cblk->u.mStreaming.mRear;
+    } else {
+        rear = android_atomic_acquire_load(&cblk->u.mStreaming.mRear);
+        front = cblk->u.mStreaming.mFront;
+    }
+    ssize_t filled = rear - front;
+    // pipe should not be overfull
+    if (!(0 <= filled && (size_t) filled <= mFrameCount)) {
+        ALOGE("Shared memory control block is corrupt (filled=%d); shutting down", filled);
+        return 0;
+    }
+    return (size_t)filled;
+}
+
 // ---------------------------------------------------------------------------
 
 void AudioTrackClientProxy::flush()
@@ -481,11 +502,11 @@
 ServerProxy::ServerProxy(audio_track_cblk_t* cblk, void *buffers, size_t frameCount,
         size_t frameSize, bool isOut, bool clientInServer)
     : Proxy(cblk, buffers, frameCount, frameSize, isOut, clientInServer),
-      mAvailToClient(0), mFlush(0), mDeferWake(false)
+      mAvailToClient(0), mFlush(0)
 {
 }
 
-status_t ServerProxy::obtainBuffer(Buffer* buffer)
+status_t ServerProxy::obtainBuffer(Buffer* buffer, bool ackFlush)
 {
     LOG_ALWAYS_FATAL_IF(buffer == NULL || buffer->mFrameCount == 0);
     if (mIsShutdown) {
@@ -558,10 +579,11 @@
     buffer->mRaw = part1 > 0 ?
             &((char *) mBuffers)[(mIsOut ? front : rear) * mFrameSize] : NULL;
     buffer->mNonContig = availToServer - part1;
-    mUnreleased = part1;
-    // optimization to avoid waking up the client too early
-    // FIXME need to test for recording
-    mDeferWake = part1 < ask && availToServer >= ask;
+    // After flush(), allow releaseBuffer() on a previously obtained buffer;
+    // see "Acknowledge any pending flush()" in audioflinger/Tracks.cpp.
+    if (!ackFlush) {
+        mUnreleased = part1;
+    }
     return part1 > 0 ? NO_ERROR : WOULD_BLOCK;
     }
 no_init:
@@ -607,7 +629,7 @@
         minimum = half;
     }
     // FIXME AudioRecord wakeup needs to be optimized; it currently wakes up client every time
-    if (!mIsOut || (!mDeferWake && mAvailToClient + stepCount >= minimum)) {
+    if (!mIsOut || (mAvailToClient + stepCount >= minimum)) {
         ALOGV("mAvailToClient=%u stepCount=%u minimum=%u", mAvailToClient, stepCount, minimum);
         int32_t old = android_atomic_or(CBLK_FUTEX_WAKE, &cblk->mFutex);
         if (!(old & CBLK_FUTEX_WAKE)) {
@@ -743,7 +765,7 @@
     return (ssize_t) position;
 }
 
-status_t StaticAudioTrackServerProxy::obtainBuffer(Buffer* buffer)
+status_t StaticAudioTrackServerProxy::obtainBuffer(Buffer* buffer, bool ackFlush)
 {
     if (mIsShutdown) {
         buffer->mFrameCount = 0;
diff --git a/media/libmedia/IAudioFlinger.cpp b/media/libmedia/IAudioFlinger.cpp
index be818c6..acfaea0 100644
--- a/media/libmedia/IAudioFlinger.cpp
+++ b/media/libmedia/IAudioFlinger.cpp
@@ -96,6 +96,7 @@
                                 pid_t tid,
                                 int *sessionId,
                                 String8& name,
+                                int clientUid,
                                 status_t *status)
     {
         Parcel data, reply;
@@ -108,7 +109,12 @@
         data.writeInt32(frameCount);
         track_flags_t lFlags = flags != NULL ? *flags : (track_flags_t) TRACK_DEFAULT;
         data.writeInt32(lFlags);
-        data.writeStrongBinder(sharedBuffer->asBinder());
+        if (sharedBuffer != 0) {
+            data.writeInt32(true);
+            data.writeStrongBinder(sharedBuffer->asBinder());
+        } else {
+            data.writeInt32(false);
+        }
         data.writeInt32((int32_t) output);
         data.writeInt32((int32_t) tid);
         int lSessionId = 0;
@@ -116,6 +122,7 @@
             lSessionId = *sessionId;
         }
         data.writeInt32(lSessionId);
+        data.writeInt32(clientUid);
         status_t lStatus = remote()->transact(CREATE_TRACK, data, &reply);
         if (lStatus != NO_ERROR) {
             ALOGE("createTrack error: %s", strerror(-lStatus));
@@ -179,6 +186,17 @@
             }
             lStatus = reply.readInt32();
             record = interface_cast<IAudioRecord>(reply.readStrongBinder());
+            if (lStatus == NO_ERROR) {
+                if (record == 0) {
+                    ALOGE("openRecord should have returned an IAudioRecord");
+                    lStatus = UNKNOWN_ERROR;
+                }
+            } else {
+                if (record != 0) {
+                    ALOGE("openRecord returned an IAudioRecord but with status %d", lStatus);
+                    record.clear();
+                }
+            }
         }
         if (status) {
             *status = lStatus;
@@ -738,15 +756,28 @@
             audio_channel_mask_t channelMask = data.readInt32();
             size_t frameCount = data.readInt32();
             track_flags_t flags = (track_flags_t) data.readInt32();
-            sp<IMemory> buffer = interface_cast<IMemory>(data.readStrongBinder());
+            bool haveSharedBuffer = data.readInt32() != 0;
+            sp<IMemory> buffer;
+            if (haveSharedBuffer) {
+                buffer = interface_cast<IMemory>(data.readStrongBinder());
+            }
             audio_io_handle_t output = (audio_io_handle_t) data.readInt32();
             pid_t tid = (pid_t) data.readInt32();
             int sessionId = data.readInt32();
+            int clientUid = data.readInt32();
             String8 name;
             status_t status;
-            sp<IAudioTrack> track = createTrack(
-                    (audio_stream_type_t) streamType, sampleRate, format,
-                    channelMask, frameCount, &flags, buffer, output, tid, &sessionId, name, &status);
+            sp<IAudioTrack> track;
+            if ((haveSharedBuffer && (buffer == 0)) ||
+                    ((buffer != 0) && (buffer->pointer() == NULL))) {
+                ALOGW("CREATE_TRACK: cannot retrieve shared memory");
+                status = DEAD_OBJECT;
+            } else {
+                track = createTrack(
+                        (audio_stream_type_t) streamType, sampleRate, format,
+                        channelMask, frameCount, &flags, buffer, output, tid,
+                        &sessionId, name, clientUid, &status);
+            }
             reply->writeInt32(flags);
             reply->writeInt32(sessionId);
             reply->writeString8(name);
@@ -767,6 +798,7 @@
             status_t status;
             sp<IAudioRecord> record = openRecord(input,
                     sampleRate, format, channelMask, frameCount, &flags, tid, &sessionId, &status);
+            LOG_ALWAYS_FATAL_IF((record != 0) != (status == NO_ERROR));
             reply->writeInt32(flags);
             reply->writeInt32(sessionId);
             reply->writeInt32(status);
diff --git a/media/libmedia/IAudioTrack.cpp b/media/libmedia/IAudioTrack.cpp
index a2b49a3..3cd9cfd 100644
--- a/media/libmedia/IAudioTrack.cpp
+++ b/media/libmedia/IAudioTrack.cpp
@@ -39,7 +39,9 @@
     ALLOCATE_TIMED_BUFFER,
     QUEUE_TIMED_BUFFER,
     SET_MEDIA_TIME_TRANSFORM,
-    SET_PARAMETERS
+    SET_PARAMETERS,
+    GET_TIMESTAMP,
+    SIGNAL,
 };
 
 class BpAudioTrack : public BpInterface<IAudioTrack>
@@ -166,6 +168,27 @@
         }
         return status;
     }
+
+    virtual status_t getTimestamp(AudioTimestamp& timestamp) {
+        Parcel data, reply;
+        data.writeInterfaceToken(IAudioTrack::getInterfaceDescriptor());
+        status_t status = remote()->transact(GET_TIMESTAMP, data, &reply);
+        if (status == NO_ERROR) {
+            status = reply.readInt32();
+            if (status == NO_ERROR) {
+                timestamp.mPosition = reply.readInt32();
+                timestamp.mTime.tv_sec = reply.readInt32();
+                timestamp.mTime.tv_nsec = reply.readInt32();
+            }
+        }
+        return status;
+    }
+
+    virtual void signal() {
+        Parcel data, reply;
+        data.writeInterfaceToken(IAudioTrack::getInterfaceDescriptor());
+        remote()->transact(SIGNAL, data, &reply);
+    }
 };
 
 IMPLEMENT_META_INTERFACE(AudioTrack, "android.media.IAudioTrack");
@@ -241,6 +264,23 @@
             reply->writeInt32(setParameters(keyValuePairs));
             return NO_ERROR;
         } break;
+        case GET_TIMESTAMP: {
+            CHECK_INTERFACE(IAudioTrack, data, reply);
+            AudioTimestamp timestamp;
+            status_t status = getTimestamp(timestamp);
+            reply->writeInt32(status);
+            if (status == NO_ERROR) {
+                reply->writeInt32(timestamp.mPosition);
+                reply->writeInt32(timestamp.mTime.tv_sec);
+                reply->writeInt32(timestamp.mTime.tv_nsec);
+            }
+            return NO_ERROR;
+        } break;
+        case SIGNAL: {
+            CHECK_INTERFACE(IAudioTrack, data, reply);
+            signal();
+            return NO_ERROR;
+        } break;
         default:
             return BBinder::onTransact(code, data, reply, flags);
     }
diff --git a/media/libmedia/IDrm.cpp b/media/libmedia/IDrm.cpp
index 902aeb2..f7a9a75 100644
--- a/media/libmedia/IDrm.cpp
+++ b/media/libmedia/IDrm.cpp
@@ -68,10 +68,11 @@
         return reply.readInt32();
     }
 
-    virtual bool isCryptoSchemeSupported(const uint8_t uuid[16]) {
+    virtual bool isCryptoSchemeSupported(const uint8_t uuid[16], const String8 &mimeType) {
         Parcel data, reply;
         data.writeInterfaceToken(IDrm::getInterfaceDescriptor());
         data.write(uuid, 16);
+        data.writeString8(mimeType);
         remote()->transact(IS_CRYPTO_SUPPORTED, data, &reply);
 
         return reply.readInt32() != 0;
@@ -438,7 +439,9 @@
             CHECK_INTERFACE(IDrm, data, reply);
             uint8_t uuid[16];
             data.read(uuid, sizeof(uuid));
-            reply->writeInt32(isCryptoSchemeSupported(uuid));
+            String8 mimeType = data.readString8();
+            reply->writeInt32(isCryptoSchemeSupported(uuid, mimeType));
+
             return OK;
         }
 
diff --git a/media/libmedia/IHDCP.cpp b/media/libmedia/IHDCP.cpp
index a46ff91..1cf987a 100644
--- a/media/libmedia/IHDCP.cpp
+++ b/media/libmedia/IHDCP.cpp
@@ -30,6 +30,7 @@
     HDCP_SET_OBSERVER,
     HDCP_INIT_ASYNC,
     HDCP_SHUTDOWN_ASYNC,
+    HDCP_GET_CAPS,
     HDCP_ENCRYPT,
     HDCP_ENCRYPT_NATIVE,
     HDCP_DECRYPT,
@@ -85,6 +86,13 @@
         return reply.readInt32();
     }
 
+    virtual uint32_t getCaps() {
+        Parcel data, reply;
+        data.writeInterfaceToken(IHDCP::getInterfaceDescriptor());
+        remote()->transact(HDCP_GET_CAPS, data, &reply);
+        return reply.readInt32();
+    }
+
     virtual status_t encrypt(
             const void *inData, size_t size, uint32_t streamCTR,
             uint64_t *outInputCTR, void *outData) {
@@ -222,6 +230,14 @@
             return OK;
         }
 
+        case HDCP_GET_CAPS:
+        {
+            CHECK_INTERFACE(IHDCP, data, reply);
+
+            reply->writeInt32(getCaps());
+            return OK;
+        }
+
         case HDCP_ENCRYPT:
         {
             size_t size = data.readInt32();
diff --git a/media/libmedia/IMediaPlayerService.cpp b/media/libmedia/IMediaPlayerService.cpp
index 74f574d..3c22b4c 100644
--- a/media/libmedia/IMediaPlayerService.cpp
+++ b/media/libmedia/IMediaPlayerService.cpp
@@ -86,30 +86,48 @@
         return interface_cast<IMediaRecorder>(reply.readStrongBinder());
     }
 
-    virtual sp<IMemory> decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat)
+    virtual status_t decode(const char* url, uint32_t *pSampleRate, int* pNumChannels,
+                               audio_format_t* pFormat,
+                               const sp<IMemoryHeap>& heap, size_t *pSize)
     {
         Parcel data, reply;
         data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
         data.writeCString(url);
-        remote()->transact(DECODE_URL, data, &reply);
-        *pSampleRate = uint32_t(reply.readInt32());
-        *pNumChannels = reply.readInt32();
-        *pFormat = (audio_format_t) reply.readInt32();
-        return interface_cast<IMemory>(reply.readStrongBinder());
+        data.writeStrongBinder(heap->asBinder());
+        status_t status = remote()->transact(DECODE_URL, data, &reply);
+        if (status == NO_ERROR) {
+            status = (status_t)reply.readInt32();
+            if (status == NO_ERROR) {
+                *pSampleRate = uint32_t(reply.readInt32());
+                *pNumChannels = reply.readInt32();
+                *pFormat = (audio_format_t)reply.readInt32();
+                *pSize = (size_t)reply.readInt32();
+            }
+        }
+        return status;
     }
 
-    virtual sp<IMemory> decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat)
+    virtual status_t decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate,
+                               int* pNumChannels, audio_format_t* pFormat,
+                               const sp<IMemoryHeap>& heap, size_t *pSize)
     {
         Parcel data, reply;
         data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
         data.writeFileDescriptor(fd);
         data.writeInt64(offset);
         data.writeInt64(length);
-        remote()->transact(DECODE_FD, data, &reply);
-        *pSampleRate = uint32_t(reply.readInt32());
-        *pNumChannels = reply.readInt32();
-        *pFormat = (audio_format_t) reply.readInt32();
-        return interface_cast<IMemory>(reply.readStrongBinder());
+        data.writeStrongBinder(heap->asBinder());
+        status_t status = remote()->transact(DECODE_FD, data, &reply);
+        if (status == NO_ERROR) {
+            status = (status_t)reply.readInt32();
+            if (status == NO_ERROR) {
+                *pSampleRate = uint32_t(reply.readInt32());
+                *pNumChannels = reply.readInt32();
+                *pFormat = (audio_format_t)reply.readInt32();
+                *pSize = (size_t)reply.readInt32();
+            }
+        }
+        return status;
     }
 
     virtual sp<IOMX> getOMX() {
@@ -205,14 +223,19 @@
         case DECODE_URL: {
             CHECK_INTERFACE(IMediaPlayerService, data, reply);
             const char* url = data.readCString();
+            sp<IMemoryHeap> heap = interface_cast<IMemoryHeap>(data.readStrongBinder());
             uint32_t sampleRate;
             int numChannels;
             audio_format_t format;
-            sp<IMemory> player = decode(url, &sampleRate, &numChannels, &format);
-            reply->writeInt32(sampleRate);
-            reply->writeInt32(numChannels);
-            reply->writeInt32((int32_t) format);
-            reply->writeStrongBinder(player->asBinder());
+            size_t size;
+            status_t status = decode(url, &sampleRate, &numChannels, &format, heap, &size);
+            reply->writeInt32(status);
+            if (status == NO_ERROR) {
+                reply->writeInt32(sampleRate);
+                reply->writeInt32(numChannels);
+                reply->writeInt32((int32_t)format);
+                reply->writeInt32((int32_t)size);
+            }
             return NO_ERROR;
         } break;
         case DECODE_FD: {
@@ -220,14 +243,20 @@
             int fd = dup(data.readFileDescriptor());
             int64_t offset = data.readInt64();
             int64_t length = data.readInt64();
+            sp<IMemoryHeap> heap = interface_cast<IMemoryHeap>(data.readStrongBinder());
             uint32_t sampleRate;
             int numChannels;
             audio_format_t format;
-            sp<IMemory> player = decode(fd, offset, length, &sampleRate, &numChannels, &format);
-            reply->writeInt32(sampleRate);
-            reply->writeInt32(numChannels);
-            reply->writeInt32((int32_t) format);
-            reply->writeStrongBinder(player->asBinder());
+            size_t size;
+            status_t status = decode(fd, offset, length, &sampleRate, &numChannels, &format,
+                                     heap, &size);
+            reply->writeInt32(status);
+            if (status == NO_ERROR) {
+                reply->writeInt32(sampleRate);
+                reply->writeInt32(numChannels);
+                reply->writeInt32((int32_t)format);
+                reply->writeInt32((int32_t)size);
+            }
             return NO_ERROR;
         } break;
         case CREATE_MEDIA_RECORDER: {
diff --git a/media/libmedia/IOMX.cpp b/media/libmedia/IOMX.cpp
index ef99f4f..71ce320 100644
--- a/media/libmedia/IOMX.cpp
+++ b/media/libmedia/IOMX.cpp
@@ -43,6 +43,7 @@
     CREATE_INPUT_SURFACE,
     SIGNAL_END_OF_INPUT_STREAM,
     STORE_META_DATA_IN_BUFFERS,
+    PREPARE_FOR_ADAPTIVE_PLAYBACK,
     ALLOC_BUFFER,
     ALLOC_BUFFER_WITH_BACKUP,
     FREE_BUFFER,
@@ -351,6 +352,22 @@
         return err;
     }
 
+    virtual status_t prepareForAdaptivePlayback(
+            node_id node, OMX_U32 port_index, OMX_BOOL enable,
+            OMX_U32 max_width, OMX_U32 max_height) {
+        Parcel data, reply;
+        data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
+        data.writeIntPtr((intptr_t)node);
+        data.writeInt32(port_index);
+        data.writeInt32((int32_t)enable);
+        data.writeInt32(max_width);
+        data.writeInt32(max_height);
+        remote()->transact(PREPARE_FOR_ADAPTIVE_PLAYBACK, data, &reply);
+
+        status_t err = reply.readInt32();
+        return err;
+    }
+
     virtual status_t allocateBuffer(
             node_id node, OMX_U32 port_index, size_t size,
             buffer_id *buffer, void **buffer_data) {
@@ -770,6 +787,23 @@
             return NO_ERROR;
         }
 
+        case PREPARE_FOR_ADAPTIVE_PLAYBACK:
+        {
+            CHECK_OMX_INTERFACE(IOMX, data, reply);
+
+            node_id node = (void*)data.readIntPtr();
+            OMX_U32 port_index = data.readInt32();
+            OMX_BOOL enable = (OMX_BOOL)data.readInt32();
+            OMX_U32 max_width = data.readInt32();
+            OMX_U32 max_height = data.readInt32();
+
+            status_t err = prepareForAdaptivePlayback(
+                    node, port_index, enable, max_width, max_height);
+            reply->writeInt32(err);
+
+            return NO_ERROR;
+        }
+
         case ALLOC_BUFFER:
         {
             CHECK_OMX_INTERFACE(IOMX, data, reply);
diff --git a/media/libmedia/IRemoteDisplayClient.cpp b/media/libmedia/IRemoteDisplayClient.cpp
index 5c494b3..7190879 100644
--- a/media/libmedia/IRemoteDisplayClient.cpp
+++ b/media/libmedia/IRemoteDisplayClient.cpp
@@ -38,7 +38,7 @@
     }
 
     void onDisplayConnected(const sp<IGraphicBufferProducer>& bufferProducer,
-            uint32_t width, uint32_t height, uint32_t flags)
+            uint32_t width, uint32_t height, uint32_t flags, uint32_t session)
     {
         Parcel data, reply;
         data.writeInterfaceToken(IRemoteDisplayClient::getInterfaceDescriptor());
@@ -46,6 +46,7 @@
         data.writeInt32(width);
         data.writeInt32(height);
         data.writeInt32(flags);
+        data.writeInt32(session);
         remote()->transact(ON_DISPLAY_CONNECTED, data, &reply, IBinder::FLAG_ONEWAY);
     }
 
@@ -80,7 +81,8 @@
             uint32_t width = data.readInt32();
             uint32_t height = data.readInt32();
             uint32_t flags = data.readInt32();
-            onDisplayConnected(surfaceTexture, width, height, flags);
+            uint32_t session = data.readInt32();
+            onDisplayConnected(surfaceTexture, width, height, flags, session);
             return NO_ERROR;
         }
         case ON_DISPLAY_DISCONNECTED: {
diff --git a/media/libmedia/SingleStateQueueInstantiations.cpp b/media/libmedia/SingleStateQueueInstantiations.cpp
index 2afebe9..0265c8c 100644
--- a/media/libmedia/SingleStateQueueInstantiations.cpp
+++ b/media/libmedia/SingleStateQueueInstantiations.cpp
@@ -16,11 +16,13 @@
 
 #include <media/SingleStateQueue.h>
 #include <private/media/StaticAudioTrackState.h>
+#include <media/AudioTimestamp.h>
 
 // FIXME hack for gcc
 
 namespace android {
 
 template class SingleStateQueue<StaticAudioTrackState>; // typedef StaticAudioTrackSingleStateQueue
+template class SingleStateQueue<AudioTimestamp>;        // typedef AudioTimestampSingleStateQueue
 
 }
diff --git a/media/libmedia/SoundPool.cpp b/media/libmedia/SoundPool.cpp
index 7f10e05..22e9fad 100644
--- a/media/libmedia/SoundPool.cpp
+++ b/media/libmedia/SoundPool.cpp
@@ -18,7 +18,7 @@
 #define LOG_TAG "SoundPool"
 #include <utils/Log.h>
 
-//#define USE_SHARED_MEM_BUFFER
+#define USE_SHARED_MEM_BUFFER
 
 #include <media/AudioTrack.h>
 #include <media/mediaplayer.h>
@@ -32,6 +32,8 @@
 uint32_t kMaxSampleRate = 48000;
 uint32_t kDefaultSampleRate = 44100;
 uint32_t kDefaultFrameCount = 1200;
+size_t kDefaultHeapSize = 1024 * 1024; // 1MB
+
 
 SoundPool::SoundPool(int maxChannels, audio_stream_type_t streamType, int srcQuality)
 {
@@ -464,7 +466,6 @@
 
 void Sample::init()
 {
-    mData = 0;
     mSize = 0;
     mRefCount = 0;
     mSampleID = 0;
@@ -482,7 +483,6 @@
         ALOGV("close(%d)", mFd);
         ::close(mFd);
     }
-    mData.clear();
     free(mUrl);
 }
 
@@ -491,44 +491,48 @@
     uint32_t sampleRate;
     int numChannels;
     audio_format_t format;
-    sp<IMemory> p;
+    status_t status;
+    mHeap = new MemoryHeapBase(kDefaultHeapSize);
+
     ALOGV("Start decode");
     if (mUrl) {
-        p = MediaPlayer::decode(mUrl, &sampleRate, &numChannels, &format);
+        status = MediaPlayer::decode(mUrl, &sampleRate, &numChannels, &format, mHeap, &mSize);
     } else {
-        p = MediaPlayer::decode(mFd, mOffset, mLength, &sampleRate, &numChannels, &format);
+        status = MediaPlayer::decode(mFd, mOffset, mLength, &sampleRate, &numChannels, &format,
+                                     mHeap, &mSize);
         ALOGV("close(%d)", mFd);
         ::close(mFd);
         mFd = -1;
     }
-    if (p == 0) {
+    if (status != NO_ERROR) {
         ALOGE("Unable to load sample: %s", mUrl);
-        return -1;
+        goto error;
     }
     ALOGV("pointer = %p, size = %u, sampleRate = %u, numChannels = %d",
-            p->pointer(), p->size(), sampleRate, numChannels);
+          mHeap->getBase(), mSize, sampleRate, numChannels);
 
     if (sampleRate > kMaxSampleRate) {
        ALOGE("Sample rate (%u) out of range", sampleRate);
-       return - 1;
+       status = BAD_VALUE;
+       goto error;
     }
 
     if ((numChannels < 1) || (numChannels > 2)) {
         ALOGE("Sample channel count (%d) out of range", numChannels);
-        return - 1;
+        status = BAD_VALUE;
+        goto error;
     }
 
-    //_dumpBuffer(p->pointer(), p->size());
-    uint8_t* q = static_cast<uint8_t*>(p->pointer()) + p->size() - 10;
-    //_dumpBuffer(q, 10, 10, false);
-
-    mData = p;
-    mSize = p->size();
+    mData = new MemoryBase(mHeap, 0, mSize);
     mSampleRate = sampleRate;
     mNumChannels = numChannels;
     mFormat = format;
     mState = READY;
-    return 0;
+    return NO_ERROR;
+
+error:
+    mHeap.clear();
+    return status;
 }
 
 
@@ -602,7 +606,7 @@
         // do not create a new audio track if current track is compatible with sample parameters
 #ifdef USE_SHARED_MEM_BUFFER
         newTrack = new AudioTrack(streamType, sampleRate, sample->format(),
-                channels, sample->getIMemory(), AUDIO_OUTPUT_FLAG_NONE, callback, userData);
+                channels, sample->getIMemory(), AUDIO_OUTPUT_FLAG_FAST, callback, userData);
 #else
         newTrack = new AudioTrack(streamType, sampleRate, sample->format(),
                 channels, frameCount, AUDIO_OUTPUT_FLAG_FAST, callback, userData,
@@ -740,11 +744,16 @@
             b->size = count;
             //ALOGV("buffer=%p, [0]=%d", b->i16, b->i16[0]);
         }
-    } else if (event == AudioTrack::EVENT_UNDERRUN) {
-        ALOGV("process %p channel %d EVENT_UNDERRUN", this, mChannelID);
+    } else if (event == AudioTrack::EVENT_UNDERRUN || event == AudioTrack::EVENT_BUFFER_END ||
+            event == AudioTrack::EVENT_NEW_IAUDIOTRACK) {
+        ALOGV("process %p channel %d event %s",
+              this, mChannelID, (event == AudioTrack::EVENT_UNDERRUN) ? "UNDERRUN" :
+                      (event == AudioTrack::EVENT_BUFFER_END) ? "BUFFER_END" : "NEW_IAUDIOTRACK");
         mSoundPool->addToStopList(this);
     } else if (event == AudioTrack::EVENT_LOOP_END) {
-        ALOGV("End loop %p channel %d count %d", this, mChannelID, *(int *)info);
+        ALOGV("End loop %p channel %d", this, mChannelID);
+    } else {
+        ALOGW("SoundChannel::process unexpected event %d", event);
     }
 }
 
diff --git a/media/libmedia/Visualizer.cpp b/media/libmedia/Visualizer.cpp
index e519f13..c146b8d 100644
--- a/media/libmedia/Visualizer.cpp
+++ b/media/libmedia/Visualizer.cpp
@@ -43,6 +43,7 @@
         mCaptureSize(CAPTURE_SIZE_DEF),
         mSampleRate(44100000),
         mScalingMode(VISUALIZER_SCALING_MODE_NORMALIZED),
+        mMeasurementMode(MEASUREMENT_MODE_NONE),
         mCaptureCallBack(NULL),
         mCaptureCbkUser(NULL)
 {
@@ -186,6 +187,73 @@
     return status;
 }
 
+status_t Visualizer::setMeasurementMode(uint32_t mode) {
+    if ((mode != MEASUREMENT_MODE_NONE)
+            //Note: needs to be handled as a mask when more measurement modes are added
+            && ((mode & MEASUREMENT_MODE_PEAK_RMS) != mode)) {
+        return BAD_VALUE;
+    }
+
+    Mutex::Autolock _l(mCaptureLock);
+
+    uint32_t buf32[sizeof(effect_param_t) / sizeof(uint32_t) + 2];
+    effect_param_t *p = (effect_param_t *)buf32;
+
+    p->psize = sizeof(uint32_t);
+    p->vsize = sizeof(uint32_t);
+    *(int32_t *)p->data = VISUALIZER_PARAM_MEASUREMENT_MODE;
+    *((int32_t *)p->data + 1)= mode;
+    status_t status = setParameter(p);
+
+    ALOGV("setMeasurementMode mode %d  status %d p->status %d", mode, status, p->status);
+
+    if (status == NO_ERROR) {
+        status = p->status;
+        if (status == NO_ERROR) {
+            mMeasurementMode = mode;
+        }
+    }
+    return status;
+}
+
+status_t Visualizer::getIntMeasurements(uint32_t type, uint32_t number, int32_t *measurements) {
+    if (mMeasurementMode == MEASUREMENT_MODE_NONE) {
+        ALOGE("Cannot retrieve int measurements, no measurement mode set");
+        return INVALID_OPERATION;
+    }
+    if (!(mMeasurementMode & type)) {
+        // measurement type has not been set on this Visualizer
+        ALOGE("Cannot retrieve int measurements, requested measurement mode 0x%x not set(0x%x)",
+                type, mMeasurementMode);
+        return INVALID_OPERATION;
+    }
+    // only peak+RMS measurement supported
+    if ((type != MEASUREMENT_MODE_PEAK_RMS)
+            // for peak+RMS measurement, the results are 2 int32_t values
+            || (number != 2)) {
+        ALOGE("Cannot retrieve int measurements, MEASUREMENT_MODE_PEAK_RMS returns 2 ints, not %d",
+                        number);
+        return BAD_VALUE;
+    }
+
+    status_t status = NO_ERROR;
+    if (mEnabled) {
+        uint32_t replySize = number * sizeof(int32_t);
+        status = command(VISUALIZER_CMD_MEASURE,
+                sizeof(uint32_t)  /*cmdSize*/,
+                &type /*cmdData*/,
+                &replySize, measurements);
+        ALOGV("getMeasurements() command returned %d", status);
+        if ((status == NO_ERROR) && (replySize == 0)) {
+            status = NOT_ENOUGH_DATA;
+        }
+    } else {
+        ALOGV("getMeasurements() disabled");
+        return INVALID_OPERATION;
+    }
+    return status;
+}
+
 status_t Visualizer::getWaveForm(uint8_t *waveform)
 {
     if (waveform == NULL) {
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index 056cc0a..0f6d897 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -756,6 +756,9 @@
     case MEDIA_TIMED_TEXT:
         ALOGV("Received timed text message");
         break;
+    case MEDIA_SUBTITLE_DATA:
+        ALOGV("Received subtitle data message");
+        break;
     default:
         ALOGV("unrecognized message: (%d, %d, %d)", msg, ext1, ext2);
         break;
@@ -773,17 +776,20 @@
     }
 }
 
-/*static*/ sp<IMemory> MediaPlayer::decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat)
+/*static*/ status_t MediaPlayer::decode(const char* url, uint32_t *pSampleRate,
+                                           int* pNumChannels, audio_format_t* pFormat,
+                                           const sp<IMemoryHeap>& heap, size_t *pSize)
 {
     ALOGV("decode(%s)", url);
-    sp<IMemory> p;
+    status_t status;
     const sp<IMediaPlayerService>& service = getMediaPlayerService();
     if (service != 0) {
-        p = service->decode(url, pSampleRate, pNumChannels, pFormat);
+        status = service->decode(url, pSampleRate, pNumChannels, pFormat, heap, pSize);
     } else {
         ALOGE("Unable to locate media service");
+        status = DEAD_OBJECT;
     }
-    return p;
+    return status;
 
 }
 
@@ -793,17 +799,22 @@
     notify(MEDIA_ERROR, MEDIA_ERROR_SERVER_DIED, 0);
 }
 
-/*static*/ sp<IMemory> MediaPlayer::decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat)
+/*static*/ status_t MediaPlayer::decode(int fd, int64_t offset, int64_t length,
+                                        uint32_t *pSampleRate, int* pNumChannels,
+                                        audio_format_t* pFormat,
+                                        const sp<IMemoryHeap>& heap, size_t *pSize)
 {
     ALOGV("decode(%d, %lld, %lld)", fd, offset, length);
-    sp<IMemory> p;
+    status_t status;
     const sp<IMediaPlayerService>& service = getMediaPlayerService();
     if (service != 0) {
-        p = service->decode(fd, offset, length, pSampleRate, pNumChannels, pFormat);
+        status = service->decode(fd, offset, length, pSampleRate,
+                                 pNumChannels, pFormat, heap, pSize);
     } else {
         ALOGE("Unable to locate media service");
+        status = DEAD_OBJECT;
     }
-    return p;
+    return status;
 
 }
 
diff --git a/media/libmediaplayerservice/Drm.cpp b/media/libmediaplayerservice/Drm.cpp
index f00f488..eebcb79 100644
--- a/media/libmediaplayerservice/Drm.cpp
+++ b/media/libmediaplayerservice/Drm.cpp
@@ -211,15 +211,22 @@
     return true;
 }
 
-bool Drm::isCryptoSchemeSupported(const uint8_t uuid[16]) {
+bool Drm::isCryptoSchemeSupported(const uint8_t uuid[16], const String8 &mimeType) {
+
     Mutex::Autolock autoLock(mLock);
 
-    if (mFactory && mFactory->isCryptoSchemeSupported(uuid)) {
-        return true;
+    if (!mFactory || !mFactory->isCryptoSchemeSupported(uuid)) {
+        findFactoryForScheme(uuid);
+        if (mInitCheck != OK) {
+            return false;
+        }
     }
 
-    findFactoryForScheme(uuid);
-    return (mInitCheck == OK);
+    if (mimeType != "") {
+        return mFactory->isContentTypeSupported(mimeType);
+    }
+
+    return true;
 }
 
 status_t Drm::createPlugin(const uint8_t uuid[16]) {
diff --git a/media/libmediaplayerservice/Drm.h b/media/libmediaplayerservice/Drm.h
index 3f460f1..119fd50 100644
--- a/media/libmediaplayerservice/Drm.h
+++ b/media/libmediaplayerservice/Drm.h
@@ -37,7 +37,7 @@
 
     virtual status_t initCheck() const;
 
-    virtual bool isCryptoSchemeSupported(const uint8_t uuid[16]);
+    virtual bool isCryptoSchemeSupported(const uint8_t uuid[16], const String8 &mimeType);
 
     virtual status_t createPlugin(const uint8_t uuid[16]);
 
diff --git a/media/libmediaplayerservice/HDCP.cpp b/media/libmediaplayerservice/HDCP.cpp
index 8a3188c..c2ac1a3 100644
--- a/media/libmediaplayerservice/HDCP.cpp
+++ b/media/libmediaplayerservice/HDCP.cpp
@@ -100,6 +100,20 @@
     return mHDCPModule->shutdownAsync();
 }
 
+uint32_t HDCP::getCaps() {
+    Mutex::Autolock autoLock(mLock);
+
+    if (mHDCPModule == NULL) {
+        return NO_INIT;
+    }
+
+    // TO-DO:
+    // Only support HDCP_CAPS_ENCRYPT (byte-array to byte-array) for now.
+    // use mHDCPModule->getCaps() when the HDCP libraries get updated.
+    //return mHDCPModule->getCaps();
+    return HDCPModule::HDCP_CAPS_ENCRYPT;
+}
+
 status_t HDCP::encrypt(
         const void *inData, size_t size, uint32_t streamCTR,
         uint64_t *outInputCTR, void *outData) {
diff --git a/media/libmediaplayerservice/HDCP.h b/media/libmediaplayerservice/HDCP.h
index c60c2e0..26ddc86 100644
--- a/media/libmediaplayerservice/HDCP.h
+++ b/media/libmediaplayerservice/HDCP.h
@@ -30,6 +30,7 @@
     virtual status_t setObserver(const sp<IHDCPObserver> &observer);
     virtual status_t initAsync(const char *host, unsigned port);
     virtual status_t shutdownAsync();
+    virtual uint32_t getCaps();
 
     virtual status_t encrypt(
             const void *inData, size_t size, uint32_t streamCTR,
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index 8833bd7..cd052e6 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -319,8 +319,8 @@
 
     result.append(" AudioCache\n");
     if (mHeap != 0) {
-        snprintf(buffer, 255, "  heap base(%p), size(%d), flags(%d), device(%s)\n",
-                mHeap->getBase(), mHeap->getSize(), mHeap->getFlags(), mHeap->getDevice());
+        snprintf(buffer, 255, "  heap base(%p), size(%d), flags(%d)\n",
+                mHeap->getBase(), mHeap->getSize(), mHeap->getFlags());
         result.append(buffer);
     }
     snprintf(buffer, 255, "  msec per frame(%f), channel count(%d), format(%d), frame count(%zd)\n",
@@ -590,7 +590,7 @@
     }
 
     if (!p->hardwareOutput()) {
-        mAudioOutput = new AudioOutput(mAudioSessionId);
+        mAudioOutput = new AudioOutput(mAudioSessionId, IPCThreadState::self()->getCallingUid());
         static_cast<MediaPlayerInterface*>(p.get())->setAudioSink(mAudioOutput);
     }
 
@@ -744,7 +744,7 @@
 
     sp<ANativeWindow> anw;
     if (bufferProducer != NULL) {
-        anw = new Surface(bufferProducer);
+        anw = new Surface(bufferProducer, true /* controlledByApp */);
         status_t err = native_window_api_connect(anw.get(),
                 NATIVE_WINDOW_API_MEDIA);
 
@@ -1176,13 +1176,13 @@
 }
 #endif
 
-static size_t kDefaultHeapSize = 1024 * 1024; // 1MB
-
-sp<IMemory> MediaPlayerService::decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat)
+status_t MediaPlayerService::decode(const char* url, uint32_t *pSampleRate, int* pNumChannels,
+                                       audio_format_t* pFormat,
+                                       const sp<IMemoryHeap>& heap, size_t *pSize)
 {
     ALOGV("decode(%s)", url);
-    sp<MemoryBase> mem;
     sp<MediaPlayerBase> player;
+    status_t status = BAD_VALUE;
 
     // Protect our precious, precious DRMd ringtones by only allowing
     // decoding of http, but not filesystem paths or content Uris.
@@ -1190,7 +1190,7 @@
     // filedescriptor for them and use that.
     if (url != NULL && strncmp(url, "http://", 7) != 0) {
         ALOGD("Can't decode %s by path, use filedescriptor instead", url);
-        return mem;
+        return BAD_VALUE;
     }
 
     player_type playerType =
@@ -1198,7 +1198,7 @@
     ALOGV("player type = %d", playerType);
 
     // create the right type of player
-    sp<AudioCache> cache = new AudioCache(url);
+    sp<AudioCache> cache = new AudioCache(heap);
     player = MediaPlayerFactory::createPlayer(playerType, cache.get(), cache->notify);
     if (player == NULL) goto Exit;
     if (player->hardwareOutput()) goto Exit;
@@ -1224,22 +1224,27 @@
         goto Exit;
     }
 
-    mem = new MemoryBase(cache->getHeap(), 0, cache->size());
+    *pSize = cache->size();
     *pSampleRate = cache->sampleRate();
     *pNumChannels = cache->channelCount();
     *pFormat = cache->format();
-    ALOGV("return memory @ %p, sampleRate=%u, channelCount = %d, format = %d", mem->pointer(), *pSampleRate, *pNumChannels, *pFormat);
+    ALOGV("return size %d sampleRate=%u, channelCount = %d, format = %d",
+          *pSize, *pSampleRate, *pNumChannels, *pFormat);
+    status = NO_ERROR;
 
 Exit:
     if (player != 0) player->reset();
-    return mem;
+    return status;
 }
 
-sp<IMemory> MediaPlayerService::decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat)
+status_t MediaPlayerService::decode(int fd, int64_t offset, int64_t length,
+                                       uint32_t *pSampleRate, int* pNumChannels,
+                                       audio_format_t* pFormat,
+                                       const sp<IMemoryHeap>& heap, size_t *pSize)
 {
     ALOGV("decode(%d, %lld, %lld)", fd, offset, length);
-    sp<MemoryBase> mem;
     sp<MediaPlayerBase> player;
+    status_t status = BAD_VALUE;
 
     player_type playerType = MediaPlayerFactory::getPlayerType(NULL /* client */,
                                                                fd,
@@ -1248,7 +1253,7 @@
     ALOGV("player type = %d", playerType);
 
     // create the right type of player
-    sp<AudioCache> cache = new AudioCache("decode_fd");
+    sp<AudioCache> cache = new AudioCache(heap);
     player = MediaPlayerFactory::createPlayer(playerType, cache.get(), cache->notify);
     if (player == NULL) goto Exit;
     if (player->hardwareOutput()) goto Exit;
@@ -1274,27 +1279,30 @@
         goto Exit;
     }
 
-    mem = new MemoryBase(cache->getHeap(), 0, cache->size());
+    *pSize = cache->size();
     *pSampleRate = cache->sampleRate();
     *pNumChannels = cache->channelCount();
     *pFormat = cache->format();
-    ALOGV("return memory @ %p, sampleRate=%u, channelCount = %d, format = %d", mem->pointer(), *pSampleRate, *pNumChannels, *pFormat);
+    ALOGV("return size %d, sampleRate=%u, channelCount = %d, format = %d",
+          *pSize, *pSampleRate, *pNumChannels, *pFormat);
+    status = NO_ERROR;
 
 Exit:
     if (player != 0) player->reset();
     ::close(fd);
-    return mem;
+    return status;
 }
 
 
 #undef LOG_TAG
 #define LOG_TAG "AudioSink"
-MediaPlayerService::AudioOutput::AudioOutput(int sessionId)
+MediaPlayerService::AudioOutput::AudioOutput(int sessionId, int uid)
     : mCallback(NULL),
       mCallbackCookie(NULL),
       mCallbackData(NULL),
       mBytesWritten(0),
       mSessionId(sessionId),
+      mUid(uid),
       mFlags(AUDIO_OUTPUT_FLAG_NONE) {
     ALOGV("AudioOutput(%d)", sessionId);
     mStreamType = AUDIO_STREAM_MUSIC;
@@ -1542,7 +1550,8 @@
                     0,  // notification frames
                     mSessionId,
                     AudioTrack::TRANSFER_CALLBACK,
-                    offloadInfo);
+                    offloadInfo,
+                    mUid);
         } else {
             t = new AudioTrack(
                     mStreamType,
@@ -1551,10 +1560,13 @@
                     channelMask,
                     frameCount,
                     flags,
-                    NULL,
-                    NULL,
-                    0,
-                    mSessionId);
+                    NULL, // callback
+                    NULL, // user data
+                    0, // notification frames
+                    mSessionId,
+                    AudioTrack::TRANSFER_DEFAULT,
+                    NULL, // offload info
+                    mUid);
         }
 
         if ((t == 0) || (t->initCheck() != NO_ERROR)) {
@@ -1803,12 +1815,10 @@
 
 #undef LOG_TAG
 #define LOG_TAG "AudioCache"
-MediaPlayerService::AudioCache::AudioCache(const char* name) :
-    mChannelCount(0), mFrameCount(1024), mSampleRate(0), mSize(0),
-    mError(NO_ERROR), mCommandComplete(false)
+MediaPlayerService::AudioCache::AudioCache(const sp<IMemoryHeap>& heap) :
+    mHeap(heap), mChannelCount(0), mFrameCount(1024), mSampleRate(0), mSize(0),
+    mError(NO_ERROR),  mCommandComplete(false)
 {
-    // create ashmem heap
-    mHeap = new MemoryHeapBase(kDefaultHeapSize, 0, name);
 }
 
 uint32_t MediaPlayerService::AudioCache::latency () const
diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h
index 7d27944..a486cb5 100644
--- a/media/libmediaplayerservice/MediaPlayerService.h
+++ b/media/libmediaplayerservice/MediaPlayerService.h
@@ -72,7 +72,7 @@
         class CallbackData;
 
      public:
-                                AudioOutput(int sessionId);
+                                AudioOutput(int sessionId, int uid);
         virtual                 ~AudioOutput();
 
         virtual bool            ready() const { return mTrack != 0; }
@@ -100,7 +100,10 @@
         virtual void            flush();
         virtual void            pause();
         virtual void            close();
-                void            setAudioStreamType(audio_stream_type_t streamType) { mStreamType = streamType; }
+                void            setAudioStreamType(audio_stream_type_t streamType) {
+                                                                        mStreamType = streamType; }
+        virtual audio_stream_type_t getAudioStreamType() const { return mStreamType; }
+
                 void            setVolume(float left, float right);
         virtual status_t        setPlaybackRatePermille(int32_t ratePermille);
                 status_t        setAuxEffectSendLevel(float level);
@@ -135,6 +138,7 @@
         uint32_t                mSampleRateHz; // sample rate of the content, as set in open()
         float                   mMsecsPerFrame;
         int                     mSessionId;
+        int                     mUid;
         float                   mSendLevel;
         int                     mAuxEffectId;
         static bool             mIsOnEmulator;
@@ -177,7 +181,7 @@
     class AudioCache : public MediaPlayerBase::AudioSink
     {
     public:
-                                AudioCache(const char* name);
+                                AudioCache(const sp<IMemoryHeap>& heap);
         virtual                 ~AudioCache() {}
 
         virtual bool            ready() const { return (mChannelCount > 0) && (mHeap->getHeapID() > 0); }
@@ -206,6 +210,9 @@
         virtual void            pause() {}
         virtual void            close() {}
                 void            setAudioStreamType(audio_stream_type_t streamType) {}
+                // stream type is not used for AudioCache
+        virtual audio_stream_type_t getAudioStreamType() const { return AUDIO_STREAM_DEFAULT; }
+
                 void            setVolume(float left, float right) {}
         virtual status_t        setPlaybackRatePermille(int32_t ratePermille) { return INVALID_OPERATION; }
                 uint32_t        sampleRate() const { return mSampleRate; }
@@ -224,7 +231,7 @@
 
         Mutex               mLock;
         Condition           mSignal;
-        sp<MemoryHeapBase>  mHeap;
+        sp<IMemoryHeap>     mHeap;
         float               mMsecsPerFrame;
         uint16_t            mChannelCount;
         audio_format_t      mFormat;
@@ -247,8 +254,13 @@
 
     virtual sp<IMediaPlayer>    create(const sp<IMediaPlayerClient>& client, int audioSessionId);
 
-    virtual sp<IMemory>         decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat);
-    virtual sp<IMemory>         decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat);
+    virtual status_t            decode(const char* url, uint32_t *pSampleRate, int* pNumChannels,
+                                       audio_format_t* pFormat,
+                                       const sp<IMemoryHeap>& heap, size_t *pSize);
+    virtual status_t            decode(int fd, int64_t offset, int64_t length,
+                                       uint32_t *pSampleRate, int* pNumChannels,
+                                       audio_format_t* pFormat,
+                                       const sp<IMemoryHeap>& heap, size_t *pSize);
     virtual sp<IOMX>            getOMX();
     virtual sp<ICrypto>         makeCrypto();
     virtual sp<IDrm>            makeDrm();
diff --git a/media/libmediaplayerservice/MidiFile.cpp b/media/libmediaplayerservice/MidiFile.cpp
index 270b872..0a6aa90 100644
--- a/media/libmediaplayerservice/MidiFile.cpp
+++ b/media/libmediaplayerservice/MidiFile.cpp
@@ -220,6 +220,9 @@
     }
 
     mRender = true;
+    if (mState == EAS_STATE_PLAY) {
+        sendEvent(MEDIA_STARTED);
+    }
 
     // wake up render thread
     ALOGV("  wakeup render thread");
@@ -242,6 +245,7 @@
         }
     }
     mPaused = false;
+    sendEvent(MEDIA_STOPPED);
     return NO_ERROR;
 }
 
@@ -279,6 +283,7 @@
         return ERROR_EAS_FAILURE;
     }
     mPaused = true;
+    sendEvent(MEDIA_PAUSED);
     return NO_ERROR;
 }
 
@@ -382,6 +387,7 @@
 status_t MidiFile::reset_nosync()
 {
     ALOGV("MidiFile::reset_nosync");
+    sendEvent(MEDIA_STOPPED);
     // close file
     if (mEasHandle) {
         EAS_CloseFile(mEasData, mEasHandle);
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index 095d5ca..f9d9020 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -70,8 +70,9 @@
       mOutputFd(-1),
       mAudioSource(AUDIO_SOURCE_CNT),
       mVideoSource(VIDEO_SOURCE_LIST_END),
-      mStarted(false), mSurfaceMediaSource(NULL),
-      mCaptureTimeLapse(false) {
+      mCaptureTimeLapse(false),
+      mStarted(false),
+      mSurfaceMediaSource(NULL) {
 
     ALOGV("Constructor");
     reset();
@@ -1089,7 +1090,22 @@
     }
 }
 
-status_t StagefrightRecorder::checkVideoEncoderCapabilities() {
+status_t StagefrightRecorder::checkVideoEncoderCapabilities(
+        bool *supportsCameraSourceMetaDataMode) {
+    /* hardware codecs must support camera source meta data mode */
+    Vector<CodecCapabilities> codecs;
+    OMXClient client;
+    CHECK_EQ(client.connect(), (status_t)OK);
+    QueryCodecs(
+            client.interface(),
+            (mVideoEncoder == VIDEO_ENCODER_H263 ? MEDIA_MIMETYPE_VIDEO_H263 :
+             mVideoEncoder == VIDEO_ENCODER_MPEG_4_SP ? MEDIA_MIMETYPE_VIDEO_MPEG4 :
+             mVideoEncoder == VIDEO_ENCODER_H264 ? MEDIA_MIMETYPE_VIDEO_AVC : ""),
+            false /* decoder */, true /* hwCodec */, &codecs);
+    *supportsCameraSourceMetaDataMode = codecs.size() > 0;
+    ALOGV("encoder %s camera source meta-data mode",
+            *supportsCameraSourceMetaDataMode ? "supports" : "DOES NOT SUPPORT");
+
     if (!mCaptureTimeLapse) {
         // Dont clip for time lapse capture as encoder will have enough
         // time to encode because of slow capture rate of time lapse.
@@ -1307,7 +1323,9 @@
 status_t StagefrightRecorder::setupCameraSource(
         sp<CameraSource> *cameraSource) {
     status_t err = OK;
-    if ((err = checkVideoEncoderCapabilities()) != OK) {
+    bool encoderSupportsCameraSourceMetaDataMode;
+    if ((err = checkVideoEncoderCapabilities(
+                &encoderSupportsCameraSourceMetaDataMode)) != OK) {
         return err;
     }
     Size videoSize;
@@ -1323,13 +1341,14 @@
         mCameraSourceTimeLapse = CameraSourceTimeLapse::CreateFromCamera(
                 mCamera, mCameraProxy, mCameraId, mClientName, mClientUid,
                 videoSize, mFrameRate, mPreviewSurface,
-                mTimeBetweenTimeLapseFrameCaptureUs);
+                mTimeBetweenTimeLapseFrameCaptureUs,
+                encoderSupportsCameraSourceMetaDataMode);
         *cameraSource = mCameraSourceTimeLapse;
     } else {
         *cameraSource = CameraSource::CreateFromCamera(
                 mCamera, mCameraProxy, mCameraId, mClientName, mClientUid,
                 videoSize, mFrameRate,
-                mPreviewSurface, true /*storeMetaDataInVideoBuffers*/);
+                mPreviewSurface, encoderSupportsCameraSourceMetaDataMode);
     }
     mCamera.clear();
     mCameraProxy.clear();
diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h
index c864207..31f09e0 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.h
+++ b/media/libmediaplayerservice/StagefrightRecorder.h
@@ -139,7 +139,8 @@
     status_t startRTPRecording();
     status_t startMPEG2TSRecording();
     sp<MediaSource> createAudioSource();
-    status_t checkVideoEncoderCapabilities();
+    status_t checkVideoEncoderCapabilities(
+            bool *supportsCameraSourceMetaDataMode);
     status_t checkAudioEncoderCapabilities();
     // Generic MediaSource set-up. Returns the appropriate
     // source (CameraSource or SurfaceMediaSource)
diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
index c8901ce..f1782cc 100644
--- a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
@@ -43,7 +43,8 @@
       mUID(uid),
       mFlags(0),
       mFinalResult(OK),
-      mOffset(0) {
+      mOffset(0),
+      mFetchSubtitleDataGeneration(0) {
     if (headers) {
         mExtraHeaders = *headers;
 
@@ -120,6 +121,28 @@
     return mLiveSession->getDuration(durationUs);
 }
 
+status_t NuPlayer::HTTPLiveSource::getTrackInfo(Parcel *reply) const {
+    return mLiveSession->getTrackInfo(reply);
+}
+
+status_t NuPlayer::HTTPLiveSource::selectTrack(size_t trackIndex, bool select) {
+    status_t err = mLiveSession->selectTrack(trackIndex, select);
+
+    if (err == OK) {
+        mFetchSubtitleDataGeneration++;
+        if (select) {
+            sp<AMessage> msg = new AMessage(kWhatFetchSubtitleData, id());
+            msg->setInt32("generation", mFetchSubtitleDataGeneration);
+            msg->post();
+        }
+    }
+
+    // LiveSession::selectTrack returns BAD_VALUE when selecting the currently
+    // selected track, or unselecting a non-selected track. In this case it's an
+    // no-op so we return OK.
+    return (err == OK || err == BAD_VALUE) ? OK : err;
+}
+
 status_t NuPlayer::HTTPLiveSource::seekTo(int64_t seekTimeUs) {
     return mLiveSession->seekTo(seekTimeUs);
 }
@@ -132,6 +155,39 @@
             break;
         }
 
+        case kWhatFetchSubtitleData:
+        {
+            int32_t generation;
+            CHECK(msg->findInt32("generation", &generation));
+
+            if (generation != mFetchSubtitleDataGeneration) {
+                // stale
+                break;
+            }
+
+            sp<ABuffer> buffer;
+            if (mLiveSession->dequeueAccessUnit(
+                    LiveSession::STREAMTYPE_SUBTITLES, &buffer) == OK) {
+                sp<AMessage> notify = dupNotify();
+                notify->setInt32("what", kWhatSubtitleData);
+                notify->setBuffer("buffer", buffer);
+                notify->post();
+
+                int64_t timeUs, baseUs, durationUs, delayUs;
+                CHECK(buffer->meta()->findInt64("baseUs", &baseUs));
+                CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+                CHECK(buffer->meta()->findInt64("durationUs", &durationUs));
+                delayUs = baseUs + timeUs - ALooper::GetNowUs();
+
+                msg->post(delayUs > 0ll ? delayUs : 0ll);
+            } else {
+                // try again in 1 second
+                msg->post(1000000ll);
+            }
+
+            break;
+        }
+
         default:
             Source::onMessageReceived(msg);
             break;
@@ -145,7 +201,16 @@
     switch (what) {
         case LiveSession::kWhatPrepared:
         {
-            notifyVideoSizeChanged(0, 0);
+            // notify the current size here if we have it, otherwise report an initial size of (0,0)
+            sp<AMessage> format = getFormat(false /* audio */);
+            int32_t width;
+            int32_t height;
+            if (format != NULL &&
+                    format->findInt32("width", &width) && format->findInt32("height", &height)) {
+                notifyVideoSizeChanged(width, height);
+            } else {
+                notifyVideoSizeChanged(0, 0);
+            }
 
             uint32_t flags = FLAG_CAN_PAUSE;
             if (mLiveSession->isSeekable()) {
diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h
index aa9434b..bcc3f8b 100644
--- a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h
+++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h
@@ -41,6 +41,8 @@
 
     virtual status_t feedMoreTSData();
     virtual status_t getDuration(int64_t *durationUs);
+    virtual status_t getTrackInfo(Parcel *reply) const;
+    virtual status_t selectTrack(size_t trackIndex, bool select);
     virtual status_t seekTo(int64_t seekTimeUs);
 
 protected:
@@ -56,6 +58,7 @@
 
     enum {
         kWhatSessionNotify,
+        kWhatFetchSubtitleData,
     };
 
     AString mURL;
@@ -67,6 +70,7 @@
     off64_t mOffset;
     sp<ALooper> mLiveLooper;
     sp<LiveSession> mLiveSession;
+    int32_t mFetchSubtitleDataGeneration;
 
     void onSessionNotify(const sp<AMessage> &msg);
 
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index 7e81035..750287f 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -340,6 +340,46 @@
             break;
         }
 
+        case kWhatGetTrackInfo:
+        {
+            uint32_t replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
+            status_t err = INVALID_OPERATION;
+            if (mSource != NULL) {
+                Parcel* reply;
+                CHECK(msg->findPointer("reply", (void**)&reply));
+                err = mSource->getTrackInfo(reply);
+            }
+
+            sp<AMessage> response = new AMessage;
+            response->setInt32("err", err);
+
+            response->postReply(replyID);
+            break;
+        }
+
+        case kWhatSelectTrack:
+        {
+            uint32_t replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
+            status_t err = INVALID_OPERATION;
+            if (mSource != NULL) {
+                size_t trackIndex;
+                int32_t select;
+                CHECK(msg->findSize("trackIndex", &trackIndex));
+                CHECK(msg->findInt32("select", &select));
+                err = mSource->selectTrack(trackIndex, select);
+            }
+
+            sp<AMessage> response = new AMessage;
+            response->setInt32("err", err);
+
+            response->postReply(replyID);
+            break;
+        }
+
         case kWhatPollDuration:
         {
             int32_t generation;
@@ -731,6 +771,9 @@
                 ALOGV("renderer %s flush completed.", audio ? "audio" : "video");
             } else if (what == Renderer::kWhatVideoRenderingStart) {
                 notifyListener(MEDIA_INFO, MEDIA_INFO_RENDERING_START, 0);
+            } else if (what == Renderer::kWhatMediaRenderingStart) {
+                ALOGV("media rendering started");
+                notifyListener(MEDIA_STARTED, 0, 0);
             }
             break;
         }
@@ -1042,7 +1085,7 @@
     mRenderer->queueBuffer(audio, buffer, reply);
 }
 
-void NuPlayer::notifyListener(int msg, int ext1, int ext2) {
+void NuPlayer::notifyListener(int msg, int ext1, int ext2, const Parcel *in) {
     if (mDriver == NULL) {
         return;
     }
@@ -1053,7 +1096,7 @@
         return;
     }
 
-    driver->notifyListener(msg, ext1, ext2);
+    driver->notifyListener(msg, ext1, ext2, in);
 }
 
 void NuPlayer::flushDecoder(bool audio, bool needShutdown) {
@@ -1129,6 +1172,26 @@
     return OK;
 }
 
+status_t NuPlayer::getTrackInfo(Parcel* reply) const {
+    sp<AMessage> msg = new AMessage(kWhatGetTrackInfo, id());
+    msg->setPointer("reply", reply);
+
+    sp<AMessage> response;
+    status_t err = msg->postAndAwaitResponse(&response);
+    return err;
+}
+
+status_t NuPlayer::selectTrack(size_t trackIndex, bool select) {
+    sp<AMessage> msg = new AMessage(kWhatSelectTrack, id());
+    msg->setSize("trackIndex", trackIndex);
+    msg->setInt32("select", select);
+
+    sp<AMessage> response;
+    status_t err = msg->postAndAwaitResponse(&response);
+
+    return err;
+}
+
 void NuPlayer::schedulePollDuration() {
     sp<AMessage> msg = new AMessage(kWhatPollDuration, id());
     msg->setInt32("generation", mPollDurationGeneration);
@@ -1333,6 +1396,11 @@
             uint32_t flags;
             CHECK(msg->findInt32("flags", (int32_t *)&flags));
 
+            sp<NuPlayerDriver> driver = mDriver.promote();
+            if (driver != NULL) {
+                driver->notifyFlagsChanged(flags);
+            }
+
             if ((mSourceFlags & Source::FLAG_DYNAMIC_DURATION)
                     && (!(flags & Source::FLAG_DYNAMIC_DURATION))) {
                 cancelPollDuration();
@@ -1368,6 +1436,29 @@
             break;
         }
 
+        case Source::kWhatSubtitleData:
+        {
+            sp<ABuffer> buffer;
+            CHECK(msg->findBuffer("buffer", &buffer));
+
+            int32_t trackIndex;
+            int64_t timeUs, durationUs;
+            CHECK(buffer->meta()->findInt32("trackIndex", &trackIndex));
+            CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+            CHECK(buffer->meta()->findInt64("durationUs", &durationUs));
+
+            Parcel in;
+            in.writeInt32(trackIndex);
+            in.writeInt64(timeUs);
+            in.writeInt64(durationUs);
+            in.writeInt32(buffer->size());
+            in.writeInt32(buffer->size());
+            in.write(buffer->data(), buffer->size());
+
+            notifyListener(MEDIA_SUBTITLE_DATA, 0, 0, &in);
+            break;
+        }
+
         case Source::kWhatQueueDecoderShutdown:
         {
             int32_t audio, video;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h
index 8b6c8c1..13350f3 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h
@@ -60,6 +60,8 @@
     void seekToAsync(int64_t seekTimeUs);
 
     status_t setVideoScalingMode(int32_t mode);
+    status_t getTrackInfo(Parcel* reply) const;
+    status_t selectTrack(size_t trackIndex, bool select);
 
 protected:
     virtual ~NuPlayer();
@@ -101,6 +103,8 @@
         kWhatResume                     = 'rsme',
         kWhatPollDuration               = 'polD',
         kWhatSourceNotify               = 'srcN',
+        kWhatGetTrackInfo               = 'gTrI',
+        kWhatSelectTrack                = 'selT',
     };
 
     wp<NuPlayerDriver> mDriver;
@@ -157,7 +161,7 @@
     status_t feedDecoderInputData(bool audio, const sp<AMessage> &msg);
     void renderBuffer(bool audio, const sp<AMessage> &msg);
 
-    void notifyListener(int msg, int ext1, int ext2);
+    void notifyListener(int msg, int ext1, int ext2, const Parcel *in = NULL);
 
     void finishFlushIfPossible();
 
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
index 68b9623..47834fd 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
@@ -255,6 +255,7 @@
             return OK;
 
         case STATE_RUNNING:
+            notifyListener(MEDIA_PAUSED);
             mPlayer->pause();
             break;
 
@@ -287,6 +288,8 @@
         case STATE_PAUSED:
         {
             mAtEOS = false;
+            // seeks can take a while, so we essentially paused
+            notifyListener(MEDIA_PAUSED);
             mPlayer->seekToAsync(seekTimeUs);
             break;
         }
@@ -345,6 +348,8 @@
             break;
     }
 
+    notifyListener(MEDIA_STOPPED);
+
     mState = STATE_RESET_IN_PROGRESS;
     mPlayer->resetAsync();
 
@@ -387,6 +392,23 @@
             return mPlayer->setVideoScalingMode(mode);
         }
 
+        case INVOKE_ID_GET_TRACK_INFO:
+        {
+            return mPlayer->getTrackInfo(reply);
+        }
+
+        case INVOKE_ID_SELECT_TRACK:
+        {
+            int trackIndex = request.readInt32();
+            return mPlayer->selectTrack(trackIndex, true /* select */);
+        }
+
+        case INVOKE_ID_UNSELECT_TRACK:
+        {
+            int trackIndex = request.readInt32();
+            return mPlayer->selectTrack(trackIndex, false /* select */);
+        }
+
         default:
         {
             return INVALID_OPERATION;
@@ -490,12 +512,13 @@
     return OK;
 }
 
-void NuPlayerDriver::notifyListener(int msg, int ext1, int ext2) {
+void NuPlayerDriver::notifyListener(
+        int msg, int ext1, int ext2, const Parcel *in) {
     if (msg == MEDIA_PLAYBACK_COMPLETE || msg == MEDIA_ERROR) {
         mAtEOS = true;
     }
 
-    sendEvent(msg, ext1, ext2);
+    sendEvent(msg, ext1, ext2, in);
 }
 
 void NuPlayerDriver::notifySetDataSourceCompleted(status_t err) {
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
index 5df0cfb..99f72a6 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
@@ -69,7 +69,7 @@
     void notifyPosition(int64_t positionUs);
     void notifySeekComplete();
     void notifyFrameStats(int64_t numFramesTotal, int64_t numFramesDropped);
-    void notifyListener(int msg, int ext1 = 0, int ext2 = 0);
+    void notifyListener(int msg, int ext1 = 0, int ext2 = 0, const Parcel *in = NULL);
     void notifyFlagsChanged(uint32_t flags);
 
 protected:
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index b543d9d..3b2784b 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -50,6 +50,8 @@
       mSyncQueues(false),
       mPaused(false),
       mVideoRenderingStarted(false),
+      mVideoRenderingStartGeneration(0),
+      mAudioRenderingStartGeneration(0),
       mLastPositionUpdateUs(-1ll),
       mVideoLateByUs(0ll) {
 }
@@ -220,6 +222,23 @@
     (new AMessage(kWhatAudioSinkChanged, id()))->post();
 }
 
+void NuPlayer::Renderer::prepareForMediaRenderingStart() {
+    mAudioRenderingStartGeneration = mAudioQueueGeneration;
+    mVideoRenderingStartGeneration = mVideoQueueGeneration;
+}
+
+void NuPlayer::Renderer::notifyIfMediaRenderingStarted() {
+    if (mVideoRenderingStartGeneration == mVideoQueueGeneration &&
+        mAudioRenderingStartGeneration == mAudioQueueGeneration) {
+        mVideoRenderingStartGeneration = -1;
+        mAudioRenderingStartGeneration = -1;
+
+        sp<AMessage> notify = mNotify->dup();
+        notify->setInt32("what", kWhatMediaRenderingStart);
+        notify->post();
+    }
+}
+
 bool NuPlayer::Renderer::onDrainAudioQueue() {
     uint32_t numFramesPlayed;
     if (mAudioSink->getPosition(&numFramesPlayed) != OK) {
@@ -299,6 +318,8 @@
         numBytesAvailableToWrite -= copy;
         size_t copiedFrames = copy / mAudioSink->frameSize();
         mNumFramesWritten += copiedFrames;
+
+        notifyIfMediaRenderingStarted();
     }
 
     notifyPosition();
@@ -405,6 +426,8 @@
         notifyVideoRenderingStart();
     }
 
+    notifyIfMediaRenderingStarted();
+
     notifyPosition();
 }
 
@@ -552,6 +575,7 @@
     // is flushed.
     syncQueuesDone();
 
+    ALOGV("flushing %s", audio ? "audio" : "video");
     if (audio) {
         flushQueue(&mAudioQueue);
 
@@ -560,6 +584,8 @@
 
         mDrainAudioQueuePending = false;
         ++mAudioQueueGeneration;
+
+        prepareForMediaRenderingStart();
     } else {
         flushQueue(&mVideoQueue);
 
@@ -568,6 +594,8 @@
 
         mDrainVideoQueuePending = false;
         ++mVideoQueueGeneration;
+
+        prepareForMediaRenderingStart();
     }
 
     notifyFlushComplete(audio);
@@ -658,6 +686,8 @@
     mDrainVideoQueuePending = false;
     ++mVideoQueueGeneration;
 
+    prepareForMediaRenderingStart();
+
     if (mHasAudio) {
         mAudioSink->pause();
     }
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
index c9796e2..94a05ea 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
@@ -53,6 +53,7 @@
         kWhatFlushComplete       = 'fluC',
         kWhatPosition            = 'posi',
         kWhatVideoRenderingStart = 'vdrd',
+        kWhatMediaRenderingStart = 'mdrd',
     };
 
 protected:
@@ -106,6 +107,8 @@
 
     bool mPaused;
     bool mVideoRenderingStarted;
+    int32_t mVideoRenderingStartGeneration;
+    int32_t mAudioRenderingStartGeneration;
 
     int64_t mLastPositionUpdateUs;
     int64_t mVideoLateByUs;
@@ -116,6 +119,9 @@
     void onDrainVideoQueue();
     void postDrainVideoQueue();
 
+    void prepareForMediaRenderingStart();
+    void notifyIfMediaRenderingStarted();
+
     void onQueueBuffer(const sp<AMessage> &msg);
     void onQueueEOS(const sp<AMessage> &msg);
     void onFlush(const sp<AMessage> &msg);
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
index 81ffd21..e50533a 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
@@ -42,6 +42,7 @@
         kWhatVideoSizeChanged,
         kWhatBufferingStart,
         kWhatBufferingEnd,
+        kWhatSubtitleData,
         kWhatQueueDecoderShutdown,
     };
 
@@ -71,6 +72,14 @@
         return INVALID_OPERATION;
     }
 
+    virtual status_t getTrackInfo(Parcel* reply) const {
+        return INVALID_OPERATION;
+    }
+
+    virtual status_t selectTrack(size_t trackIndex, bool select) {
+        return INVALID_OPERATION;
+    }
+
     virtual status_t seekTo(int64_t seekTimeUs) {
         return INVALID_OPERATION;
     }
diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
index 3385a19..18cf6d1 100644
--- a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
@@ -358,11 +358,10 @@
             uint32_t flags = 0;
 
             if (mHandler->isSeekable()) {
-                flags = FLAG_CAN_PAUSE | FLAG_CAN_SEEK;
-
-                // Seeking 10secs forward or backward is a very expensive
-                // operation for rtsp, so let's not enable that.
-                // The user can always use the seek bar.
+                flags = FLAG_CAN_PAUSE
+                        | FLAG_CAN_SEEK
+                        | FLAG_CAN_SEEK_BACKWARD
+                        | FLAG_CAN_SEEK_FORWARD;
             }
 
             notifyFlagsChanged(flags);
diff --git a/media/libnbaio/Android.mk b/media/libnbaio/Android.mk
index 5d00d15..69c75b8 100644
--- a/media/libnbaio/Android.mk
+++ b/media/libnbaio/Android.mk
@@ -31,6 +31,9 @@
     libcommon_time_client \
     libcutils \
     libutils \
-    liblog
+    liblog \
+    libmedia
+# This dependency on libmedia is for SingleStateQueueInstantiations.
+# Consider a separate a library for SingleStateQueueInstantiations.
 
 include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libnbaio/AudioStreamOutSink.cpp b/media/libnbaio/AudioStreamOutSink.cpp
index 6f525e5..e4341d7 100644
--- a/media/libnbaio/AudioStreamOutSink.cpp
+++ b/media/libnbaio/AudioStreamOutSink.cpp
@@ -79,4 +79,19 @@
     return mStream->get_next_write_timestamp(mStream, timestamp);
 }
 
+status_t AudioStreamOutSink::getTimestamp(AudioTimestamp& timestamp)
+{
+    if (mStream->get_presentation_position == NULL) {
+        return INVALID_OPERATION;
+    }
+    // FIXME position64 won't be needed after AudioTimestamp.mPosition is changed to uint64_t
+    uint64_t position64;
+    int ok = mStream->get_presentation_position(mStream, &position64, &timestamp.mTime);
+    if (ok != 0) {
+        return INVALID_OPERATION;
+    }
+    timestamp.mPosition = position64;
+    return OK;
+}
+
 }   // namespace android
diff --git a/media/libnbaio/MonoPipe.cpp b/media/libnbaio/MonoPipe.cpp
index e8d3d9b..de0ad28 100644
--- a/media/libnbaio/MonoPipe.cpp
+++ b/media/libnbaio/MonoPipe.cpp
@@ -42,7 +42,10 @@
         // mWriteTs
         mSetpoint((reqFrames * 11) / 16),
         mWriteCanBlock(writeCanBlock),
-        mIsShutdown(false)
+        mIsShutdown(false),
+        // mTimestampShared
+        mTimestampMutator(&mTimestampShared),
+        mTimestampObserver(&mTimestampShared)
 {
     CCHelper tmpHelper;
     status_t res;
@@ -310,4 +313,12 @@
     return mIsShutdown;
 }
 
+status_t MonoPipe::getTimestamp(AudioTimestamp& timestamp)
+{
+    if (mTimestampObserver.poll(timestamp)) {
+        return OK;
+    }
+    return INVALID_OPERATION;
+}
+
 }   // namespace android
diff --git a/media/libnbaio/MonoPipeReader.cpp b/media/libnbaio/MonoPipeReader.cpp
index 394f6ac..851341a 100644
--- a/media/libnbaio/MonoPipeReader.cpp
+++ b/media/libnbaio/MonoPipeReader.cpp
@@ -86,4 +86,9 @@
     return red;
 }
 
+void MonoPipeReader::onTimestamp(const AudioTimestamp& timestamp)
+{
+    mPipe->mTimestampMutator.push(timestamp);
+}
+
 }   // namespace android
diff --git a/media/libnbaio/SourceAudioBufferProvider.cpp b/media/libnbaio/SourceAudioBufferProvider.cpp
index d11a86c..062fa0f 100644
--- a/media/libnbaio/SourceAudioBufferProvider.cpp
+++ b/media/libnbaio/SourceAudioBufferProvider.cpp
@@ -25,7 +25,7 @@
 SourceAudioBufferProvider::SourceAudioBufferProvider(const sp<NBAIO_Source>& source) :
     mSource(source),
     // mFrameBitShiftFormat below
-    mAllocated(NULL), mSize(0), mOffset(0), mRemaining(0), mGetCount(0)
+    mAllocated(NULL), mSize(0), mOffset(0), mRemaining(0), mGetCount(0), mFramesReleased(0)
 {
     ALOG_ASSERT(source != 0);
 
@@ -90,6 +90,7 @@
             (mOffset + mRemaining <= mSize));
     mOffset += buffer->frameCount;
     mRemaining -= buffer->frameCount;
+    mFramesReleased += buffer->frameCount;
     buffer->raw = NULL;
     buffer->frameCount = 0;
     mGetCount = 0;
@@ -101,4 +102,14 @@
     return avail < 0 ? 0 : (size_t) avail;
 }
 
+size_t SourceAudioBufferProvider::framesReleased() const
+{
+    return mFramesReleased;
+}
+
+void SourceAudioBufferProvider::onTimestamp(const AudioTimestamp& timestamp)
+{
+    mSource->onTimestamp(timestamp);
+}
+
 }   // namespace android
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 36549d1..5d5220f 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -369,7 +369,8 @@
       mChannelMask(0),
       mDequeueCounter(0),
       mStoreMetaDataInOutputBuffers(false),
-      mMetaDataBuffersToSubmit(0) {
+      mMetaDataBuffersToSubmit(0),
+      mRepeatFrameDelayUs(-1ll) {
     mUninitializedState = new UninitializedState(this);
     mLoadedState = new LoadedState(this);
     mLoadedToIdleState = new LoadedToIdleState(this);
@@ -451,6 +452,18 @@
     (new AMessage(kWhatRequestIDRFrame, id()))->post();
 }
 
+// *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED ***
+// Some codecs may return input buffers before having them processed.
+// This causes a halt if we already signaled an EOS on the input
+// port.  For now keep submitting an output buffer if there was an
+// EOS on the input port, but not yet on the output port.
+void ACodec::signalSubmitOutputMetaDataBufferIfEOS_workaround() {
+    if (mPortEOS[kPortIndexInput] && !mPortEOS[kPortIndexOutput] &&
+            mMetaDataBuffersToSubmit > 0) {
+        (new AMessage(kWhatSubmitOutputMetaDataBufferIfEOS, id()))->post();
+    }
+}
+
 status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) {
     CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
 
@@ -1089,6 +1102,12 @@
         } else {
             mUseMetadataOnEncoderOutput = enable;
         }
+
+        if (!msg->findInt64(
+                    "repeat-previous-frame-after",
+                    &mRepeatFrameDelayUs)) {
+            mRepeatFrameDelayUs = -1ll;
+        }
     }
 
     // Always try to enable dynamic output buffers on native surface
@@ -1099,14 +1118,60 @@
     if (!encoder && video && haveNativeWindow) {
         err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, OMX_TRUE);
         if (err != OK) {
-            // allow failure
             ALOGE("[%s] storeMetaDataInBuffers failed w/ err %d",
                   mComponentName.c_str(), err);
+
+            // if adaptive playback has been requested, try JB fallback
+            // NOTE: THIS FALLBACK MECHANISM WILL BE REMOVED DUE TO ITS
+            // LARGE MEMORY REQUIREMENT
+
+            // we will not do adaptive playback on software accessed
+            // surfaces as they never had to respond to changes in the
+            // crop window, and we don't trust that they will be able to.
+            int usageBits = 0;
+            bool canDoAdaptivePlayback;
+
+            sp<NativeWindowWrapper> windowWrapper(
+                    static_cast<NativeWindowWrapper *>(obj.get()));
+            sp<ANativeWindow> nativeWindow = windowWrapper->getNativeWindow();
+
+            if (nativeWindow->query(
+                    nativeWindow.get(),
+                    NATIVE_WINDOW_CONSUMER_USAGE_BITS,
+                    &usageBits) != OK) {
+                canDoAdaptivePlayback = false;
+            } else {
+                canDoAdaptivePlayback =
+                    (usageBits &
+                            (GRALLOC_USAGE_SW_READ_MASK |
+                             GRALLOC_USAGE_SW_WRITE_MASK)) == 0;
+            }
+
+            int32_t maxWidth = 0, maxHeight = 0;
+            if (canDoAdaptivePlayback &&
+                msg->findInt32("max-width", &maxWidth) &&
+                msg->findInt32("max-height", &maxHeight)) {
+                ALOGV("[%s] prepareForAdaptivePlayback(%ldx%ld)",
+                      mComponentName.c_str(), maxWidth, maxHeight);
+
+                err = mOMX->prepareForAdaptivePlayback(
+                        mNode, kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight);
+                ALOGW_IF(err != OK,
+                        "[%s] prepareForAdaptivePlayback failed w/ err %d",
+                        mComponentName.c_str(), err);
+            }
+            // allow failure
             err = OK;
         } else {
             ALOGV("[%s] storeMetaDataInBuffers succeeded", mComponentName.c_str());
             mStoreMetaDataInOutputBuffers = true;
         }
+
+        int32_t push;
+        if (msg->findInt32("push-blank-buffers-on-shutdown", &push)
+                && push != 0) {
+            mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown;
+        }
     }
 
     if (video) {
@@ -1856,6 +1921,11 @@
             err = setupAVCEncoderParameters(msg);
             break;
 
+        case OMX_VIDEO_CodingVP8:
+        case OMX_VIDEO_CodingVP9:
+            err = setupVPXEncoderParameters(msg);
+            break;
+
         default:
             break;
     }
@@ -2187,6 +2257,17 @@
     return configureBitrate(bitrate, bitrateMode);
 }
 
+status_t ACodec::setupVPXEncoderParameters(const sp<AMessage> &msg) {
+    int32_t bitrate;
+    if (!msg->findInt32("bitrate", &bitrate)) {
+        return INVALID_OPERATION;
+    }
+
+    OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg);
+
+    return configureBitrate(bitrate, bitrateMode);
+}
+
 status_t ACodec::verifySupportForProfileAndLevel(
         int32_t profile, int32_t level) {
     OMX_VIDEO_PARAM_PROFILELEVELTYPE params;
@@ -2353,6 +2434,10 @@
 
     while (countBuffersOwnedByNativeWindow() > (size_t)minUndequeuedBufs
             && dequeueBufferFromNativeWindow() != NULL) {
+        // these buffers will be submitted as regular buffers; account for this
+        if (mStoreMetaDataInOutputBuffers && mMetaDataBuffersToSubmit > 0) {
+            --mMetaDataBuffersToSubmit;
+        }
     }
 }
 
@@ -3009,16 +3094,22 @@
     sp<ABuffer> buffer;
     int32_t err = OK;
     bool eos = false;
+    PortMode mode = getPortMode(kPortIndexInput);
 
     if (!msg->findBuffer("buffer", &buffer)) {
+        /* these are unfilled buffers returned by client */
         CHECK(msg->findInt32("err", &err));
 
-        ALOGV("[%s] saw error %d instead of an input buffer",
-             mCodec->mComponentName.c_str(), err);
+        if (err == OK) {
+            /* buffers with no errors are returned on MediaCodec.flush */
+            mode = KEEP_BUFFERS;
+        } else {
+            ALOGV("[%s] saw error %d instead of an input buffer",
+                 mCodec->mComponentName.c_str(), err);
+            eos = true;
+        }
 
         buffer.clear();
-
-        eos = true;
     }
 
     int32_t tmp;
@@ -3032,8 +3123,6 @@
 
     info->mStatus = BufferInfo::OWNED_BY_US;
 
-    PortMode mode = getPortMode(kPortIndexInput);
-
     switch (mode) {
         case KEEP_BUFFERS:
         {
@@ -3577,6 +3666,7 @@
 
     if (componentName.endsWith(".secure")) {
         mCodec->mFlags |= kFlagIsSecure;
+        mCodec->mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown;
     }
 
     mCodec->mQuirks = quirks;
@@ -3611,6 +3701,7 @@
 
     mCodec->mDequeueCounter = 0;
     mCodec->mMetaDataBuffersToSubmit = 0;
+    mCodec->mRepeatFrameDelayUs = -1ll;
 
     if (mCodec->mShutdownInProgress) {
         bool keepComponentAllocated = mCodec->mKeepComponentAllocated;
@@ -3742,6 +3833,23 @@
 
     err = mCodec->mOMX->createInputSurface(mCodec->mNode, kPortIndexInput,
             &bufferProducer);
+
+    if (err == OK && mCodec->mRepeatFrameDelayUs > 0ll) {
+        err = mCodec->mOMX->setInternalOption(
+                mCodec->mNode,
+                kPortIndexInput,
+                IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY,
+                &mCodec->mRepeatFrameDelayUs,
+                sizeof(mCodec->mRepeatFrameDelayUs));
+
+        if (err != OK) {
+            ALOGE("[%s] Unable to configure option to repeat previous "
+                  "frames (err %d)",
+                  mCodec->mComponentName.c_str(),
+                  err);
+        }
+    }
+
     if (err == OK) {
         notify->setObject("input-surface",
                 new BufferProducerWrapper(bufferProducer));
@@ -3940,6 +4048,9 @@
                 break;
         }
     }
+
+    // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED ***
+    mCodec->signalSubmitOutputMetaDataBufferIfEOS_workaround();
 }
 
 void ACodec::ExecutingState::submitRegularOutputBuffers() {
@@ -3968,10 +4079,9 @@
 }
 
 void ACodec::ExecutingState::submitOutputBuffers() {
+    submitRegularOutputBuffers();
     if (mCodec->mStoreMetaDataInOutputBuffers) {
         submitOutputMetaBuffers();
-    } else {
-        submitRegularOutputBuffers();
     }
 }
 
@@ -4089,6 +4199,19 @@
             break;
         }
 
+        // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED ***
+        case kWhatSubmitOutputMetaDataBufferIfEOS:
+        {
+            if (mCodec->mPortEOS[kPortIndexInput] &&
+                    !mCodec->mPortEOS[kPortIndexOutput]) {
+                status_t err = mCodec->submitOutputMetaDataBuffer();
+                if (err == OK) {
+                    mCodec->signalSubmitOutputMetaDataBufferIfEOS_workaround();
+                }
+            }
+            return true;
+        }
+
         default:
             handled = BaseState::onMessageReceived(msg);
             break;
@@ -4099,7 +4222,7 @@
 
 status_t ACodec::setParameters(const sp<AMessage> &params) {
     int32_t videoBitrate;
-    if (params->findInt32("videoBitrate", &videoBitrate)) {
+    if (params->findInt32("video-bitrate", &videoBitrate)) {
         OMX_VIDEO_CONFIG_BITRATETYPE configParams;
         InitOMXParams(&configParams);
         configParams.nPortIndex = kPortIndexOutput;
@@ -4388,7 +4511,8 @@
         CHECK_EQ(mCodec->freeBuffersOnPort(kPortIndexInput), (status_t)OK);
         CHECK_EQ(mCodec->freeBuffersOnPort(kPortIndexOutput), (status_t)OK);
 
-        if (mCodec->mFlags & kFlagIsSecure && mCodec->mNativeWindow != NULL) {
+        if ((mCodec->mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown)
+                && mCodec->mNativeWindow != NULL) {
             // We push enough 1x1 blank buffers to ensure that one of
             // them has made it to the display.  This allows the OMX
             // component teardown to zero out any protected buffers
diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk
index 1f68b51..6a2a696 100644
--- a/media/libstagefright/Android.mk
+++ b/media/libstagefright/Android.mk
@@ -89,6 +89,7 @@
         libutils \
         libvorbisidec \
         libz \
+        libpowermanager
 
 LOCAL_STATIC_LIBRARIES := \
         libstagefright_color_conversion \
diff --git a/media/libstagefright/AudioPlayer.cpp b/media/libstagefright/AudioPlayer.cpp
index 2418aab..a8a8786 100644
--- a/media/libstagefright/AudioPlayer.cpp
+++ b/media/libstagefright/AudioPlayer.cpp
@@ -363,6 +363,7 @@
     mPositionTimeMediaUs = -1;
     mPositionTimeRealUs = -1;
     mSeeking = false;
+    mSeekTimeUs = 0;
     mReachedEOS = false;
     mFinalStatus = OK;
     mStarted = false;
@@ -602,15 +603,24 @@
 
             // need to adjust the mStartPosUs for offload decoding since parser
             // might not be able to get the exact seek time requested.
-            if (refreshSeekTime && useOffload()) {
-                if (postSeekComplete) {
-                    ALOGV("fillBuffer is going to post SEEK_COMPLETE");
-                    mObserver->postAudioSeekComplete();
-                    postSeekComplete = false;
-                }
+            if (refreshSeekTime) {
+                if (useOffload()) {
+                    if (postSeekComplete) {
+                        ALOGV("fillBuffer is going to post SEEK_COMPLETE");
+                        mObserver->postAudioSeekComplete();
+                        postSeekComplete = false;
+                    }
 
-                mStartPosUs = mPositionTimeMediaUs;
-                ALOGV("adjust seek time to: %.2f", mStartPosUs/ 1E6);
+                    mStartPosUs = mPositionTimeMediaUs;
+                    ALOGV("adjust seek time to: %.2f", mStartPosUs/ 1E6);
+                }
+                // clear seek time with mLock locked and once we have valid mPositionTimeMediaUs
+                // and mPositionTimeRealUs
+                // before clearing mSeekTimeUs check if a new seek request has been received while
+                // we were reading from the source with mLock released.
+                if (!mSeeking) {
+                    mSeekTimeUs = 0;
+                }
             }
 
             if (!useOffload()) {
@@ -680,6 +690,14 @@
 
 int64_t AudioPlayer::getRealTimeUs() {
     Mutex::Autolock autoLock(mLock);
+    if (useOffload()) {
+        if (mSeeking) {
+            return mSeekTimeUs;
+        }
+        mPositionTimeRealUs = getOutputPlayPositionUs_l();
+        return mPositionTimeRealUs;
+    }
+
     return getRealTimeUsLocked();
 }
 
@@ -733,17 +751,10 @@
         return mPositionTimeRealUs;
     }
 
+
     if (mPositionTimeMediaUs < 0 || mPositionTimeRealUs < 0) {
-        if (mSeeking) {
-            return mSeekTimeUs;
-        }
-
-        return 0;
-    }
-
-    if (useOffload()) {
-        mPositionTimeRealUs = getOutputPlayPositionUs_l();
-        return mPositionTimeRealUs;
+        // mSeekTimeUs is either seek time while seeking or 0 if playback did not start.
+        return mSeekTimeUs;
     }
 
     int64_t realTimeOffset = getRealTimeUsLocked() - mPositionTimeRealUs;
@@ -758,8 +769,14 @@
         int64_t *realtime_us, int64_t *mediatime_us) {
     Mutex::Autolock autoLock(mLock);
 
-    *realtime_us = mPositionTimeRealUs;
-    *mediatime_us = mPositionTimeMediaUs;
+    if (useOffload()) {
+        mPositionTimeRealUs = getOutputPlayPositionUs_l();
+        *realtime_us = mPositionTimeRealUs;
+        *mediatime_us = mPositionTimeRealUs;
+    } else {
+        *realtime_us = mPositionTimeRealUs;
+        *mediatime_us = mPositionTimeMediaUs;
+    }
 
     return mPositionTimeRealUs != -1 && mPositionTimeMediaUs != -1;
 }
diff --git a/media/libstagefright/AudioSource.cpp b/media/libstagefright/AudioSource.cpp
index bdd842f..d7223d9 100644
--- a/media/libstagefright/AudioSource.cpp
+++ b/media/libstagefright/AudioSource.cpp
@@ -236,10 +236,10 @@
         memset((uint8_t *) buffer->data(), 0, buffer->range_length());
     } else if (elapsedTimeUs < kAutoRampStartUs + kAutoRampDurationUs) {
         int32_t autoRampDurationFrames =
-                    (kAutoRampDurationUs * mSampleRate + 500000LL) / 1000000LL;
+                    ((int64_t)kAutoRampDurationUs * mSampleRate + 500000LL) / 1000000LL; //Need type casting
 
         int32_t autoRampStartFrames =
-                    (kAutoRampStartUs * mSampleRate + 500000LL) / 1000000LL;
+                    ((int64_t)kAutoRampStartUs * mSampleRate + 500000LL) / 1000000LL; //Need type casting
 
         int32_t nFrames = mNumFramesReceived - autoRampStartFrames;
         rampVolume(nFrames, autoRampDurationFrames,
diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp
index 79f2c91..130207d 100644
--- a/media/libstagefright/AwesomePlayer.cpp
+++ b/media/libstagefright/AwesomePlayer.cpp
@@ -191,6 +191,8 @@
       mTimeSource(NULL),
       mVideoRenderingStarted(false),
       mVideoRendererIsPreview(false),
+      mMediaRenderingStartGeneration(0),
+      mStartGeneration(0),
       mAudioPlayer(NULL),
       mDisplayWidth(0),
       mDisplayHeight(0),
@@ -257,6 +259,7 @@
 
         mQueue.cancelEvent(mBufferingEvent->eventID());
         mBufferingEventPending = false;
+        mAudioTearDown = false;
     }
 }
 
@@ -491,6 +494,8 @@
     mDisplayWidth = 0;
     mDisplayHeight = 0;
 
+    notifyListener_l(MEDIA_STOPPED);
+
     if (mDecryptHandle != NULL) {
             mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
                     Playback::STOP, 0);
@@ -601,6 +606,9 @@
 
     mWatchForAudioSeekComplete = false;
     mWatchForAudioEOS = false;
+
+    mMediaRenderingStartGeneration = 0;
+    mStartGeneration = 0;
 }
 
 void AwesomePlayer::notifyListener_l(int msg, int ext1, int ext2) {
@@ -793,7 +801,9 @@
         }
     }
 
-    postBufferingEvent_l();
+    if (mFlags & (PLAYING | PREPARING | CACHE_UNDERRUN)) {
+        postBufferingEvent_l();
+    }
 }
 
 void AwesomePlayer::sendCacheStats() {
@@ -888,6 +898,8 @@
         return OK;
     }
 
+    mMediaRenderingStartGeneration = ++mStartGeneration;
+
     if (!(mFlags & PREPARED)) {
         status_t err = prepare_l();
 
@@ -923,6 +935,9 @@
 
             if ((err != OK) && mOffloadAudio) {
                 ALOGI("play_l() cannot create offload output, fallback to sw decode");
+                int64_t curTimeUs;
+                getPosition(&curTimeUs);
+
                 delete mAudioPlayer;
                 mAudioPlayer = NULL;
                 // if the player was started it will take care of stopping the source when destroyed
@@ -938,6 +953,10 @@
                     if (err != OK) {
                         mAudioSource.clear();
                     } else {
+                        mSeekNotificationSent = true;
+                        if (mExtractorFlags & MediaExtractor::CAN_SEEK) {
+                            seekTo_l(curTimeUs);
+                        }
                         createAudioPlayer_l();
                         err = startAudioPlayer_l(false);
                     }
@@ -989,6 +1008,10 @@
     }
     addBatteryData(params);
 
+    if (isStreamingHTTP()) {
+        postBufferingEvent_l();
+    }
+
     return OK;
 }
 
@@ -1025,6 +1048,13 @@
     seekAudioIfNecessary_l();
 }
 
+void AwesomePlayer::notifyIfMediaStarted_l() {
+    if (mMediaRenderingStartGeneration == mStartGeneration) {
+        mMediaRenderingStartGeneration = -1;
+        notifyListener_l(MEDIA_STARTED);
+    }
+}
+
 status_t AwesomePlayer::startAudioPlayer_l(bool sendErrorNotification) {
     CHECK(!(mFlags & AUDIO_RUNNING));
     status_t err = OK;
@@ -1061,6 +1091,8 @@
 
             // We will have finished the seek while starting the audio player.
             postAudioSeekComplete();
+        } else {
+            notifyIfMediaStarted_l();
         }
     } else {
         err = mAudioPlayer->resume();
@@ -1170,8 +1202,7 @@
     setVideoScalingMode_l(mVideoScalingMode);
     if (USE_SURFACE_ALLOC
             && !strncmp(component, "OMX.", 4)
-            && strncmp(component, "OMX.google.", 11)
-            && strcmp(component, "OMX.Nvidia.mpeg2v.decode")) {
+            && strncmp(component, "OMX.google.", 11)) {
         // Hardware decoders avoid the CPU color conversion by decoding
         // directly to ANativeBuffers, so we must use a renderer that
         // just pushes those buffers to the ANativeWindow.
@@ -1198,9 +1229,18 @@
 
 status_t AwesomePlayer::pause_l(bool at_eos) {
     if (!(mFlags & PLAYING)) {
+        if (mAudioTearDown && mAudioTearDownWasPlaying) {
+            ALOGV("pause_l() during teardown and finishSetDataSource_l() mFlags %x" , mFlags);
+            mAudioTearDownWasPlaying = false;
+            notifyListener_l(MEDIA_PAUSED);
+            mMediaRenderingStartGeneration = ++mStartGeneration;
+        }
         return OK;
     }
 
+    notifyListener_l(MEDIA_PAUSED);
+    mMediaRenderingStartGeneration = ++mStartGeneration;
+
     cancelPlayerEvents(true /* keepNotifications */);
 
     if (mAudioPlayer != NULL && (mFlags & AUDIO_RUNNING)) {
@@ -1389,6 +1429,11 @@
     mSeekTimeUs = timeUs;
     modifyFlags((AT_EOS | AUDIO_AT_EOS | VIDEO_AT_EOS), CLEAR);
 
+    if (mFlags & PLAYING) {
+        notifyListener_l(MEDIA_PAUSED);
+        mMediaRenderingStartGeneration = ++mStartGeneration;
+    }
+
     seekAudioIfNecessary_l();
 
     if (mFlags & TEXTPLAYER_INITIALIZED) {
@@ -1454,7 +1499,13 @@
     // This doesn't guarantee that the hardware has a free stream
     // but it avoids us attempting to open (and re-open) an offload
     // stream to hardware that doesn't have the necessary codec
-    mOffloadAudio = canOffloadStream(meta, (mVideoSource != NULL), isStreamingHTTP());
+    audio_stream_type_t streamType = AUDIO_STREAM_MUSIC;
+    if (mAudioSink != NULL) {
+        streamType = mAudioSink->getAudioStreamType();
+    }
+
+    mOffloadAudio = canOffloadStream(meta, (mVideoSource != NULL),
+                                     isStreamingHTTP(), streamType);
 
     if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) {
         ALOGV("createAudioPlayer: bypass OMX (raw)");
@@ -1632,6 +1683,16 @@
         return;
     }
 
+    // If we paused, then seeked, then resumed, it is possible that we have
+    // signaled SEEK_COMPLETE at a copmletely different media time than where
+    // we are now resuming.  Signal new position to media time provider.
+    // Cannot signal another SEEK_COMPLETE, as existing clients may not expect
+    // multiple SEEK_COMPLETE responses to a single seek() request.
+    if (mSeekNotificationSent && abs(mSeekTimeUs - videoTimeUs) > 10000) {
+        // notify if we are resuming more than 10ms away from desired seek time
+        notifyListener_l(MEDIA_SKIPPED);
+    }
+
     if (mAudioPlayer != NULL) {
         ALOGV("seeking audio to %lld us (%.2f secs).", videoTimeUs, videoTimeUs / 1E6);
 
@@ -1876,7 +1937,7 @@
                     ++mStats.mNumVideoFramesDropped;
                 }
 
-                postVideoEvent_l();
+                postVideoEvent_l(0);
                 return;
             }
         }
@@ -1903,6 +1964,9 @@
             notifyListener_l(MEDIA_INFO, MEDIA_INFO_RENDERING_START);
         }
 
+        if (mFlags & PLAYING) {
+            notifyIfMediaStarted_l();
+        }
     }
 
     mVideoBuffer->release();
@@ -1913,6 +1977,41 @@
         return;
     }
 
+    /* get next frame time */
+    if (wasSeeking == NO_SEEK) {
+        MediaSource::ReadOptions options;
+        for (;;) {
+            status_t err = mVideoSource->read(&mVideoBuffer, &options);
+            if (err != OK) {
+                // deal with any errors next time
+                CHECK(mVideoBuffer == NULL);
+                postVideoEvent_l(0);
+                return;
+            }
+
+            if (mVideoBuffer->range_length() != 0) {
+                break;
+            }
+
+            // Some decoders, notably the PV AVC software decoder
+            // return spurious empty buffers that we just want to ignore.
+
+            mVideoBuffer->release();
+            mVideoBuffer = NULL;
+        }
+
+        {
+            Mutex::Autolock autoLock(mStatsLock);
+            ++mStats.mNumVideoFramesDecoded;
+        }
+
+        int64_t nextTimeUs;
+        CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &nextTimeUs));
+        int64_t delayUs = nextTimeUs - ts->getRealTimeUs() + mTimeSourceDeltaUs;
+        postVideoEvent_l(delayUs > 10000 ? 10000 : delayUs < 0 ? 0 : delayUs);
+        return;
+    }
+
     postVideoEvent_l();
 }
 
@@ -1998,6 +2097,8 @@
         }
 
         mSeeking = NO_SEEK;
+
+        notifyIfMediaStarted_l();
     }
 
     status_t finalStatus;
@@ -2279,6 +2380,7 @@
     modifyFlags((PREPARING|PREPARE_CANCELLED|PREPARING_CONNECTED), CLEAR);
     mAsyncPrepareEvent = NULL;
     mPreparedCondition.broadcast();
+    mAudioTearDown = false;
 }
 
 // static
@@ -2352,6 +2454,20 @@
     modifyFlags(PREPARED, SET);
     mAsyncPrepareEvent = NULL;
     mPreparedCondition.broadcast();
+
+    if (mAudioTearDown) {
+        if (mPrepareResult == OK) {
+            if (mExtractorFlags & MediaExtractor::CAN_SEEK) {
+                seekTo_l(mAudioTearDownPosition);
+            }
+
+            if (mAudioTearDownWasPlaying) {
+                modifyFlags(CACHE_UNDERRUN, CLEAR);
+                play_l();
+            }
+        }
+        mAudioTearDown = false;
+    }
 }
 
 uint32_t AwesomePlayer::flags() const {
@@ -2769,7 +2885,7 @@
     ALOGV("onAudioTearDownEvent");
 
     // stream info is cleared by reset_l() so copy what we need
-    const bool wasPlaying = (mFlags & PLAYING);
+    mAudioTearDownWasPlaying = (mFlags & PLAYING);
     KeyedVector<String8, String8> uriHeaders(mUriHeaders);
     sp<DataSource> fileSource(mFileSource);
 
@@ -2778,12 +2894,10 @@
     mStatsLock.unlock();
 
     // get current position so we can start recreated stream from here
-    int64_t position = 0;
-    getPosition(&position);
+    getPosition(&mAudioTearDownPosition);
 
     // Reset and recreate
     reset_l();
-    mFlags |= PREPARING;
 
     status_t err;
 
@@ -2794,6 +2908,7 @@
         err = setDataSource_l(uri, &uriHeaders);
     }
 
+    mFlags |= PREPARING;
     if ( err != OK ) {
         // This will force beingPrepareAsync_l() to notify
         // a MEDIA_ERROR to the client and abort the prepare
@@ -2803,21 +2918,8 @@
     mAudioTearDown = true;
     mIsAsyncPrepare = true;
 
-    // Call parepare for the host decoding
+    // Call prepare for the host decoding
     beginPrepareAsync_l();
-
-    if (mPrepareResult == OK) {
-        if (mExtractorFlags & MediaExtractor::CAN_SEEK) {
-            seekTo_l(position);
-        }
-
-        if (wasPlaying) {
-            modifyFlags(CACHE_UNDERRUN, CLEAR);
-            play_l();
-        }
-    }
-
-    mAudioTearDown = false;
 }
 
 }  // namespace android
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index 5a26b06..3017fe7 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -536,7 +536,7 @@
     if (mSurface != NULL) {
         // This CHECK is good, since we just passed the lock/unlock
         // check earlier by calling mCamera->setParameters().
-        CHECK_EQ((status_t)OK, mCamera->setPreviewTexture(mSurface));
+        CHECK_EQ((status_t)OK, mCamera->setPreviewTarget(mSurface));
     }
 
     // By default, do not store metadata in video buffers
diff --git a/media/libstagefright/CameraSourceTimeLapse.cpp b/media/libstagefright/CameraSourceTimeLapse.cpp
index 20214e8..5772316 100644
--- a/media/libstagefright/CameraSourceTimeLapse.cpp
+++ b/media/libstagefright/CameraSourceTimeLapse.cpp
@@ -41,13 +41,15 @@
         Size videoSize,
         int32_t videoFrameRate,
         const sp<IGraphicBufferProducer>& surface,
-        int64_t timeBetweenFrameCaptureUs) {
+        int64_t timeBetweenFrameCaptureUs,
+        bool storeMetaDataInVideoBuffers) {
 
     CameraSourceTimeLapse *source = new
             CameraSourceTimeLapse(camera, proxy, cameraId,
                 clientName, clientUid,
                 videoSize, videoFrameRate, surface,
-                timeBetweenFrameCaptureUs);
+                timeBetweenFrameCaptureUs,
+                storeMetaDataInVideoBuffers);
 
     if (source != NULL) {
         if (source->initCheck() != OK) {
@@ -67,9 +69,11 @@
         Size videoSize,
         int32_t videoFrameRate,
         const sp<IGraphicBufferProducer>& surface,
-        int64_t timeBetweenFrameCaptureUs)
+        int64_t timeBetweenFrameCaptureUs,
+        bool storeMetaDataInVideoBuffers)
       : CameraSource(camera, proxy, cameraId, clientName, clientUid,
-                videoSize, videoFrameRate, surface, true),
+                videoSize, videoFrameRate, surface,
+                storeMetaDataInVideoBuffers),
       mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate),
       mLastTimeLapseFrameRealTimestampUs(0),
       mSkipCurrentFrame(false) {
diff --git a/media/libstagefright/DataSource.cpp b/media/libstagefright/DataSource.cpp
index fc6fd9c..97987e2 100644
--- a/media/libstagefright/DataSource.cpp
+++ b/media/libstagefright/DataSource.cpp
@@ -107,6 +107,7 @@
 
 Mutex DataSource::gSnifferMutex;
 List<DataSource::SnifferFunc> DataSource::gSniffers;
+bool DataSource::gSniffersRegistered = false;
 
 bool DataSource::sniff(
         String8 *mimeType, float *confidence, sp<AMessage> *meta) {
@@ -114,7 +115,13 @@
     *confidence = 0.0f;
     meta->clear();
 
-    Mutex::Autolock autoLock(gSnifferMutex);
+    {
+        Mutex::Autolock autoLock(gSnifferMutex);
+        if (!gSniffersRegistered) {
+            return false;
+        }
+    }
+
     for (List<SnifferFunc>::iterator it = gSniffers.begin();
          it != gSniffers.end(); ++it) {
         String8 newMimeType;
@@ -133,9 +140,7 @@
 }
 
 // static
-void DataSource::RegisterSniffer(SnifferFunc func) {
-    Mutex::Autolock autoLock(gSnifferMutex);
-
+void DataSource::RegisterSniffer_l(SnifferFunc func) {
     for (List<SnifferFunc>::iterator it = gSniffers.begin();
          it != gSniffers.end(); ++it) {
         if (*it == func) {
@@ -148,23 +153,29 @@
 
 // static
 void DataSource::RegisterDefaultSniffers() {
-    RegisterSniffer(SniffMPEG4);
-    RegisterSniffer(SniffMatroska);
-    RegisterSniffer(SniffOgg);
-    RegisterSniffer(SniffWAV);
-    RegisterSniffer(SniffFLAC);
-    RegisterSniffer(SniffAMR);
-    RegisterSniffer(SniffMPEG2TS);
-    RegisterSniffer(SniffMP3);
-    RegisterSniffer(SniffAAC);
-    RegisterSniffer(SniffMPEG2PS);
-    RegisterSniffer(SniffWVM);
+    Mutex::Autolock autoLock(gSnifferMutex);
+    if (gSniffersRegistered) {
+        return;
+    }
+
+    RegisterSniffer_l(SniffMPEG4);
+    RegisterSniffer_l(SniffMatroska);
+    RegisterSniffer_l(SniffOgg);
+    RegisterSniffer_l(SniffWAV);
+    RegisterSniffer_l(SniffFLAC);
+    RegisterSniffer_l(SniffAMR);
+    RegisterSniffer_l(SniffMPEG2TS);
+    RegisterSniffer_l(SniffMP3);
+    RegisterSniffer_l(SniffAAC);
+    RegisterSniffer_l(SniffMPEG2PS);
+    RegisterSniffer_l(SniffWVM);
 
     char value[PROPERTY_VALUE_MAX];
     if (property_get("drm.service.enabled", value, NULL)
             && (!strcmp(value, "1") || !strcasecmp(value, "true"))) {
-        RegisterSniffer(SniffDRM);
+        RegisterSniffer_l(SniffDRM);
     }
+    gSniffersRegistered = true;
 }
 
 // static
diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp
index ad985ee..1ba1c6e 100644
--- a/media/libstagefright/MPEG4Extractor.cpp
+++ b/media/libstagefright/MPEG4Extractor.cpp
@@ -1377,19 +1377,33 @@
             } else {
                 // No size was specified. Pick a conservatively large size.
                 int32_t width, height;
-                if (mLastTrack->meta->findInt32(kKeyWidth, &width) &&
-                        mLastTrack->meta->findInt32(kKeyHeight, &height)) {
-                    mLastTrack->meta->setInt32(kKeyMaxInputSize, width * height * 3 / 2);
-                } else {
+                if (!mLastTrack->meta->findInt32(kKeyWidth, &width) ||
+                    !mLastTrack->meta->findInt32(kKeyHeight, &height)) {
                     ALOGE("No width or height, assuming worst case 1080p");
-                    mLastTrack->meta->setInt32(kKeyMaxInputSize, 3110400);
+                    width = 1920;
+                    height = 1080;
                 }
+
+                const char *mime;
+                CHECK(mLastTrack->meta->findCString(kKeyMIMEType, &mime));
+                if (!strcmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)) {
+                    // AVC requires compression ratio of at least 2, and uses
+                    // macroblocks
+                    max_size = ((width + 15) / 16) * ((height + 15) / 16) * 192;
+                } else {
+                    // For all other formats there is no minimum compression
+                    // ratio. Use compression ratio of 1.
+                    max_size = width * height * 3 / 2;
+                }
+                mLastTrack->meta->setInt32(kKeyMaxInputSize, max_size);
             }
             *offset += chunk_size;
 
-            // Calculate average frame rate.
+            // NOTE: setting another piece of metadata invalidates any pointers (such as the
+            // mimetype) previously obtained, so don't cache them.
             const char *mime;
             CHECK(mLastTrack->meta->findCString(kKeyMIMEType, &mime));
+            // Calculate average frame rate.
             if (!strncasecmp("video/", mime, 6)) {
                 size_t nSamples = mLastTrack->sampleTable->countSamples();
                 int64_t durationUs;
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index f412dc8..c36dd7c 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -31,6 +31,7 @@
 #include <media/stagefright/foundation/hexdump.h>
 #include <media/stagefright/ACodec.h>
 #include <media/stagefright/BufferProducerWrapper.h>
+#include <media/stagefright/MediaCodecList.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/MetaData.h>
@@ -104,8 +105,24 @@
     bool needDedicatedLooper = false;
     if (nameIsType && !strncasecmp(name, "video/", 6)) {
         needDedicatedLooper = true;
-    } else if (!nameIsType && !strncmp(name, "OMX.TI.DUCATI1.VIDEO.", 21)) {
-        needDedicatedLooper = true;
+    } else {
+        AString tmp = name;
+        if (tmp.endsWith(".secure")) {
+            tmp.erase(tmp.size() - 7, 7);
+        }
+        const MediaCodecList *mcl = MediaCodecList::getInstance();
+        ssize_t codecIdx = mcl->findCodecByName(tmp.c_str());
+        if (codecIdx >= 0) {
+            Vector<AString> types;
+            if (mcl->getSupportedTypes(codecIdx, &types) == OK) {
+                for (int i = 0; i < types.size(); i++) {
+                    if (types[i].startsWith("video/")) {
+                        needDedicatedLooper = true;
+                        break;
+                    }
+                }
+            }
+        }
     }
 
     if (needDedicatedLooper) {
@@ -1483,7 +1500,8 @@
             info->mOwnedByClient = false;
 
             if (portIndex == kPortIndexInput) {
-                msg->setInt32("err", ERROR_END_OF_STREAM);
+                /* no error, just returning buffers */
+                msg->setInt32("err", OK);
             }
             msg->post();
         }
diff --git a/media/libstagefright/MediaCodecList.cpp b/media/libstagefright/MediaCodecList.cpp
index d24337f..6248e90 100644
--- a/media/libstagefright/MediaCodecList.cpp
+++ b/media/libstagefright/MediaCodecList.cpp
@@ -509,7 +509,8 @@
 status_t MediaCodecList::getCodecCapabilities(
         size_t index, const char *type,
         Vector<ProfileLevel> *profileLevels,
-        Vector<uint32_t> *colorFormats) const {
+        Vector<uint32_t> *colorFormats,
+        uint32_t *flags) const {
     profileLevels->clear();
     colorFormats->clear();
 
@@ -547,6 +548,8 @@
         colorFormats->push(caps.mColorFormats.itemAt(i));
     }
 
+    *flags = caps.mFlags;
+
     return OK;
 }
 
diff --git a/media/libstagefright/MediaMuxer.cpp b/media/libstagefright/MediaMuxer.cpp
index 94ce5de..d87e910 100644
--- a/media/libstagefright/MediaMuxer.cpp
+++ b/media/libstagefright/MediaMuxer.cpp
@@ -103,6 +103,16 @@
     return OK;
 }
 
+status_t MediaMuxer::setLocation(int latitude, int longitude) {
+    Mutex::Autolock autoLock(mMuxerLock);
+    if (mState != INITIALIZED) {
+        ALOGE("setLocation() must be called before start().");
+        return INVALID_OPERATION;
+    }
+    ALOGV("Setting location: latitude = %d, longitude = %d", latitude, longitude);
+    return mWriter->setGeoData(latitude, longitude);
+}
+
 status_t MediaMuxer::start() {
     Mutex::Autolock autoLock(mMuxerLock);
     if (mState == INITIALIZED) {
diff --git a/media/libstagefright/MetaData.cpp b/media/libstagefright/MetaData.cpp
index ae6ae2d..7b60afc 100644
--- a/media/libstagefright/MetaData.cpp
+++ b/media/libstagefright/MetaData.cpp
@@ -89,6 +89,9 @@
     return setData(key, TYPE_RECT, &r, sizeof(r));
 }
 
+/**
+ * Note that the returned pointer becomes invalid when additional metadata is set.
+ */
 bool MetaData::findCString(uint32_t key, const char **value) {
     uint32_t type;
     const void *data;
diff --git a/media/libstagefright/OMXClient.cpp b/media/libstagefright/OMXClient.cpp
index 9820ef5..9f9352d 100644
--- a/media/libstagefright/OMXClient.cpp
+++ b/media/libstagefright/OMXClient.cpp
@@ -69,6 +69,10 @@
     virtual status_t storeMetaDataInBuffers(
             node_id node, OMX_U32 port_index, OMX_BOOL enable);
 
+    virtual status_t prepareForAdaptivePlayback(
+            node_id node, OMX_U32 port_index, OMX_BOOL enable,
+            OMX_U32 maxFrameWidth, OMX_U32 maxFrameHeight);
+
     virtual status_t enableGraphicBuffers(
             node_id node, OMX_U32 port_index, OMX_BOOL enable);
 
@@ -268,6 +272,13 @@
     return getOMX(node)->storeMetaDataInBuffers(node, port_index, enable);
 }
 
+status_t MuxOMX::prepareForAdaptivePlayback(
+        node_id node, OMX_U32 port_index, OMX_BOOL enable,
+        OMX_U32 maxFrameWidth, OMX_U32 maxFrameHeight) {
+    return getOMX(node)->prepareForAdaptivePlayback(
+            node, port_index, enable, maxFrameWidth, maxFrameHeight);
+}
+
 status_t MuxOMX::enableGraphicBuffers(
         node_id node, OMX_U32 port_index, OMX_BOOL enable) {
     return getOMX(node)->enableGraphicBuffers(node, port_index, enable);
diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp
index 3de3c28..43736ad 100644
--- a/media/libstagefright/OMXCodec.cpp
+++ b/media/libstagefright/OMXCodec.cpp
@@ -359,12 +359,7 @@
             observer->setCodec(codec);
 
             err = codec->configureCodec(meta);
-
             if (err == OK) {
-                if (!strcmp("OMX.Nvidia.mpeg2v.decode", componentName)) {
-                    codec->mFlags |= kOnlySubmitOneInputBufferAtOneTime;
-                }
-
                 return codec;
             }
 
@@ -1346,8 +1341,7 @@
       mLeftOverBuffer(NULL),
       mPaused(false),
       mNativeWindow(
-              (!strncmp(componentName, "OMX.google.", 11)
-              || !strcmp(componentName, "OMX.Nvidia.mpeg2v.decode"))
+              (!strncmp(componentName, "OMX.google.", 11))
                         ? NULL : nativeWindow) {
     mPortStatus[kPortIndexInput] = ENABLED;
     mPortStatus[kPortIndexOutput] = ENABLED;
@@ -4567,7 +4561,7 @@
         CodecCapabilities *caps) {
     if (strncmp(componentName, "OMX.", 4)) {
         // Not an OpenMax component but a software codec.
-
+        caps->mFlags = 0;
         caps->mComponentName = componentName;
         return OK;
     }
@@ -4582,6 +4576,7 @@
 
     OMXCodec::setComponentRole(omx, node, isEncoder, mime);
 
+    caps->mFlags = 0;
     caps->mComponentName = componentName;
 
     OMX_VIDEO_PARAM_PROFILELEVELTYPE param;
@@ -4619,6 +4614,16 @@
         caps->mColorFormats.push(portFormat.eColorFormat);
     }
 
+    if (!isEncoder && !strncmp(mime, "video/", 6)) {
+        if (omx->storeMetaDataInBuffers(
+                    node, 1 /* port index */, OMX_TRUE) == OK ||
+            omx->prepareForAdaptivePlayback(
+                    node, 1 /* port index */, OMX_TRUE,
+                    1280 /* width */, 720 /* height */) == OK) {
+            caps->mFlags |= CodecCapabilities::kFlagSupportsAdaptivePlayback;
+        }
+    }
+
     CHECK_EQ(omx->freeNode(node), (status_t)OK);
 
     return OK;
diff --git a/media/libstagefright/TimedEventQueue.cpp b/media/libstagefright/TimedEventQueue.cpp
index 7e9c4bf..1a9a26b 100644
--- a/media/libstagefright/TimedEventQueue.cpp
+++ b/media/libstagefright/TimedEventQueue.cpp
@@ -31,17 +31,29 @@
 
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/ALooper.h>
+#include <binder/IServiceManager.h>
+#include <powermanager/PowerManager.h>
+#include <binder/IPCThreadState.h>
+#include <utils/CallStack.h>
 
 namespace android {
 
+static int64_t kWakelockMinDelay = 100000ll;  // 100ms
+
 TimedEventQueue::TimedEventQueue()
     : mNextEventID(1),
       mRunning(false),
-      mStopped(false) {
+      mStopped(false),
+      mDeathRecipient(new PMDeathRecipient(this)),
+      mWakeLockCount(0) {
 }
 
 TimedEventQueue::~TimedEventQueue() {
     stop();
+    if (mPowerManager != 0) {
+        sp<IBinder> binder = mPowerManager->asBinder();
+        binder->unlinkToDeath(mDeathRecipient);
+    }
 }
 
 void TimedEventQueue::start() {
@@ -76,6 +88,9 @@
     void *dummy;
     pthread_join(mThread, &dummy);
 
+    // some events may be left in the queue if we did not flush and the wake lock
+    // must be released.
+    releaseWakeLock_l(true /*force*/);
     mQueue.clear();
 
     mRunning = false;
@@ -112,11 +127,16 @@
     QueueItem item;
     item.event = event;
     item.realtime_us = realtime_us;
+    item.has_wakelock = false;
 
     if (it == mQueue.begin()) {
         mQueueHeadChangedCondition.signal();
     }
 
+    if (realtime_us > ALooper::GetNowUs() + kWakelockMinDelay) {
+        acquireWakeLock_l();
+        item.has_wakelock = true;
+    }
     mQueue.insert(it, item);
 
     mQueueNotEmptyCondition.signal();
@@ -171,8 +191,10 @@
         ALOGV("cancelling event %d", (*it).event->eventID());
 
         (*it).event->setEventID(0);
+        if ((*it).has_wakelock) {
+            releaseWakeLock_l();
+        }
         it = mQueue.erase(it);
-
         if (stopAfterFirstMatch) {
             return;
         }
@@ -278,9 +300,10 @@
         if ((*it).event->eventID() == id) {
             sp<Event> event = (*it).event;
             event->setEventID(0);
-
+            if ((*it).has_wakelock) {
+                releaseWakeLock_l();
+            }
             mQueue.erase(it);
-
             return event;
         }
     }
@@ -290,5 +313,68 @@
     return NULL;
 }
 
+void TimedEventQueue::acquireWakeLock_l()
+{
+    if (mWakeLockCount++ == 0) {
+        CHECK(mWakeLockToken == 0);
+        if (mPowerManager == 0) {
+            // use checkService() to avoid blocking if power service is not up yet
+            sp<IBinder> binder =
+                defaultServiceManager()->checkService(String16("power"));
+            if (binder == 0) {
+                ALOGW("cannot connect to the power manager service");
+            } else {
+                mPowerManager = interface_cast<IPowerManager>(binder);
+                binder->linkToDeath(mDeathRecipient);
+            }
+        }
+        if (mPowerManager != 0) {
+            sp<IBinder> binder = new BBinder();
+            int64_t token = IPCThreadState::self()->clearCallingIdentity();
+            status_t status = mPowerManager->acquireWakeLock(POWERMANAGER_PARTIAL_WAKE_LOCK,
+                                                             binder,
+                                                             String16("TimedEventQueue"),
+                                                             String16("media"));
+            IPCThreadState::self()->restoreCallingIdentity(token);
+            if (status == NO_ERROR) {
+                mWakeLockToken = binder;
+            }
+        }
+    }
+}
+
+void TimedEventQueue::releaseWakeLock_l(bool force)
+{
+    if (force) {
+        if (mWakeLockCount == 0) {
+            return;
+        }
+        // Force wakelock release below by setting reference count to 1.
+        mWakeLockCount = 1;
+    }
+    CHECK(mWakeLockCount != 0);
+    if (--mWakeLockCount == 0) {
+        CHECK(mWakeLockToken != 0);
+        if (mPowerManager != 0) {
+            int64_t token = IPCThreadState::self()->clearCallingIdentity();
+            mPowerManager->releaseWakeLock(mWakeLockToken, 0);
+            IPCThreadState::self()->restoreCallingIdentity(token);
+        }
+        mWakeLockToken.clear();
+    }
+}
+
+void TimedEventQueue::clearPowerManager()
+{
+    Mutex::Autolock _l(mLock);
+    releaseWakeLock_l(true /*force*/);
+    mPowerManager.clear();
+}
+
+void TimedEventQueue::PMDeathRecipient::binderDied(const wp<IBinder>& who)
+{
+    mQueue->clearPowerManager();
+}
+
 }  // namespace android
 
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index 4db8e80..9041c21 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -540,7 +540,8 @@
     return BAD_VALUE;
 }
 
-bool canOffloadStream(const sp<MetaData>& meta, bool hasVideo, bool isStreaming)
+bool canOffloadStream(const sp<MetaData>& meta, bool hasVideo,
+                      bool isStreaming, audio_stream_type_t streamType)
 {
     const char *mime;
     CHECK(meta->findCString(kKeyMIMEType, &mime));
@@ -594,7 +595,7 @@
     info.bit_rate = brate;
 
 
-    info.stream_type = AUDIO_STREAM_MUSIC;
+    info.stream_type = streamType;
     info.has_video = hasVideo;
     info.is_streaming = isStreaming;
 
diff --git a/media/libstagefright/codecs/aacenc/SoftAACEncoder2.cpp b/media/libstagefright/codecs/aacenc/SoftAACEncoder2.cpp
index 5749733..ff2b503 100644
--- a/media/libstagefright/codecs/aacenc/SoftAACEncoder2.cpp
+++ b/media/libstagefright/codecs/aacenc/SoftAACEncoder2.cpp
@@ -292,6 +292,10 @@
         return AOT_AAC_LC;
     } else if (profile == OMX_AUDIO_AACObjectHE) {
         return AOT_SBR;
+    } else if (profile == OMX_AUDIO_AACObjectHE_PS) {
+        return AOT_PS;
+    } else if (profile == OMX_AUDIO_AACObjectLD) {
+        return AOT_ER_AAC_LD;
     } else if (profile == OMX_AUDIO_AACObjectELD) {
         return AOT_ER_AAC_ELD;
     } else {
diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
index 5f2b5c8..8375cac 100644
--- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
+++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
@@ -141,6 +141,7 @@
       mWidth(176),
       mHeight(144),
       mBitrate(192000),  // in bps
+      mBitrateUpdated(false),
       mBitrateControlMode(VPX_VBR),  // variable bitrate
       mFrameDurationUs(33333),  // Defaults to 30 fps
       mDCTPartitions(0),
@@ -536,6 +537,22 @@
             return OMX_ErrorNone;
         }
 
+        case OMX_IndexConfigVideoBitrate:
+        {
+            OMX_VIDEO_CONFIG_BITRATETYPE *params =
+                (OMX_VIDEO_CONFIG_BITRATETYPE *)_params;
+
+            if (params->nPortIndex != kOutputPortIndex) {
+                return OMX_ErrorBadPortIndex;
+            }
+
+            if (mBitrate != params->nEncodeBitrate) {
+                mBitrate = params->nEncodeBitrate;
+                mBitrateUpdated = true;
+            }
+            return OMX_ErrorNone;
+        }
+
         default:
             return SimpleSoftOMXComponent::setConfig(index, _params);
     }
@@ -779,6 +796,21 @@
             mKeyFrameRequested = false;
         }
 
+        if (mBitrateUpdated) {
+            mCodecConfiguration->rc_target_bitrate = mBitrate/1000;
+            vpx_codec_err_t res = vpx_codec_enc_config_set(mCodecContext,
+                                                           mCodecConfiguration);
+            if (res != VPX_CODEC_OK) {
+                ALOGE("vp8 encoder failed to update bitrate: %s",
+                      vpx_codec_err_to_string(res));
+                notify(OMX_EventError,
+                       OMX_ErrorUndefined,
+                       0, // Extra notification data
+                       NULL); // Notification data pointer
+            }
+            mBitrateUpdated = false;
+        }
+
         codec_return = vpx_codec_encode(
                 mCodecContext,
                 &raw_frame,
@@ -803,6 +835,8 @@
             if (encoded_packet->kind == VPX_CODEC_CX_FRAME_PKT) {
                 outputBufferHeader->nTimeStamp = encoded_packet->data.frame.pts;
                 outputBufferHeader->nFlags = 0;
+                if (encoded_packet->data.frame.flags & VPX_FRAME_IS_KEY)
+                  outputBufferHeader->nFlags |= OMX_BUFFERFLAG_SYNCFRAME;
                 outputBufferHeader->nOffset = 0;
                 outputBufferHeader->nFilledLen = encoded_packet->data.frame.sz;
                 memcpy(outputBufferHeader->pBuffer,
diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h
index 4ee5e51..076830f 100644
--- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h
+++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h
@@ -128,7 +128,10 @@
     int32_t mHeight;
 
     // Target bitrate set for the encoder, in bits per second.
-    int32_t mBitrate;
+    uint32_t mBitrate;
+
+    // If a request for a change it bitrate has been received.
+    bool mBitrateUpdated;
 
     // Bitrate control mode, either constant or variable
     vpx_rc_mode mBitrateControlMode;
diff --git a/media/libstagefright/httplive/Android.mk b/media/libstagefright/httplive/Android.mk
index 85bd492..f3529f9 100644
--- a/media/libstagefright/httplive/Android.mk
+++ b/media/libstagefright/httplive/Android.mk
@@ -14,6 +14,7 @@
 	$(TOP)/external/openssl/include
 
 LOCAL_SHARED_LIBRARIES := \
+        libbinder \
         libcrypto \
         libcutils \
         libmedia \
diff --git a/media/libstagefright/httplive/LiveSession.cpp b/media/libstagefright/httplive/LiveSession.cpp
index e91c60b..bd12ddc 100644
--- a/media/libstagefright/httplive/LiveSession.cpp
+++ b/media/libstagefright/httplive/LiveSession.cpp
@@ -59,6 +59,7 @@
       mStreamMask(0),
       mCheckBandwidthGeneration(0),
       mLastDequeuedTimeUs(0ll),
+      mRealTimeBaseUs(0ll),
       mReconfigurationInProgress(false),
       mDisconnectReplyID(0) {
     if (mUIDValid) {
@@ -122,11 +123,18 @@
               type,
               extra == NULL ? "NULL" : extra->debugString().c_str());
     } else if (err == OK) {
-        int64_t timeUs;
-        CHECK((*accessUnit)->meta()->findInt64("timeUs",  &timeUs));
-        ALOGV("[%s] read buffer at time %lld us", streamStr, timeUs);
+        if (stream == STREAMTYPE_AUDIO || stream == STREAMTYPE_VIDEO) {
+            int64_t timeUs;
+            CHECK((*accessUnit)->meta()->findInt64("timeUs",  &timeUs));
+            ALOGV("[%s] read buffer at time %lld us", streamStr, timeUs);
 
-        mLastDequeuedTimeUs = timeUs;
+            mLastDequeuedTimeUs = timeUs;
+            mRealTimeBaseUs = ALooper::GetNowUs() - timeUs;
+        } else if (stream == STREAMTYPE_SUBTITLES) {
+            (*accessUnit)->meta()->setInt32(
+                    "trackIndex", mPlaylist->getSelectedIndex());
+            (*accessUnit)->meta()->setInt64("baseUs", mRealTimeBaseUs);
+        }
     } else {
         ALOGI("[%s] encountered error %d", streamStr, err);
     }
@@ -325,6 +333,12 @@
             break;
         }
 
+        case kWhatChangeConfiguration:
+        {
+            onChangeConfiguration(msg);
+            break;
+        }
+
         case kWhatChangeConfiguration2:
         {
             onChangeConfiguration2(msg);
@@ -438,7 +452,8 @@
         mBandwidthItems.push(item);
     }
 
-    changeConfiguration(0ll /* timeUs */, initialBandwidthIndex);
+    changeConfiguration(
+            0ll /* timeUs */, initialBandwidthIndex, true /* pickTrack */);
 }
 
 void LiveSession::finishDisconnect() {
@@ -783,16 +798,31 @@
     return false;
 }
 
-void LiveSession::changeConfiguration(int64_t timeUs, size_t bandwidthIndex) {
+status_t LiveSession::getTrackInfo(Parcel *reply) const {
+    return mPlaylist->getTrackInfo(reply);
+}
+
+status_t LiveSession::selectTrack(size_t index, bool select) {
+    status_t err = mPlaylist->selectTrack(index, select);
+    if (err == OK) {
+        (new AMessage(kWhatChangeConfiguration, id()))->post();
+    }
+    return err;
+}
+
+void LiveSession::changeConfiguration(
+        int64_t timeUs, size_t bandwidthIndex, bool pickTrack) {
     CHECK(!mReconfigurationInProgress);
     mReconfigurationInProgress = true;
 
     mPrevBandwidthIndex = bandwidthIndex;
 
-    ALOGV("changeConfiguration => timeUs:%lld us, bwIndex:%d",
-          timeUs, bandwidthIndex);
+    ALOGV("changeConfiguration => timeUs:%lld us, bwIndex:%d, pickTrack:%d",
+          timeUs, bandwidthIndex, pickTrack);
 
-    mPlaylist->pickRandomMediaItems();
+    if (pickTrack) {
+        mPlaylist->pickRandomMediaItems();
+    }
 
     CHECK_LT(bandwidthIndex, mBandwidthItems.size());
     const BandwidthItem &item = mBandwidthItems.itemAt(bandwidthIndex);
@@ -862,6 +892,14 @@
     }
 }
 
+void LiveSession::onChangeConfiguration(const sp<AMessage> &msg) {
+    if (!mReconfigurationInProgress) {
+        changeConfiguration(-1ll /* timeUs */, getBandwidthIndex());
+    } else {
+        msg->post(1000000ll); // retry in 1 sec
+    }
+}
+
 void LiveSession::onChangeConfiguration2(const sp<AMessage> &msg) {
     mContinuation.clear();
 
@@ -948,6 +986,7 @@
     if (timeUs < 0ll) {
         timeUs = mLastDequeuedTimeUs;
     }
+    mRealTimeBaseUs = ALooper::GetNowUs() - timeUs;
 
     mStreamMask = streamMask;
     mAudioURI = audioURI;
diff --git a/media/libstagefright/httplive/LiveSession.h b/media/libstagefright/httplive/LiveSession.h
index b134725..99b480a8 100644
--- a/media/libstagefright/httplive/LiveSession.h
+++ b/media/libstagefright/httplive/LiveSession.h
@@ -31,6 +31,7 @@
 struct LiveDataSource;
 struct M3UParser;
 struct PlaylistFetcher;
+struct Parcel;
 
 struct LiveSession : public AHandler {
     enum Flags {
@@ -60,6 +61,8 @@
     status_t seekTo(int64_t timeUs);
 
     status_t getDuration(int64_t *durationUs) const;
+    status_t getTrackInfo(Parcel *reply) const;
+    status_t selectTrack(size_t index, bool select);
 
     bool isSeekable() const;
     bool hasDynamicDuration() const;
@@ -85,6 +88,7 @@
         kWhatSeek                       = 'seek',
         kWhatFetcherNotify              = 'notf',
         kWhatCheckBandwidth             = 'bndw',
+        kWhatChangeConfiguration        = 'chC0',
         kWhatChangeConfiguration2       = 'chC2',
         kWhatChangeConfiguration3       = 'chC3',
         kWhatFinishDisconnect2          = 'fin2',
@@ -130,6 +134,7 @@
     sp<AMessage> mContinuation;
 
     int64_t mLastDequeuedTimeUs;
+    int64_t mRealTimeBaseUs;
 
     bool mReconfigurationInProgress;
     uint32_t mDisconnectReplyID;
@@ -151,7 +156,9 @@
 
     static int SortByBandwidth(const BandwidthItem *, const BandwidthItem *);
 
-    void changeConfiguration(int64_t timeUs, size_t bandwidthIndex);
+    void changeConfiguration(
+            int64_t timeUs, size_t bandwidthIndex, bool pickTrack = false);
+    void onChangeConfiguration(const sp<AMessage> &msg);
     void onChangeConfiguration2(const sp<AMessage> &msg);
     void onChangeConfiguration3(const sp<AMessage> &msg);
 
diff --git a/media/libstagefright/httplive/M3UParser.cpp b/media/libstagefright/httplive/M3UParser.cpp
index be66252..243888c 100644
--- a/media/libstagefright/httplive/M3UParser.cpp
+++ b/media/libstagefright/httplive/M3UParser.cpp
@@ -19,11 +19,12 @@
 #include <utils/Log.h>
 
 #include "M3UParser.h"
-
+#include <binder/Parcel.h>
 #include <cutils/properties.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/MediaErrors.h>
+#include <media/mediaplayer.h>
 
 namespace android {
 
@@ -55,6 +56,9 @@
     bool getActiveURI(AString *uri) const;
 
     void pickRandomMediaItems();
+    status_t selectTrack(size_t index, bool select);
+    void getTrackInfo(Parcel* reply) const;
+    size_t countTracks() const;
 
 protected:
     virtual ~MediaGroup();
@@ -150,6 +154,66 @@
 #endif
 }
 
+status_t M3UParser::MediaGroup::selectTrack(size_t index, bool select) {
+    if (mType != TYPE_SUBS) {
+        ALOGE("only select subtitile tracks for now!");
+        return INVALID_OPERATION;
+    }
+
+    if (select) {
+        if (index >= mMediaItems.size()) {
+            ALOGE("track %d does not exist", index);
+            return INVALID_OPERATION;
+        }
+        if (mSelectedIndex == index) {
+            ALOGE("track %d already selected", index);
+            return BAD_VALUE;
+        }
+        ALOGV("selected track %d", index);
+        mSelectedIndex = index;
+    } else {
+        if (mSelectedIndex != index) {
+            ALOGE("track %d is not selected", index);
+            return BAD_VALUE;
+        }
+        ALOGV("unselected track %d", index);
+        mSelectedIndex = -1;
+    }
+
+    return OK;
+}
+
+void M3UParser::MediaGroup::getTrackInfo(Parcel* reply) const {
+    for (size_t i = 0; i < mMediaItems.size(); ++i) {
+        reply->writeInt32(2); // 2 fields
+
+        if (mType == TYPE_AUDIO) {
+            reply->writeInt32(MEDIA_TRACK_TYPE_AUDIO);
+        } else if (mType == TYPE_VIDEO) {
+            reply->writeInt32(MEDIA_TRACK_TYPE_VIDEO);
+        } else if (mType == TYPE_SUBS) {
+            reply->writeInt32(MEDIA_TRACK_TYPE_SUBTITLE);
+        } else {
+            reply->writeInt32(MEDIA_TRACK_TYPE_UNKNOWN);
+        }
+
+        const Media &item = mMediaItems.itemAt(i);
+        const char *lang = item.mLanguage.empty() ? "und" : item.mLanguage.c_str();
+        reply->writeString16(String16(lang));
+
+        if (mType == TYPE_SUBS) {
+            // TO-DO: pass in a MediaFormat instead
+            reply->writeInt32(!!(item.mFlags & MediaGroup::FLAG_AUTOSELECT));
+            reply->writeInt32(!!(item.mFlags & MediaGroup::FLAG_DEFAULT));
+            reply->writeInt32(!!(item.mFlags & MediaGroup::FLAG_FORCED));
+        }
+    }
+}
+
+size_t M3UParser::MediaGroup::countTracks() const {
+    return mMediaItems.size();
+}
+
 bool M3UParser::MediaGroup::getActiveURI(AString *uri) const {
     for (size_t i = 0; i < mMediaItems.size(); ++i) {
         if (mSelectedIndex >= 0 && i == (size_t)mSelectedIndex) {
@@ -172,7 +236,8 @@
       mIsExtM3U(false),
       mIsVariantPlaylist(false),
       mIsComplete(false),
-      mIsEvent(false) {
+      mIsEvent(false),
+      mSelectedIndex(-1) {
     mInitCheck = parse(data, size);
 }
 
@@ -237,6 +302,39 @@
     }
 }
 
+status_t M3UParser::selectTrack(size_t index, bool select) {
+    for (size_t i = 0, ii = index; i < mMediaGroups.size(); ++i) {
+        sp<MediaGroup> group = mMediaGroups.valueAt(i);
+        size_t tracks = group->countTracks();
+        if (ii < tracks) {
+            status_t err = group->selectTrack(ii, select);
+            if (err == OK) {
+                mSelectedIndex = select ? index : -1;
+            }
+            return err;
+        }
+        ii -= tracks;
+    }
+    return INVALID_OPERATION;
+}
+
+status_t M3UParser::getTrackInfo(Parcel* reply) const {
+    size_t trackCount = 0;
+    for (size_t i = 0; i < mMediaGroups.size(); ++i) {
+        trackCount += mMediaGroups.valueAt(i)->countTracks();
+    }
+    reply->writeInt32(trackCount);
+
+    for (size_t i = 0; i < mMediaGroups.size(); ++i) {
+        mMediaGroups.valueAt(i)->getTrackInfo(reply);
+    }
+    return OK;
+}
+
+ssize_t M3UParser::getSelectedIndex() const {
+    return mSelectedIndex;
+}
+
 bool M3UParser::getTypeURI(size_t index, const char *key, AString *uri) const {
     if (!mIsVariantPlaylist) {
         *uri = mBaseURI;
diff --git a/media/libstagefright/httplive/M3UParser.h b/media/libstagefright/httplive/M3UParser.h
index abea286..5248004 100644
--- a/media/libstagefright/httplive/M3UParser.h
+++ b/media/libstagefright/httplive/M3UParser.h
@@ -41,6 +41,9 @@
     bool itemAt(size_t index, AString *uri, sp<AMessage> *meta = NULL);
 
     void pickRandomMediaItems();
+    status_t selectTrack(size_t index, bool select);
+    status_t getTrackInfo(Parcel* reply) const;
+    ssize_t getSelectedIndex() const;
 
     bool getAudioURI(size_t index, AString *uri) const;
     bool getVideoURI(size_t index, AString *uri) const;
@@ -67,6 +70,7 @@
 
     sp<AMessage> mMeta;
     Vector<Item> mItems;
+    ssize_t mSelectedIndex;
 
     // Media groups keyed by group ID.
     KeyedVector<AString, sp<MediaGroup> > mMediaGroups;
diff --git a/media/libstagefright/httplive/PlaylistFetcher.cpp b/media/libstagefright/httplive/PlaylistFetcher.cpp
index 8ae70b7..973b779 100644
--- a/media/libstagefright/httplive/PlaylistFetcher.cpp
+++ b/media/libstagefright/httplive/PlaylistFetcher.cpp
@@ -462,7 +462,11 @@
         sp<AnotherPacketSource> packetSource =
             mPacketSources.valueFor(LiveSession::STREAMTYPE_SUBTITLES);
 
-        downloadMore = packetSource->hasBufferAvailable(&finalResult);
+        int64_t bufferedDurationUs =
+                packetSource->getBufferedDurationUs(&finalResult);
+
+        downloadMore = (bufferedDurationUs < kMinBufferedDurationUs);
+        finalResult = OK;
     } else {
         bool first = true;
         int64_t minBufferedDurationUs = 0ll;
@@ -659,7 +663,7 @@
         }
     }
 
-    err = extractAndQueueAccessUnits(buffer);
+    err = extractAndQueueAccessUnits(buffer, itemMeta);
 
     if (err != OK) {
         notifyError(err);
@@ -706,7 +710,7 @@
 }
 
 status_t PlaylistFetcher::extractAndQueueAccessUnits(
-        const sp<ABuffer> &buffer) {
+        const sp<ABuffer> &buffer, const sp<AMessage> &itemMeta) {
     if (buffer->size() > 0 && buffer->data()[0] == 0x47) {
         // Let's assume this is an MPEG2 transport stream.
 
@@ -802,7 +806,10 @@
         const sp<AnotherPacketSource> packetSource =
             mPacketSources.valueFor(LiveSession::STREAMTYPE_SUBTITLES);
 
-        buffer->meta()->setInt64("timeUs", 0ll);
+        int64_t durationUs;
+        CHECK(itemMeta->findInt64("durationUs", &durationUs));
+        buffer->meta()->setInt64("timeUs", getSegmentStartTimeUs(mSeqNumber));
+        buffer->meta()->setInt64("durationUs", durationUs);
 
         packetSource->queueAccessUnit(buffer);
         return OK;
diff --git a/media/libstagefright/httplive/PlaylistFetcher.h b/media/libstagefright/httplive/PlaylistFetcher.h
index 5a2b901..1648e02 100644
--- a/media/libstagefright/httplive/PlaylistFetcher.h
+++ b/media/libstagefright/httplive/PlaylistFetcher.h
@@ -135,7 +135,8 @@
     void onMonitorQueue();
     void onDownloadNext();
 
-    status_t extractAndQueueAccessUnits(const sp<ABuffer> &buffer);
+    status_t extractAndQueueAccessUnits(
+            const sp<ABuffer> &buffer, const sp<AMessage> &itemMeta);
 
     void notifyError(status_t err);
 
diff --git a/media/libstagefright/include/AwesomePlayer.h b/media/libstagefright/include/AwesomePlayer.h
index d3c74e2..271df8e 100644
--- a/media/libstagefright/include/AwesomePlayer.h
+++ b/media/libstagefright/include/AwesomePlayer.h
@@ -169,6 +169,8 @@
     sp<AwesomeRenderer> mVideoRenderer;
     bool mVideoRenderingStarted;
     bool mVideoRendererIsPreview;
+    int32_t mMediaRenderingStartGeneration;
+    int32_t mStartGeneration;
 
     ssize_t mActiveAudioTrackIndex;
     sp<MediaSource> mAudioTrack;
@@ -294,6 +296,7 @@
     void finishSeekIfNecessary(int64_t videoTimeUs);
     void ensureCacheIsFetching_l();
 
+    void notifyIfMediaStarted_l();
     void createAudioPlayer_l();
     status_t startAudioPlayer_l(bool sendErrorNotification = true);
 
@@ -339,6 +342,8 @@
 
     bool    mOffloadAudio;
     bool    mAudioTearDown;
+    bool    mAudioTearDownWasPlaying;
+    int64_t mAudioTearDownPosition;
 
     status_t setVideoScalingMode(int32_t mode);
     status_t setVideoScalingMode_l(int32_t mode);
diff --git a/media/libstagefright/include/OMX.h b/media/libstagefright/include/OMX.h
index 7e53af3..31a5077 100644
--- a/media/libstagefright/include/OMX.h
+++ b/media/libstagefright/include/OMX.h
@@ -71,6 +71,10 @@
     virtual status_t storeMetaDataInBuffers(
             node_id node, OMX_U32 port_index, OMX_BOOL enable);
 
+    virtual status_t prepareForAdaptivePlayback(
+            node_id node, OMX_U32 portIndex, OMX_BOOL enable,
+            OMX_U32 max_frame_width, OMX_U32 max_frame_height);
+
     virtual status_t useBuffer(
             node_id node, OMX_U32 port_index, const sp<IMemory> &params,
             buffer_id *buffer);
diff --git a/media/libstagefright/include/OMXNodeInstance.h b/media/libstagefright/include/OMXNodeInstance.h
index ae498b4..339179e 100644
--- a/media/libstagefright/include/OMXNodeInstance.h
+++ b/media/libstagefright/include/OMXNodeInstance.h
@@ -58,6 +58,10 @@
 
     status_t storeMetaDataInBuffers(OMX_U32 portIndex, OMX_BOOL enable);
 
+    status_t prepareForAdaptivePlayback(
+            OMX_U32 portIndex, OMX_BOOL enable,
+            OMX_U32 maxFrameWidth, OMX_U32 maxFrameHeight);
+
     status_t useBuffer(
             OMX_U32 portIndex, const sp<IMemory> &params,
             OMX::buffer_id *buffer);
diff --git a/media/libstagefright/include/TimedEventQueue.h b/media/libstagefright/include/TimedEventQueue.h
index 11f844c..38a08b1 100644
--- a/media/libstagefright/include/TimedEventQueue.h
+++ b/media/libstagefright/include/TimedEventQueue.h
@@ -23,6 +23,7 @@
 #include <utils/List.h>
 #include <utils/RefBase.h>
 #include <utils/threads.h>
+#include <powermanager/IPowerManager.h>
 
 namespace android {
 
@@ -57,6 +58,21 @@
         Event &operator=(const Event &);
     };
 
+    class PMDeathRecipient : public IBinder::DeathRecipient {
+    public:
+                    PMDeathRecipient(TimedEventQueue *queue) : mQueue(queue) {}
+        virtual     ~PMDeathRecipient() {}
+
+        // IBinder::DeathRecipient
+        virtual     void        binderDied(const wp<IBinder>& who);
+
+    private:
+                    PMDeathRecipient(const PMDeathRecipient&);
+                    PMDeathRecipient& operator = (const PMDeathRecipient&);
+
+                    TimedEventQueue *mQueue;
+    };
+
     TimedEventQueue();
     ~TimedEventQueue();
 
@@ -96,10 +112,13 @@
 
     static int64_t getRealTimeUs();
 
+    void clearPowerManager();
+
 private:
     struct QueueItem {
         sp<Event> event;
         int64_t realtime_us;
+        bool has_wakelock;
     };
 
     struct StopEvent : public TimedEventQueue::Event {
@@ -118,11 +137,19 @@
     bool mRunning;
     bool mStopped;
 
+    sp<IPowerManager>       mPowerManager;
+    sp<IBinder>             mWakeLockToken;
+    const sp<PMDeathRecipient> mDeathRecipient;
+    uint32_t                mWakeLockCount;
+
     static void *ThreadWrapper(void *me);
     void threadEntry();
 
     sp<Event> removeEventFromQueue_l(event_id id);
 
+    void acquireWakeLock_l();
+    void releaseWakeLock_l(bool force = false);
+
     TimedEventQueue(const TimedEventQueue &);
     TimedEventQueue &operator=(const TimedEventQueue &);
 };
diff --git a/media/libstagefright/mpeg2ts/ATSParser.cpp b/media/libstagefright/mpeg2ts/ATSParser.cpp
index 9850a46..175a263 100644
--- a/media/libstagefright/mpeg2ts/ATSParser.cpp
+++ b/media/libstagefright/mpeg2ts/ATSParser.cpp
@@ -1193,7 +1193,10 @@
     unsigned sync_byte = br->getBits(8);
     CHECK_EQ(sync_byte, 0x47u);
 
-    MY_LOGV("transport_error_indicator = %u", br->getBits(1));
+    if (br->getBits(1)) {  // transport_error_indicator
+        // silently ignore.
+        return OK;
+    }
 
     unsigned payload_unit_start_indicator = br->getBits(1);
     ALOGV("payload_unit_start_indicator = %u", payload_unit_start_indicator);
diff --git a/media/libstagefright/mpeg2ts/ESQueue.cpp b/media/libstagefright/mpeg2ts/ESQueue.cpp
index 9f3b19c..8f9c9c8 100644
--- a/media/libstagefright/mpeg2ts/ESQueue.cpp
+++ b/media/libstagefright/mpeg2ts/ESQueue.cpp
@@ -504,15 +504,11 @@
 
         if (first) {
             timeUs = info->mTimestampUs;
+            first = false;
         }
 
         if (info->mLength > size) {
             info->mLength -= size;
-
-            if (first) {
-                info->mTimestampUs = -1;
-            }
-
             size = 0;
         } else {
             size -= info->mLength;
@@ -521,7 +517,6 @@
             info = NULL;
         }
 
-        first = false;
     }
 
     if (timeUs == 0ll) {
diff --git a/media/libstagefright/omx/GraphicBufferSource.cpp b/media/libstagefright/omx/GraphicBufferSource.cpp
index 325ffcf..b8970ad 100644
--- a/media/libstagefright/omx/GraphicBufferSource.cpp
+++ b/media/libstagefright/omx/GraphicBufferSource.cpp
@@ -22,6 +22,7 @@
 
 #include <OMX_Core.h>
 #include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
 
 #include <media/hardware/MetadataBufferType.h>
 #include <ui/GraphicBuffer.h>
@@ -39,7 +40,13 @@
     mSuspended(false),
     mNumFramesAvailable(0),
     mEndOfStream(false),
-    mEndOfStreamSent(false) {
+    mEndOfStreamSent(false),
+    mRepeatAfterUs(-1ll),
+    mRepeatLastFrameGeneration(0),
+    mLatestSubmittedBufferId(-1),
+    mLatestSubmittedBufferFrameNum(0),
+    mLatestSubmittedBufferUseCount(0),
+    mRepeatBufferDeferred(false) {
 
     ALOGV("GraphicBufferSource w=%u h=%u c=%u",
             bufferWidth, bufferHeight, bufferCount);
@@ -123,6 +130,34 @@
     if (mEndOfStream && mNumFramesAvailable == 0) {
         submitEndOfInputStream_l();
     }
+
+    if (mRepeatAfterUs > 0ll && mLooper == NULL) {
+        mReflector = new AHandlerReflector<GraphicBufferSource>(this);
+
+        mLooper = new ALooper;
+        mLooper->registerHandler(mReflector);
+        mLooper->start();
+
+        if (mLatestSubmittedBufferId >= 0) {
+            sp<AMessage> msg =
+                new AMessage(kWhatRepeatLastFrame, mReflector->id());
+
+            msg->setInt32("generation", ++mRepeatLastFrameGeneration);
+            msg->post(mRepeatAfterUs);
+        }
+    }
+}
+
+void GraphicBufferSource::omxIdle() {
+    ALOGV("omxIdle");
+
+    Mutex::Autolock autoLock(mMutex);
+
+    if (mExecuting) {
+        // We are only interested in the transition from executing->idle,
+        // not loaded->idle.
+        mExecuting = false;
+    }
 }
 
 void GraphicBufferSource::omxLoaded(){
@@ -132,6 +167,14 @@
         ALOGW("Dropped back down to Loaded without Executing");
     }
 
+    if (mLooper != NULL) {
+        mLooper->unregisterHandler(mReflector->id());
+        mReflector.clear();
+
+        mLooper->stop();
+        mLooper.clear();
+    }
+
     ALOGV("--> loaded; avail=%d eos=%d eosSent=%d",
             mNumFramesAvailable, mEndOfStream, mEndOfStreamSent);
 
@@ -163,7 +206,9 @@
 void GraphicBufferSource::codecBufferEmptied(OMX_BUFFERHEADERTYPE* header) {
     Mutex::Autolock autoLock(mMutex);
 
-    CHECK(mExecuting);  // could this happen if app stop()s early?
+    if (!mExecuting) {
+        return;
+    }
 
     int cbi = findMatchingCodecBuffer_l(header);
     if (cbi < 0) {
@@ -182,7 +227,12 @@
     // see if the GraphicBuffer reference was null, which should only ever
     // happen for EOS.
     if (codecBuffer.mGraphicBuffer == NULL) {
-        CHECK(mEndOfStream && mEndOfStreamSent);
+        if (!(mEndOfStream && mEndOfStreamSent)) {
+            // This can happen when broken code sends us the same buffer
+            // twice in a row.
+            ALOGE("ERROR: codecBufferEmptied on non-EOS null buffer "
+                    "(buffer emptied twice?)");
+        }
         // No GraphicBuffer to deal with, no additional input or output is
         // expected, so just return.
         return;
@@ -211,8 +261,12 @@
         ALOGV("cbi %d matches bq slot %d, handle=%p",
                 cbi, id, mBufferSlot[id]->handle);
 
-        mBufferQueue->releaseBuffer(id, codecBuffer.mFrameNumber,
-                EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, Fence::NO_FENCE);
+        if (id == mLatestSubmittedBufferId) {
+            CHECK_GT(mLatestSubmittedBufferUseCount--, 0);
+        } else {
+            mBufferQueue->releaseBuffer(id, codecBuffer.mFrameNumber,
+                    EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, Fence::NO_FENCE);
+        }
     } else {
         ALOGV("codecBufferEmptied: no match for emptied buffer in cbi %d",
                 cbi);
@@ -232,7 +286,16 @@
         // send that.
         ALOGV("buffer freed, EOS pending");
         submitEndOfInputStream_l();
+    } else if (mRepeatBufferDeferred) {
+        bool success = repeatLatestSubmittedBuffer_l();
+        if (success) {
+            ALOGV("deferred repeatLatestSubmittedBuffer_l SUCCESS");
+        } else {
+            ALOGV("deferred repeatLatestSubmittedBuffer_l FAILURE");
+        }
+        mRepeatBufferDeferred = false;
     }
+
     return;
 }
 
@@ -264,6 +327,16 @@
     }
 
     mSuspended = false;
+
+    if (mExecuting && mNumFramesAvailable == 0 && mRepeatBufferDeferred) {
+        if (repeatLatestSubmittedBuffer_l()) {
+            ALOGV("suspend/deferred repeatLatestSubmittedBuffer_l SUCCESS");
+
+            mRepeatBufferDeferred = false;
+        } else {
+            ALOGV("suspend/deferred repeatLatestSubmittedBuffer_l FAILURE");
+        }
+    }
 }
 
 bool GraphicBufferSource::fillCodecBuffer_l() {
@@ -318,11 +391,85 @@
                 EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, Fence::NO_FENCE);
     } else {
         ALOGV("buffer submitted (bq %d, cbi %d)", item.mBuf, cbi);
+        setLatestSubmittedBuffer_l(item);
     }
 
     return true;
 }
 
+bool GraphicBufferSource::repeatLatestSubmittedBuffer_l() {
+    CHECK(mExecuting && mNumFramesAvailable == 0);
+
+    if (mLatestSubmittedBufferId < 0 || mSuspended) {
+        return false;
+    }
+    if (mBufferSlot[mLatestSubmittedBufferId] == NULL) {
+        // This can happen if the remote side disconnects, causing
+        // onBuffersReleased() to NULL out our copy of the slots.  The
+        // buffer is gone, so we have nothing to show.
+        //
+        // To be on the safe side we try to release the buffer.
+        ALOGD("repeatLatestSubmittedBuffer_l: slot was NULL");
+        mBufferQueue->releaseBuffer(
+                mLatestSubmittedBufferId,
+                mLatestSubmittedBufferFrameNum,
+                EGL_NO_DISPLAY,
+                EGL_NO_SYNC_KHR,
+                Fence::NO_FENCE);
+        mLatestSubmittedBufferId = -1;
+        mLatestSubmittedBufferFrameNum = 0;
+        return false;
+    }
+
+    int cbi = findAvailableCodecBuffer_l();
+    if (cbi < 0) {
+        // No buffers available, bail.
+        ALOGV("repeatLatestSubmittedBuffer_l: no codec buffers.");
+        return false;
+    }
+
+    BufferQueue::BufferItem item;
+    item.mBuf = mLatestSubmittedBufferId;
+    item.mFrameNumber = mLatestSubmittedBufferFrameNum;
+
+    status_t err = submitBuffer_l(item, cbi);
+
+    if (err != OK) {
+        return false;
+    }
+
+    ++mLatestSubmittedBufferUseCount;
+
+    return true;
+}
+
+void GraphicBufferSource::setLatestSubmittedBuffer_l(
+        const BufferQueue::BufferItem &item) {
+    ALOGV("setLatestSubmittedBuffer_l");
+
+    if (mLatestSubmittedBufferId >= 0) {
+        if (mLatestSubmittedBufferUseCount == 0) {
+            mBufferQueue->releaseBuffer(
+                    mLatestSubmittedBufferId,
+                    mLatestSubmittedBufferFrameNum,
+                    EGL_NO_DISPLAY,
+                    EGL_NO_SYNC_KHR,
+                    Fence::NO_FENCE);
+        }
+    }
+
+    mLatestSubmittedBufferId = item.mBuf;
+    mLatestSubmittedBufferFrameNum = item.mFrameNumber;
+    mLatestSubmittedBufferUseCount = 1;
+    mRepeatBufferDeferred = false;
+
+    if (mReflector != NULL) {
+        sp<AMessage> msg = new AMessage(kWhatRepeatLastFrame, mReflector->id());
+        msg->setInt32("generation", ++mRepeatLastFrameGeneration);
+        msg->post(mRepeatAfterUs);
+    }
+}
+
 status_t GraphicBufferSource::signalEndOfInputStream() {
     Mutex::Autolock autoLock(mMutex);
     ALOGV("signalEndOfInputStream: exec=%d avail=%d eos=%d",
@@ -470,6 +617,9 @@
 
     mNumFramesAvailable++;
 
+    mRepeatBufferDeferred = false;
+    ++mRepeatLastFrameGeneration;
+
     if (mExecuting) {
         fillCodecBuffer_l();
     }
@@ -495,4 +645,51 @@
     }
 }
 
+status_t GraphicBufferSource::setRepeatPreviousFrameDelayUs(
+        int64_t repeatAfterUs) {
+    Mutex::Autolock autoLock(mMutex);
+
+    if (mExecuting || repeatAfterUs <= 0ll) {
+        return INVALID_OPERATION;
+    }
+
+    mRepeatAfterUs = repeatAfterUs;
+
+    return OK;
+}
+
+void GraphicBufferSource::onMessageReceived(const sp<AMessage> &msg) {
+    switch (msg->what()) {
+        case kWhatRepeatLastFrame:
+        {
+            Mutex::Autolock autoLock(mMutex);
+
+            int32_t generation;
+            CHECK(msg->findInt32("generation", &generation));
+
+            if (generation != mRepeatLastFrameGeneration) {
+                // stale
+                break;
+            }
+
+            if (!mExecuting || mNumFramesAvailable > 0) {
+                break;
+            }
+
+            bool success = repeatLatestSubmittedBuffer_l();
+
+            if (success) {
+                ALOGV("repeatLatestSubmittedBuffer_l SUCCESS");
+            } else {
+                ALOGV("repeatLatestSubmittedBuffer_l FAILURE");
+                mRepeatBufferDeferred = true;
+            }
+            break;
+        }
+
+        default:
+            TRESPASS();
+    }
+}
+
 }  // namespace android
diff --git a/media/libstagefright/omx/GraphicBufferSource.h b/media/libstagefright/omx/GraphicBufferSource.h
index ac73770..9e5eee6 100644
--- a/media/libstagefright/omx/GraphicBufferSource.h
+++ b/media/libstagefright/omx/GraphicBufferSource.h
@@ -25,6 +25,8 @@
 #include <OMX_Core.h>
 #include "../include/OMXNodeInstance.h"
 #include <media/stagefright/foundation/ABase.h>
+#include <media/stagefright/foundation/AHandlerReflector.h>
+#include <media/stagefright/foundation/ALooper.h>
 
 namespace android {
 
@@ -67,6 +69,11 @@
     // sitting in the BufferQueue, this will send them to the codec.
     void omxExecuting();
 
+    // This is called when OMX transitions to OMX_StateIdle, indicating that
+    // the codec is meant to return all buffers back to the client for them
+    // to be freed. Do NOT submit any more buffers to the component.
+    void omxIdle();
+
     // This is called when OMX transitions to OMX_StateLoaded, indicating that
     // we are shutting down.
     void omxLoaded();
@@ -89,6 +96,15 @@
     // in the BufferQueue) will be discarded until the suspension is lifted.
     void suspend(bool suspend);
 
+    // Specifies the interval after which we requeue the buffer previously
+    // queued to the encoder. This is useful in the case of surface flinger
+    // providing the input surface if the resulting encoded stream is to
+    // be displayed "live". If we were not to push through the extra frame
+    // the decoder on the remote end would be unable to decode the latest frame.
+    // This API must be called before transitioning the encoder to "executing"
+    // state and once this behaviour is specified it cannot be reset.
+    status_t setRepeatPreviousFrameDelayUs(int64_t repeatAfterUs);
+
 protected:
     // BufferQueue::ConsumerListener interface, called when a new frame of
     // data is available.  If we're executing and a codec buffer is
@@ -147,6 +163,9 @@
     // doing anything if we don't have a codec buffer available.
     void submitEndOfInputStream_l();
 
+    void setLatestSubmittedBuffer_l(const BufferQueue::BufferItem &item);
+    bool repeatLatestSubmittedBuffer_l();
+
     // Lock, covers all member variables.
     mutable Mutex mMutex;
 
@@ -181,6 +200,30 @@
     // Tracks codec buffers.
     Vector<CodecBuffer> mCodecBuffers;
 
+    ////
+    friend class AHandlerReflector<GraphicBufferSource>;
+
+    enum {
+        kWhatRepeatLastFrame,
+    };
+
+    int64_t mRepeatAfterUs;
+
+    sp<ALooper> mLooper;
+    sp<AHandlerReflector<GraphicBufferSource> > mReflector;
+
+    int32_t mRepeatLastFrameGeneration;
+
+    int mLatestSubmittedBufferId;
+    uint64_t mLatestSubmittedBufferFrameNum;
+    int32_t mLatestSubmittedBufferUseCount;
+
+    // The previously submitted buffer should've been repeated but
+    // no codec buffer was available at the time.
+    bool mRepeatBufferDeferred;
+
+    void onMessageReceived(const sp<AMessage> &msg);
+
     DISALLOW_EVIL_CONSTRUCTORS(GraphicBufferSource);
 };
 
diff --git a/media/libstagefright/omx/OMX.cpp b/media/libstagefright/omx/OMX.cpp
index aaa9f89..84a0e10 100644
--- a/media/libstagefright/omx/OMX.cpp
+++ b/media/libstagefright/omx/OMX.cpp
@@ -331,6 +331,13 @@
     return findInstance(node)->storeMetaDataInBuffers(port_index, enable);
 }
 
+status_t OMX::prepareForAdaptivePlayback(
+        node_id node, OMX_U32 portIndex, OMX_BOOL enable,
+        OMX_U32 maxFrameWidth, OMX_U32 maxFrameHeight) {
+    return findInstance(node)->prepareForAdaptivePlayback(
+            portIndex, enable, maxFrameWidth, maxFrameHeight);
+}
+
 status_t OMX::useBuffer(
         node_id node, OMX_U32 port_index, const sp<IMemory> &params,
         buffer_id *buffer) {
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index 8d100f1..5f104fc 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -243,13 +243,18 @@
 status_t OMXNodeInstance::sendCommand(
         OMX_COMMANDTYPE cmd, OMX_S32 param) {
     const sp<GraphicBufferSource>& bufferSource(getGraphicBufferSource());
-    if (bufferSource != NULL
-            && cmd == OMX_CommandStateSet
-            && param == OMX_StateLoaded) {
-        // Initiating transition from Executing -> Loaded
-        // Buffers are about to be freed.
-        bufferSource->omxLoaded();
-        setGraphicBufferSource(NULL);
+    if (bufferSource != NULL && cmd == OMX_CommandStateSet) {
+        if (param == OMX_StateIdle) {
+            // Initiating transition from Executing -> Idle
+            // ACodec is waiting for all buffers to be returned, do NOT
+            // submit any more buffers to the codec.
+            bufferSource->omxIdle();
+        } else if (param == OMX_StateLoaded) {
+            // Initiating transition from Idle/Executing -> Loaded
+            // Buffers are about to be freed.
+            bufferSource->omxLoaded();
+            setGraphicBufferSource(NULL);
+        }
 
         // fall through
     }
@@ -417,6 +422,40 @@
     return err;
 }
 
+status_t OMXNodeInstance::prepareForAdaptivePlayback(
+        OMX_U32 portIndex, OMX_BOOL enable, OMX_U32 maxFrameWidth,
+        OMX_U32 maxFrameHeight) {
+    Mutex::Autolock autolock(mLock);
+
+    OMX_INDEXTYPE index;
+    OMX_STRING name = const_cast<OMX_STRING>(
+            "OMX.google.android.index.prepareForAdaptivePlayback");
+
+    OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, name, &index);
+    if (err != OMX_ErrorNone) {
+        ALOGW_IF(enable, "OMX_GetExtensionIndex %s failed", name);
+        return StatusFromOMXError(err);
+    }
+
+    PrepareForAdaptivePlaybackParams params;
+    params.nSize = sizeof(params);
+    params.nVersion.s.nVersionMajor = 1;
+    params.nVersion.s.nVersionMinor = 0;
+    params.nVersion.s.nRevision = 0;
+    params.nVersion.s.nStep = 0;
+
+    params.nPortIndex = portIndex;
+    params.bEnable = enable;
+    params.nMaxFrameWidth = maxFrameWidth;
+    params.nMaxFrameHeight = maxFrameHeight;
+    if ((err = OMX_SetParameter(mHandle, index, &params)) != OMX_ErrorNone) {
+        ALOGW("OMX_SetParameter failed for PrepareForAdaptivePlayback "
+              "with error %d (0x%08x)", err, err);
+        return UNKNOWN_ERROR;
+    }
+    return err;
+}
+
 status_t OMXNodeInstance::useBuffer(
         OMX_U32 portIndex, const sp<IMemory> &params,
         OMX::buffer_id *buffer) {
@@ -809,6 +848,7 @@
         size_t size) {
     switch (type) {
         case IOMX::INTERNAL_OPTION_SUSPEND:
+        case IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY:
         {
             const sp<GraphicBufferSource> &bufferSource =
                 getGraphicBufferSource();
@@ -817,12 +857,22 @@
                 return ERROR_UNSUPPORTED;
             }
 
-            if (size != sizeof(bool)) {
-                return INVALID_OPERATION;
-            }
+            if (type == IOMX::INTERNAL_OPTION_SUSPEND) {
+                if (size != sizeof(bool)) {
+                    return INVALID_OPERATION;
+                }
 
-            bool suspend = *(bool *)data;
-            bufferSource->suspend(suspend);
+                bool suspend = *(bool *)data;
+                bufferSource->suspend(suspend);
+            } else {
+                if (size != sizeof(int64_t)) {
+                    return INVALID_OPERATION;
+                }
+
+                int64_t delayUs = *(int64_t *)data;
+
+                return bufferSource->setRepeatPreviousFrameDelayUs(delayUs);
+            }
 
             return OK;
         }
diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h
index 946f602..f4b5846 100644
--- a/media/libstagefright/rtsp/MyHandler.h
+++ b/media/libstagefright/rtsp/MyHandler.h
@@ -1681,6 +1681,26 @@
         return true;
     }
 
+    void handleFirstAccessUnit() {
+        if (mFirstAccessUnit) {
+            sp<AMessage> msg = mNotify->dup();
+            msg->setInt32("what", kWhatConnected);
+            msg->post();
+
+            if (mSeekable) {
+                for (size_t i = 0; i < mTracks.size(); ++i) {
+                    TrackInfo *info = &mTracks.editItemAt(i);
+
+                    postNormalPlayTimeMapping(
+                            i,
+                            info->mNormalPlayTimeRTP, info->mNormalPlayTimeUs);
+                }
+            }
+
+            mFirstAccessUnit = false;
+        }
+    }
+
     void onTimeUpdate(int32_t trackIndex, uint32_t rtpTime, uint64_t ntpTime) {
         ALOGV("onTimeUpdate track %d, rtpTime = 0x%08x, ntpTime = 0x%016llx",
              trackIndex, rtpTime, ntpTime);
@@ -1712,6 +1732,8 @@
             }
         }
         if (mAllTracksHaveTime && dataReceivedOnAllChannels()) {
+            handleFirstAccessUnit();
+
             // Time is now established, lets start timestamping immediately
             for (size_t i = 0; i < mTracks.size(); ++i) {
                 TrackInfo *trackInfo = &mTracks.editItemAt(i);
@@ -1745,23 +1767,7 @@
             return;
         }
 
-        if (mFirstAccessUnit) {
-            sp<AMessage> msg = mNotify->dup();
-            msg->setInt32("what", kWhatConnected);
-            msg->post();
-
-            if (mSeekable) {
-                for (size_t i = 0; i < mTracks.size(); ++i) {
-                    TrackInfo *info = &mTracks.editItemAt(i);
-
-                    postNormalPlayTimeMapping(
-                            i,
-                            info->mNormalPlayTimeRTP, info->mNormalPlayTimeUs);
-                }
-            }
-
-            mFirstAccessUnit = false;
-        }
+        handleFirstAccessUnit();
 
         TrackInfo *track = &mTracks.editItemAt(trackIndex);
 
diff --git a/media/libstagefright/tests/SurfaceMediaSource_test.cpp b/media/libstagefright/tests/SurfaceMediaSource_test.cpp
index a5459fe..49ffcd6 100644
--- a/media/libstagefright/tests/SurfaceMediaSource_test.cpp
+++ b/media/libstagefright/tests/SurfaceMediaSource_test.cpp
@@ -23,6 +23,8 @@
 #include <fcntl.h>
 #include <unistd.h>
 
+#include <GLES2/gl2.h>
+
 #include <media/stagefright/SurfaceMediaSource.h>
 #include <media/mediarecorder.h>
 
diff --git a/media/libstagefright/wifi-display/Android.mk b/media/libstagefright/wifi-display/Android.mk
index c7d107e..f70454a 100644
--- a/media/libstagefright/wifi-display/Android.mk
+++ b/media/libstagefright/wifi-display/Android.mk
@@ -3,16 +3,9 @@
 include $(CLEAR_VARS)
 
 LOCAL_SRC_FILES:= \
-        MediaReceiver.cpp               \
         MediaSender.cpp                 \
         Parameters.cpp                  \
-        rtp/RTPAssembler.cpp            \
-        rtp/RTPReceiver.cpp             \
         rtp/RTPSender.cpp               \
-        sink/DirectRenderer.cpp         \
-        sink/WifiDisplaySink.cpp        \
-        SNTPClient.cpp                  \
-        TimeSyncer.cpp                  \
         source/Converter.cpp            \
         source/MediaPuller.cpp          \
         source/PlaybackSession.cpp      \
@@ -42,87 +35,3 @@
 LOCAL_MODULE_TAGS:= optional
 
 include $(BUILD_SHARED_LIBRARY)
-
-################################################################################
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:= \
-        wfd.cpp                 \
-
-LOCAL_SHARED_LIBRARIES:= \
-        libbinder                       \
-        libgui                          \
-        libmedia                        \
-        libstagefright                  \
-        libstagefright_foundation       \
-        libstagefright_wfd              \
-        libutils                        \
-        liblog                          \
-
-LOCAL_MODULE:= wfd
-
-include $(BUILD_EXECUTABLE)
-
-################################################################################
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:= \
-        udptest.cpp                 \
-
-LOCAL_SHARED_LIBRARIES:= \
-        libbinder                       \
-        libgui                          \
-        libmedia                        \
-        libstagefright                  \
-        libstagefright_foundation       \
-        libstagefright_wfd              \
-        libutils                        \
-        liblog                          \
-
-LOCAL_MODULE:= udptest
-
-include $(BUILD_EXECUTABLE)
-
-################################################################################
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:= \
-        rtptest.cpp                 \
-
-LOCAL_SHARED_LIBRARIES:= \
-        libbinder                       \
-        libgui                          \
-        libmedia                        \
-        libstagefright                  \
-        libstagefright_foundation       \
-        libstagefright_wfd              \
-        libutils                        \
-        liblog                          \
-
-LOCAL_MODULE:= rtptest
-
-include $(BUILD_EXECUTABLE)
-
-################################################################################
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:= \
-        nettest.cpp                     \
-
-LOCAL_SHARED_LIBRARIES:= \
-        libbinder                       \
-        libgui                          \
-        libmedia                        \
-        libstagefright                  \
-        libstagefright_foundation       \
-        libstagefright_wfd              \
-        libutils                        \
-        liblog                          \
-
-LOCAL_MODULE:= nettest
-
-include $(BUILD_EXECUTABLE)
diff --git a/media/libstagefright/wifi-display/MediaReceiver.cpp b/media/libstagefright/wifi-display/MediaReceiver.cpp
deleted file mode 100644
index 5524235..0000000
--- a/media/libstagefright/wifi-display/MediaReceiver.cpp
+++ /dev/null
@@ -1,328 +0,0 @@
-/*
- * Copyright 2013, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "MediaReceiver"
-#include <utils/Log.h>
-
-#include "MediaReceiver.h"
-
-#include "AnotherPacketSource.h"
-#include "rtp/RTPReceiver.h"
-
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/foundation/ANetworkSession.h>
-#include <media/stagefright/MetaData.h>
-#include <media/stagefright/Utils.h>
-
-namespace android {
-
-MediaReceiver::MediaReceiver(
-        const sp<ANetworkSession> &netSession,
-        const sp<AMessage> &notify)
-    : mNetSession(netSession),
-      mNotify(notify),
-      mMode(MODE_UNDEFINED),
-      mGeneration(0),
-      mInitStatus(OK),
-      mInitDoneCount(0) {
-}
-
-MediaReceiver::~MediaReceiver() {
-}
-
-ssize_t MediaReceiver::addTrack(
-        RTPReceiver::TransportMode rtpMode,
-        RTPReceiver::TransportMode rtcpMode,
-        int32_t *localRTPPort) {
-    if (mMode != MODE_UNDEFINED) {
-        return INVALID_OPERATION;
-    }
-
-    size_t trackIndex = mTrackInfos.size();
-
-    TrackInfo info;
-
-    sp<AMessage> notify = new AMessage(kWhatReceiverNotify, id());
-    notify->setInt32("generation", mGeneration);
-    notify->setSize("trackIndex", trackIndex);
-
-    info.mReceiver = new RTPReceiver(mNetSession, notify);
-    looper()->registerHandler(info.mReceiver);
-
-    info.mReceiver->registerPacketType(
-            33, RTPReceiver::PACKETIZATION_TRANSPORT_STREAM);
-
-    info.mReceiver->registerPacketType(
-            96, RTPReceiver::PACKETIZATION_AAC);
-
-    info.mReceiver->registerPacketType(
-            97, RTPReceiver::PACKETIZATION_H264);
-
-    status_t err = info.mReceiver->initAsync(
-            rtpMode,
-            rtcpMode,
-            localRTPPort);
-
-    if (err != OK) {
-        looper()->unregisterHandler(info.mReceiver->id());
-        info.mReceiver.clear();
-
-        return err;
-    }
-
-    mTrackInfos.push_back(info);
-
-    return trackIndex;
-}
-
-status_t MediaReceiver::connectTrack(
-        size_t trackIndex,
-        const char *remoteHost,
-        int32_t remoteRTPPort,
-        int32_t remoteRTCPPort) {
-    if (trackIndex >= mTrackInfos.size()) {
-        return -ERANGE;
-    }
-
-    TrackInfo *info = &mTrackInfos.editItemAt(trackIndex);
-    return info->mReceiver->connect(remoteHost, remoteRTPPort, remoteRTCPPort);
-}
-
-status_t MediaReceiver::initAsync(Mode mode) {
-    if ((mode == MODE_TRANSPORT_STREAM || mode == MODE_TRANSPORT_STREAM_RAW)
-            && mTrackInfos.size() > 1) {
-        return INVALID_OPERATION;
-    }
-
-    sp<AMessage> msg = new AMessage(kWhatInit, id());
-    msg->setInt32("mode", mode);
-    msg->post();
-
-    return OK;
-}
-
-void MediaReceiver::onMessageReceived(const sp<AMessage> &msg) {
-    switch (msg->what()) {
-        case kWhatInit:
-        {
-            int32_t mode;
-            CHECK(msg->findInt32("mode", &mode));
-
-            CHECK_EQ(mMode, MODE_UNDEFINED);
-            mMode = (Mode)mode;
-
-            if (mInitStatus != OK || mInitDoneCount == mTrackInfos.size()) {
-                notifyInitDone(mInitStatus);
-            }
-
-            mTSParser = new ATSParser(
-                    ATSParser::ALIGNED_VIDEO_DATA
-                        | ATSParser::TS_TIMESTAMPS_ARE_ABSOLUTE);
-
-            mFormatKnownMask = 0;
-            break;
-        }
-
-        case kWhatReceiverNotify:
-        {
-            int32_t generation;
-            CHECK(msg->findInt32("generation", &generation));
-            if (generation != mGeneration) {
-                break;
-            }
-
-            onReceiverNotify(msg);
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-}
-
-void MediaReceiver::onReceiverNotify(const sp<AMessage> &msg) {
-    int32_t what;
-    CHECK(msg->findInt32("what", &what));
-
-    switch (what) {
-        case RTPReceiver::kWhatInitDone:
-        {
-            ++mInitDoneCount;
-
-            int32_t err;
-            CHECK(msg->findInt32("err", &err));
-
-            if (err != OK) {
-                mInitStatus = err;
-                ++mGeneration;
-            }
-
-            if (mMode != MODE_UNDEFINED) {
-                if (mInitStatus != OK || mInitDoneCount == mTrackInfos.size()) {
-                    notifyInitDone(mInitStatus);
-                }
-            }
-            break;
-        }
-
-        case RTPReceiver::kWhatError:
-        {
-            int32_t err;
-            CHECK(msg->findInt32("err", &err));
-
-            notifyError(err);
-            break;
-        }
-
-        case RTPReceiver::kWhatAccessUnit:
-        {
-            size_t trackIndex;
-            CHECK(msg->findSize("trackIndex", &trackIndex));
-
-            sp<ABuffer> accessUnit;
-            CHECK(msg->findBuffer("accessUnit", &accessUnit));
-
-            int32_t followsDiscontinuity;
-            if (!msg->findInt32(
-                        "followsDiscontinuity", &followsDiscontinuity)) {
-                followsDiscontinuity = 0;
-            }
-
-            if (mMode == MODE_TRANSPORT_STREAM) {
-                if (followsDiscontinuity) {
-                    mTSParser->signalDiscontinuity(
-                            ATSParser::DISCONTINUITY_TIME, NULL /* extra */);
-                }
-
-                for (size_t offset = 0;
-                        offset < accessUnit->size(); offset += 188) {
-                    status_t err = mTSParser->feedTSPacket(
-                             accessUnit->data() + offset, 188);
-
-                    if (err != OK) {
-                        notifyError(err);
-                        break;
-                    }
-                }
-
-                drainPackets(0 /* trackIndex */, ATSParser::VIDEO);
-                drainPackets(1 /* trackIndex */, ATSParser::AUDIO);
-            } else {
-                postAccessUnit(trackIndex, accessUnit, NULL);
-            }
-            break;
-        }
-
-        case RTPReceiver::kWhatPacketLost:
-        {
-            notifyPacketLost();
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-}
-
-void MediaReceiver::drainPackets(
-        size_t trackIndex, ATSParser::SourceType type) {
-    sp<AnotherPacketSource> source =
-        static_cast<AnotherPacketSource *>(
-                mTSParser->getSource(type).get());
-
-    if (source == NULL) {
-        return;
-    }
-
-    sp<AMessage> format;
-    if (!(mFormatKnownMask & (1ul << trackIndex))) {
-        sp<MetaData> meta = source->getFormat();
-        CHECK(meta != NULL);
-
-        CHECK_EQ((status_t)OK, convertMetaDataToMessage(meta, &format));
-
-        mFormatKnownMask |= 1ul << trackIndex;
-    }
-
-    status_t finalResult;
-    while (source->hasBufferAvailable(&finalResult)) {
-        sp<ABuffer> accessUnit;
-        status_t err = source->dequeueAccessUnit(&accessUnit);
-        if (err == OK) {
-            postAccessUnit(trackIndex, accessUnit, format);
-            format.clear();
-        } else if (err != INFO_DISCONTINUITY) {
-            notifyError(err);
-        }
-    }
-
-    if (finalResult != OK) {
-        notifyError(finalResult);
-    }
-}
-
-void MediaReceiver::notifyInitDone(status_t err) {
-    sp<AMessage> notify = mNotify->dup();
-    notify->setInt32("what", kWhatInitDone);
-    notify->setInt32("err", err);
-    notify->post();
-}
-
-void MediaReceiver::notifyError(status_t err) {
-    sp<AMessage> notify = mNotify->dup();
-    notify->setInt32("what", kWhatError);
-    notify->setInt32("err", err);
-    notify->post();
-}
-
-void MediaReceiver::notifyPacketLost() {
-    sp<AMessage> notify = mNotify->dup();
-    notify->setInt32("what", kWhatPacketLost);
-    notify->post();
-}
-
-void MediaReceiver::postAccessUnit(
-        size_t trackIndex,
-        const sp<ABuffer> &accessUnit,
-        const sp<AMessage> &format) {
-    sp<AMessage> notify = mNotify->dup();
-    notify->setInt32("what", kWhatAccessUnit);
-    notify->setSize("trackIndex", trackIndex);
-    notify->setBuffer("accessUnit", accessUnit);
-
-    if (format != NULL) {
-        notify->setMessage("format", format);
-    }
-
-    notify->post();
-}
-
-status_t MediaReceiver::informSender(
-        size_t trackIndex, const sp<AMessage> &params) {
-    if (trackIndex >= mTrackInfos.size()) {
-        return -ERANGE;
-    }
-
-    TrackInfo *info = &mTrackInfos.editItemAt(trackIndex);
-    return info->mReceiver->informSender(params);
-}
-
-}  // namespace android
-
-
diff --git a/media/libstagefright/wifi-display/MediaReceiver.h b/media/libstagefright/wifi-display/MediaReceiver.h
deleted file mode 100644
index afbb407..0000000
--- a/media/libstagefright/wifi-display/MediaReceiver.h
+++ /dev/null
@@ -1,111 +0,0 @@
-/*
- * Copyright 2013, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <media/stagefright/foundation/AHandler.h>
-
-#include "ATSParser.h"
-#include "rtp/RTPReceiver.h"
-
-namespace android {
-
-struct ABuffer;
-struct ANetworkSession;
-struct AMessage;
-struct ATSParser;
-
-// This class facilitates receiving of media data for one or more tracks
-// over RTP. Either a 1:1 track to RTP channel mapping is used or a single
-// RTP channel provides the data for a transport stream that is consequently
-// demuxed and its track's data provided to the observer.
-struct MediaReceiver : public AHandler {
-    enum {
-        kWhatInitDone,
-        kWhatError,
-        kWhatAccessUnit,
-        kWhatPacketLost,
-    };
-
-    MediaReceiver(
-            const sp<ANetworkSession> &netSession,
-            const sp<AMessage> &notify);
-
-    ssize_t addTrack(
-            RTPReceiver::TransportMode rtpMode,
-            RTPReceiver::TransportMode rtcpMode,
-            int32_t *localRTPPort);
-
-    status_t connectTrack(
-            size_t trackIndex,
-            const char *remoteHost,
-            int32_t remoteRTPPort,
-            int32_t remoteRTCPPort);
-
-    enum Mode {
-        MODE_UNDEFINED,
-        MODE_TRANSPORT_STREAM,
-        MODE_TRANSPORT_STREAM_RAW,
-        MODE_ELEMENTARY_STREAMS,
-    };
-    status_t initAsync(Mode mode);
-
-    status_t informSender(size_t trackIndex, const sp<AMessage> &params);
-
-protected:
-    virtual void onMessageReceived(const sp<AMessage> &msg);
-    virtual ~MediaReceiver();
-
-private:
-    enum {
-        kWhatInit,
-        kWhatReceiverNotify,
-    };
-
-    struct TrackInfo {
-        sp<RTPReceiver> mReceiver;
-    };
-
-    sp<ANetworkSession> mNetSession;
-    sp<AMessage> mNotify;
-
-    Mode mMode;
-    int32_t mGeneration;
-
-    Vector<TrackInfo> mTrackInfos;
-
-    status_t mInitStatus;
-    size_t mInitDoneCount;
-
-    sp<ATSParser> mTSParser;
-    uint32_t mFormatKnownMask;
-
-    void onReceiverNotify(const sp<AMessage> &msg);
-
-    void drainPackets(size_t trackIndex, ATSParser::SourceType type);
-
-    void notifyInitDone(status_t err);
-    void notifyError(status_t err);
-    void notifyPacketLost();
-
-    void postAccessUnit(
-            size_t trackIndex,
-            const sp<ABuffer> &accessUnit,
-            const sp<AMessage> &format);
-
-    DISALLOW_EVIL_CONSTRUCTORS(MediaReceiver);
-};
-
-}  // namespace android
-
diff --git a/media/libstagefright/wifi-display/SNTPClient.cpp b/media/libstagefright/wifi-display/SNTPClient.cpp
deleted file mode 100644
index 5c0af6a..0000000
--- a/media/libstagefright/wifi-display/SNTPClient.cpp
+++ /dev/null
@@ -1,174 +0,0 @@
-/*
- * Copyright 2013, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "SNTPClient.h"
-
-#include <media/stagefright/foundation/ALooper.h>
-#include <media/stagefright/Utils.h>
-
-#include <arpa/inet.h>
-#include <netdb.h>
-#include <netinet/in.h>
-#include <sys/socket.h>
-#include <unistd.h>
-
-namespace android {
-
-SNTPClient::SNTPClient() {
-}
-
-status_t SNTPClient::requestTime(const char *host) {
-    struct hostent *ent;
-    int64_t requestTimeNTP, requestTimeUs;
-    ssize_t n;
-    int64_t responseTimeUs, responseTimeNTP;
-    int64_t originateTimeNTP, receiveTimeNTP, transmitTimeNTP;
-    int64_t roundTripTimeNTP, clockOffsetNTP;
-
-    status_t err = UNKNOWN_ERROR;
-
-    int s = socket(AF_INET, SOCK_DGRAM, 0);
-
-    if (s < 0) {
-        err = -errno;
-
-        goto bail;
-    }
-
-    ent = gethostbyname(host);
-
-    if (ent == NULL) {
-        err = -ENOENT;
-        goto bail2;
-    }
-
-    struct sockaddr_in hostAddr;
-    memset(hostAddr.sin_zero, 0, sizeof(hostAddr.sin_zero));
-    hostAddr.sin_family = AF_INET;
-    hostAddr.sin_port = htons(kNTPPort);
-    hostAddr.sin_addr.s_addr = *(in_addr_t *)ent->h_addr;
-
-    uint8_t packet[kNTPPacketSize];
-    memset(packet, 0, sizeof(packet));
-
-    packet[0] = kNTPModeClient | (kNTPVersion << 3);
-
-    requestTimeNTP = getNowNTP();
-    requestTimeUs = ALooper::GetNowUs();
-    writeTimeStamp(&packet[kNTPTransmitTimeOffset], requestTimeNTP);
-
-    n = sendto(
-            s, packet, sizeof(packet), 0,
-            (const struct sockaddr *)&hostAddr, sizeof(hostAddr));
-
-    if (n < 0) {
-        err = -errno;
-        goto bail2;
-    }
-
-    memset(packet, 0, sizeof(packet));
-
-    do {
-        n = recv(s, packet, sizeof(packet), 0);
-    } while (n < 0 && errno == EINTR);
-
-    if (n < 0) {
-        err = -errno;
-        goto bail2;
-    }
-
-    responseTimeUs = ALooper::GetNowUs();
-
-    responseTimeNTP = requestTimeNTP + makeNTP(responseTimeUs - requestTimeUs);
-
-    originateTimeNTP = readTimeStamp(&packet[kNTPOriginateTimeOffset]);
-    receiveTimeNTP = readTimeStamp(&packet[kNTPReceiveTimeOffset]);
-    transmitTimeNTP = readTimeStamp(&packet[kNTPTransmitTimeOffset]);
-
-    roundTripTimeNTP =
-        makeNTP(responseTimeUs - requestTimeUs)
-            - (transmitTimeNTP - receiveTimeNTP);
-
-    clockOffsetNTP =
-        ((receiveTimeNTP - originateTimeNTP)
-            + (transmitTimeNTP - responseTimeNTP)) / 2;
-
-    mTimeReferenceNTP = responseTimeNTP + clockOffsetNTP;
-    mTimeReferenceUs = responseTimeUs;
-    mRoundTripTimeNTP = roundTripTimeNTP;
-
-    err = OK;
-
-bail2:
-    close(s);
-    s = -1;
-
-bail:
-    return err;
-}
-
-int64_t SNTPClient::adjustTimeUs(int64_t timeUs) const {
-    uint64_t nowNTP =
-        mTimeReferenceNTP + makeNTP(timeUs - mTimeReferenceUs);
-
-    int64_t nowUs =
-        (nowNTP >> 32) * 1000000ll
-        + ((nowNTP & 0xffffffff) * 1000000ll) / (1ll << 32);
-
-    nowUs -= ((70ll * 365 + 17) * 24) * 60 * 60 * 1000000ll;
-
-    return nowUs;
-}
-
-// static
-void SNTPClient::writeTimeStamp(uint8_t *dst, uint64_t ntpTime) {
-    *dst++ = (ntpTime >> 56) & 0xff;
-    *dst++ = (ntpTime >> 48) & 0xff;
-    *dst++ = (ntpTime >> 40) & 0xff;
-    *dst++ = (ntpTime >> 32) & 0xff;
-    *dst++ = (ntpTime >> 24) & 0xff;
-    *dst++ = (ntpTime >> 16) & 0xff;
-    *dst++ = (ntpTime >> 8) & 0xff;
-    *dst++ = ntpTime & 0xff;
-}
-
-// static
-uint64_t SNTPClient::readTimeStamp(const uint8_t *dst) {
-    return U64_AT(dst);
-}
-
-// static
-uint64_t SNTPClient::getNowNTP() {
-    struct timeval tv;
-    gettimeofday(&tv, NULL /* time zone */);
-
-    uint64_t nowUs = tv.tv_sec * 1000000ll + tv.tv_usec;
-
-    nowUs += ((70ll * 365 + 17) * 24) * 60 * 60 * 1000000ll;
-
-    return makeNTP(nowUs);
-}
-
-// static
-uint64_t SNTPClient::makeNTP(uint64_t deltaUs) {
-    uint64_t hi = deltaUs / 1000000ll;
-    uint64_t lo = ((1ll << 32) * (deltaUs % 1000000ll)) / 1000000ll;
-
-    return (hi << 32) | lo;
-}
-
-}  // namespace android
-
diff --git a/media/libstagefright/wifi-display/SNTPClient.h b/media/libstagefright/wifi-display/SNTPClient.h
deleted file mode 100644
index 967d1fc..0000000
--- a/media/libstagefright/wifi-display/SNTPClient.h
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Copyright 2013, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SNTP_CLIENT_H_
-
-#define SNTP_CLIENT_H_
-
-#include <media/stagefright/foundation/ABase.h>
-#include <utils/Errors.h>
-
-namespace android {
-
-// Implementation of the SNTP (Simple Network Time Protocol)
-struct SNTPClient {
-    SNTPClient();
-
-    status_t requestTime(const char *host);
-
-    // given a time obtained from ALooper::GetNowUs()
-    // return the number of us elapsed since Jan 1 1970 00:00:00 (UTC).
-    int64_t adjustTimeUs(int64_t timeUs) const;
-
-private:
-    enum {
-        kNTPPort = 123,
-        kNTPPacketSize = 48,
-        kNTPModeClient = 3,
-        kNTPVersion = 3,
-        kNTPTransmitTimeOffset = 40,
-        kNTPOriginateTimeOffset = 24,
-        kNTPReceiveTimeOffset = 32,
-    };
-
-    uint64_t mTimeReferenceNTP;
-    int64_t mTimeReferenceUs;
-    int64_t mRoundTripTimeNTP;
-
-    static void writeTimeStamp(uint8_t *dst, uint64_t ntpTime);
-    static uint64_t readTimeStamp(const uint8_t *dst);
-
-    static uint64_t getNowNTP();
-    static uint64_t makeNTP(uint64_t deltaUs);
-
-    DISALLOW_EVIL_CONSTRUCTORS(SNTPClient);
-};
-
-}  // namespace android
-
-#endif  // SNTP_CLIENT_H_
diff --git a/media/libstagefright/wifi-display/TimeSyncer.cpp b/media/libstagefright/wifi-display/TimeSyncer.cpp
deleted file mode 100644
index 0f4d93a..0000000
--- a/media/libstagefright/wifi-display/TimeSyncer.cpp
+++ /dev/null
@@ -1,337 +0,0 @@
-/*
- * Copyright 2013, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NEBUG 0
-#define LOG_TAG "TimeSyncer"
-#include <utils/Log.h>
-
-#include "TimeSyncer.h"
-
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AHandler.h>
-#include <media/stagefright/foundation/ALooper.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/foundation/ANetworkSession.h>
-#include <media/stagefright/Utils.h>
-
-namespace android {
-
-TimeSyncer::TimeSyncer(
-        const sp<ANetworkSession> &netSession, const sp<AMessage> &notify)
-    : mNetSession(netSession),
-      mNotify(notify),
-      mIsServer(false),
-      mConnected(false),
-      mUDPSession(0),
-      mSeqNo(0),
-      mTotalTimeUs(0.0),
-      mPendingT1(0ll),
-      mTimeoutGeneration(0) {
-}
-
-TimeSyncer::~TimeSyncer() {
-}
-
-void TimeSyncer::startServer(unsigned localPort) {
-    sp<AMessage> msg = new AMessage(kWhatStartServer, id());
-    msg->setInt32("localPort", localPort);
-    msg->post();
-}
-
-void TimeSyncer::startClient(const char *remoteHost, unsigned remotePort) {
-    sp<AMessage> msg = new AMessage(kWhatStartClient, id());
-    msg->setString("remoteHost", remoteHost);
-    msg->setInt32("remotePort", remotePort);
-    msg->post();
-}
-
-void TimeSyncer::onMessageReceived(const sp<AMessage> &msg) {
-    switch (msg->what()) {
-        case kWhatStartClient:
-        {
-            AString remoteHost;
-            CHECK(msg->findString("remoteHost", &remoteHost));
-
-            int32_t remotePort;
-            CHECK(msg->findInt32("remotePort", &remotePort));
-
-            sp<AMessage> notify = new AMessage(kWhatUDPNotify, id());
-
-            CHECK_EQ((status_t)OK,
-                     mNetSession->createUDPSession(
-                         0 /* localPort */,
-                         remoteHost.c_str(),
-                         remotePort,
-                         notify,
-                         &mUDPSession));
-
-            postSendPacket();
-            break;
-        }
-
-        case kWhatStartServer:
-        {
-            mIsServer = true;
-
-            int32_t localPort;
-            CHECK(msg->findInt32("localPort", &localPort));
-
-            sp<AMessage> notify = new AMessage(kWhatUDPNotify, id());
-
-            CHECK_EQ((status_t)OK,
-                     mNetSession->createUDPSession(
-                         localPort, notify, &mUDPSession));
-
-            break;
-        }
-
-        case kWhatSendPacket:
-        {
-            if (mHistory.size() == 0) {
-                ALOGI("starting batch");
-            }
-
-            TimeInfo ti;
-            memset(&ti, 0, sizeof(ti));
-
-            ti.mT1 = ALooper::GetNowUs();
-
-            CHECK_EQ((status_t)OK,
-                     mNetSession->sendRequest(
-                         mUDPSession, &ti, sizeof(ti)));
-
-            mPendingT1 = ti.mT1;
-            postTimeout();
-            break;
-        }
-
-        case kWhatTimedOut:
-        {
-            int32_t generation;
-            CHECK(msg->findInt32("generation", &generation));
-
-            if (generation != mTimeoutGeneration) {
-                break;
-            }
-
-            ALOGI("timed out, sending another request");
-            postSendPacket();
-            break;
-        }
-
-        case kWhatUDPNotify:
-        {
-            int32_t reason;
-            CHECK(msg->findInt32("reason", &reason));
-
-            switch (reason) {
-                case ANetworkSession::kWhatError:
-                {
-                    int32_t sessionID;
-                    CHECK(msg->findInt32("sessionID", &sessionID));
-
-                    int32_t err;
-                    CHECK(msg->findInt32("err", &err));
-
-                    AString detail;
-                    CHECK(msg->findString("detail", &detail));
-
-                    ALOGE("An error occurred in session %d (%d, '%s/%s').",
-                          sessionID,
-                          err,
-                          detail.c_str(),
-                          strerror(-err));
-
-                    mNetSession->destroySession(sessionID);
-
-                    cancelTimeout();
-
-                    notifyError(err);
-                    break;
-                }
-
-                case ANetworkSession::kWhatDatagram:
-                {
-                    int32_t sessionID;
-                    CHECK(msg->findInt32("sessionID", &sessionID));
-
-                    sp<ABuffer> packet;
-                    CHECK(msg->findBuffer("data", &packet));
-
-                    int64_t arrivalTimeUs;
-                    CHECK(packet->meta()->findInt64(
-                                "arrivalTimeUs", &arrivalTimeUs));
-
-                    CHECK_EQ(packet->size(), sizeof(TimeInfo));
-
-                    TimeInfo *ti = (TimeInfo *)packet->data();
-
-                    if (mIsServer) {
-                        if (!mConnected) {
-                            AString fromAddr;
-                            CHECK(msg->findString("fromAddr", &fromAddr));
-
-                            int32_t fromPort;
-                            CHECK(msg->findInt32("fromPort", &fromPort));
-
-                            CHECK_EQ((status_t)OK,
-                                     mNetSession->connectUDPSession(
-                                         mUDPSession, fromAddr.c_str(), fromPort));
-
-                            mConnected = true;
-                        }
-
-                        ti->mT2 = arrivalTimeUs;
-                        ti->mT3 = ALooper::GetNowUs();
-
-                        CHECK_EQ((status_t)OK,
-                                 mNetSession->sendRequest(
-                                     mUDPSession, ti, sizeof(*ti)));
-                    } else {
-                        if (ti->mT1 != mPendingT1) {
-                            break;
-                        }
-
-                        cancelTimeout();
-                        mPendingT1 = 0;
-
-                        ti->mT4 = arrivalTimeUs;
-
-                        // One way delay for a packet to travel from client
-                        // to server or back (assumed to be the same either way).
-                        int64_t delay =
-                            (ti->mT2 - ti->mT1 + ti->mT4 - ti->mT3) / 2;
-
-                        // Offset between the client clock (T1, T4) and the
-                        // server clock (T2, T3) timestamps.
-                        int64_t offset =
-                            (ti->mT2 - ti->mT1 - ti->mT4 + ti->mT3) / 2;
-
-                        mHistory.push_back(*ti);
-
-                        ALOGV("delay = %lld us,\toffset %lld us",
-                               delay,
-                               offset);
-
-                        if (mHistory.size() < kNumPacketsPerBatch) {
-                            postSendPacket(1000000ll / 30);
-                        } else {
-                            notifyOffset();
-
-                            ALOGI("batch done");
-
-                            mHistory.clear();
-                            postSendPacket(kBatchDelayUs);
-                        }
-                    }
-                    break;
-                }
-
-                default:
-                    TRESPASS();
-            }
-
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-}
-
-void TimeSyncer::postSendPacket(int64_t delayUs) {
-    (new AMessage(kWhatSendPacket, id()))->post(delayUs);
-}
-
-void TimeSyncer::postTimeout() {
-    sp<AMessage> msg = new AMessage(kWhatTimedOut, id());
-    msg->setInt32("generation", mTimeoutGeneration);
-    msg->post(kTimeoutDelayUs);
-}
-
-void TimeSyncer::cancelTimeout() {
-    ++mTimeoutGeneration;
-}
-
-void TimeSyncer::notifyError(status_t err) {
-    if (mNotify == NULL) {
-        looper()->stop();
-        return;
-    }
-
-    sp<AMessage> notify = mNotify->dup();
-    notify->setInt32("what", kWhatError);
-    notify->setInt32("err", err);
-    notify->post();
-}
-
-// static
-int TimeSyncer::CompareRountripTime(const TimeInfo *ti1, const TimeInfo *ti2) {
-    int64_t rt1 = ti1->mT4 - ti1->mT1;
-    int64_t rt2 = ti2->mT4 - ti2->mT1;
-
-    if (rt1 < rt2) {
-        return -1;
-    } else if (rt1 > rt2) {
-        return 1;
-    }
-
-    return 0;
-}
-
-void TimeSyncer::notifyOffset() {
-    mHistory.sort(CompareRountripTime);
-
-    int64_t sum = 0ll;
-    size_t count = 0;
-
-    // Only consider the third of the information associated with the best
-    // (smallest) roundtrip times.
-    for (size_t i = 0; i < mHistory.size() / 3; ++i) {
-        const TimeInfo *ti = &mHistory[i];
-
-#if 0
-        // One way delay for a packet to travel from client
-        // to server or back (assumed to be the same either way).
-        int64_t delay =
-            (ti->mT2 - ti->mT1 + ti->mT4 - ti->mT3) / 2;
-#endif
-
-        // Offset between the client clock (T1, T4) and the
-        // server clock (T2, T3) timestamps.
-        int64_t offset =
-            (ti->mT2 - ti->mT1 - ti->mT4 + ti->mT3) / 2;
-
-        ALOGV("(%d) RT: %lld us, offset: %lld us",
-              i, ti->mT4 - ti->mT1, offset);
-
-        sum += offset;
-        ++count;
-    }
-
-    if (mNotify == NULL) {
-        ALOGI("avg. offset is %lld", sum / count);
-        return;
-    }
-
-    sp<AMessage> notify = mNotify->dup();
-    notify->setInt32("what", kWhatTimeOffset);
-    notify->setInt64("offset", sum / count);
-    notify->post();
-}
-
-}  // namespace android
diff --git a/media/libstagefright/wifi-display/TimeSyncer.h b/media/libstagefright/wifi-display/TimeSyncer.h
deleted file mode 100644
index 4e7571f..0000000
--- a/media/libstagefright/wifi-display/TimeSyncer.h
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
- * Copyright 2013, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef TIME_SYNCER_H_
-
-#define TIME_SYNCER_H_
-
-#include <media/stagefright/foundation/AHandler.h>
-
-namespace android {
-
-struct ANetworkSession;
-
-/*
-   TimeSyncer allows us to synchronize time between a client and a server.
-   The client sends a UDP packet containing its send-time to the server,
-   the server sends that packet back to the client amended with information
-   about when it was received as well as the time the reply was sent back.
-   Finally the client receives the reply and has now enough information to
-   compute the clock offset between client and server assuming that packet
-   exchange is symmetric, i.e. time for a packet client->server and
-   server->client is roughly equal.
-   This exchange is repeated a number of times and the average offset computed
-   over the 30% of packets that had the lowest roundtrip times.
-   The offset is determined every 10 secs to account for slight differences in
-   clock frequency.
-*/
-struct TimeSyncer : public AHandler {
-    enum {
-        kWhatError,
-        kWhatTimeOffset,
-    };
-    TimeSyncer(
-            const sp<ANetworkSession> &netSession,
-            const sp<AMessage> &notify);
-
-    void startServer(unsigned localPort);
-    void startClient(const char *remoteHost, unsigned remotePort);
-
-protected:
-    virtual ~TimeSyncer();
-
-    virtual void onMessageReceived(const sp<AMessage> &msg);
-
-private:
-    enum {
-        kWhatStartServer,
-        kWhatStartClient,
-        kWhatUDPNotify,
-        kWhatSendPacket,
-        kWhatTimedOut,
-    };
-
-    struct TimeInfo {
-        int64_t mT1;  // client timestamp at send
-        int64_t mT2;  // server timestamp at receive
-        int64_t mT3;  // server timestamp at send
-        int64_t mT4;  // client timestamp at receive
-    };
-
-    enum {
-        kNumPacketsPerBatch = 30,
-    };
-    static const int64_t kTimeoutDelayUs = 500000ll;
-    static const int64_t kBatchDelayUs = 60000000ll;  // every minute
-
-    sp<ANetworkSession> mNetSession;
-    sp<AMessage> mNotify;
-
-    bool mIsServer;
-    bool mConnected;
-    int32_t mUDPSession;
-    uint32_t mSeqNo;
-    double mTotalTimeUs;
-
-    Vector<TimeInfo> mHistory;
-
-    int64_t mPendingT1;
-    int32_t mTimeoutGeneration;
-
-    void postSendPacket(int64_t delayUs = 0ll);
-
-    void postTimeout();
-    void cancelTimeout();
-
-    void notifyError(status_t err);
-    void notifyOffset();
-
-    static int CompareRountripTime(const TimeInfo *ti1, const TimeInfo *ti2);
-
-    DISALLOW_EVIL_CONSTRUCTORS(TimeSyncer);
-};
-
-}  // namespace android
-
-#endif  // TIME_SYNCER_H_
diff --git a/media/libstagefright/wifi-display/nettest.cpp b/media/libstagefright/wifi-display/nettest.cpp
deleted file mode 100644
index 73c0d80..0000000
--- a/media/libstagefright/wifi-display/nettest.cpp
+++ /dev/null
@@ -1,400 +0,0 @@
-/*
- * Copyright 2013, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NEBUG 0
-#define LOG_TAG "nettest"
-#include <utils/Log.h>
-
-#include "TimeSyncer.h"
-
-#include <binder/ProcessState.h>
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AHandler.h>
-#include <media/stagefright/foundation/ALooper.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/foundation/ANetworkSession.h>
-#include <media/stagefright/foundation/hexdump.h>
-#include <media/stagefright/DataSource.h>
-#include <media/stagefright/MediaDefs.h>
-#include <media/stagefright/NuMediaExtractor.h>
-#include <media/stagefright/Utils.h>
-
-namespace android {
-
-struct TestHandler : public AHandler {
-    TestHandler(const sp<ANetworkSession> &netSession);
-
-    void listen(int32_t port);
-    void connect(const char *host, int32_t port);
-
-protected:
-    virtual ~TestHandler();
-    virtual void onMessageReceived(const sp<AMessage> &msg);
-
-private:
-    enum {
-        kTimeSyncerPort = 8123,
-    };
-
-    enum {
-        kWhatListen,
-        kWhatConnect,
-        kWhatTimeSyncerNotify,
-        kWhatNetNotify,
-        kWhatSendMore,
-        kWhatStop,
-    };
-
-    sp<ANetworkSession> mNetSession;
-    sp<TimeSyncer> mTimeSyncer;
-
-    int32_t mServerSessionID;
-    int32_t mSessionID;
-
-    int64_t mTimeOffsetUs;
-    bool mTimeOffsetValid;
-
-    int32_t mCounter;
-
-    int64_t mMaxDelayMs;
-
-    void dumpDelay(int32_t counter, int64_t delayMs);
-
-    DISALLOW_EVIL_CONSTRUCTORS(TestHandler);
-};
-
-TestHandler::TestHandler(const sp<ANetworkSession> &netSession)
-    : mNetSession(netSession),
-      mServerSessionID(0),
-      mSessionID(0),
-      mTimeOffsetUs(-1ll),
-      mTimeOffsetValid(false),
-      mCounter(0),
-      mMaxDelayMs(-1ll) {
-}
-
-TestHandler::~TestHandler() {
-}
-
-void TestHandler::listen(int32_t port) {
-    sp<AMessage> msg = new AMessage(kWhatListen, id());
-    msg->setInt32("port", port);
-    msg->post();
-}
-
-void TestHandler::connect(const char *host, int32_t port) {
-    sp<AMessage> msg = new AMessage(kWhatConnect, id());
-    msg->setString("host", host);
-    msg->setInt32("port", port);
-    msg->post();
-}
-
-void TestHandler::dumpDelay(int32_t counter, int64_t delayMs) {
-    static const int64_t kMinDelayMs = 0;
-    static const int64_t kMaxDelayMs = 300;
-
-    const char *kPattern = "########################################";
-    size_t kPatternSize = strlen(kPattern);
-
-    int n = (kPatternSize * (delayMs - kMinDelayMs))
-                / (kMaxDelayMs - kMinDelayMs);
-
-    if (n < 0) {
-        n = 0;
-    } else if ((size_t)n > kPatternSize) {
-        n = kPatternSize;
-    }
-
-    if (delayMs > mMaxDelayMs) {
-        mMaxDelayMs = delayMs;
-    }
-
-    ALOGI("[%d] (%4lld ms / %4lld ms) %s",
-          counter,
-          delayMs,
-          mMaxDelayMs,
-          kPattern + kPatternSize - n);
-}
-
-void TestHandler::onMessageReceived(const sp<AMessage> &msg) {
-    switch (msg->what()) {
-        case kWhatListen:
-        {
-            sp<AMessage> notify = new AMessage(kWhatTimeSyncerNotify, id());
-            mTimeSyncer = new TimeSyncer(mNetSession, notify);
-            looper()->registerHandler(mTimeSyncer);
-
-            notify = new AMessage(kWhatNetNotify, id());
-
-            int32_t port;
-            CHECK(msg->findInt32("port", &port));
-
-            struct in_addr ifaceAddr;
-            ifaceAddr.s_addr = INADDR_ANY;
-
-            CHECK_EQ((status_t)OK,
-                     mNetSession->createTCPDatagramSession(
-                         ifaceAddr,
-                         port,
-                         notify,
-                         &mServerSessionID));
-            break;
-        }
-
-        case kWhatConnect:
-        {
-            sp<AMessage> notify = new AMessage(kWhatTimeSyncerNotify, id());
-            mTimeSyncer = new TimeSyncer(mNetSession, notify);
-            looper()->registerHandler(mTimeSyncer);
-            mTimeSyncer->startServer(kTimeSyncerPort);
-
-            AString host;
-            CHECK(msg->findString("host", &host));
-
-            int32_t port;
-            CHECK(msg->findInt32("port", &port));
-
-            notify = new AMessage(kWhatNetNotify, id());
-
-            CHECK_EQ((status_t)OK,
-                     mNetSession->createTCPDatagramSession(
-                         0 /* localPort */,
-                         host.c_str(),
-                         port,
-                         notify,
-                         &mSessionID));
-            break;
-        }
-
-        case kWhatNetNotify:
-        {
-            int32_t reason;
-            CHECK(msg->findInt32("reason", &reason));
-
-            switch (reason) {
-                case ANetworkSession::kWhatConnected:
-                {
-                    ALOGI("kWhatConnected");
-
-                    (new AMessage(kWhatSendMore, id()))->post();
-                    break;
-                }
-
-                case ANetworkSession::kWhatClientConnected:
-                {
-                    ALOGI("kWhatClientConnected");
-
-                    CHECK_EQ(mSessionID, 0);
-                    CHECK(msg->findInt32("sessionID", &mSessionID));
-
-                    AString clientIP;
-                    CHECK(msg->findString("client-ip", &clientIP));
-
-                    mTimeSyncer->startClient(clientIP.c_str(), kTimeSyncerPort);
-                    break;
-                }
-
-                case ANetworkSession::kWhatDatagram:
-                {
-                    sp<ABuffer> packet;
-                    CHECK(msg->findBuffer("data", &packet));
-
-                    CHECK_EQ(packet->size(), 12u);
-
-                    int32_t counter = U32_AT(packet->data());
-                    int64_t timeUs = U64_AT(packet->data() + 4);
-
-                    if (mTimeOffsetValid) {
-                        timeUs -= mTimeOffsetUs;
-                        int64_t nowUs = ALooper::GetNowUs();
-                        int64_t delayMs = (nowUs - timeUs) / 1000ll;
-
-                        dumpDelay(counter, delayMs);
-                    } else {
-                        ALOGI("received %d", counter);
-                    }
-                    break;
-                }
-
-                case ANetworkSession::kWhatError:
-                {
-                    ALOGE("kWhatError");
-                    break;
-                }
-
-                default:
-                    TRESPASS();
-            }
-            break;
-        }
-
-        case kWhatTimeSyncerNotify:
-        {
-            CHECK(msg->findInt64("offset", &mTimeOffsetUs));
-            mTimeOffsetValid = true;
-            break;
-        }
-
-        case kWhatSendMore:
-        {
-            uint8_t buffer[4 + 8];
-            buffer[0] = mCounter >> 24;
-            buffer[1] = (mCounter >> 16) & 0xff;
-            buffer[2] = (mCounter >> 8) & 0xff;
-            buffer[3] = mCounter & 0xff;
-
-            int64_t nowUs = ALooper::GetNowUs();
-
-            buffer[4] = nowUs >> 56;
-            buffer[5] = (nowUs >> 48) & 0xff;
-            buffer[6] = (nowUs >> 40) & 0xff;
-            buffer[7] = (nowUs >> 32) & 0xff;
-            buffer[8] = (nowUs >> 24) & 0xff;
-            buffer[9] = (nowUs >> 16) & 0xff;
-            buffer[10] = (nowUs >> 8) & 0xff;
-            buffer[11] = nowUs & 0xff;
-
-            ++mCounter;
-
-            CHECK_EQ((status_t)OK,
-                     mNetSession->sendRequest(
-                         mSessionID,
-                         buffer,
-                         sizeof(buffer),
-                         true /* timeValid */,
-                         nowUs));
-
-            msg->post(100000ll);
-            break;
-        }
-
-        case kWhatStop:
-        {
-            if (mSessionID != 0) {
-                mNetSession->destroySession(mSessionID);
-                mSessionID = 0;
-            }
-
-            if (mServerSessionID != 0) {
-                mNetSession->destroySession(mServerSessionID);
-                mServerSessionID = 0;
-            }
-
-            looper()->stop();
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-}
-
-}  // namespace android
-
-static void usage(const char *me) {
-    fprintf(stderr,
-            "usage: %s -c host:port\tconnect to remote host\n"
-            "               -l port   \tlisten\n",
-            me);
-}
-
-int main(int argc, char **argv) {
-    using namespace android;
-
-    // srand(time(NULL));
-
-    ProcessState::self()->startThreadPool();
-
-    DataSource::RegisterDefaultSniffers();
-
-    int32_t connectToPort = -1;
-    AString connectToHost;
-
-    int32_t listenOnPort = -1;
-
-    int res;
-    while ((res = getopt(argc, argv, "hc:l:")) >= 0) {
-        switch (res) {
-            case 'c':
-            {
-                const char *colonPos = strrchr(optarg, ':');
-
-                if (colonPos == NULL) {
-                    usage(argv[0]);
-                    exit(1);
-                }
-
-                connectToHost.setTo(optarg, colonPos - optarg);
-
-                char *end;
-                connectToPort = strtol(colonPos + 1, &end, 10);
-
-                if (*end != '\0' || end == colonPos + 1
-                        || connectToPort < 0 || connectToPort > 65535) {
-                    fprintf(stderr, "Illegal port specified.\n");
-                    exit(1);
-                }
-                break;
-            }
-
-            case 'l':
-            {
-                char *end;
-                listenOnPort = strtol(optarg, &end, 10);
-
-                if (*end != '\0' || end == optarg
-                        || listenOnPort < 0 || listenOnPort > 65535) {
-                    fprintf(stderr, "Illegal port specified.\n");
-                    exit(1);
-                }
-                break;
-            }
-
-            case '?':
-            case 'h':
-                usage(argv[0]);
-                exit(1);
-        }
-    }
-
-    if ((listenOnPort < 0 && connectToPort < 0)
-            || (listenOnPort >= 0 && connectToPort >= 0)) {
-        fprintf(stderr,
-                "You need to select either client or server mode.\n");
-        exit(1);
-    }
-
-    sp<ANetworkSession> netSession = new ANetworkSession;
-    netSession->start();
-
-    sp<ALooper> looper = new ALooper;
-
-    sp<TestHandler> handler = new TestHandler(netSession);
-    looper->registerHandler(handler);
-
-    if (listenOnPort) {
-        handler->listen(listenOnPort);
-    }
-
-    if (connectToPort >= 0) {
-        handler->connect(connectToHost.c_str(), connectToPort);
-    }
-
-    looper->start(true /* runOnCallingThread */);
-
-    return 0;
-}
diff --git a/media/libstagefright/wifi-display/rtp/RTPAssembler.cpp b/media/libstagefright/wifi-display/rtp/RTPAssembler.cpp
deleted file mode 100644
index 7a96081..0000000
--- a/media/libstagefright/wifi-display/rtp/RTPAssembler.cpp
+++ /dev/null
@@ -1,328 +0,0 @@
-/*
- * Copyright 2013, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "RTPAssembler"
-#include <utils/Log.h>
-
-#include "RTPAssembler.h"
-
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/foundation/hexdump.h>
-#include <media/stagefright/MediaErrors.h>
-
-namespace android {
-
-RTPReceiver::Assembler::Assembler(const sp<AMessage> &notify)
-    : mNotify(notify) {
-}
-
-void RTPReceiver::Assembler::postAccessUnit(
-        const sp<ABuffer> &accessUnit, bool followsDiscontinuity) {
-    sp<AMessage> notify = mNotify->dup();
-    notify->setInt32("what", RTPReceiver::kWhatAccessUnit);
-    notify->setBuffer("accessUnit", accessUnit);
-    notify->setInt32("followsDiscontinuity", followsDiscontinuity);
-    notify->post();
-}
-////////////////////////////////////////////////////////////////////////////////
-
-RTPReceiver::TSAssembler::TSAssembler(const sp<AMessage> &notify)
-    : Assembler(notify),
-      mSawDiscontinuity(false) {
-}
-
-void RTPReceiver::TSAssembler::signalDiscontinuity() {
-    mSawDiscontinuity = true;
-}
-
-status_t RTPReceiver::TSAssembler::processPacket(const sp<ABuffer> &packet) {
-    int32_t rtpTime;
-    CHECK(packet->meta()->findInt32("rtp-time", &rtpTime));
-
-    packet->meta()->setInt64("timeUs", (rtpTime * 100ll) / 9);
-
-    postAccessUnit(packet, mSawDiscontinuity);
-
-    if (mSawDiscontinuity) {
-        mSawDiscontinuity = false;
-    }
-
-    return OK;
-}
-
-////////////////////////////////////////////////////////////////////////////////
-
-RTPReceiver::H264Assembler::H264Assembler(const sp<AMessage> &notify)
-    : Assembler(notify),
-      mState(0),
-      mIndicator(0),
-      mNALType(0),
-      mAccessUnitRTPTime(0) {
-}
-
-void RTPReceiver::H264Assembler::signalDiscontinuity() {
-    reset();
-}
-
-status_t RTPReceiver::H264Assembler::processPacket(const sp<ABuffer> &packet) {
-    status_t err = internalProcessPacket(packet);
-
-    if (err != OK) {
-        reset();
-    }
-
-    return err;
-}
-
-status_t RTPReceiver::H264Assembler::internalProcessPacket(
-        const sp<ABuffer> &packet) {
-    const uint8_t *data = packet->data();
-    size_t size = packet->size();
-
-    switch (mState) {
-        case 0:
-        {
-            if (size < 1 || (data[0] & 0x80)) {
-                ALOGV("Malformed H264 RTP packet (empty or F-bit set)");
-                return ERROR_MALFORMED;
-            }
-
-            unsigned nalType = data[0] & 0x1f;
-            if (nalType >= 1 && nalType <= 23) {
-                addSingleNALUnit(packet);
-                ALOGV("added single NAL packet");
-            } else if (nalType == 28) {
-                // FU-A
-                unsigned indicator = data[0];
-                CHECK((indicator & 0x1f) == 28);
-
-                if (size < 2) {
-                    ALOGV("Malformed H264 FU-A packet (single byte)");
-                    return ERROR_MALFORMED;
-                }
-
-                if (!(data[1] & 0x80)) {
-                    ALOGV("Malformed H264 FU-A packet (no start bit)");
-                    return ERROR_MALFORMED;
-                }
-
-                mIndicator = data[0];
-                mNALType = data[1] & 0x1f;
-                uint32_t nri = (data[0] >> 5) & 3;
-
-                clearAccumulator();
-
-                uint8_t byte = mNALType | (nri << 5);
-                appendToAccumulator(&byte, 1);
-                appendToAccumulator(data + 2, size - 2);
-
-                int32_t rtpTime;
-                CHECK(packet->meta()->findInt32("rtp-time", &rtpTime));
-                mAccumulator->meta()->setInt32("rtp-time", rtpTime);
-
-                if (data[1] & 0x40) {
-                    // Huh? End bit also set on the first buffer.
-                    addSingleNALUnit(mAccumulator);
-                    clearAccumulator();
-
-                    ALOGV("added FU-A");
-                    break;
-                }
-
-                mState = 1;
-            } else if (nalType == 24) {
-                // STAP-A
-
-                status_t err = addSingleTimeAggregationPacket(packet);
-                if (err != OK) {
-                    return err;
-                }
-            } else {
-                ALOGV("Malformed H264 packet (unknown type %d)", nalType);
-                return ERROR_UNSUPPORTED;
-            }
-            break;
-        }
-
-        case 1:
-        {
-            if (size < 2
-                    || data[0] != mIndicator
-                    || (data[1] & 0x1f) != mNALType
-                    || (data[1] & 0x80)) {
-                ALOGV("Malformed H264 FU-A packet (indicator, "
-                      "type or start bit mismatch)");
-
-                return ERROR_MALFORMED;
-            }
-
-            appendToAccumulator(data + 2, size - 2);
-
-            if (data[1] & 0x40) {
-                addSingleNALUnit(mAccumulator);
-
-                clearAccumulator();
-                mState = 0;
-
-                ALOGV("added FU-A");
-            }
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-
-    int32_t marker;
-    CHECK(packet->meta()->findInt32("M", &marker));
-
-    if (marker) {
-        flushAccessUnit();
-    }
-
-    return OK;
-}
-
-void RTPReceiver::H264Assembler::reset() {
-    mNALUnits.clear();
-
-    clearAccumulator();
-    mState = 0;
-}
-
-void RTPReceiver::H264Assembler::clearAccumulator() {
-    if (mAccumulator != NULL) {
-        // XXX Too expensive.
-        mAccumulator.clear();
-    }
-}
-
-void RTPReceiver::H264Assembler::appendToAccumulator(
-        const void *data, size_t size) {
-    if (mAccumulator == NULL) {
-        mAccumulator = new ABuffer(size);
-        memcpy(mAccumulator->data(), data, size);
-        return;
-    }
-
-    if (mAccumulator->size() + size > mAccumulator->capacity()) {
-        sp<ABuffer> buf = new ABuffer(mAccumulator->size() + size);
-        memcpy(buf->data(), mAccumulator->data(), mAccumulator->size());
-        buf->setRange(0, mAccumulator->size());
-
-        int32_t rtpTime;
-        if (mAccumulator->meta()->findInt32("rtp-time", &rtpTime)) {
-            buf->meta()->setInt32("rtp-time", rtpTime);
-        }
-
-        mAccumulator = buf;
-    }
-
-    memcpy(mAccumulator->data() + mAccumulator->size(), data, size);
-    mAccumulator->setRange(0, mAccumulator->size() + size);
-}
-
-void RTPReceiver::H264Assembler::addSingleNALUnit(const sp<ABuffer> &packet) {
-    if (mNALUnits.empty()) {
-        int32_t rtpTime;
-        CHECK(packet->meta()->findInt32("rtp-time", &rtpTime));
-
-        mAccessUnitRTPTime = rtpTime;
-    }
-
-    mNALUnits.push_back(packet);
-}
-
-void RTPReceiver::H264Assembler::flushAccessUnit() {
-    if (mNALUnits.empty()) {
-        return;
-    }
-
-    size_t totalSize = 0;
-    for (List<sp<ABuffer> >::iterator it = mNALUnits.begin();
-            it != mNALUnits.end(); ++it) {
-        totalSize += 4 + (*it)->size();
-    }
-
-    sp<ABuffer> accessUnit = new ABuffer(totalSize);
-    size_t offset = 0;
-    for (List<sp<ABuffer> >::iterator it = mNALUnits.begin();
-            it != mNALUnits.end(); ++it) {
-        const sp<ABuffer> nalUnit = *it;
-
-        memcpy(accessUnit->data() + offset, "\x00\x00\x00\x01", 4);
-
-        memcpy(accessUnit->data() + offset + 4,
-               nalUnit->data(),
-               nalUnit->size());
-
-        offset += 4 + nalUnit->size();
-    }
-
-    mNALUnits.clear();
-
-    accessUnit->meta()->setInt64("timeUs", mAccessUnitRTPTime * 100ll / 9ll);
-    postAccessUnit(accessUnit, false /* followsDiscontinuity */);
-}
-
-status_t RTPReceiver::H264Assembler::addSingleTimeAggregationPacket(
-        const sp<ABuffer> &packet) {
-    const uint8_t *data = packet->data();
-    size_t size = packet->size();
-
-    if (size < 3) {
-        ALOGV("Malformed H264 STAP-A packet (too small)");
-        return ERROR_MALFORMED;
-    }
-
-    int32_t rtpTime;
-    CHECK(packet->meta()->findInt32("rtp-time", &rtpTime));
-
-    ++data;
-    --size;
-    while (size >= 2) {
-        size_t nalSize = (data[0] << 8) | data[1];
-
-        if (size < nalSize + 2) {
-            ALOGV("Malformed H264 STAP-A packet (incomplete NAL unit)");
-            return ERROR_MALFORMED;
-        }
-
-        sp<ABuffer> unit = new ABuffer(nalSize);
-        memcpy(unit->data(), &data[2], nalSize);
-
-        unit->meta()->setInt32("rtp-time", rtpTime);
-
-        addSingleNALUnit(unit);
-
-        data += 2 + nalSize;
-        size -= 2 + nalSize;
-    }
-
-    if (size != 0) {
-        ALOGV("Unexpected padding at end of STAP-A packet.");
-    }
-
-    ALOGV("added STAP-A");
-
-    return OK;
-}
-
-}  // namespace android
-
diff --git a/media/libstagefright/wifi-display/rtp/RTPAssembler.h b/media/libstagefright/wifi-display/rtp/RTPAssembler.h
deleted file mode 100644
index e456d32..0000000
--- a/media/libstagefright/wifi-display/rtp/RTPAssembler.h
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * Copyright 2013, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef RTP_ASSEMBLER_H_
-
-#define RTP_ASSEMBLER_H_
-
-#include "RTPReceiver.h"
-
-namespace android {
-
-// A helper class to reassemble the payload of RTP packets into access
-// units depending on the packetization scheme.
-struct RTPReceiver::Assembler : public RefBase {
-    Assembler(const sp<AMessage> &notify);
-
-    virtual void signalDiscontinuity() = 0;
-    virtual status_t processPacket(const sp<ABuffer> &packet) = 0;
-
-protected:
-    virtual ~Assembler() {}
-
-    void postAccessUnit(
-            const sp<ABuffer> &accessUnit, bool followsDiscontinuity);
-
-private:
-    sp<AMessage> mNotify;
-
-    DISALLOW_EVIL_CONSTRUCTORS(Assembler);
-};
-
-struct RTPReceiver::TSAssembler : public RTPReceiver::Assembler {
-    TSAssembler(const sp<AMessage> &notify);
-
-    virtual void signalDiscontinuity();
-    virtual status_t processPacket(const sp<ABuffer> &packet);
-
-private:
-    bool mSawDiscontinuity;
-
-    DISALLOW_EVIL_CONSTRUCTORS(TSAssembler);
-};
-
-struct RTPReceiver::H264Assembler : public RTPReceiver::Assembler {
-    H264Assembler(const sp<AMessage> &notify);
-
-    virtual void signalDiscontinuity();
-    virtual status_t processPacket(const sp<ABuffer> &packet);
-
-private:
-    int32_t mState;
-
-    uint8_t mIndicator;
-    uint8_t mNALType;
-
-    sp<ABuffer> mAccumulator;
-
-    List<sp<ABuffer> > mNALUnits;
-    int32_t mAccessUnitRTPTime;
-
-    status_t internalProcessPacket(const sp<ABuffer> &packet);
-
-    void addSingleNALUnit(const sp<ABuffer> &packet);
-    status_t addSingleTimeAggregationPacket(const sp<ABuffer> &packet);
-
-    void flushAccessUnit();
-
-    void clearAccumulator();
-    void appendToAccumulator(const void *data, size_t size);
-
-    void reset();
-
-    DISALLOW_EVIL_CONSTRUCTORS(H264Assembler);
-};
-
-}  // namespace android
-
-#endif  // RTP_ASSEMBLER_H_
-
diff --git a/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp b/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp
deleted file mode 100644
index 3b3bd63..0000000
--- a/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp
+++ /dev/null
@@ -1,1152 +0,0 @@
-/*
- * Copyright 2013, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "RTPReceiver"
-#include <utils/Log.h>
-
-#include "RTPAssembler.h"
-#include "RTPReceiver.h"
-
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/foundation/ANetworkSession.h>
-#include <media/stagefright/foundation/hexdump.h>
-#include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/Utils.h>
-
-#define TRACK_PACKET_LOSS       0
-
-namespace android {
-
-////////////////////////////////////////////////////////////////////////////////
-
-struct RTPReceiver::Source : public AHandler {
-    Source(RTPReceiver *receiver, uint32_t ssrc);
-
-    void onPacketReceived(uint16_t seq, const sp<ABuffer> &buffer);
-
-    void addReportBlock(uint32_t ssrc, const sp<ABuffer> &buf);
-
-protected:
-    virtual ~Source();
-
-    virtual void onMessageReceived(const sp<AMessage> &msg);
-
-private:
-    enum {
-        kWhatRetransmit,
-        kWhatDeclareLost,
-    };
-
-    static const uint32_t kMinSequential = 2;
-    static const uint32_t kMaxDropout = 3000;
-    static const uint32_t kMaxMisorder = 100;
-    static const uint32_t kRTPSeqMod = 1u << 16;
-    static const int64_t kReportIntervalUs = 10000000ll;
-
-    RTPReceiver *mReceiver;
-    uint32_t mSSRC;
-    bool mFirst;
-    uint16_t mMaxSeq;
-    uint32_t mCycles;
-    uint32_t mBaseSeq;
-    uint32_t mReceived;
-    uint32_t mExpectedPrior;
-    uint32_t mReceivedPrior;
-
-    int64_t mFirstArrivalTimeUs;
-    int64_t mFirstRTPTimeUs;
-
-    // Ordered by extended seq number.
-    List<sp<ABuffer> > mPackets;
-
-    enum StatusBits {
-        STATUS_DECLARED_LOST            = 1,
-        STATUS_REQUESTED_RETRANSMISSION = 2,
-        STATUS_ARRIVED_LATE             = 4,
-    };
-#if TRACK_PACKET_LOSS
-    KeyedVector<int32_t, uint32_t> mLostPackets;
-#endif
-
-    void modifyPacketStatus(int32_t extSeqNo, uint32_t mask);
-
-    int32_t mAwaitingExtSeqNo;
-    bool mRequestedRetransmission;
-
-    int32_t mActivePacketType;
-    sp<Assembler> mActiveAssembler;
-
-    int64_t mNextReportTimeUs;
-
-    int32_t mNumDeclaredLost;
-    int32_t mNumDeclaredLostPrior;
-
-    int32_t mRetransmitGeneration;
-    int32_t mDeclareLostGeneration;
-    bool mDeclareLostTimerPending;
-
-    void queuePacket(const sp<ABuffer> &packet);
-    void dequeueMore();
-
-    sp<ABuffer> getNextPacket();
-    void resync();
-
-    void postRetransmitTimer(int64_t delayUs);
-    void postDeclareLostTimer(int64_t delayUs);
-    void cancelTimers();
-
-    DISALLOW_EVIL_CONSTRUCTORS(Source);
-};
-
-////////////////////////////////////////////////////////////////////////////////
-
-RTPReceiver::Source::Source(RTPReceiver *receiver, uint32_t ssrc)
-    : mReceiver(receiver),
-      mSSRC(ssrc),
-      mFirst(true),
-      mMaxSeq(0),
-      mCycles(0),
-      mBaseSeq(0),
-      mReceived(0),
-      mExpectedPrior(0),
-      mReceivedPrior(0),
-      mFirstArrivalTimeUs(-1ll),
-      mFirstRTPTimeUs(-1ll),
-      mAwaitingExtSeqNo(-1),
-      mRequestedRetransmission(false),
-      mActivePacketType(-1),
-      mNextReportTimeUs(-1ll),
-      mNumDeclaredLost(0),
-      mNumDeclaredLostPrior(0),
-      mRetransmitGeneration(0),
-      mDeclareLostGeneration(0),
-      mDeclareLostTimerPending(false) {
-}
-
-RTPReceiver::Source::~Source() {
-}
-
-void RTPReceiver::Source::onMessageReceived(const sp<AMessage> &msg) {
-    switch (msg->what()) {
-        case kWhatRetransmit:
-        {
-            int32_t generation;
-            CHECK(msg->findInt32("generation", &generation));
-
-            if (generation != mRetransmitGeneration) {
-                break;
-            }
-
-            mRequestedRetransmission = true;
-            mReceiver->requestRetransmission(mSSRC, mAwaitingExtSeqNo);
-
-            modifyPacketStatus(
-                    mAwaitingExtSeqNo, STATUS_REQUESTED_RETRANSMISSION);
-            break;
-        }
-
-        case kWhatDeclareLost:
-        {
-            int32_t generation;
-            CHECK(msg->findInt32("generation", &generation));
-
-            if (generation != mDeclareLostGeneration) {
-                break;
-            }
-
-            cancelTimers();
-
-            ALOGV("Lost packet extSeqNo %d %s",
-                  mAwaitingExtSeqNo,
-                  mRequestedRetransmission ? "*" : "");
-
-            mRequestedRetransmission = false;
-            if (mActiveAssembler != NULL) {
-                mActiveAssembler->signalDiscontinuity();
-            }
-
-            modifyPacketStatus(mAwaitingExtSeqNo, STATUS_DECLARED_LOST);
-
-            // resync();
-            ++mAwaitingExtSeqNo;
-            ++mNumDeclaredLost;
-
-            mReceiver->notifyPacketLost();
-
-            dequeueMore();
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-}
-
-void RTPReceiver::Source::onPacketReceived(
-        uint16_t seq, const sp<ABuffer> &buffer) {
-    if (mFirst) {
-        buffer->setInt32Data(mCycles | seq);
-        queuePacket(buffer);
-
-        mFirst = false;
-        mBaseSeq = seq;
-        mMaxSeq = seq;
-        ++mReceived;
-        return;
-    }
-
-    uint16_t udelta = seq - mMaxSeq;
-
-    if (udelta < kMaxDropout) {
-        // In order, with permissible gap.
-
-        if (seq < mMaxSeq) {
-            // Sequence number wrapped - count another 64K cycle
-            mCycles += kRTPSeqMod;
-        }
-
-        mMaxSeq = seq;
-
-        ++mReceived;
-    } else if (udelta <= kRTPSeqMod - kMaxMisorder) {
-        // The sequence number made a very large jump
-        return;
-    } else {
-        // Duplicate or reordered packet.
-    }
-
-    buffer->setInt32Data(mCycles | seq);
-    queuePacket(buffer);
-}
-
-void RTPReceiver::Source::queuePacket(const sp<ABuffer> &packet) {
-    int32_t newExtendedSeqNo = packet->int32Data();
-
-    if (mFirstArrivalTimeUs < 0ll) {
-        mFirstArrivalTimeUs = ALooper::GetNowUs();
-
-        uint32_t rtpTime;
-        CHECK(packet->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
-
-        mFirstRTPTimeUs = (rtpTime * 100ll) / 9ll;
-    }
-
-    if (mAwaitingExtSeqNo >= 0 && newExtendedSeqNo < mAwaitingExtSeqNo) {
-        // We're no longer interested in these. They're old.
-        ALOGV("dropping stale extSeqNo %d", newExtendedSeqNo);
-
-        modifyPacketStatus(newExtendedSeqNo, STATUS_ARRIVED_LATE);
-        return;
-    }
-
-    if (mPackets.empty()) {
-        mPackets.push_back(packet);
-        dequeueMore();
-        return;
-    }
-
-    List<sp<ABuffer> >::iterator firstIt = mPackets.begin();
-    List<sp<ABuffer> >::iterator it = --mPackets.end();
-    for (;;) {
-        int32_t extendedSeqNo = (*it)->int32Data();
-
-        if (extendedSeqNo == newExtendedSeqNo) {
-            // Duplicate packet.
-            return;
-        }
-
-        if (extendedSeqNo < newExtendedSeqNo) {
-            // Insert new packet after the one at "it".
-            mPackets.insert(++it, packet);
-            break;
-        }
-
-        if (it == firstIt) {
-            // Insert new packet before the first existing one.
-            mPackets.insert(it, packet);
-            break;
-        }
-
-        --it;
-    }
-
-    dequeueMore();
-}
-
-void RTPReceiver::Source::dequeueMore() {
-    int64_t nowUs = ALooper::GetNowUs();
-    if (mNextReportTimeUs < 0ll || nowUs >= mNextReportTimeUs) {
-        if (mNextReportTimeUs >= 0ll) {
-            uint32_t expected = (mMaxSeq | mCycles) - mBaseSeq + 1;
-
-            uint32_t expectedInterval = expected - mExpectedPrior;
-            mExpectedPrior = expected;
-
-            uint32_t receivedInterval = mReceived - mReceivedPrior;
-            mReceivedPrior = mReceived;
-
-            int64_t lostInterval =
-                (int64_t)expectedInterval - (int64_t)receivedInterval;
-
-            int32_t declaredLostInterval =
-                mNumDeclaredLost - mNumDeclaredLostPrior;
-
-            mNumDeclaredLostPrior = mNumDeclaredLost;
-
-            if (declaredLostInterval > 0) {
-                ALOGI("lost %lld packets (%.2f %%), declared %d lost\n",
-                      lostInterval,
-                      100.0f * lostInterval / expectedInterval,
-                      declaredLostInterval);
-            }
-        }
-
-        mNextReportTimeUs = nowUs + kReportIntervalUs;
-
-#if TRACK_PACKET_LOSS
-        for (size_t i = 0; i < mLostPackets.size(); ++i) {
-            int32_t key = mLostPackets.keyAt(i);
-            uint32_t value = mLostPackets.valueAt(i);
-
-            AString status;
-            if (value & STATUS_REQUESTED_RETRANSMISSION) {
-                status.append("retrans ");
-            }
-            if (value & STATUS_ARRIVED_LATE) {
-                status.append("arrived-late ");
-            }
-            ALOGI("Packet %d declared lost %s", key, status.c_str());
-        }
-#endif
-    }
-
-    sp<ABuffer> packet;
-    while ((packet = getNextPacket()) != NULL) {
-        if (mDeclareLostTimerPending) {
-            cancelTimers();
-        }
-
-        CHECK_GE(mAwaitingExtSeqNo, 0);
-#if TRACK_PACKET_LOSS
-        mLostPackets.removeItem(mAwaitingExtSeqNo);
-#endif
-
-        int32_t packetType;
-        CHECK(packet->meta()->findInt32("PT", &packetType));
-
-        if (packetType != mActivePacketType) {
-            mActiveAssembler = mReceiver->makeAssembler(packetType);
-            mActivePacketType = packetType;
-        }
-
-        if (mActiveAssembler != NULL) {
-            status_t err = mActiveAssembler->processPacket(packet);
-            if (err != OK) {
-                ALOGV("assembler returned error %d", err);
-            }
-        }
-
-        ++mAwaitingExtSeqNo;
-    }
-
-    if (mDeclareLostTimerPending) {
-        return;
-    }
-
-    if (mPackets.empty()) {
-        return;
-    }
-
-    CHECK_GE(mAwaitingExtSeqNo, 0);
-
-    const sp<ABuffer> &firstPacket = *mPackets.begin();
-
-    uint32_t rtpTime;
-    CHECK(firstPacket->meta()->findInt32(
-                "rtp-time", (int32_t *)&rtpTime));
-
-
-    int64_t rtpUs = (rtpTime * 100ll) / 9ll;
-
-    int64_t maxArrivalTimeUs =
-        mFirstArrivalTimeUs + rtpUs - mFirstRTPTimeUs;
-
-    nowUs = ALooper::GetNowUs();
-
-    CHECK_LT(mAwaitingExtSeqNo, firstPacket->int32Data());
-
-    ALOGV("waiting for %d, comparing against %d, %lld us left",
-          mAwaitingExtSeqNo,
-          firstPacket->int32Data(),
-          maxArrivalTimeUs - nowUs);
-
-    postDeclareLostTimer(maxArrivalTimeUs + kPacketLostAfterUs);
-
-    if (kRequestRetransmissionAfterUs > 0ll) {
-        postRetransmitTimer(
-                maxArrivalTimeUs + kRequestRetransmissionAfterUs);
-    }
-}
-
-sp<ABuffer> RTPReceiver::Source::getNextPacket() {
-    if (mPackets.empty()) {
-        return NULL;
-    }
-
-    int32_t extSeqNo = (*mPackets.begin())->int32Data();
-
-    if (mAwaitingExtSeqNo < 0) {
-        mAwaitingExtSeqNo = extSeqNo;
-    } else if (extSeqNo != mAwaitingExtSeqNo) {
-        return NULL;
-    }
-
-    sp<ABuffer> packet = *mPackets.begin();
-    mPackets.erase(mPackets.begin());
-
-    return packet;
-}
-
-void RTPReceiver::Source::resync() {
-    mAwaitingExtSeqNo = -1;
-}
-
-void RTPReceiver::Source::addReportBlock(
-        uint32_t ssrc, const sp<ABuffer> &buf) {
-    uint32_t extMaxSeq = mMaxSeq | mCycles;
-    uint32_t expected = extMaxSeq - mBaseSeq + 1;
-
-    int64_t lost = (int64_t)expected - (int64_t)mReceived;
-    if (lost > 0x7fffff) {
-        lost = 0x7fffff;
-    } else if (lost < -0x800000) {
-        lost = -0x800000;
-    }
-
-    uint32_t expectedInterval = expected - mExpectedPrior;
-    mExpectedPrior = expected;
-
-    uint32_t receivedInterval = mReceived - mReceivedPrior;
-    mReceivedPrior = mReceived;
-
-    int64_t lostInterval = expectedInterval - receivedInterval;
-
-    uint8_t fractionLost;
-    if (expectedInterval == 0 || lostInterval <=0) {
-        fractionLost = 0;
-    } else {
-        fractionLost = (lostInterval << 8) / expectedInterval;
-    }
-
-    uint8_t *ptr = buf->data() + buf->size();
-
-    ptr[0] = ssrc >> 24;
-    ptr[1] = (ssrc >> 16) & 0xff;
-    ptr[2] = (ssrc >> 8) & 0xff;
-    ptr[3] = ssrc & 0xff;
-
-    ptr[4] = fractionLost;
-
-    ptr[5] = (lost >> 16) & 0xff;
-    ptr[6] = (lost >> 8) & 0xff;
-    ptr[7] = lost & 0xff;
-
-    ptr[8] = extMaxSeq >> 24;
-    ptr[9] = (extMaxSeq >> 16) & 0xff;
-    ptr[10] = (extMaxSeq >> 8) & 0xff;
-    ptr[11] = extMaxSeq & 0xff;
-
-    // XXX TODO:
-
-    ptr[12] = 0x00;  // interarrival jitter
-    ptr[13] = 0x00;
-    ptr[14] = 0x00;
-    ptr[15] = 0x00;
-
-    ptr[16] = 0x00;  // last SR
-    ptr[17] = 0x00;
-    ptr[18] = 0x00;
-    ptr[19] = 0x00;
-
-    ptr[20] = 0x00;  // delay since last SR
-    ptr[21] = 0x00;
-    ptr[22] = 0x00;
-    ptr[23] = 0x00;
-
-    buf->setRange(buf->offset(), buf->size() + 24);
-}
-
-////////////////////////////////////////////////////////////////////////////////
-
-RTPReceiver::RTPReceiver(
-        const sp<ANetworkSession> &netSession,
-        const sp<AMessage> &notify,
-        uint32_t flags)
-    : mNetSession(netSession),
-      mNotify(notify),
-      mFlags(flags),
-      mRTPMode(TRANSPORT_UNDEFINED),
-      mRTCPMode(TRANSPORT_UNDEFINED),
-      mRTPSessionID(0),
-      mRTCPSessionID(0),
-      mRTPConnected(false),
-      mRTCPConnected(false),
-      mRTPClientSessionID(0),
-      mRTCPClientSessionID(0) {
-}
-
-RTPReceiver::~RTPReceiver() {
-    if (mRTCPClientSessionID != 0) {
-        mNetSession->destroySession(mRTCPClientSessionID);
-        mRTCPClientSessionID = 0;
-    }
-
-    if (mRTPClientSessionID != 0) {
-        mNetSession->destroySession(mRTPClientSessionID);
-        mRTPClientSessionID = 0;
-    }
-
-    if (mRTCPSessionID != 0) {
-        mNetSession->destroySession(mRTCPSessionID);
-        mRTCPSessionID = 0;
-    }
-
-    if (mRTPSessionID != 0) {
-        mNetSession->destroySession(mRTPSessionID);
-        mRTPSessionID = 0;
-    }
-}
-
-status_t RTPReceiver::initAsync(
-        TransportMode rtpMode,
-        TransportMode rtcpMode,
-        int32_t *outLocalRTPPort) {
-    if (mRTPMode != TRANSPORT_UNDEFINED
-            || rtpMode == TRANSPORT_UNDEFINED
-            || rtpMode == TRANSPORT_NONE
-            || rtcpMode == TRANSPORT_UNDEFINED) {
-        return INVALID_OPERATION;
-    }
-
-    CHECK_NE(rtpMode, TRANSPORT_TCP_INTERLEAVED);
-    CHECK_NE(rtcpMode, TRANSPORT_TCP_INTERLEAVED);
-
-    sp<AMessage> rtpNotify = new AMessage(kWhatRTPNotify, id());
-
-    sp<AMessage> rtcpNotify;
-    if (rtcpMode != TRANSPORT_NONE) {
-        rtcpNotify = new AMessage(kWhatRTCPNotify, id());
-    }
-
-    CHECK_EQ(mRTPSessionID, 0);
-    CHECK_EQ(mRTCPSessionID, 0);
-
-    int32_t localRTPPort;
-
-    struct in_addr ifaceAddr;
-    ifaceAddr.s_addr = INADDR_ANY;
-
-    for (;;) {
-        localRTPPort = PickRandomRTPPort();
-
-        status_t err;
-        if (rtpMode == TRANSPORT_UDP) {
-            err = mNetSession->createUDPSession(
-                    localRTPPort,
-                    rtpNotify,
-                    &mRTPSessionID);
-        } else {
-            CHECK_EQ(rtpMode, TRANSPORT_TCP);
-            err = mNetSession->createTCPDatagramSession(
-                    ifaceAddr,
-                    localRTPPort,
-                    rtpNotify,
-                    &mRTPSessionID);
-        }
-
-        if (err != OK) {
-            continue;
-        }
-
-        if (rtcpMode == TRANSPORT_NONE) {
-            break;
-        } else if (rtcpMode == TRANSPORT_UDP) {
-            err = mNetSession->createUDPSession(
-                    localRTPPort + 1,
-                    rtcpNotify,
-                    &mRTCPSessionID);
-        } else {
-            CHECK_EQ(rtpMode, TRANSPORT_TCP);
-            err = mNetSession->createTCPDatagramSession(
-                    ifaceAddr,
-                    localRTPPort + 1,
-                    rtcpNotify,
-                    &mRTCPSessionID);
-        }
-
-        if (err == OK) {
-            break;
-        }
-
-        mNetSession->destroySession(mRTPSessionID);
-        mRTPSessionID = 0;
-    }
-
-    mRTPMode = rtpMode;
-    mRTCPMode = rtcpMode;
-    *outLocalRTPPort = localRTPPort;
-
-    return OK;
-}
-
-status_t RTPReceiver::connect(
-        const char *remoteHost, int32_t remoteRTPPort, int32_t remoteRTCPPort) {
-    status_t err;
-
-    if (mRTPMode == TRANSPORT_UDP) {
-        CHECK(!mRTPConnected);
-
-        err = mNetSession->connectUDPSession(
-                mRTPSessionID, remoteHost, remoteRTPPort);
-
-        if (err != OK) {
-            notifyInitDone(err);
-            return err;
-        }
-
-        ALOGI("connectUDPSession RTP successful.");
-
-        mRTPConnected = true;
-    }
-
-    if (mRTCPMode == TRANSPORT_UDP) {
-        CHECK(!mRTCPConnected);
-
-        err = mNetSession->connectUDPSession(
-                mRTCPSessionID, remoteHost, remoteRTCPPort);
-
-        if (err != OK) {
-            notifyInitDone(err);
-            return err;
-        }
-
-        scheduleSendRR();
-
-        ALOGI("connectUDPSession RTCP successful.");
-
-        mRTCPConnected = true;
-    }
-
-    if (mRTPConnected
-            && (mRTCPConnected || mRTCPMode == TRANSPORT_NONE)) {
-        notifyInitDone(OK);
-    }
-
-    return OK;
-}
-
-status_t RTPReceiver::informSender(const sp<AMessage> &params) {
-    if (!mRTCPConnected) {
-        return INVALID_OPERATION;
-    }
-
-    int64_t avgLatencyUs;
-    CHECK(params->findInt64("avgLatencyUs", &avgLatencyUs));
-
-    int64_t maxLatencyUs;
-    CHECK(params->findInt64("maxLatencyUs", &maxLatencyUs));
-
-    sp<ABuffer> buf = new ABuffer(28);
-
-    uint8_t *ptr = buf->data();
-    ptr[0] = 0x80 | 0;
-    ptr[1] = 204;  // APP
-    ptr[2] = 0;
-
-    CHECK((buf->size() % 4) == 0u);
-    ptr[3] = (buf->size() / 4) - 1;
-
-    ptr[4] = kSourceID >> 24;  // SSRC
-    ptr[5] = (kSourceID >> 16) & 0xff;
-    ptr[6] = (kSourceID >> 8) & 0xff;
-    ptr[7] = kSourceID & 0xff;
-    ptr[8] = 'l';
-    ptr[9] = 'a';
-    ptr[10] = 't';
-    ptr[11] = 'e';
-
-    ptr[12] = avgLatencyUs >> 56;
-    ptr[13] = (avgLatencyUs >> 48) & 0xff;
-    ptr[14] = (avgLatencyUs >> 40) & 0xff;
-    ptr[15] = (avgLatencyUs >> 32) & 0xff;
-    ptr[16] = (avgLatencyUs >> 24) & 0xff;
-    ptr[17] = (avgLatencyUs >> 16) & 0xff;
-    ptr[18] = (avgLatencyUs >> 8) & 0xff;
-    ptr[19] = avgLatencyUs & 0xff;
-
-    ptr[20] = maxLatencyUs >> 56;
-    ptr[21] = (maxLatencyUs >> 48) & 0xff;
-    ptr[22] = (maxLatencyUs >> 40) & 0xff;
-    ptr[23] = (maxLatencyUs >> 32) & 0xff;
-    ptr[24] = (maxLatencyUs >> 24) & 0xff;
-    ptr[25] = (maxLatencyUs >> 16) & 0xff;
-    ptr[26] = (maxLatencyUs >> 8) & 0xff;
-    ptr[27] = maxLatencyUs & 0xff;
-
-    mNetSession->sendRequest(mRTCPSessionID, buf->data(), buf->size());
-
-    return OK;
-}
-
-void RTPReceiver::onMessageReceived(const sp<AMessage> &msg) {
-    switch (msg->what()) {
-        case kWhatRTPNotify:
-        case kWhatRTCPNotify:
-            onNetNotify(msg->what() == kWhatRTPNotify, msg);
-            break;
-
-        case kWhatSendRR:
-        {
-            onSendRR();
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-}
-
-void RTPReceiver::onNetNotify(bool isRTP, const sp<AMessage> &msg) {
-    int32_t reason;
-    CHECK(msg->findInt32("reason", &reason));
-
-    switch (reason) {
-        case ANetworkSession::kWhatError:
-        {
-            int32_t sessionID;
-            CHECK(msg->findInt32("sessionID", &sessionID));
-
-            int32_t err;
-            CHECK(msg->findInt32("err", &err));
-
-            int32_t errorOccuredDuringSend;
-            CHECK(msg->findInt32("send", &errorOccuredDuringSend));
-
-            AString detail;
-            CHECK(msg->findString("detail", &detail));
-
-            ALOGE("An error occurred during %s in session %d "
-                  "(%d, '%s' (%s)).",
-                  errorOccuredDuringSend ? "send" : "receive",
-                  sessionID,
-                  err,
-                  detail.c_str(),
-                  strerror(-err));
-
-            mNetSession->destroySession(sessionID);
-
-            if (sessionID == mRTPSessionID) {
-                mRTPSessionID = 0;
-            } else if (sessionID == mRTCPSessionID) {
-                mRTCPSessionID = 0;
-            } else if (sessionID == mRTPClientSessionID) {
-                mRTPClientSessionID = 0;
-            } else if (sessionID == mRTCPClientSessionID) {
-                mRTCPClientSessionID = 0;
-            }
-
-            if (!mRTPConnected
-                    || (mRTCPMode != TRANSPORT_NONE && !mRTCPConnected)) {
-                notifyInitDone(err);
-                break;
-            }
-
-            notifyError(err);
-            break;
-        }
-
-        case ANetworkSession::kWhatDatagram:
-        {
-            sp<ABuffer> data;
-            CHECK(msg->findBuffer("data", &data));
-
-            if (isRTP) {
-                if (mFlags & FLAG_AUTO_CONNECT) {
-                    AString fromAddr;
-                    CHECK(msg->findString("fromAddr", &fromAddr));
-
-                    int32_t fromPort;
-                    CHECK(msg->findInt32("fromPort", &fromPort));
-
-                    CHECK_EQ((status_t)OK,
-                             connect(
-                                 fromAddr.c_str(), fromPort, fromPort + 1));
-
-                    mFlags &= ~FLAG_AUTO_CONNECT;
-                }
-
-                onRTPData(data);
-            } else {
-                onRTCPData(data);
-            }
-            break;
-        }
-
-        case ANetworkSession::kWhatClientConnected:
-        {
-            int32_t sessionID;
-            CHECK(msg->findInt32("sessionID", &sessionID));
-
-            if (isRTP) {
-                CHECK_EQ(mRTPMode, TRANSPORT_TCP);
-
-                if (mRTPClientSessionID != 0) {
-                    // We only allow a single client connection.
-                    mNetSession->destroySession(sessionID);
-                    sessionID = 0;
-                    break;
-                }
-
-                mRTPClientSessionID = sessionID;
-                mRTPConnected = true;
-            } else {
-                CHECK_EQ(mRTCPMode, TRANSPORT_TCP);
-
-                if (mRTCPClientSessionID != 0) {
-                    // We only allow a single client connection.
-                    mNetSession->destroySession(sessionID);
-                    sessionID = 0;
-                    break;
-                }
-
-                mRTCPClientSessionID = sessionID;
-                mRTCPConnected = true;
-            }
-
-            if (mRTPConnected
-                    && (mRTCPConnected || mRTCPMode == TRANSPORT_NONE)) {
-                notifyInitDone(OK);
-            }
-            break;
-        }
-    }
-}
-
-void RTPReceiver::notifyInitDone(status_t err) {
-    sp<AMessage> notify = mNotify->dup();
-    notify->setInt32("what", kWhatInitDone);
-    notify->setInt32("err", err);
-    notify->post();
-}
-
-void RTPReceiver::notifyError(status_t err) {
-    sp<AMessage> notify = mNotify->dup();
-    notify->setInt32("what", kWhatError);
-    notify->setInt32("err", err);
-    notify->post();
-}
-
-void RTPReceiver::notifyPacketLost() {
-    sp<AMessage> notify = mNotify->dup();
-    notify->setInt32("what", kWhatPacketLost);
-    notify->post();
-}
-
-status_t RTPReceiver::onRTPData(const sp<ABuffer> &buffer) {
-    size_t size = buffer->size();
-    if (size < 12) {
-        // Too short to be a valid RTP header.
-        return ERROR_MALFORMED;
-    }
-
-    const uint8_t *data = buffer->data();
-
-    if ((data[0] >> 6) != 2) {
-        // Unsupported version.
-        return ERROR_UNSUPPORTED;
-    }
-
-    if (data[0] & 0x20) {
-        // Padding present.
-
-        size_t paddingLength = data[size - 1];
-
-        if (paddingLength + 12 > size) {
-            // If we removed this much padding we'd end up with something
-            // that's too short to be a valid RTP header.
-            return ERROR_MALFORMED;
-        }
-
-        size -= paddingLength;
-    }
-
-    int numCSRCs = data[0] & 0x0f;
-
-    size_t payloadOffset = 12 + 4 * numCSRCs;
-
-    if (size < payloadOffset) {
-        // Not enough data to fit the basic header and all the CSRC entries.
-        return ERROR_MALFORMED;
-    }
-
-    if (data[0] & 0x10) {
-        // Header eXtension present.
-
-        if (size < payloadOffset + 4) {
-            // Not enough data to fit the basic header, all CSRC entries
-            // and the first 4 bytes of the extension header.
-
-            return ERROR_MALFORMED;
-        }
-
-        const uint8_t *extensionData = &data[payloadOffset];
-
-        size_t extensionLength =
-            4 * (extensionData[2] << 8 | extensionData[3]);
-
-        if (size < payloadOffset + 4 + extensionLength) {
-            return ERROR_MALFORMED;
-        }
-
-        payloadOffset += 4 + extensionLength;
-    }
-
-    uint32_t srcId = U32_AT(&data[8]);
-    uint32_t rtpTime = U32_AT(&data[4]);
-    uint16_t seqNo = U16_AT(&data[2]);
-
-    sp<AMessage> meta = buffer->meta();
-    meta->setInt32("ssrc", srcId);
-    meta->setInt32("rtp-time", rtpTime);
-    meta->setInt32("PT", data[1] & 0x7f);
-    meta->setInt32("M", data[1] >> 7);
-
-    buffer->setRange(payloadOffset, size - payloadOffset);
-
-    ssize_t index = mSources.indexOfKey(srcId);
-    sp<Source> source;
-    if (index < 0) {
-        source = new Source(this, srcId);
-        looper()->registerHandler(source);
-
-        mSources.add(srcId, source);
-    } else {
-        source = mSources.valueAt(index);
-    }
-
-    source->onPacketReceived(seqNo, buffer);
-
-    return OK;
-}
-
-status_t RTPReceiver::onRTCPData(const sp<ABuffer> &data) {
-    ALOGI("onRTCPData");
-    return OK;
-}
-
-void RTPReceiver::addSDES(const sp<ABuffer> &buffer) {
-    uint8_t *data = buffer->data() + buffer->size();
-    data[0] = 0x80 | 1;
-    data[1] = 202;  // SDES
-    data[4] = kSourceID >> 24;  // SSRC
-    data[5] = (kSourceID >> 16) & 0xff;
-    data[6] = (kSourceID >> 8) & 0xff;
-    data[7] = kSourceID & 0xff;
-
-    size_t offset = 8;
-
-    data[offset++] = 1;  // CNAME
-
-    AString cname = "stagefright@somewhere";
-    data[offset++] = cname.size();
-
-    memcpy(&data[offset], cname.c_str(), cname.size());
-    offset += cname.size();
-
-    data[offset++] = 6;  // TOOL
-
-    AString tool = "stagefright/1.0";
-    data[offset++] = tool.size();
-
-    memcpy(&data[offset], tool.c_str(), tool.size());
-    offset += tool.size();
-
-    data[offset++] = 0;
-
-    if ((offset % 4) > 0) {
-        size_t count = 4 - (offset % 4);
-        switch (count) {
-            case 3:
-                data[offset++] = 0;
-            case 2:
-                data[offset++] = 0;
-            case 1:
-                data[offset++] = 0;
-        }
-    }
-
-    size_t numWords = (offset / 4) - 1;
-    data[2] = numWords >> 8;
-    data[3] = numWords & 0xff;
-
-    buffer->setRange(buffer->offset(), buffer->size() + offset);
-}
-
-void RTPReceiver::scheduleSendRR() {
-    (new AMessage(kWhatSendRR, id()))->post(5000000ll);
-}
-
-void RTPReceiver::onSendRR() {
-    sp<ABuffer> buf = new ABuffer(kMaxUDPPacketSize);
-    buf->setRange(0, 0);
-
-    uint8_t *ptr = buf->data();
-    ptr[0] = 0x80 | 0;
-    ptr[1] = 201;  // RR
-    ptr[2] = 0;
-    ptr[3] = 1;
-    ptr[4] = kSourceID >> 24;  // SSRC
-    ptr[5] = (kSourceID >> 16) & 0xff;
-    ptr[6] = (kSourceID >> 8) & 0xff;
-    ptr[7] = kSourceID & 0xff;
-
-    buf->setRange(0, 8);
-
-    size_t numReportBlocks = 0;
-    for (size_t i = 0; i < mSources.size(); ++i) {
-        uint32_t ssrc = mSources.keyAt(i);
-        sp<Source> source = mSources.valueAt(i);
-
-        if (numReportBlocks > 31 || buf->size() + 24 > buf->capacity()) {
-            // Cannot fit another report block.
-            break;
-        }
-
-        source->addReportBlock(ssrc, buf);
-        ++numReportBlocks;
-    }
-
-    ptr[0] |= numReportBlocks;  // 5 bit
-
-    size_t sizeInWordsMinus1 = 1 + 6 * numReportBlocks;
-    ptr[2] = sizeInWordsMinus1 >> 8;
-    ptr[3] = sizeInWordsMinus1 & 0xff;
-
-    buf->setRange(0, (sizeInWordsMinus1 + 1) * 4);
-
-    addSDES(buf);
-
-    mNetSession->sendRequest(mRTCPSessionID, buf->data(), buf->size());
-
-    scheduleSendRR();
-}
-
-status_t RTPReceiver::registerPacketType(
-        uint8_t packetType, PacketizationMode mode) {
-    mPacketTypes.add(packetType, mode);
-
-    return OK;
-}
-
-sp<RTPReceiver::Assembler> RTPReceiver::makeAssembler(uint8_t packetType) {
-    ssize_t index = mPacketTypes.indexOfKey(packetType);
-    if (index < 0) {
-        return NULL;
-    }
-
-    PacketizationMode mode = mPacketTypes.valueAt(index);
-
-    switch (mode) {
-        case PACKETIZATION_NONE:
-        case PACKETIZATION_TRANSPORT_STREAM:
-            return new TSAssembler(mNotify);
-
-        case PACKETIZATION_H264:
-            return new H264Assembler(mNotify);
-
-        default:
-            return NULL;
-    }
-}
-
-void RTPReceiver::requestRetransmission(uint32_t senderSSRC, int32_t extSeqNo) {
-    int32_t blp = 0;
-
-    sp<ABuffer> buf = new ABuffer(16);
-    buf->setRange(0, 0);
-
-    uint8_t *ptr = buf->data();
-    ptr[0] = 0x80 | 1;  // generic NACK
-    ptr[1] = 205;  // TSFB
-    ptr[2] = 0;
-    ptr[3] = 3;
-    ptr[8] = (senderSSRC >> 24) & 0xff;
-    ptr[9] = (senderSSRC >> 16) & 0xff;
-    ptr[10] = (senderSSRC >> 8) & 0xff;
-    ptr[11] = (senderSSRC & 0xff);
-    ptr[8] = (kSourceID >> 24) & 0xff;
-    ptr[9] = (kSourceID >> 16) & 0xff;
-    ptr[10] = (kSourceID >> 8) & 0xff;
-    ptr[11] = (kSourceID & 0xff);
-    ptr[12] = (extSeqNo >> 8) & 0xff;
-    ptr[13] = (extSeqNo & 0xff);
-    ptr[14] = (blp >> 8) & 0xff;
-    ptr[15] = (blp & 0xff);
-
-    buf->setRange(0, 16);
-
-     mNetSession->sendRequest(mRTCPSessionID, buf->data(), buf->size());
-}
-
-void RTPReceiver::Source::modifyPacketStatus(int32_t extSeqNo, uint32_t mask) {
-#if TRACK_PACKET_LOSS
-    ssize_t index = mLostPackets.indexOfKey(extSeqNo);
-    if (index < 0) {
-        mLostPackets.add(extSeqNo, mask);
-    } else {
-        mLostPackets.editValueAt(index) |= mask;
-    }
-#endif
-}
-
-void RTPReceiver::Source::postRetransmitTimer(int64_t timeUs) {
-    int64_t delayUs = timeUs - ALooper::GetNowUs();
-    sp<AMessage> msg = new AMessage(kWhatRetransmit, id());
-    msg->setInt32("generation", mRetransmitGeneration);
-    msg->post(delayUs);
-}
-
-void RTPReceiver::Source::postDeclareLostTimer(int64_t timeUs) {
-    CHECK(!mDeclareLostTimerPending);
-    mDeclareLostTimerPending = true;
-
-    int64_t delayUs = timeUs - ALooper::GetNowUs();
-    sp<AMessage> msg = new AMessage(kWhatDeclareLost, id());
-    msg->setInt32("generation", mDeclareLostGeneration);
-    msg->post(delayUs);
-}
-
-void RTPReceiver::Source::cancelTimers() {
-    ++mRetransmitGeneration;
-    ++mDeclareLostGeneration;
-    mDeclareLostTimerPending = false;
-}
-
-}  // namespace android
-
diff --git a/media/libstagefright/wifi-display/rtp/RTPReceiver.h b/media/libstagefright/wifi-display/rtp/RTPReceiver.h
deleted file mode 100644
index 240ab2e..0000000
--- a/media/libstagefright/wifi-display/rtp/RTPReceiver.h
+++ /dev/null
@@ -1,125 +0,0 @@
-/*
- * Copyright 2013, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef RTP_RECEIVER_H_
-
-#define RTP_RECEIVER_H_
-
-#include "RTPBase.h"
-
-#include <media/stagefright/foundation/AHandler.h>
-
-namespace android {
-
-struct ABuffer;
-struct ANetworkSession;
-
-// An object of this class facilitates receiving of media data on an RTP
-// channel. The channel is established over a UDP or TCP connection depending
-// on which "TransportMode" was chosen. In addition different RTP packetization
-// schemes are supported such as "Transport Stream Packets over RTP",
-// or "AVC/H.264 encapsulation as specified in RFC 3984 (non-interleaved mode)"
-struct RTPReceiver : public RTPBase, public AHandler {
-    enum {
-        kWhatInitDone,
-        kWhatError,
-        kWhatAccessUnit,
-        kWhatPacketLost,
-    };
-
-    enum Flags {
-        FLAG_AUTO_CONNECT = 1,
-    };
-    RTPReceiver(
-            const sp<ANetworkSession> &netSession,
-            const sp<AMessage> &notify,
-            uint32_t flags = 0);
-
-    status_t registerPacketType(
-            uint8_t packetType, PacketizationMode mode);
-
-    status_t initAsync(
-            TransportMode rtpMode,
-            TransportMode rtcpMode,
-            int32_t *outLocalRTPPort);
-
-    status_t connect(
-            const char *remoteHost,
-            int32_t remoteRTPPort,
-            int32_t remoteRTCPPort);
-
-    status_t informSender(const sp<AMessage> &params);
-
-protected:
-    virtual ~RTPReceiver();
-    virtual void onMessageReceived(const sp<AMessage> &msg);
-
-private:
-    enum {
-        kWhatRTPNotify,
-        kWhatRTCPNotify,
-        kWhatSendRR,
-    };
-
-    enum {
-        kSourceID                       = 0xdeadbeef,
-        kPacketLostAfterUs              = 100000,
-        kRequestRetransmissionAfterUs   = -1,
-    };
-
-    struct Assembler;
-    struct H264Assembler;
-    struct Source;
-    struct TSAssembler;
-
-    sp<ANetworkSession> mNetSession;
-    sp<AMessage> mNotify;
-    uint32_t mFlags;
-    TransportMode mRTPMode;
-    TransportMode mRTCPMode;
-    int32_t mRTPSessionID;
-    int32_t mRTCPSessionID;
-    bool mRTPConnected;
-    bool mRTCPConnected;
-
-    int32_t mRTPClientSessionID;  // in TRANSPORT_TCP mode.
-    int32_t mRTCPClientSessionID;  // in TRANSPORT_TCP mode.
-
-    KeyedVector<uint8_t, PacketizationMode> mPacketTypes;
-    KeyedVector<uint32_t, sp<Source> > mSources;
-
-    void onNetNotify(bool isRTP, const sp<AMessage> &msg);
-    status_t onRTPData(const sp<ABuffer> &data);
-    status_t onRTCPData(const sp<ABuffer> &data);
-    void onSendRR();
-
-    void scheduleSendRR();
-    void addSDES(const sp<ABuffer> &buffer);
-
-    void notifyInitDone(status_t err);
-    void notifyError(status_t err);
-    void notifyPacketLost();
-
-    sp<Assembler> makeAssembler(uint8_t packetType);
-
-    void requestRetransmission(uint32_t senderSSRC, int32_t extSeqNo);
-
-    DISALLOW_EVIL_CONSTRUCTORS(RTPReceiver);
-};
-
-}  // namespace android
-
-#endif  // RTP_RECEIVER_H_
diff --git a/media/libstagefright/wifi-display/rtptest.cpp b/media/libstagefright/wifi-display/rtptest.cpp
deleted file mode 100644
index b902f29..0000000
--- a/media/libstagefright/wifi-display/rtptest.cpp
+++ /dev/null
@@ -1,565 +0,0 @@
-/*
- * Copyright 2013, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NEBUG 0
-#define LOG_TAG "rtptest"
-#include <utils/Log.h>
-
-#include "rtp/RTPSender.h"
-#include "rtp/RTPReceiver.h"
-#include "TimeSyncer.h"
-
-#include <binder/ProcessState.h>
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AHandler.h>
-#include <media/stagefright/foundation/ALooper.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/foundation/ANetworkSession.h>
-#include <media/stagefright/foundation/hexdump.h>
-#include <media/stagefright/DataSource.h>
-#include <media/stagefright/MediaDefs.h>
-#include <media/stagefright/NuMediaExtractor.h>
-#include <media/stagefright/Utils.h>
-
-#define MEDIA_FILENAME "/sdcard/Frame Counter HD 30FPS_1080p.mp4"
-
-namespace android {
-
-struct PacketSource : public RefBase {
-    PacketSource() {}
-
-    virtual sp<ABuffer> getNextAccessUnit() = 0;
-
-protected:
-    virtual ~PacketSource() {}
-
-private:
-    DISALLOW_EVIL_CONSTRUCTORS(PacketSource);
-};
-
-struct MediaPacketSource : public PacketSource {
-    MediaPacketSource()
-        : mMaxSampleSize(1024 * 1024) {
-        mExtractor = new NuMediaExtractor;
-        CHECK_EQ((status_t)OK,
-                 mExtractor->setDataSource(MEDIA_FILENAME));
-
-        bool haveVideo = false;
-        for (size_t i = 0; i < mExtractor->countTracks(); ++i) {
-            sp<AMessage> format;
-            CHECK_EQ((status_t)OK, mExtractor->getTrackFormat(i, &format));
-
-            AString mime;
-            CHECK(format->findString("mime", &mime));
-
-            if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime.c_str())) {
-                mExtractor->selectTrack(i);
-                haveVideo = true;
-                break;
-            }
-        }
-
-        CHECK(haveVideo);
-    }
-
-    virtual sp<ABuffer> getNextAccessUnit() {
-        int64_t timeUs;
-        status_t err = mExtractor->getSampleTime(&timeUs);
-
-        if (err != OK) {
-            return NULL;
-        }
-
-        sp<ABuffer> accessUnit = new ABuffer(mMaxSampleSize);
-        CHECK_EQ((status_t)OK, mExtractor->readSampleData(accessUnit));
-
-        accessUnit->meta()->setInt64("timeUs", timeUs);
-
-        CHECK_EQ((status_t)OK, mExtractor->advance());
-
-        return accessUnit;
-    }
-
-protected:
-    virtual ~MediaPacketSource() {
-    }
-
-private:
-    sp<NuMediaExtractor> mExtractor;
-    size_t mMaxSampleSize;
-
-    DISALLOW_EVIL_CONSTRUCTORS(MediaPacketSource);
-};
-
-struct SimplePacketSource : public PacketSource {
-    SimplePacketSource()
-        : mCounter(0) {
-    }
-
-    virtual sp<ABuffer> getNextAccessUnit() {
-        sp<ABuffer> buffer = new ABuffer(4);
-        uint8_t *dst = buffer->data();
-        dst[0] = mCounter >> 24;
-        dst[1] = (mCounter >> 16) & 0xff;
-        dst[2] = (mCounter >> 8) & 0xff;
-        dst[3] = mCounter & 0xff;
-
-        buffer->meta()->setInt64("timeUs", mCounter * 1000000ll / kFrameRate);
-
-        ++mCounter;
-
-        return buffer;
-    }
-
-protected:
-    virtual ~SimplePacketSource() {
-    }
-
-private:
-    enum {
-        kFrameRate = 30
-    };
-
-    uint32_t mCounter;
-
-    DISALLOW_EVIL_CONSTRUCTORS(SimplePacketSource);
-};
-
-struct TestHandler : public AHandler {
-    TestHandler(const sp<ANetworkSession> &netSession);
-
-    void listen();
-    void connect(const char *host, int32_t port);
-
-protected:
-    virtual ~TestHandler();
-    virtual void onMessageReceived(const sp<AMessage> &msg);
-
-private:
-    enum {
-        kWhatListen,
-        kWhatConnect,
-        kWhatReceiverNotify,
-        kWhatSenderNotify,
-        kWhatSendMore,
-        kWhatStop,
-        kWhatTimeSyncerNotify,
-    };
-
-#if 1
-    static const RTPBase::TransportMode kRTPMode = RTPBase::TRANSPORT_UDP;
-    static const RTPBase::TransportMode kRTCPMode = RTPBase::TRANSPORT_UDP;
-#else
-    static const RTPBase::TransportMode kRTPMode = RTPBase::TRANSPORT_TCP;
-    static const RTPBase::TransportMode kRTCPMode = RTPBase::TRANSPORT_NONE;
-#endif
-
-#if 1
-    static const RTPBase::PacketizationMode kPacketizationMode
-        = RTPBase::PACKETIZATION_H264;
-#else
-    static const RTPBase::PacketizationMode kPacketizationMode
-        = RTPBase::PACKETIZATION_NONE;
-#endif
-
-    sp<ANetworkSession> mNetSession;
-    sp<PacketSource> mSource;
-    sp<RTPSender> mSender;
-    sp<RTPReceiver> mReceiver;
-
-    sp<TimeSyncer> mTimeSyncer;
-    bool mTimeSyncerStarted;
-
-    int64_t mFirstTimeRealUs;
-    int64_t mFirstTimeMediaUs;
-
-    int64_t mTimeOffsetUs;
-    bool mTimeOffsetValid;
-
-    status_t readMore();
-
-    DISALLOW_EVIL_CONSTRUCTORS(TestHandler);
-};
-
-TestHandler::TestHandler(const sp<ANetworkSession> &netSession)
-    : mNetSession(netSession),
-      mTimeSyncerStarted(false),
-      mFirstTimeRealUs(-1ll),
-      mFirstTimeMediaUs(-1ll),
-      mTimeOffsetUs(-1ll),
-      mTimeOffsetValid(false) {
-}
-
-TestHandler::~TestHandler() {
-}
-
-void TestHandler::listen() {
-    sp<AMessage> msg = new AMessage(kWhatListen, id());
-    msg->post();
-}
-
-void TestHandler::connect(const char *host, int32_t port) {
-    sp<AMessage> msg = new AMessage(kWhatConnect, id());
-    msg->setString("host", host);
-    msg->setInt32("port", port);
-    msg->post();
-}
-
-static void dumpDelay(int64_t delayMs) {
-    static const int64_t kMinDelayMs = 0;
-    static const int64_t kMaxDelayMs = 300;
-
-    const char *kPattern = "########################################";
-    size_t kPatternSize = strlen(kPattern);
-
-    int n = (kPatternSize * (delayMs - kMinDelayMs))
-                / (kMaxDelayMs - kMinDelayMs);
-
-    if (n < 0) {
-        n = 0;
-    } else if ((size_t)n > kPatternSize) {
-        n = kPatternSize;
-    }
-
-    ALOGI("(%4lld ms) %s\n",
-          delayMs,
-          kPattern + kPatternSize - n);
-}
-
-void TestHandler::onMessageReceived(const sp<AMessage> &msg) {
-    switch (msg->what()) {
-        case kWhatListen:
-        {
-            sp<AMessage> notify = new AMessage(kWhatTimeSyncerNotify, id());
-            mTimeSyncer = new TimeSyncer(mNetSession, notify);
-            looper()->registerHandler(mTimeSyncer);
-
-            notify = new AMessage(kWhatReceiverNotify, id());
-            mReceiver = new RTPReceiver(
-                    mNetSession, notify, RTPReceiver::FLAG_AUTO_CONNECT);
-            looper()->registerHandler(mReceiver);
-
-            CHECK_EQ((status_t)OK,
-                     mReceiver->registerPacketType(33, kPacketizationMode));
-
-            int32_t receiverRTPPort;
-            CHECK_EQ((status_t)OK,
-                     mReceiver->initAsync(
-                         kRTPMode,
-                         kRTCPMode,
-                         &receiverRTPPort));
-
-            printf("picked receiverRTPPort %d\n", receiverRTPPort);
-
-#if 0
-            CHECK_EQ((status_t)OK,
-                     mReceiver->connect(
-                         "127.0.0.1", senderRTPPort, senderRTPPort + 1));
-#endif
-            break;
-        }
-
-        case kWhatConnect:
-        {
-            AString host;
-            CHECK(msg->findString("host", &host));
-
-            sp<AMessage> notify = new AMessage(kWhatTimeSyncerNotify, id());
-            mTimeSyncer = new TimeSyncer(mNetSession, notify);
-            looper()->registerHandler(mTimeSyncer);
-            mTimeSyncer->startServer(8123);
-
-            int32_t receiverRTPPort;
-            CHECK(msg->findInt32("port", &receiverRTPPort));
-
-#if 1
-            mSource = new MediaPacketSource;
-#else
-            mSource = new SimplePacketSource;
-#endif
-
-            notify = new AMessage(kWhatSenderNotify, id());
-            mSender = new RTPSender(mNetSession, notify);
-
-            looper()->registerHandler(mSender);
-
-            int32_t senderRTPPort;
-            CHECK_EQ((status_t)OK,
-                     mSender->initAsync(
-                         host.c_str(),
-                         receiverRTPPort,
-                         kRTPMode,
-                         kRTCPMode == RTPBase::TRANSPORT_NONE
-                            ? -1 : receiverRTPPort + 1,
-                         kRTCPMode,
-                         &senderRTPPort));
-
-            printf("picked senderRTPPort %d\n", senderRTPPort);
-            break;
-        }
-
-        case kWhatSenderNotify:
-        {
-            ALOGI("kWhatSenderNotify");
-
-            int32_t what;
-            CHECK(msg->findInt32("what", &what));
-
-            switch (what) {
-                case RTPSender::kWhatInitDone:
-                {
-                    int32_t err;
-                    CHECK(msg->findInt32("err", &err));
-
-                    ALOGI("RTPSender::initAsync completed w/ err %d", err);
-
-                    if (err == OK) {
-                        err = readMore();
-
-                        if (err != OK) {
-                            (new AMessage(kWhatStop, id()))->post();
-                        }
-                    }
-                    break;
-                }
-
-                case RTPSender::kWhatError:
-                    break;
-            }
-            break;
-        }
-
-        case kWhatReceiverNotify:
-        {
-            ALOGV("kWhatReceiverNotify");
-
-            int32_t what;
-            CHECK(msg->findInt32("what", &what));
-
-            switch (what) {
-                case RTPReceiver::kWhatInitDone:
-                {
-                    int32_t err;
-                    CHECK(msg->findInt32("err", &err));
-
-                    ALOGI("RTPReceiver::initAsync completed w/ err %d", err);
-                    break;
-                }
-
-                case RTPReceiver::kWhatError:
-                    break;
-
-                case RTPReceiver::kWhatAccessUnit:
-                {
-#if 0
-                    if (!mTimeSyncerStarted) {
-                        mTimeSyncer->startClient("172.18.41.216", 8123);
-                        mTimeSyncerStarted = true;
-                    }
-
-                    sp<ABuffer> accessUnit;
-                    CHECK(msg->findBuffer("accessUnit", &accessUnit));
-
-                    int64_t timeUs;
-                    CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
-
-                    if (mTimeOffsetValid) {
-                        timeUs -= mTimeOffsetUs;
-                        int64_t nowUs = ALooper::GetNowUs();
-                        int64_t delayMs = (nowUs - timeUs) / 1000ll;
-
-                        dumpDelay(delayMs);
-                    }
-#endif
-                    break;
-                }
-
-                case RTPReceiver::kWhatPacketLost:
-                    ALOGV("kWhatPacketLost");
-                    break;
-
-                default:
-                    TRESPASS();
-            }
-            break;
-        }
-
-        case kWhatSendMore:
-        {
-            sp<ABuffer> accessUnit;
-            CHECK(msg->findBuffer("accessUnit", &accessUnit));
-
-            CHECK_EQ((status_t)OK,
-                     mSender->queueBuffer(
-                         accessUnit,
-                         33,
-                         kPacketizationMode));
-
-            status_t err = readMore();
-
-            if (err != OK) {
-                (new AMessage(kWhatStop, id()))->post();
-            }
-            break;
-        }
-
-        case kWhatStop:
-        {
-            if (mReceiver != NULL) {
-                looper()->unregisterHandler(mReceiver->id());
-                mReceiver.clear();
-            }
-
-            if (mSender != NULL) {
-                looper()->unregisterHandler(mSender->id());
-                mSender.clear();
-            }
-
-            mSource.clear();
-
-            looper()->stop();
-            break;
-        }
-
-        case kWhatTimeSyncerNotify:
-        {
-            CHECK(msg->findInt64("offset", &mTimeOffsetUs));
-            mTimeOffsetValid = true;
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-}
-
-status_t TestHandler::readMore() {
-    sp<ABuffer> accessUnit = mSource->getNextAccessUnit();
-
-    if (accessUnit == NULL) {
-        return ERROR_END_OF_STREAM;
-    }
-
-    int64_t timeUs;
-    CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
-
-    int64_t nowUs = ALooper::GetNowUs();
-    int64_t whenUs;
-
-    if (mFirstTimeRealUs < 0ll) {
-        mFirstTimeRealUs = whenUs = nowUs;
-        mFirstTimeMediaUs = timeUs;
-    } else {
-        whenUs = mFirstTimeRealUs + timeUs - mFirstTimeMediaUs;
-    }
-
-    accessUnit->meta()->setInt64("timeUs", whenUs);
-
-    sp<AMessage> msg = new AMessage(kWhatSendMore, id());
-    msg->setBuffer("accessUnit", accessUnit);
-    msg->post(whenUs - nowUs);
-
-    return OK;
-}
-
-}  // namespace android
-
-static void usage(const char *me) {
-    fprintf(stderr,
-            "usage: %s -c host:port\tconnect to remote host\n"
-            "               -l       \tlisten\n",
-            me);
-}
-
-int main(int argc, char **argv) {
-    using namespace android;
-
-    // srand(time(NULL));
-
-    ProcessState::self()->startThreadPool();
-
-    DataSource::RegisterDefaultSniffers();
-
-    bool listen = false;
-    int32_t connectToPort = -1;
-    AString connectToHost;
-
-    int res;
-    while ((res = getopt(argc, argv, "hc:l")) >= 0) {
-        switch (res) {
-            case 'c':
-            {
-                const char *colonPos = strrchr(optarg, ':');
-
-                if (colonPos == NULL) {
-                    usage(argv[0]);
-                    exit(1);
-                }
-
-                connectToHost.setTo(optarg, colonPos - optarg);
-
-                char *end;
-                connectToPort = strtol(colonPos + 1, &end, 10);
-
-                if (*end != '\0' || end == colonPos + 1
-                        || connectToPort < 1 || connectToPort > 65535) {
-                    fprintf(stderr, "Illegal port specified.\n");
-                    exit(1);
-                }
-                break;
-            }
-
-            case 'l':
-            {
-                listen = true;
-                break;
-            }
-
-            case '?':
-            case 'h':
-                usage(argv[0]);
-                exit(1);
-        }
-    }
-
-    if (!listen && connectToPort < 0) {
-        fprintf(stderr,
-                "You need to select either client or server mode.\n");
-        exit(1);
-    }
-
-    sp<ANetworkSession> netSession = new ANetworkSession;
-    netSession->start();
-
-    sp<ALooper> looper = new ALooper;
-
-    sp<TestHandler> handler = new TestHandler(netSession);
-    looper->registerHandler(handler);
-
-    if (listen) {
-        handler->listen();
-    }
-
-    if (connectToPort >= 0) {
-        handler->connect(connectToHost.c_str(), connectToPort);
-    }
-
-    looper->start(true /* runOnCallingThread */);
-
-    return 0;
-}
-
diff --git a/media/libstagefright/wifi-display/sink/DirectRenderer.cpp b/media/libstagefright/wifi-display/sink/DirectRenderer.cpp
deleted file mode 100644
index cdb2267..0000000
--- a/media/libstagefright/wifi-display/sink/DirectRenderer.cpp
+++ /dev/null
@@ -1,653 +0,0 @@
-/*
- * Copyright 2012, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "DirectRenderer"
-#include <utils/Log.h>
-
-#include "DirectRenderer.h"
-
-#include <gui/SurfaceComposerClient.h>
-#include <gui/Surface.h>
-#include <media/AudioTrack.h>
-#include <media/ICrypto.h>
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/foundation/hexdump.h>
-#include <media/stagefright/MediaCodec.h>
-#include <media/stagefright/MediaDefs.h>
-#include <media/stagefright/MediaErrors.h>
-
-namespace android {
-
-/*
-   Drives the decoding process using a MediaCodec instance. Input buffers
-   queued by calls to "queueInputBuffer" are fed to the decoder as soon
-   as the decoder is ready for them, the client is notified about output
-   buffers as the decoder spits them out.
-*/
-struct DirectRenderer::DecoderContext : public AHandler {
-    enum {
-        kWhatOutputBufferReady,
-    };
-    DecoderContext(const sp<AMessage> &notify);
-
-    status_t init(
-            const sp<AMessage> &format,
-            const sp<IGraphicBufferProducer> &surfaceTex);
-
-    void queueInputBuffer(const sp<ABuffer> &accessUnit);
-
-    status_t renderOutputBufferAndRelease(size_t index);
-    status_t releaseOutputBuffer(size_t index);
-
-protected:
-    virtual ~DecoderContext();
-
-    virtual void onMessageReceived(const sp<AMessage> &msg);
-
-private:
-    enum {
-        kWhatDecoderNotify,
-    };
-
-    sp<AMessage> mNotify;
-    sp<ALooper> mDecoderLooper;
-    sp<MediaCodec> mDecoder;
-    Vector<sp<ABuffer> > mDecoderInputBuffers;
-    Vector<sp<ABuffer> > mDecoderOutputBuffers;
-    List<size_t> mDecoderInputBuffersAvailable;
-    bool mDecoderNotificationPending;
-
-    List<sp<ABuffer> > mAccessUnits;
-
-    void onDecoderNotify();
-    void scheduleDecoderNotification();
-    void queueDecoderInputBuffers();
-
-    void queueOutputBuffer(
-            size_t index, int64_t timeUs, const sp<ABuffer> &buffer);
-
-    DISALLOW_EVIL_CONSTRUCTORS(DecoderContext);
-};
-
-////////////////////////////////////////////////////////////////////////////////
-
-/*
-   A "push" audio renderer. The primary function of this renderer is to use
-   an AudioTrack in push mode and making sure not to block the event loop
-   be ensuring that calls to AudioTrack::write never block. This is done by
-   estimating an upper bound of data that can be written to the AudioTrack
-   buffer without delay.
-*/
-struct DirectRenderer::AudioRenderer : public AHandler {
-    AudioRenderer(const sp<DecoderContext> &decoderContext);
-
-    void queueInputBuffer(
-            size_t index, int64_t timeUs, const sp<ABuffer> &buffer);
-
-protected:
-    virtual ~AudioRenderer();
-    virtual void onMessageReceived(const sp<AMessage> &msg);
-
-private:
-    enum {
-        kWhatPushAudio,
-    };
-
-    struct BufferInfo {
-        size_t mIndex;
-        int64_t mTimeUs;
-        sp<ABuffer> mBuffer;
-    };
-
-    sp<DecoderContext> mDecoderContext;
-    sp<AudioTrack> mAudioTrack;
-
-    List<BufferInfo> mInputBuffers;
-    bool mPushPending;
-
-    size_t mNumFramesWritten;
-
-    void schedulePushIfNecessary();
-    void onPushAudio();
-
-    ssize_t writeNonBlocking(const uint8_t *data, size_t size);
-
-    DISALLOW_EVIL_CONSTRUCTORS(AudioRenderer);
-};
-
-////////////////////////////////////////////////////////////////////////////////
-
-DirectRenderer::DecoderContext::DecoderContext(const sp<AMessage> &notify)
-    : mNotify(notify),
-      mDecoderNotificationPending(false) {
-}
-
-DirectRenderer::DecoderContext::~DecoderContext() {
-    if (mDecoder != NULL) {
-        mDecoder->release();
-        mDecoder.clear();
-
-        mDecoderLooper->stop();
-        mDecoderLooper.clear();
-    }
-}
-
-status_t DirectRenderer::DecoderContext::init(
-        const sp<AMessage> &format,
-        const sp<IGraphicBufferProducer> &surfaceTex) {
-    CHECK(mDecoder == NULL);
-
-    AString mime;
-    CHECK(format->findString("mime", &mime));
-
-    mDecoderLooper = new ALooper;
-    mDecoderLooper->setName("video codec looper");
-
-    mDecoderLooper->start(
-            false /* runOnCallingThread */,
-            false /* canCallJava */,
-            PRIORITY_DEFAULT);
-
-    mDecoder = MediaCodec::CreateByType(
-            mDecoderLooper, mime.c_str(), false /* encoder */);
-
-    CHECK(mDecoder != NULL);
-
-    status_t err = mDecoder->configure(
-            format,
-            surfaceTex == NULL
-                ? NULL : new Surface(surfaceTex),
-            NULL /* crypto */,
-            0 /* flags */);
-    CHECK_EQ(err, (status_t)OK);
-
-    err = mDecoder->start();
-    CHECK_EQ(err, (status_t)OK);
-
-    err = mDecoder->getInputBuffers(
-            &mDecoderInputBuffers);
-    CHECK_EQ(err, (status_t)OK);
-
-    err = mDecoder->getOutputBuffers(
-            &mDecoderOutputBuffers);
-    CHECK_EQ(err, (status_t)OK);
-
-    scheduleDecoderNotification();
-
-    return OK;
-}
-
-void DirectRenderer::DecoderContext::queueInputBuffer(
-        const sp<ABuffer> &accessUnit) {
-    CHECK(mDecoder != NULL);
-
-    mAccessUnits.push_back(accessUnit);
-    queueDecoderInputBuffers();
-}
-
-status_t DirectRenderer::DecoderContext::renderOutputBufferAndRelease(
-        size_t index) {
-    return mDecoder->renderOutputBufferAndRelease(index);
-}
-
-status_t DirectRenderer::DecoderContext::releaseOutputBuffer(size_t index) {
-    return mDecoder->releaseOutputBuffer(index);
-}
-
-void DirectRenderer::DecoderContext::queueDecoderInputBuffers() {
-    if (mDecoder == NULL) {
-        return;
-    }
-
-    bool submittedMore = false;
-
-    while (!mAccessUnits.empty()
-            && !mDecoderInputBuffersAvailable.empty()) {
-        size_t index = *mDecoderInputBuffersAvailable.begin();
-
-        mDecoderInputBuffersAvailable.erase(
-                mDecoderInputBuffersAvailable.begin());
-
-        sp<ABuffer> srcBuffer = *mAccessUnits.begin();
-        mAccessUnits.erase(mAccessUnits.begin());
-
-        const sp<ABuffer> &dstBuffer =
-            mDecoderInputBuffers.itemAt(index);
-
-        memcpy(dstBuffer->data(), srcBuffer->data(), srcBuffer->size());
-
-        int64_t timeUs;
-        CHECK(srcBuffer->meta()->findInt64("timeUs", &timeUs));
-
-        status_t err = mDecoder->queueInputBuffer(
-                index,
-                0 /* offset */,
-                srcBuffer->size(),
-                timeUs,
-                0 /* flags */);
-        CHECK_EQ(err, (status_t)OK);
-
-        submittedMore = true;
-    }
-
-    if (submittedMore) {
-        scheduleDecoderNotification();
-    }
-}
-
-void DirectRenderer::DecoderContext::onMessageReceived(
-        const sp<AMessage> &msg) {
-    switch (msg->what()) {
-        case kWhatDecoderNotify:
-        {
-            onDecoderNotify();
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-}
-
-void DirectRenderer::DecoderContext::onDecoderNotify() {
-    mDecoderNotificationPending = false;
-
-    for (;;) {
-        size_t index;
-        status_t err = mDecoder->dequeueInputBuffer(&index);
-
-        if (err == OK) {
-            mDecoderInputBuffersAvailable.push_back(index);
-        } else if (err == -EAGAIN) {
-            break;
-        } else {
-            TRESPASS();
-        }
-    }
-
-    queueDecoderInputBuffers();
-
-    for (;;) {
-        size_t index;
-        size_t offset;
-        size_t size;
-        int64_t timeUs;
-        uint32_t flags;
-        status_t err = mDecoder->dequeueOutputBuffer(
-                &index,
-                &offset,
-                &size,
-                &timeUs,
-                &flags);
-
-        if (err == OK) {
-            queueOutputBuffer(
-                    index, timeUs, mDecoderOutputBuffers.itemAt(index));
-        } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) {
-            err = mDecoder->getOutputBuffers(
-                    &mDecoderOutputBuffers);
-            CHECK_EQ(err, (status_t)OK);
-        } else if (err == INFO_FORMAT_CHANGED) {
-            // We don't care.
-        } else if (err == -EAGAIN) {
-            break;
-        } else {
-            TRESPASS();
-        }
-    }
-
-    scheduleDecoderNotification();
-}
-
-void DirectRenderer::DecoderContext::scheduleDecoderNotification() {
-    if (mDecoderNotificationPending) {
-        return;
-    }
-
-    sp<AMessage> notify =
-        new AMessage(kWhatDecoderNotify, id());
-
-    mDecoder->requestActivityNotification(notify);
-    mDecoderNotificationPending = true;
-}
-
-void DirectRenderer::DecoderContext::queueOutputBuffer(
-        size_t index, int64_t timeUs, const sp<ABuffer> &buffer) {
-    sp<AMessage> msg = mNotify->dup();
-    msg->setInt32("what", kWhatOutputBufferReady);
-    msg->setSize("index", index);
-    msg->setInt64("timeUs", timeUs);
-    msg->setBuffer("buffer", buffer);
-    msg->post();
-}
-
-////////////////////////////////////////////////////////////////////////////////
-
-DirectRenderer::AudioRenderer::AudioRenderer(
-        const sp<DecoderContext> &decoderContext)
-    : mDecoderContext(decoderContext),
-      mPushPending(false),
-      mNumFramesWritten(0) {
-    mAudioTrack = new AudioTrack(
-            AUDIO_STREAM_DEFAULT,
-            48000.0f,
-            AUDIO_FORMAT_PCM,
-            AUDIO_CHANNEL_OUT_STEREO,
-            (int)0 /* frameCount */);
-
-    CHECK_EQ((status_t)OK, mAudioTrack->initCheck());
-
-    mAudioTrack->start();
-}
-
-DirectRenderer::AudioRenderer::~AudioRenderer() {
-}
-
-void DirectRenderer::AudioRenderer::queueInputBuffer(
-        size_t index, int64_t timeUs, const sp<ABuffer> &buffer) {
-    BufferInfo info;
-    info.mIndex = index;
-    info.mTimeUs = timeUs;
-    info.mBuffer = buffer;
-
-    mInputBuffers.push_back(info);
-    schedulePushIfNecessary();
-}
-
-void DirectRenderer::AudioRenderer::onMessageReceived(
-        const sp<AMessage> &msg) {
-    switch (msg->what()) {
-        case kWhatPushAudio:
-        {
-            onPushAudio();
-            break;
-        }
-
-        default:
-            break;
-    }
-}
-
-void DirectRenderer::AudioRenderer::schedulePushIfNecessary() {
-    if (mPushPending || mInputBuffers.empty()) {
-        return;
-    }
-
-    mPushPending = true;
-
-    uint32_t numFramesPlayed;
-    CHECK_EQ(mAudioTrack->getPosition(&numFramesPlayed),
-             (status_t)OK);
-
-    uint32_t numFramesPendingPlayout = mNumFramesWritten - numFramesPlayed;
-
-    // This is how long the audio sink will have data to
-    // play back.
-    const float msecsPerFrame = 1000.0f / mAudioTrack->getSampleRate();
-
-    int64_t delayUs =
-        msecsPerFrame * numFramesPendingPlayout * 1000ll;
-
-    // Let's give it more data after about half that time
-    // has elapsed.
-    (new AMessage(kWhatPushAudio, id()))->post(delayUs / 2);
-}
-
-void DirectRenderer::AudioRenderer::onPushAudio() {
-    mPushPending = false;
-
-    while (!mInputBuffers.empty()) {
-        const BufferInfo &info = *mInputBuffers.begin();
-
-        ssize_t n = writeNonBlocking(
-                info.mBuffer->data(), info.mBuffer->size());
-
-        if (n < (ssize_t)info.mBuffer->size()) {
-            CHECK_GE(n, 0);
-
-            info.mBuffer->setRange(
-                    info.mBuffer->offset() + n, info.mBuffer->size() - n);
-            break;
-        }
-
-        mDecoderContext->releaseOutputBuffer(info.mIndex);
-
-        mInputBuffers.erase(mInputBuffers.begin());
-    }
-
-    schedulePushIfNecessary();
-}
-
-ssize_t DirectRenderer::AudioRenderer::writeNonBlocking(
-        const uint8_t *data, size_t size) {
-    uint32_t numFramesPlayed;
-    status_t err = mAudioTrack->getPosition(&numFramesPlayed);
-    if (err != OK) {
-        return err;
-    }
-
-    ssize_t numFramesAvailableToWrite =
-        mAudioTrack->frameCount() - (mNumFramesWritten - numFramesPlayed);
-
-    size_t numBytesAvailableToWrite =
-        numFramesAvailableToWrite * mAudioTrack->frameSize();
-
-    if (size > numBytesAvailableToWrite) {
-        size = numBytesAvailableToWrite;
-    }
-
-    CHECK_EQ(mAudioTrack->write(data, size), (ssize_t)size);
-
-    size_t numFramesWritten = size / mAudioTrack->frameSize();
-    mNumFramesWritten += numFramesWritten;
-
-    return size;
-}
-
-////////////////////////////////////////////////////////////////////////////////
-
-DirectRenderer::DirectRenderer(
-        const sp<IGraphicBufferProducer> &bufferProducer)
-    : mSurfaceTex(bufferProducer),
-      mVideoRenderPending(false),
-      mNumFramesLate(0),
-      mNumFrames(0) {
-}
-
-DirectRenderer::~DirectRenderer() {
-}
-
-void DirectRenderer::onMessageReceived(const sp<AMessage> &msg) {
-    switch (msg->what()) {
-        case kWhatDecoderNotify:
-        {
-            onDecoderNotify(msg);
-            break;
-        }
-
-        case kWhatRenderVideo:
-        {
-            onRenderVideo();
-            break;
-        }
-
-        case kWhatQueueAccessUnit:
-            onQueueAccessUnit(msg);
-            break;
-
-        case kWhatSetFormat:
-            onSetFormat(msg);
-            break;
-
-        default:
-            TRESPASS();
-    }
-}
-
-void DirectRenderer::setFormat(size_t trackIndex, const sp<AMessage> &format) {
-    sp<AMessage> msg = new AMessage(kWhatSetFormat, id());
-    msg->setSize("trackIndex", trackIndex);
-    msg->setMessage("format", format);
-    msg->post();
-}
-
-void DirectRenderer::onSetFormat(const sp<AMessage> &msg) {
-    size_t trackIndex;
-    CHECK(msg->findSize("trackIndex", &trackIndex));
-
-    sp<AMessage> format;
-    CHECK(msg->findMessage("format", &format));
-
-    internalSetFormat(trackIndex, format);
-}
-
-void DirectRenderer::internalSetFormat(
-        size_t trackIndex, const sp<AMessage> &format) {
-    CHECK_LT(trackIndex, 2u);
-
-    CHECK(mDecoderContext[trackIndex] == NULL);
-
-    sp<AMessage> notify = new AMessage(kWhatDecoderNotify, id());
-    notify->setSize("trackIndex", trackIndex);
-
-    mDecoderContext[trackIndex] = new DecoderContext(notify);
-    looper()->registerHandler(mDecoderContext[trackIndex]);
-
-    CHECK_EQ((status_t)OK,
-             mDecoderContext[trackIndex]->init(
-                 format, trackIndex == 0 ? mSurfaceTex : NULL));
-
-    if (trackIndex == 1) {
-        // Audio
-        mAudioRenderer = new AudioRenderer(mDecoderContext[1]);
-        looper()->registerHandler(mAudioRenderer);
-    }
-}
-
-void DirectRenderer::queueAccessUnit(
-        size_t trackIndex, const sp<ABuffer> &accessUnit) {
-    sp<AMessage> msg = new AMessage(kWhatQueueAccessUnit, id());
-    msg->setSize("trackIndex", trackIndex);
-    msg->setBuffer("accessUnit", accessUnit);
-    msg->post();
-}
-
-void DirectRenderer::onQueueAccessUnit(const sp<AMessage> &msg) {
-    size_t trackIndex;
-    CHECK(msg->findSize("trackIndex", &trackIndex));
-
-    sp<ABuffer> accessUnit;
-    CHECK(msg->findBuffer("accessUnit", &accessUnit));
-
-    CHECK_LT(trackIndex, 2u);
-    CHECK(mDecoderContext[trackIndex] != NULL);
-
-    mDecoderContext[trackIndex]->queueInputBuffer(accessUnit);
-}
-
-void DirectRenderer::onDecoderNotify(const sp<AMessage> &msg) {
-    size_t trackIndex;
-    CHECK(msg->findSize("trackIndex", &trackIndex));
-
-    int32_t what;
-    CHECK(msg->findInt32("what", &what));
-
-    switch (what) {
-        case DecoderContext::kWhatOutputBufferReady:
-        {
-            size_t index;
-            CHECK(msg->findSize("index", &index));
-
-            int64_t timeUs;
-            CHECK(msg->findInt64("timeUs", &timeUs));
-
-            sp<ABuffer> buffer;
-            CHECK(msg->findBuffer("buffer", &buffer));
-
-            queueOutputBuffer(trackIndex, index, timeUs, buffer);
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-}
-
-void DirectRenderer::queueOutputBuffer(
-        size_t trackIndex,
-        size_t index, int64_t timeUs, const sp<ABuffer> &buffer) {
-    if (trackIndex == 1) {
-        // Audio
-        mAudioRenderer->queueInputBuffer(index, timeUs, buffer);
-        return;
-    }
-
-    OutputInfo info;
-    info.mIndex = index;
-    info.mTimeUs = timeUs;
-    info.mBuffer = buffer;
-    mVideoOutputBuffers.push_back(info);
-
-    scheduleVideoRenderIfNecessary();
-}
-
-void DirectRenderer::scheduleVideoRenderIfNecessary() {
-    if (mVideoRenderPending || mVideoOutputBuffers.empty()) {
-        return;
-    }
-
-    mVideoRenderPending = true;
-
-    int64_t timeUs = (*mVideoOutputBuffers.begin()).mTimeUs;
-    int64_t nowUs = ALooper::GetNowUs();
-
-    int64_t delayUs = timeUs - nowUs;
-
-    (new AMessage(kWhatRenderVideo, id()))->post(delayUs);
-}
-
-void DirectRenderer::onRenderVideo() {
-    mVideoRenderPending = false;
-
-    int64_t nowUs = ALooper::GetNowUs();
-
-    while (!mVideoOutputBuffers.empty()) {
-        const OutputInfo &info = *mVideoOutputBuffers.begin();
-
-        if (info.mTimeUs > nowUs) {
-            break;
-        }
-
-        if (info.mTimeUs + 15000ll < nowUs) {
-            ++mNumFramesLate;
-        }
-        ++mNumFrames;
-
-        status_t err =
-            mDecoderContext[0]->renderOutputBufferAndRelease(info.mIndex);
-        CHECK_EQ(err, (status_t)OK);
-
-        mVideoOutputBuffers.erase(mVideoOutputBuffers.begin());
-    }
-
-    scheduleVideoRenderIfNecessary();
-}
-
-}  // namespace android
-
diff --git a/media/libstagefright/wifi-display/sink/DirectRenderer.h b/media/libstagefright/wifi-display/sink/DirectRenderer.h
deleted file mode 100644
index 07c2170..0000000
--- a/media/libstagefright/wifi-display/sink/DirectRenderer.h
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * Copyright 2012, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef DIRECT_RENDERER_H_
-
-#define DIRECT_RENDERER_H_
-
-#include <media/stagefright/foundation/AHandler.h>
-
-namespace android {
-
-struct ABuffer;
-struct IGraphicBufferProducer;
-
-// Renders audio and video data queued by calls to "queueAccessUnit".
-struct DirectRenderer : public AHandler {
-    DirectRenderer(const sp<IGraphicBufferProducer> &bufferProducer);
-
-    void setFormat(size_t trackIndex, const sp<AMessage> &format);
-    void queueAccessUnit(size_t trackIndex, const sp<ABuffer> &accessUnit);
-
-protected:
-    virtual void onMessageReceived(const sp<AMessage> &msg);
-    virtual ~DirectRenderer();
-
-private:
-    struct DecoderContext;
-    struct AudioRenderer;
-
-    enum {
-        kWhatDecoderNotify,
-        kWhatRenderVideo,
-        kWhatQueueAccessUnit,
-        kWhatSetFormat,
-    };
-
-    struct OutputInfo {
-        size_t mIndex;
-        int64_t mTimeUs;
-        sp<ABuffer> mBuffer;
-    };
-
-    sp<IGraphicBufferProducer> mSurfaceTex;
-
-    sp<DecoderContext> mDecoderContext[2];
-    List<OutputInfo> mVideoOutputBuffers;
-
-    bool mVideoRenderPending;
-
-    sp<AudioRenderer> mAudioRenderer;
-
-    int32_t mNumFramesLate;
-    int32_t mNumFrames;
-
-    void onDecoderNotify(const sp<AMessage> &msg);
-
-    void queueOutputBuffer(
-            size_t trackIndex,
-            size_t index, int64_t timeUs, const sp<ABuffer> &buffer);
-
-    void scheduleVideoRenderIfNecessary();
-    void onRenderVideo();
-
-    void onSetFormat(const sp<AMessage> &msg);
-    void onQueueAccessUnit(const sp<AMessage> &msg);
-
-    void internalSetFormat(size_t trackIndex, const sp<AMessage> &format);
-
-    DISALLOW_EVIL_CONSTRUCTORS(DirectRenderer);
-};
-
-}  // namespace android
-
-#endif  // DIRECT_RENDERER_H_
diff --git a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp b/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp
deleted file mode 100644
index bc88f1e..0000000
--- a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp
+++ /dev/null
@@ -1,917 +0,0 @@
-/*
- * Copyright 2012, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "WifiDisplaySink"
-#include <utils/Log.h>
-
-#include "WifiDisplaySink.h"
-
-#include "DirectRenderer.h"
-#include "MediaReceiver.h"
-#include "TimeSyncer.h"
-
-#include <cutils/properties.h>
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/foundation/ParsedMessage.h>
-#include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/Utils.h>
-
-namespace android {
-
-// static
-const AString WifiDisplaySink::sUserAgent = MakeUserAgent();
-
-WifiDisplaySink::WifiDisplaySink(
-        uint32_t flags,
-        const sp<ANetworkSession> &netSession,
-        const sp<IGraphicBufferProducer> &bufferProducer,
-        const sp<AMessage> &notify)
-    : mState(UNDEFINED),
-      mFlags(flags),
-      mNetSession(netSession),
-      mSurfaceTex(bufferProducer),
-      mNotify(notify),
-      mUsingTCPTransport(false),
-      mUsingTCPInterleaving(false),
-      mSessionID(0),
-      mNextCSeq(1),
-      mIDRFrameRequestPending(false),
-      mTimeOffsetUs(0ll),
-      mTimeOffsetValid(false),
-      mSetupDeferred(false),
-      mLatencyCount(0),
-      mLatencySumUs(0ll),
-      mLatencyMaxUs(0ll),
-      mMaxDelayMs(-1ll) {
-    // We support any and all resolutions, but prefer 720p30
-    mSinkSupportedVideoFormats.setNativeResolution(
-            VideoFormats::RESOLUTION_CEA, 5);  // 1280 x 720 p30
-
-    mSinkSupportedVideoFormats.enableAll();
-}
-
-WifiDisplaySink::~WifiDisplaySink() {
-}
-
-void WifiDisplaySink::start(const char *sourceHost, int32_t sourcePort) {
-    sp<AMessage> msg = new AMessage(kWhatStart, id());
-    msg->setString("sourceHost", sourceHost);
-    msg->setInt32("sourcePort", sourcePort);
-    msg->post();
-}
-
-void WifiDisplaySink::start(const char *uri) {
-    sp<AMessage> msg = new AMessage(kWhatStart, id());
-    msg->setString("setupURI", uri);
-    msg->post();
-}
-
-// static
-bool WifiDisplaySink::ParseURL(
-        const char *url, AString *host, int32_t *port, AString *path,
-        AString *user, AString *pass) {
-    host->clear();
-    *port = 0;
-    path->clear();
-    user->clear();
-    pass->clear();
-
-    if (strncasecmp("rtsp://", url, 7)) {
-        return false;
-    }
-
-    const char *slashPos = strchr(&url[7], '/');
-
-    if (slashPos == NULL) {
-        host->setTo(&url[7]);
-        path->setTo("/");
-    } else {
-        host->setTo(&url[7], slashPos - &url[7]);
-        path->setTo(slashPos);
-    }
-
-    ssize_t atPos = host->find("@");
-
-    if (atPos >= 0) {
-        // Split of user:pass@ from hostname.
-
-        AString userPass(*host, 0, atPos);
-        host->erase(0, atPos + 1);
-
-        ssize_t colonPos = userPass.find(":");
-
-        if (colonPos < 0) {
-            *user = userPass;
-        } else {
-            user->setTo(userPass, 0, colonPos);
-            pass->setTo(userPass, colonPos + 1, userPass.size() - colonPos - 1);
-        }
-    }
-
-    const char *colonPos = strchr(host->c_str(), ':');
-
-    if (colonPos != NULL) {
-        char *end;
-        unsigned long x = strtoul(colonPos + 1, &end, 10);
-
-        if (end == colonPos + 1 || *end != '\0' || x >= 65536) {
-            return false;
-        }
-
-        *port = x;
-
-        size_t colonOffset = colonPos - host->c_str();
-        size_t trailing = host->size() - colonOffset;
-        host->erase(colonOffset, trailing);
-    } else {
-        *port = 554;
-    }
-
-    return true;
-}
-
-void WifiDisplaySink::onMessageReceived(const sp<AMessage> &msg) {
-    switch (msg->what()) {
-        case kWhatStart:
-        {
-            sleep(2);  // XXX
-
-            int32_t sourcePort;
-            CHECK(msg->findString("sourceHost", &mRTSPHost));
-            CHECK(msg->findInt32("sourcePort", &sourcePort));
-
-            sp<AMessage> notify = new AMessage(kWhatRTSPNotify, id());
-
-            status_t err = mNetSession->createRTSPClient(
-                    mRTSPHost.c_str(), sourcePort, notify, &mSessionID);
-            CHECK_EQ(err, (status_t)OK);
-
-            mState = CONNECTING;
-            break;
-        }
-
-        case kWhatRTSPNotify:
-        {
-            int32_t reason;
-            CHECK(msg->findInt32("reason", &reason));
-
-            switch (reason) {
-                case ANetworkSession::kWhatError:
-                {
-                    int32_t sessionID;
-                    CHECK(msg->findInt32("sessionID", &sessionID));
-
-                    int32_t err;
-                    CHECK(msg->findInt32("err", &err));
-
-                    AString detail;
-                    CHECK(msg->findString("detail", &detail));
-
-                    ALOGE("An error occurred in session %d (%d, '%s/%s').",
-                          sessionID,
-                          err,
-                          detail.c_str(),
-                          strerror(-err));
-
-                    if (sessionID == mSessionID) {
-                        ALOGI("Lost control connection.");
-
-                        // The control connection is dead now.
-                        mNetSession->destroySession(mSessionID);
-                        mSessionID = 0;
-
-                        if (mNotify == NULL) {
-                            looper()->stop();
-                        } else {
-                            sp<AMessage> notify = mNotify->dup();
-                            notify->setInt32("what", kWhatDisconnected);
-                            notify->post();
-                        }
-                    }
-                    break;
-                }
-
-                case ANetworkSession::kWhatConnected:
-                {
-                    ALOGI("We're now connected.");
-                    mState = CONNECTED;
-
-                    if (mFlags & FLAG_SPECIAL_MODE) {
-                        sp<AMessage> notify = new AMessage(
-                                kWhatTimeSyncerNotify, id());
-
-                        mTimeSyncer = new TimeSyncer(mNetSession, notify);
-                        looper()->registerHandler(mTimeSyncer);
-
-                        mTimeSyncer->startClient(mRTSPHost.c_str(), 8123);
-                    }
-                    break;
-                }
-
-                case ANetworkSession::kWhatData:
-                {
-                    onReceiveClientData(msg);
-                    break;
-                }
-
-                default:
-                    TRESPASS();
-            }
-            break;
-        }
-
-        case kWhatStop:
-        {
-            looper()->stop();
-            break;
-        }
-
-        case kWhatMediaReceiverNotify:
-        {
-            onMediaReceiverNotify(msg);
-            break;
-        }
-
-        case kWhatTimeSyncerNotify:
-        {
-            int32_t what;
-            CHECK(msg->findInt32("what", &what));
-
-            if (what == TimeSyncer::kWhatTimeOffset) {
-                CHECK(msg->findInt64("offset", &mTimeOffsetUs));
-                mTimeOffsetValid = true;
-
-                if (mSetupDeferred) {
-                    CHECK_EQ((status_t)OK,
-                             sendSetup(
-                                mSessionID,
-                                "rtsp://x.x.x.x:x/wfd1.0/streamid=0"));
-
-                    mSetupDeferred = false;
-                }
-            }
-            break;
-        }
-
-        case kWhatReportLateness:
-        {
-            if (mLatencyCount > 0) {
-                int64_t avgLatencyUs = mLatencySumUs / mLatencyCount;
-
-                ALOGV("avg. latency = %lld ms (max %lld ms)",
-                      avgLatencyUs / 1000ll,
-                      mLatencyMaxUs / 1000ll);
-
-                sp<AMessage> params = new AMessage;
-                params->setInt64("avgLatencyUs", avgLatencyUs);
-                params->setInt64("maxLatencyUs", mLatencyMaxUs);
-                mMediaReceiver->informSender(0 /* trackIndex */, params);
-            }
-
-            mLatencyCount = 0;
-            mLatencySumUs = 0ll;
-            mLatencyMaxUs = 0ll;
-
-            msg->post(kReportLatenessEveryUs);
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-}
-
-void WifiDisplaySink::dumpDelay(size_t trackIndex, int64_t timeUs) {
-    int64_t delayMs = (ALooper::GetNowUs() - timeUs) / 1000ll;
-
-    if (delayMs > mMaxDelayMs) {
-        mMaxDelayMs = delayMs;
-    }
-
-    static const int64_t kMinDelayMs = 0;
-    static const int64_t kMaxDelayMs = 300;
-
-    const char *kPattern = "########################################";
-    size_t kPatternSize = strlen(kPattern);
-
-    int n = (kPatternSize * (delayMs - kMinDelayMs))
-                / (kMaxDelayMs - kMinDelayMs);
-
-    if (n < 0) {
-        n = 0;
-    } else if ((size_t)n > kPatternSize) {
-        n = kPatternSize;
-    }
-
-    ALOGI("[%lld]: (%4lld ms / %4lld ms) %s",
-          timeUs / 1000,
-          delayMs,
-          mMaxDelayMs,
-          kPattern + kPatternSize - n);
-}
-
-void WifiDisplaySink::onMediaReceiverNotify(const sp<AMessage> &msg) {
-    int32_t what;
-    CHECK(msg->findInt32("what", &what));
-
-    switch (what) {
-        case MediaReceiver::kWhatInitDone:
-        {
-            status_t err;
-            CHECK(msg->findInt32("err", &err));
-
-            ALOGI("MediaReceiver initialization completed w/ err %d", err);
-            break;
-        }
-
-        case MediaReceiver::kWhatError:
-        {
-            status_t err;
-            CHECK(msg->findInt32("err", &err));
-
-            ALOGE("MediaReceiver signaled error %d", err);
-            break;
-        }
-
-        case MediaReceiver::kWhatAccessUnit:
-        {
-            if (mRenderer == NULL) {
-                mRenderer = new DirectRenderer(mSurfaceTex);
-                looper()->registerHandler(mRenderer);
-            }
-
-            sp<ABuffer> accessUnit;
-            CHECK(msg->findBuffer("accessUnit", &accessUnit));
-
-            int64_t timeUs;
-            CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
-
-            if (!mTimeOffsetValid && !(mFlags & FLAG_SPECIAL_MODE)) {
-                mTimeOffsetUs = timeUs - ALooper::GetNowUs();
-                mTimeOffsetValid = true;
-            }
-
-            CHECK(mTimeOffsetValid);
-
-            // We are the timesync _client_,
-            // client time = server time - time offset.
-            timeUs -= mTimeOffsetUs;
-
-            size_t trackIndex;
-            CHECK(msg->findSize("trackIndex", &trackIndex));
-
-            int64_t nowUs = ALooper::GetNowUs();
-            int64_t delayUs = nowUs - timeUs;
-
-            mLatencySumUs += delayUs;
-            if (mLatencyCount == 0 || delayUs > mLatencyMaxUs) {
-                mLatencyMaxUs = delayUs;
-            }
-            ++mLatencyCount;
-
-            // dumpDelay(trackIndex, timeUs);
-
-            timeUs += 220000ll;  // Assume 220 ms of latency
-            accessUnit->meta()->setInt64("timeUs", timeUs);
-
-            sp<AMessage> format;
-            if (msg->findMessage("format", &format)) {
-                mRenderer->setFormat(trackIndex, format);
-            }
-
-            mRenderer->queueAccessUnit(trackIndex, accessUnit);
-            break;
-        }
-
-        case MediaReceiver::kWhatPacketLost:
-        {
-#if 0
-            if (!mIDRFrameRequestPending) {
-                ALOGI("requesting IDR frame");
-
-                sendIDRFrameRequest(mSessionID);
-            }
-#endif
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-}
-
-void WifiDisplaySink::registerResponseHandler(
-        int32_t sessionID, int32_t cseq, HandleRTSPResponseFunc func) {
-    ResponseID id;
-    id.mSessionID = sessionID;
-    id.mCSeq = cseq;
-    mResponseHandlers.add(id, func);
-}
-
-status_t WifiDisplaySink::sendM2(int32_t sessionID) {
-    AString request = "OPTIONS * RTSP/1.0\r\n";
-    AppendCommonResponse(&request, mNextCSeq);
-
-    request.append(
-            "Require: org.wfa.wfd1.0\r\n"
-            "\r\n");
-
-    status_t err =
-        mNetSession->sendRequest(sessionID, request.c_str(), request.size());
-
-    if (err != OK) {
-        return err;
-    }
-
-    registerResponseHandler(
-            sessionID, mNextCSeq, &WifiDisplaySink::onReceiveM2Response);
-
-    ++mNextCSeq;
-
-    return OK;
-}
-
-status_t WifiDisplaySink::onReceiveM2Response(
-        int32_t sessionID, const sp<ParsedMessage> &msg) {
-    int32_t statusCode;
-    if (!msg->getStatusCode(&statusCode)) {
-        return ERROR_MALFORMED;
-    }
-
-    if (statusCode != 200) {
-        return ERROR_UNSUPPORTED;
-    }
-
-    return OK;
-}
-
-status_t WifiDisplaySink::onReceiveSetupResponse(
-        int32_t sessionID, const sp<ParsedMessage> &msg) {
-    int32_t statusCode;
-    if (!msg->getStatusCode(&statusCode)) {
-        return ERROR_MALFORMED;
-    }
-
-    if (statusCode != 200) {
-        return ERROR_UNSUPPORTED;
-    }
-
-    if (!msg->findString("session", &mPlaybackSessionID)) {
-        return ERROR_MALFORMED;
-    }
-
-    if (!ParsedMessage::GetInt32Attribute(
-                mPlaybackSessionID.c_str(),
-                "timeout",
-                &mPlaybackSessionTimeoutSecs)) {
-        mPlaybackSessionTimeoutSecs = -1;
-    }
-
-    ssize_t colonPos = mPlaybackSessionID.find(";");
-    if (colonPos >= 0) {
-        // Strip any options from the returned session id.
-        mPlaybackSessionID.erase(
-                colonPos, mPlaybackSessionID.size() - colonPos);
-    }
-
-    status_t err = configureTransport(msg);
-
-    if (err != OK) {
-        return err;
-    }
-
-    mState = PAUSED;
-
-    return sendPlay(
-            sessionID,
-            "rtsp://x.x.x.x:x/wfd1.0/streamid=0");
-}
-
-status_t WifiDisplaySink::configureTransport(const sp<ParsedMessage> &msg) {
-    if (mUsingTCPTransport && !(mFlags & FLAG_SPECIAL_MODE)) {
-        // In "special" mode we still use a UDP RTCP back-channel that
-        // needs connecting.
-        return OK;
-    }
-
-    AString transport;
-    if (!msg->findString("transport", &transport)) {
-        ALOGE("Missing 'transport' field in SETUP response.");
-        return ERROR_MALFORMED;
-    }
-
-    AString sourceHost;
-    if (!ParsedMessage::GetAttribute(
-                transport.c_str(), "source", &sourceHost)) {
-        sourceHost = mRTSPHost;
-    }
-
-    AString serverPortStr;
-    if (!ParsedMessage::GetAttribute(
-                transport.c_str(), "server_port", &serverPortStr)) {
-        ALOGE("Missing 'server_port' in Transport field.");
-        return ERROR_MALFORMED;
-    }
-
-    int rtpPort, rtcpPort;
-    if (sscanf(serverPortStr.c_str(), "%d-%d", &rtpPort, &rtcpPort) != 2
-            || rtpPort <= 0 || rtpPort > 65535
-            || rtcpPort <=0 || rtcpPort > 65535
-            || rtcpPort != rtpPort + 1) {
-        ALOGE("Invalid server_port description '%s'.",
-                serverPortStr.c_str());
-
-        return ERROR_MALFORMED;
-    }
-
-    if (rtpPort & 1) {
-        ALOGW("Server picked an odd numbered RTP port.");
-    }
-
-    return mMediaReceiver->connectTrack(
-            0 /* trackIndex */, sourceHost.c_str(), rtpPort, rtcpPort);
-}
-
-status_t WifiDisplaySink::onReceivePlayResponse(
-        int32_t sessionID, const sp<ParsedMessage> &msg) {
-    int32_t statusCode;
-    if (!msg->getStatusCode(&statusCode)) {
-        return ERROR_MALFORMED;
-    }
-
-    if (statusCode != 200) {
-        return ERROR_UNSUPPORTED;
-    }
-
-    mState = PLAYING;
-
-    (new AMessage(kWhatReportLateness, id()))->post(kReportLatenessEveryUs);
-
-    return OK;
-}
-
-status_t WifiDisplaySink::onReceiveIDRFrameRequestResponse(
-        int32_t sessionID, const sp<ParsedMessage> &msg) {
-    CHECK(mIDRFrameRequestPending);
-    mIDRFrameRequestPending = false;
-
-    return OK;
-}
-
-void WifiDisplaySink::onReceiveClientData(const sp<AMessage> &msg) {
-    int32_t sessionID;
-    CHECK(msg->findInt32("sessionID", &sessionID));
-
-    sp<RefBase> obj;
-    CHECK(msg->findObject("data", &obj));
-
-    sp<ParsedMessage> data =
-        static_cast<ParsedMessage *>(obj.get());
-
-    ALOGV("session %d received '%s'",
-          sessionID, data->debugString().c_str());
-
-    AString method;
-    AString uri;
-    data->getRequestField(0, &method);
-
-    int32_t cseq;
-    if (!data->findInt32("cseq", &cseq)) {
-        sendErrorResponse(sessionID, "400 Bad Request", -1 /* cseq */);
-        return;
-    }
-
-    if (method.startsWith("RTSP/")) {
-        // This is a response.
-
-        ResponseID id;
-        id.mSessionID = sessionID;
-        id.mCSeq = cseq;
-
-        ssize_t index = mResponseHandlers.indexOfKey(id);
-
-        if (index < 0) {
-            ALOGW("Received unsolicited server response, cseq %d", cseq);
-            return;
-        }
-
-        HandleRTSPResponseFunc func = mResponseHandlers.valueAt(index);
-        mResponseHandlers.removeItemsAt(index);
-
-        status_t err = (this->*func)(sessionID, data);
-        CHECK_EQ(err, (status_t)OK);
-    } else {
-        AString version;
-        data->getRequestField(2, &version);
-        if (!(version == AString("RTSP/1.0"))) {
-            sendErrorResponse(sessionID, "505 RTSP Version not supported", cseq);
-            return;
-        }
-
-        if (method == "OPTIONS") {
-            onOptionsRequest(sessionID, cseq, data);
-        } else if (method == "GET_PARAMETER") {
-            onGetParameterRequest(sessionID, cseq, data);
-        } else if (method == "SET_PARAMETER") {
-            onSetParameterRequest(sessionID, cseq, data);
-        } else {
-            sendErrorResponse(sessionID, "405 Method Not Allowed", cseq);
-        }
-    }
-}
-
-void WifiDisplaySink::onOptionsRequest(
-        int32_t sessionID,
-        int32_t cseq,
-        const sp<ParsedMessage> &data) {
-    AString response = "RTSP/1.0 200 OK\r\n";
-    AppendCommonResponse(&response, cseq);
-    response.append("Public: org.wfa.wfd1.0, GET_PARAMETER, SET_PARAMETER\r\n");
-    response.append("\r\n");
-
-    status_t err = mNetSession->sendRequest(sessionID, response.c_str());
-    CHECK_EQ(err, (status_t)OK);
-
-    err = sendM2(sessionID);
-    CHECK_EQ(err, (status_t)OK);
-}
-
-void WifiDisplaySink::onGetParameterRequest(
-        int32_t sessionID,
-        int32_t cseq,
-        const sp<ParsedMessage> &data) {
-    AString body;
-
-    if (mState == CONNECTED) {
-        mUsingTCPTransport = false;
-        mUsingTCPInterleaving = false;
-
-        char val[PROPERTY_VALUE_MAX];
-        if (property_get("media.wfd-sink.tcp-mode", val, NULL)) {
-            if (!strcasecmp("true", val) || !strcmp("1", val)) {
-                ALOGI("Using TCP unicast transport.");
-                mUsingTCPTransport = true;
-                mUsingTCPInterleaving = false;
-            } else if (!strcasecmp("interleaved", val)) {
-                ALOGI("Using TCP interleaved transport.");
-                mUsingTCPTransport = true;
-                mUsingTCPInterleaving = true;
-            }
-        } else if (mFlags & FLAG_SPECIAL_MODE) {
-            mUsingTCPTransport = true;
-        }
-
-        body = "wfd_video_formats: ";
-        body.append(mSinkSupportedVideoFormats.getFormatSpec());
-
-        body.append(
-                "\r\nwfd_audio_codecs: AAC 0000000F 00\r\n"
-                "wfd_client_rtp_ports: RTP/AVP/");
-
-        if (mUsingTCPTransport) {
-            body.append("TCP;");
-            if (mUsingTCPInterleaving) {
-                body.append("interleaved");
-            } else {
-                body.append("unicast 19000 0");
-            }
-        } else {
-            body.append("UDP;unicast 19000 0");
-        }
-
-        body.append(" mode=play\r\n");
-    }
-
-    AString response = "RTSP/1.0 200 OK\r\n";
-    AppendCommonResponse(&response, cseq);
-    response.append("Content-Type: text/parameters\r\n");
-    response.append(StringPrintf("Content-Length: %d\r\n", body.size()));
-    response.append("\r\n");
-    response.append(body);
-
-    status_t err = mNetSession->sendRequest(sessionID, response.c_str());
-    CHECK_EQ(err, (status_t)OK);
-}
-
-status_t WifiDisplaySink::sendSetup(int32_t sessionID, const char *uri) {
-    sp<AMessage> notify = new AMessage(kWhatMediaReceiverNotify, id());
-
-    mMediaReceiverLooper = new ALooper;
-    mMediaReceiverLooper->setName("media_receiver");
-
-    mMediaReceiverLooper->start(
-            false /* runOnCallingThread */,
-            false /* canCallJava */,
-            PRIORITY_AUDIO);
-
-    mMediaReceiver = new MediaReceiver(mNetSession, notify);
-    mMediaReceiverLooper->registerHandler(mMediaReceiver);
-
-    RTPReceiver::TransportMode rtpMode = RTPReceiver::TRANSPORT_UDP;
-    if (mUsingTCPTransport) {
-        if (mUsingTCPInterleaving) {
-            rtpMode = RTPReceiver::TRANSPORT_TCP_INTERLEAVED;
-        } else {
-            rtpMode = RTPReceiver::TRANSPORT_TCP;
-        }
-    }
-
-    int32_t localRTPPort;
-    status_t err = mMediaReceiver->addTrack(
-            rtpMode, RTPReceiver::TRANSPORT_UDP /* rtcpMode */, &localRTPPort);
-
-    if (err == OK) {
-        err = mMediaReceiver->initAsync(MediaReceiver::MODE_TRANSPORT_STREAM);
-    }
-
-    if (err != OK) {
-        mMediaReceiverLooper->unregisterHandler(mMediaReceiver->id());
-        mMediaReceiver.clear();
-
-        mMediaReceiverLooper->stop();
-        mMediaReceiverLooper.clear();
-
-        return err;
-    }
-
-    AString request = StringPrintf("SETUP %s RTSP/1.0\r\n", uri);
-
-    AppendCommonResponse(&request, mNextCSeq);
-
-    if (rtpMode == RTPReceiver::TRANSPORT_TCP_INTERLEAVED) {
-        request.append("Transport: RTP/AVP/TCP;interleaved=0-1\r\n");
-    } else if (rtpMode == RTPReceiver::TRANSPORT_TCP) {
-        if (mFlags & FLAG_SPECIAL_MODE) {
-            // This isn't quite true, since the RTP connection is through TCP
-            // and the RTCP connection through UDP...
-            request.append(
-                    StringPrintf(
-                        "Transport: RTP/AVP/TCP;unicast;client_port=%d-%d\r\n",
-                        localRTPPort, localRTPPort + 1));
-        } else {
-            request.append(
-                    StringPrintf(
-                        "Transport: RTP/AVP/TCP;unicast;client_port=%d\r\n",
-                        localRTPPort));
-        }
-    } else {
-        request.append(
-                StringPrintf(
-                    "Transport: RTP/AVP/UDP;unicast;client_port=%d-%d\r\n",
-                    localRTPPort,
-                    localRTPPort + 1));
-    }
-
-    request.append("\r\n");
-
-    ALOGV("request = '%s'", request.c_str());
-
-    err = mNetSession->sendRequest(sessionID, request.c_str(), request.size());
-
-    if (err != OK) {
-        return err;
-    }
-
-    registerResponseHandler(
-            sessionID, mNextCSeq, &WifiDisplaySink::onReceiveSetupResponse);
-
-    ++mNextCSeq;
-
-    return OK;
-}
-
-status_t WifiDisplaySink::sendPlay(int32_t sessionID, const char *uri) {
-    AString request = StringPrintf("PLAY %s RTSP/1.0\r\n", uri);
-
-    AppendCommonResponse(&request, mNextCSeq);
-
-    request.append(StringPrintf("Session: %s\r\n", mPlaybackSessionID.c_str()));
-    request.append("\r\n");
-
-    status_t err =
-        mNetSession->sendRequest(sessionID, request.c_str(), request.size());
-
-    if (err != OK) {
-        return err;
-    }
-
-    registerResponseHandler(
-            sessionID, mNextCSeq, &WifiDisplaySink::onReceivePlayResponse);
-
-    ++mNextCSeq;
-
-    return OK;
-}
-
-status_t WifiDisplaySink::sendIDRFrameRequest(int32_t sessionID) {
-    CHECK(!mIDRFrameRequestPending);
-
-    AString request = "SET_PARAMETER rtsp://localhost/wfd1.0 RTSP/1.0\r\n";
-
-    AppendCommonResponse(&request, mNextCSeq);
-
-    AString content = "wfd_idr_request\r\n";
-
-    request.append(StringPrintf("Session: %s\r\n", mPlaybackSessionID.c_str()));
-    request.append(StringPrintf("Content-Length: %d\r\n", content.size()));
-    request.append("\r\n");
-    request.append(content);
-
-    status_t err =
-        mNetSession->sendRequest(sessionID, request.c_str(), request.size());
-
-    if (err != OK) {
-        return err;
-    }
-
-    registerResponseHandler(
-            sessionID,
-            mNextCSeq,
-            &WifiDisplaySink::onReceiveIDRFrameRequestResponse);
-
-    ++mNextCSeq;
-
-    mIDRFrameRequestPending = true;
-
-    return OK;
-}
-
-void WifiDisplaySink::onSetParameterRequest(
-        int32_t sessionID,
-        int32_t cseq,
-        const sp<ParsedMessage> &data) {
-    const char *content = data->getContent();
-
-    if (strstr(content, "wfd_trigger_method: SETUP\r\n") != NULL) {
-        if ((mFlags & FLAG_SPECIAL_MODE) && !mTimeOffsetValid) {
-            mSetupDeferred = true;
-        } else {
-            status_t err =
-                sendSetup(
-                        sessionID,
-                        "rtsp://x.x.x.x:x/wfd1.0/streamid=0");
-
-            CHECK_EQ(err, (status_t)OK);
-        }
-    }
-
-    AString response = "RTSP/1.0 200 OK\r\n";
-    AppendCommonResponse(&response, cseq);
-    response.append("\r\n");
-
-    status_t err = mNetSession->sendRequest(sessionID, response.c_str());
-    CHECK_EQ(err, (status_t)OK);
-}
-
-void WifiDisplaySink::sendErrorResponse(
-        int32_t sessionID,
-        const char *errorDetail,
-        int32_t cseq) {
-    AString response;
-    response.append("RTSP/1.0 ");
-    response.append(errorDetail);
-    response.append("\r\n");
-
-    AppendCommonResponse(&response, cseq);
-
-    response.append("\r\n");
-
-    status_t err = mNetSession->sendRequest(sessionID, response.c_str());
-    CHECK_EQ(err, (status_t)OK);
-}
-
-// static
-void WifiDisplaySink::AppendCommonResponse(AString *response, int32_t cseq) {
-    time_t now = time(NULL);
-    struct tm *now2 = gmtime(&now);
-    char buf[128];
-    strftime(buf, sizeof(buf), "%a, %d %b %Y %H:%M:%S %z", now2);
-
-    response->append("Date: ");
-    response->append(buf);
-    response->append("\r\n");
-
-    response->append(StringPrintf("User-Agent: %s\r\n", sUserAgent.c_str()));
-
-    if (cseq >= 0) {
-        response->append(StringPrintf("CSeq: %d\r\n", cseq));
-    }
-}
-
-}  // namespace android
diff --git a/media/libstagefright/wifi-display/sink/WifiDisplaySink.h b/media/libstagefright/wifi-display/sink/WifiDisplaySink.h
deleted file mode 100644
index dc1fc32..0000000
--- a/media/libstagefright/wifi-display/sink/WifiDisplaySink.h
+++ /dev/null
@@ -1,195 +0,0 @@
-/*
- * Copyright 2012, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef WIFI_DISPLAY_SINK_H_
-
-#define WIFI_DISPLAY_SINK_H_
-
-#include "VideoFormats.h"
-
-#include <gui/Surface.h>
-#include <media/stagefright/foundation/AHandler.h>
-#include <media/stagefright/foundation/ANetworkSession.h>
-
-namespace android {
-
-struct AMessage;
-struct DirectRenderer;
-struct MediaReceiver;
-struct ParsedMessage;
-struct TimeSyncer;
-
-// Represents the RTSP client acting as a wifi display sink.
-// Connects to a wifi display source and renders the incoming
-// transport stream using a MediaPlayer instance.
-struct WifiDisplaySink : public AHandler {
-    enum {
-        kWhatDisconnected,
-    };
-
-    enum Flags {
-        FLAG_SPECIAL_MODE = 1,
-    };
-
-    // If no notification message is specified (notify == NULL)
-    // the sink will stop its looper() once the session ends,
-    // otherwise it will post an appropriate notification but leave
-    // the looper() running.
-    WifiDisplaySink(
-            uint32_t flags,
-            const sp<ANetworkSession> &netSession,
-            const sp<IGraphicBufferProducer> &bufferProducer = NULL,
-            const sp<AMessage> &notify = NULL);
-
-    void start(const char *sourceHost, int32_t sourcePort);
-    void start(const char *uri);
-
-protected:
-    virtual ~WifiDisplaySink();
-    virtual void onMessageReceived(const sp<AMessage> &msg);
-
-private:
-    enum State {
-        UNDEFINED,
-        CONNECTING,
-        CONNECTED,
-        PAUSED,
-        PLAYING,
-    };
-
-    enum {
-        kWhatStart,
-        kWhatRTSPNotify,
-        kWhatStop,
-        kWhatMediaReceiverNotify,
-        kWhatTimeSyncerNotify,
-        kWhatReportLateness,
-    };
-
-    struct ResponseID {
-        int32_t mSessionID;
-        int32_t mCSeq;
-
-        bool operator<(const ResponseID &other) const {
-            return mSessionID < other.mSessionID
-                || (mSessionID == other.mSessionID
-                        && mCSeq < other.mCSeq);
-        }
-    };
-
-    typedef status_t (WifiDisplaySink::*HandleRTSPResponseFunc)(
-            int32_t sessionID, const sp<ParsedMessage> &msg);
-
-    static const int64_t kReportLatenessEveryUs = 1000000ll;
-
-    static const AString sUserAgent;
-
-    State mState;
-    uint32_t mFlags;
-    VideoFormats mSinkSupportedVideoFormats;
-    sp<ANetworkSession> mNetSession;
-    sp<IGraphicBufferProducer> mSurfaceTex;
-    sp<AMessage> mNotify;
-    sp<TimeSyncer> mTimeSyncer;
-    bool mUsingTCPTransport;
-    bool mUsingTCPInterleaving;
-    AString mRTSPHost;
-    int32_t mSessionID;
-
-    int32_t mNextCSeq;
-
-    KeyedVector<ResponseID, HandleRTSPResponseFunc> mResponseHandlers;
-
-    sp<ALooper> mMediaReceiverLooper;
-    sp<MediaReceiver> mMediaReceiver;
-    sp<DirectRenderer> mRenderer;
-
-    AString mPlaybackSessionID;
-    int32_t mPlaybackSessionTimeoutSecs;
-
-    bool mIDRFrameRequestPending;
-
-    int64_t mTimeOffsetUs;
-    bool mTimeOffsetValid;
-
-    bool mSetupDeferred;
-
-    size_t mLatencyCount;
-    int64_t mLatencySumUs;
-    int64_t mLatencyMaxUs;
-
-    int64_t mMaxDelayMs;
-
-    status_t sendM2(int32_t sessionID);
-    status_t sendSetup(int32_t sessionID, const char *uri);
-    status_t sendPlay(int32_t sessionID, const char *uri);
-    status_t sendIDRFrameRequest(int32_t sessionID);
-
-    status_t onReceiveM2Response(
-            int32_t sessionID, const sp<ParsedMessage> &msg);
-
-    status_t onReceiveSetupResponse(
-            int32_t sessionID, const sp<ParsedMessage> &msg);
-
-    status_t configureTransport(const sp<ParsedMessage> &msg);
-
-    status_t onReceivePlayResponse(
-            int32_t sessionID, const sp<ParsedMessage> &msg);
-
-    status_t onReceiveIDRFrameRequestResponse(
-            int32_t sessionID, const sp<ParsedMessage> &msg);
-
-    void registerResponseHandler(
-            int32_t sessionID, int32_t cseq, HandleRTSPResponseFunc func);
-
-    void onReceiveClientData(const sp<AMessage> &msg);
-
-    void onOptionsRequest(
-            int32_t sessionID,
-            int32_t cseq,
-            const sp<ParsedMessage> &data);
-
-    void onGetParameterRequest(
-            int32_t sessionID,
-            int32_t cseq,
-            const sp<ParsedMessage> &data);
-
-    void onSetParameterRequest(
-            int32_t sessionID,
-            int32_t cseq,
-            const sp<ParsedMessage> &data);
-
-    void onMediaReceiverNotify(const sp<AMessage> &msg);
-
-    void sendErrorResponse(
-            int32_t sessionID,
-            const char *errorDetail,
-            int32_t cseq);
-
-    static void AppendCommonResponse(AString *response, int32_t cseq);
-
-    bool ParseURL(
-            const char *url, AString *host, int32_t *port, AString *path,
-            AString *user, AString *pass);
-
-    void dumpDelay(size_t trackIndex, int64_t timeUs);
-
-    DISALLOW_EVIL_CONSTRUCTORS(WifiDisplaySink);
-};
-
-}  // namespace android
-
-#endif  // WIFI_DISPLAY_SINK_H_
diff --git a/media/libstagefright/wifi-display/source/Converter.cpp b/media/libstagefright/wifi-display/source/Converter.cpp
index 6f23854..753b3ec 100644
--- a/media/libstagefright/wifi-display/source/Converter.cpp
+++ b/media/libstagefright/wifi-display/source/Converter.cpp
@@ -833,7 +833,7 @@
 void Converter::setVideoBitrate(int32_t bitRate) {
     if (mIsVideo && mEncoder != NULL && bitRate != mPrevVideoBitrate) {
         sp<AMessage> params = new AMessage;
-        params->setInt32("videoBitrate", bitRate);
+        params->setInt32("video-bitrate", bitRate);
 
         mEncoder->setParameters(params);
 
diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.cpp b/media/libstagefright/wifi-display/source/PlaybackSession.cpp
index 0aa4ee5..286ea13 100644
--- a/media/libstagefright/wifi-display/source/PlaybackSession.cpp
+++ b/media/libstagefright/wifi-display/source/PlaybackSession.cpp
@@ -939,7 +939,8 @@
     if (isVideo) {
         format->setString("mime", MEDIA_MIMETYPE_VIDEO_AVC);
         format->setInt32("store-metadata-in-buffers", true);
-        format->setInt32("store-metadata-in-buffers-output", (mHDCP != NULL));
+        format->setInt32("store-metadata-in-buffers-output", (mHDCP != NULL)
+                && (mHDCP->getCaps() & HDCPModule::HDCP_CAPS_ENCRYPT_NATIVE));
         format->setInt32(
                 "color-format", OMX_COLOR_FormatAndroidOpaque);
         format->setInt32("profile-idc", profileIdc);
diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp
index 4b59e62..05e4018 100644
--- a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp
+++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp
@@ -22,7 +22,6 @@
 #include "PlaybackSession.h"
 #include "Parameters.h"
 #include "rtp/RTPSender.h"
-#include "TimeSyncer.h"
 
 #include <binder/IServiceManager.h>
 #include <gui/IGraphicBufferProducer.h>
@@ -173,15 +172,7 @@
                 }
             }
 
-            if (err == OK) {
-                sp<AMessage> notify = new AMessage(kWhatTimeSyncerNotify, id());
-                mTimeSyncer = new TimeSyncer(mNetSession, notify);
-                looper()->registerHandler(mTimeSyncer);
-
-                mTimeSyncer->startServer(8123);
-
-                mState = AWAITING_CLIENT_CONNECTION;
-            }
+            mState = AWAITING_CLIENT_CONNECTION;
 
             sp<AMessage> response = new AMessage;
             response->setInt32("err", err);
@@ -416,7 +407,8 @@
                                 0, // height,
                                 mUsingHDCP
                                     ? IRemoteDisplayClient::kDisplayFlagSecure
-                                    : 0);
+                                    : 0,
+                                0);
                     } else {
                         size_t width, height;
 
@@ -435,7 +427,8 @@
                                 height,
                                 mUsingHDCP
                                     ? IRemoteDisplayClient::kDisplayFlagSecure
-                                    : 0);
+                                    : 0,
+                                playbackSessionID);
                     }
                 }
 
@@ -554,11 +547,6 @@
             break;
         }
 
-        case kWhatTimeSyncerNotify:
-        {
-            break;
-        }
-
         default:
             TRESPASS();
     }
diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.h b/media/libstagefright/wifi-display/source/WifiDisplaySource.h
index 4f11712..750265f 100644
--- a/media/libstagefright/wifi-display/source/WifiDisplaySource.h
+++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.h
@@ -30,7 +30,6 @@
 struct IHDCP;
 struct IRemoteDisplayClient;
 struct ParsedMessage;
-struct TimeSyncer;
 
 // Represents the RTSP server acting as a wifi display source.
 // Manages incoming connections, sets up Playback sessions as necessary.
@@ -83,7 +82,6 @@
         kWhatHDCPNotify,
         kWhatFinishStop2,
         kWhatTeardownTriggerTimedOut,
-        kWhatTimeSyncerNotify,
     };
 
     struct ResponseID {
@@ -120,7 +118,6 @@
     sp<ANetworkSession> mNetSession;
     sp<IRemoteDisplayClient> mClient;
     AString mMediaPath;
-    sp<TimeSyncer> mTimeSyncer;
     struct in_addr mInterfaceAddr;
     int32_t mSessionID;
 
diff --git a/media/libstagefright/wifi-display/udptest.cpp b/media/libstagefright/wifi-display/udptest.cpp
deleted file mode 100644
index 61eb9f9..0000000
--- a/media/libstagefright/wifi-display/udptest.cpp
+++ /dev/null
@@ -1,116 +0,0 @@
-/*
- * Copyright 2012, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NEBUG 0
-#define LOG_TAG "udptest"
-#include <utils/Log.h>
-
-#include "TimeSyncer.h"
-
-#include <binder/ProcessState.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/foundation/ANetworkSession.h>
-
-namespace android {
-
-}  // namespace android
-
-static void usage(const char *me) {
-    fprintf(stderr,
-            "usage: %s -c host[:port]\tconnect to test server\n"
-            "           -l            \tcreate a test server\n",
-            me);
-}
-
-int main(int argc, char **argv) {
-    using namespace android;
-
-    ProcessState::self()->startThreadPool();
-
-    int32_t localPort = -1;
-    int32_t connectToPort = -1;
-    AString connectToHost;
-
-    int res;
-    while ((res = getopt(argc, argv, "hc:l:")) >= 0) {
-        switch (res) {
-            case 'c':
-            {
-                const char *colonPos = strrchr(optarg, ':');
-
-                if (colonPos == NULL) {
-                    connectToHost = optarg;
-                    connectToPort = 49152;
-                } else {
-                    connectToHost.setTo(optarg, colonPos - optarg);
-
-                    char *end;
-                    connectToPort = strtol(colonPos + 1, &end, 10);
-
-                    if (*end != '\0' || end == colonPos + 1
-                            || connectToPort < 1 || connectToPort > 65535) {
-                        fprintf(stderr, "Illegal port specified.\n");
-                        exit(1);
-                    }
-                }
-                break;
-            }
-
-            case 'l':
-            {
-                char *end;
-                localPort = strtol(optarg, &end, 10);
-
-                if (*end != '\0' || end == optarg
-                        || localPort < 1 || localPort > 65535) {
-                    fprintf(stderr, "Illegal port specified.\n");
-                    exit(1);
-                }
-                break;
-            }
-
-            case '?':
-            case 'h':
-                usage(argv[0]);
-                exit(1);
-        }
-    }
-
-    if (localPort < 0 && connectToPort < 0) {
-        fprintf(stderr,
-                "You need to select either client or server mode.\n");
-        exit(1);
-    }
-
-    sp<ANetworkSession> netSession = new ANetworkSession;
-    netSession->start();
-
-    sp<ALooper> looper = new ALooper;
-
-    sp<TimeSyncer> handler = new TimeSyncer(netSession, NULL /* notify */);
-    looper->registerHandler(handler);
-
-    if (localPort >= 0) {
-        handler->startServer(localPort);
-    } else {
-        handler->startClient(connectToHost.c_str(), connectToPort);
-    }
-
-    looper->start(true /* runOnCallingThread */);
-
-    return 0;
-}
-
diff --git a/media/libstagefright/wifi-display/wfd.cpp b/media/libstagefright/wifi-display/wfd.cpp
deleted file mode 100644
index 4607606..0000000
--- a/media/libstagefright/wifi-display/wfd.cpp
+++ /dev/null
@@ -1,387 +0,0 @@
-/*
- * Copyright 2012, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "wfd"
-#include <utils/Log.h>
-
-#include "sink/WifiDisplaySink.h"
-#include "source/WifiDisplaySource.h"
-
-#include <binder/ProcessState.h>
-#include <binder/IServiceManager.h>
-#include <gui/ISurfaceComposer.h>
-#include <gui/SurfaceComposerClient.h>
-#include <media/AudioSystem.h>
-#include <media/IMediaPlayerService.h>
-#include <media/IRemoteDisplay.h>
-#include <media/IRemoteDisplayClient.h>
-#include <media/stagefright/DataSource.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <ui/DisplayInfo.h>
-
-namespace android {
-
-static void usage(const char *me) {
-    fprintf(stderr,
-            "usage:\n"
-            "           %s -c host[:port]\tconnect to wifi source\n"
-            "               -u uri        \tconnect to an rtsp uri\n"
-            "               -l ip[:port] \tlisten on the specified port "
-            "               -f(ilename)  \tstream media "
-            "(create a sink)\n"
-            "               -s(pecial)   \trun in 'special' mode\n",
-            me);
-}
-
-struct RemoteDisplayClient : public BnRemoteDisplayClient {
-    RemoteDisplayClient();
-
-    virtual void onDisplayConnected(
-            const sp<IGraphicBufferProducer> &bufferProducer,
-            uint32_t width,
-            uint32_t height,
-            uint32_t flags);
-
-    virtual void onDisplayDisconnected();
-    virtual void onDisplayError(int32_t error);
-
-    void waitUntilDone();
-
-protected:
-    virtual ~RemoteDisplayClient();
-
-private:
-    Mutex mLock;
-    Condition mCondition;
-
-    bool mDone;
-
-    sp<SurfaceComposerClient> mComposerClient;
-    sp<IGraphicBufferProducer> mSurfaceTexture;
-    sp<IBinder> mDisplayBinder;
-
-    DISALLOW_EVIL_CONSTRUCTORS(RemoteDisplayClient);
-};
-
-RemoteDisplayClient::RemoteDisplayClient()
-    : mDone(false) {
-    mComposerClient = new SurfaceComposerClient;
-    CHECK_EQ(mComposerClient->initCheck(), (status_t)OK);
-}
-
-RemoteDisplayClient::~RemoteDisplayClient() {
-}
-
-void RemoteDisplayClient::onDisplayConnected(
-        const sp<IGraphicBufferProducer> &bufferProducer,
-        uint32_t width,
-        uint32_t height,
-        uint32_t flags) {
-    ALOGI("onDisplayConnected width=%u, height=%u, flags = 0x%08x",
-          width, height, flags);
-
-    if (bufferProducer != NULL) {
-        mSurfaceTexture = bufferProducer;
-        mDisplayBinder = mComposerClient->createDisplay(
-                String8("foo"), false /* secure */);
-
-        SurfaceComposerClient::openGlobalTransaction();
-        mComposerClient->setDisplaySurface(mDisplayBinder, mSurfaceTexture);
-
-        Rect layerStackRect(1280, 720);  // XXX fix this.
-        Rect displayRect(1280, 720);
-
-        mComposerClient->setDisplayProjection(
-                mDisplayBinder, 0 /* 0 degree rotation */,
-                layerStackRect,
-                displayRect);
-
-        SurfaceComposerClient::closeGlobalTransaction();
-    }
-}
-
-void RemoteDisplayClient::onDisplayDisconnected() {
-    ALOGI("onDisplayDisconnected");
-
-    Mutex::Autolock autoLock(mLock);
-    mDone = true;
-    mCondition.broadcast();
-}
-
-void RemoteDisplayClient::onDisplayError(int32_t error) {
-    ALOGI("onDisplayError error=%d", error);
-
-    Mutex::Autolock autoLock(mLock);
-    mDone = true;
-    mCondition.broadcast();
-}
-
-void RemoteDisplayClient::waitUntilDone() {
-    Mutex::Autolock autoLock(mLock);
-    while (!mDone) {
-        mCondition.wait(mLock);
-    }
-}
-
-static status_t enableAudioSubmix(bool enable) {
-    status_t err = AudioSystem::setDeviceConnectionState(
-            AUDIO_DEVICE_IN_REMOTE_SUBMIX,
-            enable
-                ? AUDIO_POLICY_DEVICE_STATE_AVAILABLE
-                : AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
-            NULL /* device_address */);
-
-    if (err != OK) {
-        return err;
-    }
-
-    err = AudioSystem::setDeviceConnectionState(
-            AUDIO_DEVICE_OUT_REMOTE_SUBMIX,
-            enable
-                ? AUDIO_POLICY_DEVICE_STATE_AVAILABLE
-                : AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
-            NULL /* device_address */);
-
-    return err;
-}
-
-static void createSource(const AString &addr, int32_t port) {
-    sp<IServiceManager> sm = defaultServiceManager();
-    sp<IBinder> binder = sm->getService(String16("media.player"));
-    sp<IMediaPlayerService> service =
-        interface_cast<IMediaPlayerService>(binder);
-
-    CHECK(service.get() != NULL);
-
-    enableAudioSubmix(true /* enable */);
-
-    String8 iface;
-    iface.append(addr.c_str());
-    iface.append(StringPrintf(":%d", port).c_str());
-
-    sp<RemoteDisplayClient> client = new RemoteDisplayClient;
-    sp<IRemoteDisplay> display =
-        service->listenForRemoteDisplay(client, iface);
-
-    client->waitUntilDone();
-
-    display->dispose();
-    display.clear();
-
-    enableAudioSubmix(false /* enable */);
-}
-
-static void createFileSource(
-        const AString &addr, int32_t port, const char *path) {
-    sp<ANetworkSession> session = new ANetworkSession;
-    session->start();
-
-    sp<ALooper> looper = new ALooper;
-    looper->start();
-
-    sp<RemoteDisplayClient> client = new RemoteDisplayClient;
-    sp<WifiDisplaySource> source = new WifiDisplaySource(session, client, path);
-    looper->registerHandler(source);
-
-    AString iface = StringPrintf("%s:%d", addr.c_str(), port);
-    CHECK_EQ((status_t)OK, source->start(iface.c_str()));
-
-    client->waitUntilDone();
-
-    source->stop();
-}
-
-}  // namespace android
-
-int main(int argc, char **argv) {
-    using namespace android;
-
-    ProcessState::self()->startThreadPool();
-
-    DataSource::RegisterDefaultSniffers();
-
-    AString connectToHost;
-    int32_t connectToPort = -1;
-    AString uri;
-
-    AString listenOnAddr;
-    int32_t listenOnPort = -1;
-
-    AString path;
-
-    bool specialMode = false;
-
-    int res;
-    while ((res = getopt(argc, argv, "hc:l:u:f:s")) >= 0) {
-        switch (res) {
-            case 'c':
-            {
-                const char *colonPos = strrchr(optarg, ':');
-
-                if (colonPos == NULL) {
-                    connectToHost = optarg;
-                    connectToPort = WifiDisplaySource::kWifiDisplayDefaultPort;
-                } else {
-                    connectToHost.setTo(optarg, colonPos - optarg);
-
-                    char *end;
-                    connectToPort = strtol(colonPos + 1, &end, 10);
-
-                    if (*end != '\0' || end == colonPos + 1
-                            || connectToPort < 1 || connectToPort > 65535) {
-                        fprintf(stderr, "Illegal port specified.\n");
-                        exit(1);
-                    }
-                }
-                break;
-            }
-
-            case 'u':
-            {
-                uri = optarg;
-                break;
-            }
-
-            case 'f':
-            {
-                path = optarg;
-                break;
-            }
-
-            case 'l':
-            {
-                const char *colonPos = strrchr(optarg, ':');
-
-                if (colonPos == NULL) {
-                    listenOnAddr = optarg;
-                    listenOnPort = WifiDisplaySource::kWifiDisplayDefaultPort;
-                } else {
-                    listenOnAddr.setTo(optarg, colonPos - optarg);
-
-                    char *end;
-                    listenOnPort = strtol(colonPos + 1, &end, 10);
-
-                    if (*end != '\0' || end == colonPos + 1
-                            || listenOnPort < 1 || listenOnPort > 65535) {
-                        fprintf(stderr, "Illegal port specified.\n");
-                        exit(1);
-                    }
-                }
-                break;
-            }
-
-            case 's':
-            {
-                specialMode = true;
-                break;
-            }
-
-            case '?':
-            case 'h':
-            default:
-                usage(argv[0]);
-                exit(1);
-        }
-    }
-
-    if (connectToPort >= 0 && listenOnPort >= 0) {
-        fprintf(stderr,
-                "You can connect to a source or create one, "
-                "but not both at the same time.\n");
-        exit(1);
-    }
-
-    if (listenOnPort >= 0) {
-        if (path.empty()) {
-            createSource(listenOnAddr, listenOnPort);
-        } else {
-            createFileSource(listenOnAddr, listenOnPort, path.c_str());
-        }
-
-        exit(0);
-    }
-
-    if (connectToPort < 0 && uri.empty()) {
-        fprintf(stderr,
-                "You need to select either source host or uri.\n");
-
-        exit(1);
-    }
-
-    if (connectToPort >= 0 && !uri.empty()) {
-        fprintf(stderr,
-                "You need to either connect to a wfd host or an rtsp url, "
-                "not both.\n");
-        exit(1);
-    }
-
-    sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
-    CHECK_EQ(composerClient->initCheck(), (status_t)OK);
-
-    sp<IBinder> display(SurfaceComposerClient::getBuiltInDisplay(
-            ISurfaceComposer::eDisplayIdMain));
-    DisplayInfo info;
-    SurfaceComposerClient::getDisplayInfo(display, &info);
-    ssize_t displayWidth = info.w;
-    ssize_t displayHeight = info.h;
-
-    ALOGV("display is %d x %d\n", displayWidth, displayHeight);
-
-    sp<SurfaceControl> control =
-        composerClient->createSurface(
-                String8("A Surface"),
-                displayWidth,
-                displayHeight,
-                PIXEL_FORMAT_RGB_565,
-                0);
-
-    CHECK(control != NULL);
-    CHECK(control->isValid());
-
-    SurfaceComposerClient::openGlobalTransaction();
-    CHECK_EQ(control->setLayer(INT_MAX), (status_t)OK);
-    CHECK_EQ(control->show(), (status_t)OK);
-    SurfaceComposerClient::closeGlobalTransaction();
-
-    sp<Surface> surface = control->getSurface();
-    CHECK(surface != NULL);
-
-    sp<ANetworkSession> session = new ANetworkSession;
-    session->start();
-
-    sp<ALooper> looper = new ALooper;
-
-    sp<WifiDisplaySink> sink = new WifiDisplaySink(
-            specialMode ? WifiDisplaySink::FLAG_SPECIAL_MODE : 0 /* flags */,
-            session,
-            surface->getIGraphicBufferProducer());
-
-    looper->registerHandler(sink);
-
-    if (connectToPort >= 0) {
-        sink->start(connectToHost.c_str(), connectToPort);
-    } else {
-        sink->start(uri.c_str());
-    }
-
-    looper->start(true /* runOnCallingThread */);
-
-    composerClient->dispose();
-
-    return 0;
-}
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 3d65c44..3132e54 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -98,6 +98,10 @@
 size_t AudioFlinger::mTeeSinkTrackFrames = kTeeSinkTrackFramesDefault;
 #endif
 
+// In order to avoid invalidating offloaded tracks each time a Visualizer is turned on and off
+// we define a minimum time during which a global effect is considered enabled.
+static const nsecs_t kMinGlobalEffectEnabletimeNs = seconds(7200);
+
 // ----------------------------------------------------------------------------
 
 static int load_audio_interface(const char *if_name, audio_hw_device_t **dev)
@@ -141,7 +145,8 @@
       mMode(AUDIO_MODE_INVALID),
       mBtNrecIsOff(false),
       mIsLowRamDevice(true),
-      mIsDeviceTypeKnown(false)
+      mIsDeviceTypeKnown(false),
+      mGlobalEffectEnableTime(0)
 {
     getpid_cached = getpid();
     char value[PROPERTY_VALUE_MAX];
@@ -259,6 +264,12 @@
         }
     }
 
+    result.append("Notification Clients:\n");
+    for (size_t i = 0; i < mNotificationClients.size(); ++i) {
+        snprintf(buffer, SIZE, "  pid: %d\n", mNotificationClients.keyAt(i));
+        result.append(buffer);
+    }
+
     result.append("Global session refs:\n");
     result.append(" session pid count\n");
     for (size_t i = 0; i < mAudioSessionRefs.size(); i++) {
@@ -437,6 +448,7 @@
         pid_t tid,
         int *sessionId,
         String8& name,
+        int clientUid,
         status_t *status)
 {
     sp<PlaybackThread::Track> track;
@@ -472,6 +484,7 @@
         }
 
         pid_t pid = IPCThreadState::self()->getCallingPid();
+
         client = registerPid_l(pid);
 
         ALOGV("createTrack() sessionId: %d", (sessionId == NULL) ? -2 : *sessionId);
@@ -499,7 +512,7 @@
         ALOGV("createTrack() lSessionId: %d", lSessionId);
 
         track = thread->createTrack_l(client, streamType, sampleRate, format,
-                channelMask, frameCount, sharedBuffer, lSessionId, flags, tid, &lStatus);
+                channelMask, frameCount, sharedBuffer, lSessionId, flags, tid, clientUid, &lStatus);
 
         // move effect chain to this output thread if an effect on same session was waiting
         // for a track to be created
@@ -1206,6 +1219,10 @@
 
 // ----------------------------------------------------------------------------
 
+static bool deviceRequiresCaptureAudioOutputPermission(audio_devices_t inDevice) {
+    return audio_is_remote_submix_device(inDevice);
+}
+
 sp<IAudioRecord> AudioFlinger::openRecord(
         audio_io_handle_t input,
         uint32_t sampleRate,
@@ -1227,6 +1244,7 @@
 
     // check calling permissions
     if (!recordingAllowed()) {
+        ALOGE("openRecord() permission denied: recording not allowed");
         lStatus = PERMISSION_DENIED;
         goto Exit;
     }
@@ -1242,10 +1260,18 @@
         Mutex::Autolock _l(mLock);
         thread = checkRecordThread_l(input);
         if (thread == NULL) {
+            ALOGE("openRecord() checkRecordThread_l failed");
             lStatus = BAD_VALUE;
             goto Exit;
         }
 
+        if (deviceRequiresCaptureAudioOutputPermission(thread->inDevice())
+                && !captureAudioOutputAllowed()) {
+            ALOGE("openRecord() permission denied: capture not allowed");
+            lStatus = PERMISSION_DENIED;
+            goto Exit;
+        }
+
         pid_t pid = IPCThreadState::self()->getCallingPid();
         client = registerPid_l(pid);
 
@@ -1260,8 +1286,12 @@
         }
         // create new record track.
         // The record track uses one track in mHardwareMixerThread by convention.
+        // TODO: the uid should be passed in as a parameter to openRecord
         recordTrack = thread->createRecordTrack_l(client, sampleRate, format, channelMask,
-                                                  frameCount, lSessionId, flags, tid, &lStatus);
+                                                  frameCount, lSessionId,
+                                                  IPCThreadState::self()->getCallingUid(),
+                                                  flags, tid, &lStatus);
+        LOG_ALWAYS_FATAL_IF((recordTrack != 0) != (lStatus == NO_ERROR));
     }
     if (lStatus != NO_ERROR) {
         // remove local strong reference to Client before deleting the RecordTrack so that the
@@ -1835,6 +1865,16 @@
     Mutex::Autolock _l(mLock);
     pid_t caller = IPCThreadState::self()->getCallingPid();
     ALOGV("acquiring %d from %d", audioSession, caller);
+
+    // Ignore requests received from processes not known as notification client. The request
+    // is likely proxied by mediaserver (e.g CameraService) and releaseAudioSessionId() can be
+    // called from a different pid leaving a stale session reference.  Also we don't know how
+    // to clear this reference if the client process dies.
+    if (mNotificationClients.indexOfKey(caller) < 0) {
+        ALOGV("acquireAudioSessionId() unknown client %d for session %d", caller, audioSession);
+        return;
+    }
+
     size_t num = mAudioSessionRefs.size();
     for (size_t i = 0; i< num; i++) {
         AudioSessionRef *ref = mAudioSessionRefs.editItemAt(i);
@@ -1867,7 +1907,9 @@
             return;
         }
     }
-    ALOGW("session id %d not found for pid %d", audioSession, caller);
+    // If the caller is mediaserver it is likely that the session being released was acquired
+    // on behalf of a process not in notification clients and we ignore the warning.
+    ALOGW_IF(caller != getpid_cached, "session id %d not found for pid %d", audioSession, caller);
 }
 
 void AudioFlinger::purgeStaleEffects_l() {
@@ -2068,24 +2110,7 @@
         goto Exit;
     }
 
-    if (io == 0) {
-        if (sessionId == AUDIO_SESSION_OUTPUT_STAGE) {
-            // output must be specified by AudioPolicyManager when using session
-            // AUDIO_SESSION_OUTPUT_STAGE
-            lStatus = BAD_VALUE;
-            goto Exit;
-        } else if (sessionId == AUDIO_SESSION_OUTPUT_MIX) {
-            // if the output returned by getOutputForEffect() is removed before we lock the
-            // mutex below, the call to checkPlaybackThread_l(io) below will detect it
-            // and we will exit safely
-            io = AudioSystem::getOutputForEffect(&desc);
-        }
-    }
-
     {
-        Mutex::Autolock _l(mLock);
-
-
         if (!EffectIsNullUuid(&pDesc->uuid)) {
             // if uuid is specified, request effect descriptor
             lStatus = EffectGetDescriptor(&pDesc->uuid, &desc);
@@ -2158,6 +2183,15 @@
 
         // return effect descriptor
         *pDesc = desc;
+        if (io == 0 && sessionId == AUDIO_SESSION_OUTPUT_MIX) {
+            // if the output returned by getOutputForEffect() is removed before we lock the
+            // mutex below, the call to checkPlaybackThread_l(io) below will detect it
+            // and we will exit safely
+            io = AudioSystem::getOutputForEffect(&desc);
+            ALOGV("createEffect got output %d", io);
+        }
+
+        Mutex::Autolock _l(mLock);
 
         // If output is not specified try to find a matching audio session ID in one of the
         // output threads.
@@ -2165,6 +2199,12 @@
         // because of code checking output when entering the function.
         // Note: io is never 0 when creating an effect on an input
         if (io == 0) {
+            if (sessionId == AUDIO_SESSION_OUTPUT_STAGE) {
+                // output must be specified by AudioPolicyManager when using session
+                // AUDIO_SESSION_OUTPUT_STAGE
+                lStatus = BAD_VALUE;
+                goto Exit;
+            }
             // look for the thread where the specified audio session is present
             for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
                 if (mPlaybackThreads.valueAt(i)->hasAudioSession(sessionId) != 0) {
@@ -2238,9 +2278,7 @@
 
     Mutex::Autolock _dl(dstThread->mLock);
     Mutex::Autolock _sl(srcThread->mLock);
-    moveEffectChain_l(sessionId, srcThread, dstThread, false);
-
-    return NO_ERROR;
+    return moveEffectChain_l(sessionId, srcThread, dstThread, false);
 }
 
 // moveEffectChain_l must be called with both srcThread and dstThread mLocks held
@@ -2267,13 +2305,18 @@
 
     // transfer all effects one by one so that new effect chain is created on new thread with
     // correct buffer sizes and audio parameters and effect engines reconfigured accordingly
-    audio_io_handle_t dstOutput = dstThread->id();
     sp<EffectChain> dstChain;
     uint32_t strategy = 0; // prevent compiler warning
     sp<EffectModule> effect = chain->getEffectFromId_l(0);
+    Vector< sp<EffectModule> > removed;
+    status_t status = NO_ERROR;
     while (effect != 0) {
         srcThread->removeEffect_l(effect);
-        dstThread->addEffect_l(effect);
+        removed.add(effect);
+        status = dstThread->addEffect_l(effect);
+        if (status != NO_ERROR) {
+            break;
+        }
         // removeEffect_l() has stopped the effect if it was active so it must be restarted
         if (effect->state() == EffectModule::ACTIVE ||
                 effect->state() == EffectModule::STOPPING) {
@@ -2285,23 +2328,71 @@
             dstChain = effect->chain().promote();
             if (dstChain == 0) {
                 ALOGW("moveEffectChain_l() cannot get chain from effect %p", effect.get());
-                srcThread->addEffect_l(effect);
-                return NO_INIT;
+                status = NO_INIT;
+                break;
             }
             strategy = dstChain->strategy();
         }
         if (reRegister) {
             AudioSystem::unregisterEffect(effect->id());
             AudioSystem::registerEffect(&effect->desc(),
-                                        dstOutput,
+                                        dstThread->id(),
                                         strategy,
                                         sessionId,
                                         effect->id());
+            AudioSystem::setEffectEnabled(effect->id(), effect->isEnabled());
         }
         effect = chain->getEffectFromId_l(0);
     }
 
-    return NO_ERROR;
+    if (status != NO_ERROR) {
+        for (size_t i = 0; i < removed.size(); i++) {
+            srcThread->addEffect_l(removed[i]);
+            if (dstChain != 0 && reRegister) {
+                AudioSystem::unregisterEffect(removed[i]->id());
+                AudioSystem::registerEffect(&removed[i]->desc(),
+                                            srcThread->id(),
+                                            strategy,
+                                            sessionId,
+                                            removed[i]->id());
+                AudioSystem::setEffectEnabled(effect->id(), effect->isEnabled());
+            }
+        }
+    }
+
+    return status;
+}
+
+bool AudioFlinger::isNonOffloadableGlobalEffectEnabled_l()
+{
+    if (mGlobalEffectEnableTime != 0 &&
+            ((systemTime() - mGlobalEffectEnableTime) < kMinGlobalEffectEnabletimeNs)) {
+        return true;
+    }
+
+    for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
+        sp<EffectChain> ec =
+                mPlaybackThreads.valueAt(i)->getEffectChain_l(AUDIO_SESSION_OUTPUT_MIX);
+        if (ec != 0 && ec->isNonOffloadableEnabled()) {
+            return true;
+        }
+    }
+    return false;
+}
+
+void AudioFlinger::onNonOffloadableGlobalEffectEnable()
+{
+    Mutex::Autolock _l(mLock);
+
+    mGlobalEffectEnableTime = systemTime();
+
+    for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
+        sp<PlaybackThread> t = mPlaybackThreads.valueAt(i);
+        if (t->mType == ThreadBase::OFFLOAD) {
+            t->invalidateTracks(AUDIO_STREAM_MUSIC);
+        }
+    }
+
 }
 
 struct Entry {
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index e5e4113..53e238e 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -109,6 +109,7 @@
                                 pid_t tid,
                                 int *sessionId,
                                 String8& name,
+                                int clientUid,
                                 status_t *status);
 
     virtual sp<IAudioRecord> openRecord(
@@ -411,6 +412,8 @@
         virtual status_t    setMediaTimeTransform(const LinearTransform& xform,
                                                   int target);
         virtual status_t    setParameters(const String8& keyValuePairs);
+        virtual status_t    getTimestamp(AudioTimestamp& timestamp);
+        virtual void        signal(); // signal playback thread for a change in control block
 
         virtual status_t onTransact(
             uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags);
@@ -465,6 +468,9 @@
                 void        removeClient_l(pid_t pid);
                 void        removeNotificationClient(pid_t pid);
 
+                bool isNonOffloadableGlobalEffectEnabled_l();
+                void onNonOffloadableGlobalEffectEnable();
+
     class AudioHwDevice {
     public:
         enum Flags {
@@ -640,6 +646,7 @@
 private:
     bool    mIsLowRamDevice;
     bool    mIsDeviceTypeKnown;
+    nsecs_t mGlobalEffectEnableTime;  // when a global effect was last enabled
 };
 
 #undef INCLUDING_FROM_AUDIOFLINGER_H
diff --git a/services/audioflinger/AudioPolicyService.cpp b/services/audioflinger/AudioPolicyService.cpp
index 900b411..35e816b 100644
--- a/services/audioflinger/AudioPolicyService.cpp
+++ b/services/audioflinger/AudioPolicyService.cpp
@@ -296,9 +296,14 @@
         return 0;
     }
     // already checked by client, but double-check in case the client wrapper is bypassed
-    if (uint32_t(inputSource) >= AUDIO_SOURCE_CNT) {
+    if (inputSource >= AUDIO_SOURCE_CNT && inputSource != AUDIO_SOURCE_HOTWORD) {
         return 0;
     }
+
+    if ((inputSource == AUDIO_SOURCE_HOTWORD) && !captureHotwordAllowed()) {
+        return 0;
+    }
+
     Mutex::Autolock _l(mLock);
     // the audio_in_acoustics_t parameter is ignored by get_input()
     audio_io_handle_t input = mpAudioPolicy->get_input(mpAudioPolicy, inputSource, samplingRate,
@@ -308,7 +313,10 @@
         return input;
     }
     // create audio pre processors according to input source
-    ssize_t index = mInputSources.indexOfKey(inputSource);
+    audio_source_t aliasSource = (inputSource == AUDIO_SOURCE_HOTWORD) ?
+                                    AUDIO_SOURCE_VOICE_RECOGNITION : inputSource;
+
+    ssize_t index = mInputSources.indexOfKey(aliasSource);
     if (index < 0) {
         return input;
     }
@@ -967,7 +975,6 @@
 {
     ssize_t i;  // not size_t because i will count down to -1
     Vector <AudioCommand *> removedCommands;
-    nsecs_t time = 0;
     command->mTime = systemTime() + milliseconds(delayMs);
 
     // acquire wake lock to make sure delayed commands are processed
@@ -1013,7 +1020,10 @@
             } else {
                 data2->mKeyValuePairs = param2.toString();
             }
-            time = command2->mTime;
+            command->mTime = command2->mTime;
+            // force delayMs to non 0 so that code below does not request to wait for
+            // command status as the command is now delayed
+            delayMs = 1;
         } break;
 
         case SET_VOLUME: {
@@ -1024,7 +1034,10 @@
             ALOGV("Filtering out volume command on output %d for stream %d",
                     data->mIO, data->mStream);
             removedCommands.add(command2);
-            time = command2->mTime;
+            command->mTime = command2->mTime;
+            // force delayMs to non 0 so that code below does not request to wait for
+            // command status as the command is now delayed
+            delayMs = 1;
         } break;
         case START_TONE:
         case STOP_TONE:
@@ -1046,16 +1059,12 @@
     }
     removedCommands.clear();
 
-    // wait for status only if delay is 0 and command time was not modified above
-    if (delayMs == 0 && time == 0) {
+    // wait for status only if delay is 0
+    if (delayMs == 0) {
         command->mWaitStatus = true;
     } else {
         command->mWaitStatus = false;
     }
-    // update command time if modified above
-    if (time != 0) {
-        command->mTime = time;
-    }
 
     // insert command at the right place according to its time stamp
     ALOGV("inserting command: %d at index %d, num commands %d",
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index d5a21a7..a8a5169 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -593,17 +593,6 @@
                 h->setEnabled(enabled);
             }
         }
-//EL_FIXME not sure why this is needed?
-//        sp<ThreadBase> thread = mThread.promote();
-//        if (thread == 0) {
-//            return NO_ERROR;
-//        }
-//
-//        if ((thread->type() == ThreadBase::OFFLOAD) && (enabled)) {
-//            PlaybackThread *p = (PlaybackThread *)thread.get();
-//            ALOGV("setEnabled: Offload, invalidate tracks");
-//            p->invalidateTracks(AUDIO_STREAM_MUSIC);
-//        }
     }
     return NO_ERROR;
 }
@@ -775,6 +764,46 @@
     return enabled;
 }
 
+status_t AudioFlinger::EffectModule::setOffloaded(bool offloaded, audio_io_handle_t io)
+{
+    Mutex::Autolock _l(mLock);
+    if (mStatus != NO_ERROR) {
+        return mStatus;
+    }
+    status_t status = NO_ERROR;
+    if ((mDescriptor.flags & EFFECT_FLAG_OFFLOAD_SUPPORTED) != 0) {
+        status_t cmdStatus;
+        uint32_t size = sizeof(status_t);
+        effect_offload_param_t cmd;
+
+        cmd.isOffload = offloaded;
+        cmd.ioHandle = io;
+        status = (*mEffectInterface)->command(mEffectInterface,
+                                              EFFECT_CMD_OFFLOAD,
+                                              sizeof(effect_offload_param_t),
+                                              &cmd,
+                                              &size,
+                                              &cmdStatus);
+        if (status == NO_ERROR) {
+            status = cmdStatus;
+        }
+        mOffloaded = (status == NO_ERROR) ? offloaded : false;
+    } else {
+        if (offloaded) {
+            status = INVALID_OPERATION;
+        }
+        mOffloaded = false;
+    }
+    ALOGV("setOffloaded() offloaded %d io %d status %d", offloaded, io, status);
+    return status;
+}
+
+bool AudioFlinger::EffectModule::isOffloaded() const
+{
+    Mutex::Autolock _l(mLock);
+    return mOffloaded;
+}
+
 void AudioFlinger::EffectModule::dump(int fd, const Vector<String16>& args)
 {
     const size_t SIZE = 256;
@@ -942,6 +971,23 @@
             thread->checkSuspendOnEffectEnabled(mEffect, false, mEffect->sessionId());
         }
         mEnabled = false;
+    } else {
+        if (thread != 0) {
+            if (thread->type() == ThreadBase::OFFLOAD) {
+                PlaybackThread *t = (PlaybackThread *)thread.get();
+                Mutex::Autolock _l(t->mLock);
+                t->broadcast_l();
+            }
+            if (!mEffect->isOffloadable()) {
+                if (thread->type() == ThreadBase::OFFLOAD) {
+                    PlaybackThread *t = (PlaybackThread *)thread.get();
+                    t->invalidateTracks(AUDIO_STREAM_MUSIC);
+                }
+                if (mEffect->sessionId() == AUDIO_SESSION_OUTPUT_MIX) {
+                    thread->mAudioFlinger->onNonOffloadableGlobalEffectEnable();
+                }
+            }
+        }
     }
     return status;
 }
@@ -970,6 +1016,11 @@
     sp<ThreadBase> thread = mEffect->thread().promote();
     if (thread != 0) {
         thread->checkSuspendOnEffectEnabled(mEffect, false, mEffect->sessionId());
+        if (thread->type() == ThreadBase::OFFLOAD) {
+            PlaybackThread *t = (PlaybackThread *)thread.get();
+            Mutex::Autolock _l(t->mLock);
+            t->broadcast_l();
+        }
     }
 
     return status;
@@ -1240,9 +1291,10 @@
     }
     bool isGlobalSession = (mSessionId == AUDIO_SESSION_OUTPUT_MIX) ||
             (mSessionId == AUDIO_SESSION_OUTPUT_STAGE);
-    // always process effects unless no more tracks are on the session and the effect tail
-    // has been rendered
-    bool doProcess = true;
+    // never process effects when:
+    // - on an OFFLOAD thread
+    // - no more tracks are on the session and the effect tail has been rendered
+    bool doProcess = (thread->type() != ThreadBase::OFFLOAD);
     if (!isGlobalSession) {
         bool tracksOnSession = (trackCnt() != 0);
 
@@ -1728,4 +1780,16 @@
     }
 }
 
+bool AudioFlinger::EffectChain::isNonOffloadableEnabled()
+{
+    Mutex::Autolock _l(mLock);
+    size_t size = mEffects.size();
+    for (size_t i = 0; i < size; i++) {
+        if (mEffects[i]->isEnabled() && !mEffects[i]->isOffloadable()) {
+            return true;
+        }
+    }
+    return false;
+}
+
 }; // namespace android
diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h
index 0b7fb83..b717857 100644
--- a/services/audioflinger/Effects.h
+++ b/services/audioflinger/Effects.h
@@ -25,6 +25,10 @@
 // state changes or resource modifications. Always respect the following order
 // if multiple mutexes must be acquired to avoid cross deadlock:
 // AudioFlinger -> ThreadBase -> EffectChain -> EffectModule
+// In addition, methods that lock the AudioPolicyService mutex (getOutputForEffect(),
+// startOutput()...) should never be called with AudioFlinger or Threadbase mutex locked
+// to avoid cross deadlock with other clients calling AudioPolicyService methods that in turn
+// call AudioFlinger thus locking the same mutexes in the reverse order.
 
 // The EffectModule class is a wrapper object controlling the effect engine implementation
 // in the effect library. It prevents concurrent calls to process() and command() functions
@@ -111,6 +115,10 @@
     bool             purgeHandles();
     void             lock() { mLock.lock(); }
     void             unlock() { mLock.unlock(); }
+    bool             isOffloadable() const
+                        { return (mDescriptor.flags & EFFECT_FLAG_OFFLOAD_SUPPORTED) != 0; }
+    status_t         setOffloaded(bool offloaded, audio_io_handle_t io);
+    bool             isOffloaded() const;
 
     void             dump(int fd, const Vector<String16>& args);
 
@@ -144,6 +152,7 @@
                                     // sending disable command.
     uint32_t mDisableWaitCnt;       // current process() calls count during disable period.
     bool     mSuspended;            // effect is suspended: temporarily disabled by framework
+    bool     mOffloaded;            // effect is currently offloaded to the audio DSP
 };
 
 // The EffectHandle class implements the IEffect interface. It provides resources
@@ -303,6 +312,10 @@
 
     void clearInputBuffer();
 
+    // At least one non offloadable effect in the chain is enabled
+    bool isNonOffloadableEnabled();
+
+
     void dump(int fd, const Vector<String16>& args);
 
 protected:
diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/FastMixer.cpp
index ad9f4f2..f27ea17 100644
--- a/services/audioflinger/FastMixer.cpp
+++ b/services/audioflinger/FastMixer.cpp
@@ -96,6 +96,12 @@
     uint32_t warmupCycles = 0;  // counter of number of loop cycles required to warmup
     NBAIO_Sink* teeSink = NULL; // if non-NULL, then duplicate write() to this non-blocking sink
     NBLog::Writer dummyLogWriter, *logWriter = &dummyLogWriter;
+    uint32_t totalNativeFramesWritten = 0;  // copied to dumpState->mFramesWritten
+
+    // next 2 fields are valid only when timestampStatus == NO_ERROR
+    AudioTimestamp timestamp;
+    uint32_t nativeFramesWrittenButNotPresented = 0;    // the = 0 is to silence the compiler
+    status_t timestampStatus = INVALID_OPERATION;
 
     for (;;) {
 
@@ -192,6 +198,7 @@
                 full = false;
 #endif
                 oldTsValid = !clock_gettime(CLOCK_MONOTONIC, &oldTs);
+                timestampStatus = INVALID_OPERATION;
             } else {
                 sleepNs = FAST_HOT_IDLE_NS;
             }
@@ -382,6 +389,31 @@
                 i = __builtin_ctz(currentTrackMask);
                 currentTrackMask &= ~(1 << i);
                 const FastTrack* fastTrack = &current->mFastTracks[i];
+
+                // Refresh the per-track timestamp
+                if (timestampStatus == NO_ERROR) {
+                    uint32_t trackFramesWrittenButNotPresented;
+                    uint32_t trackSampleRate = fastTrack->mSampleRate;
+                    // There is currently no sample rate conversion for fast tracks currently
+                    if (trackSampleRate != 0 && trackSampleRate != sampleRate) {
+                        trackFramesWrittenButNotPresented =
+                                ((int64_t) nativeFramesWrittenButNotPresented * trackSampleRate) /
+                                sampleRate;
+                    } else {
+                        trackFramesWrittenButNotPresented = nativeFramesWrittenButNotPresented;
+                    }
+                    uint32_t trackFramesWritten = fastTrack->mBufferProvider->framesReleased();
+                    // Can't provide an AudioTimestamp before first frame presented,
+                    // or during the brief 32-bit wraparound window
+                    if (trackFramesWritten >= trackFramesWrittenButNotPresented) {
+                        AudioTimestamp perTrackTimestamp;
+                        perTrackTimestamp.mPosition =
+                                trackFramesWritten - trackFramesWrittenButNotPresented;
+                        perTrackTimestamp.mTime = timestamp.mTime;
+                        fastTrack->mBufferProvider->onTimestamp(perTrackTimestamp);
+                    }
+                }
+
                 int name = fastTrackNames[i];
                 ALOG_ASSERT(name >= 0);
                 if (fastTrack->mVolumeProvider != NULL) {
@@ -455,7 +487,8 @@
             dumpState->mWriteSequence++;
             if (framesWritten >= 0) {
                 ALOG_ASSERT((size_t) framesWritten <= frameCount);
-                dumpState->mFramesWritten += framesWritten;
+                totalNativeFramesWritten += framesWritten;
+                dumpState->mFramesWritten = totalNativeFramesWritten;
                 //if ((size_t) framesWritten == frameCount) {
                 //    didFullWrite = true;
                 //}
@@ -464,6 +497,18 @@
             }
             attemptedWrite = true;
             // FIXME count # of writes blocked excessively, CPU usage, etc. for dump
+
+            timestampStatus = outputSink->getTimestamp(timestamp);
+            if (timestampStatus == NO_ERROR) {
+                uint32_t totalNativeFramesPresented = timestamp.mPosition;
+                if (totalNativeFramesPresented <= totalNativeFramesWritten) {
+                    nativeFramesWrittenButNotPresented =
+                        totalNativeFramesWritten - totalNativeFramesPresented;
+                } else {
+                    // HAL reported that more frames were presented than were written
+                    timestampStatus = INVALID_OPERATION;
+                }
+            }
         }
 
         // To be exactly periodic, compute the next sleep time based on current time.
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index 5600411c..43b77f3 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -31,6 +31,7 @@
                                 size_t frameCount,
                                 const sp<IMemory>& sharedBuffer,
                                 int sessionId,
+                                int uid,
                                 IAudioFlinger::track_flags_t flags);
     virtual             ~Track();
 
@@ -59,6 +60,8 @@
             void        setMainBuffer(int16_t *buffer) { mMainBuffer = buffer; }
             int16_t     *mainBuffer() const { return mMainBuffer; }
             int         auxEffectId() const { return mAuxEffectId; }
+    virtual status_t    getTimestamp(AudioTimestamp& timestamp);
+            void        signal();
 
 // implement FastMixerState::VolumeProvider interface
     virtual uint32_t    getVolumeLR();
@@ -80,7 +83,9 @@
                                    int64_t pts = kInvalidPTS);
     // releaseBuffer() not overridden
 
+    // ExtendedAudioBufferProvider interface
     virtual size_t framesReady() const;
+    virtual size_t framesReleased() const;
 
     bool isPausing() const { return mState == PAUSING; }
     bool isPaused() const { return mState == PAUSED; }
@@ -114,7 +119,10 @@
     enum {FS_INVALID, FS_FILLING, FS_FILLED, FS_ACTIVE};
     mutable uint8_t     mFillingUpStatus;
     int8_t              mRetryCount;
-    const sp<IMemory>   mSharedBuffer;
+
+    // see comment at AudioFlinger::PlaybackThread::Track::~Track for why this can't be const
+    sp<IMemory>         mSharedBuffer;
+
     bool                mResetDone;
     const audio_stream_type_t mStreamType;
     int                 mName;      // track name on the normal mixer,
@@ -158,7 +166,8 @@
                                  audio_channel_mask_t channelMask,
                                  size_t frameCount,
                                  const sp<IMemory>& sharedBuffer,
-                                 int sessionId);
+                                 int sessionId,
+                                 int uid);
     virtual ~TimedTrack();
 
     class TimedBuffer {
@@ -201,7 +210,8 @@
                audio_channel_mask_t channelMask,
                size_t frameCount,
                const sp<IMemory>& sharedBuffer,
-               int sessionId);
+               int sessionId,
+               int uid);
 
     void timedYieldSamples_l(AudioBufferProvider::Buffer* buffer);
     void timedYieldSilence_l(uint32_t numFrames,
@@ -248,7 +258,8 @@
                                 uint32_t sampleRate,
                                 audio_format_t format,
                                 audio_channel_mask_t channelMask,
-                                size_t frameCount);
+                                size_t frameCount,
+                                int uid);
     virtual             ~OutputTrack();
 
     virtual status_t    start(AudioSystem::sync_event_t event =
diff --git a/services/audioflinger/RecordTracks.h b/services/audioflinger/RecordTracks.h
index ffe3e9f..57de568 100644
--- a/services/audioflinger/RecordTracks.h
+++ b/services/audioflinger/RecordTracks.h
@@ -28,7 +28,8 @@
                                 audio_format_t format,
                                 audio_channel_mask_t channelMask,
                                 size_t frameCount,
-                                int sessionId);
+                                int sessionId,
+                                int uid);
     virtual             ~RecordTrack();
 
     virtual status_t    start(AudioSystem::sync_event_t event, int triggerSession);
@@ -36,6 +37,7 @@
 
             void        destroy();
 
+            void        invalidate();
             // clear the buffer overflow flag
             void        clearOverflow() { mOverflow = false; }
             // set the buffer overflow flag and return previous value
diff --git a/services/audioflinger/ServiceUtilities.cpp b/services/audioflinger/ServiceUtilities.cpp
index d15bd04..152455d 100644
--- a/services/audioflinger/ServiceUtilities.cpp
+++ b/services/audioflinger/ServiceUtilities.cpp
@@ -34,6 +34,22 @@
     return ok;
 }
 
+bool captureAudioOutputAllowed() {
+    if (getpid_cached == IPCThreadState::self()->getCallingPid()) return true;
+    static const String16 sCaptureAudioOutput("android.permission.CAPTURE_AUDIO_OUTPUT");
+    // don't use PermissionCache; this is not a system permission
+    bool ok = checkCallingPermission(sCaptureAudioOutput);
+    if (!ok) ALOGE("Request requires android.permission.CAPTURE_AUDIO_OUTPUT");
+    return ok;
+}
+
+bool captureHotwordAllowed() {
+    static const String16 sCaptureHotwordAllowed("android.permission.CAPTURE_AUDIO_HOTWORD");
+    bool ok = checkCallingPermission(sCaptureHotwordAllowed);
+    if (!ok) ALOGE("android.permission.CAPTURE_AUDIO_HOTWORD");
+    return ok;
+}
+
 bool settingsAllowed() {
     if (getpid_cached == IPCThreadState::self()->getCallingPid()) return true;
     static const String16 sAudioSettings("android.permission.MODIFY_AUDIO_SETTINGS");
diff --git a/services/audioflinger/ServiceUtilities.h b/services/audioflinger/ServiceUtilities.h
index 80cecba..531bc56 100644
--- a/services/audioflinger/ServiceUtilities.h
+++ b/services/audioflinger/ServiceUtilities.h
@@ -21,6 +21,8 @@
 extern pid_t getpid_cached;
 
 bool recordingAllowed();
+bool captureAudioOutputAllowed();
+bool captureHotwordAllowed();
 bool settingsAllowed();
 bool dumpAllowed();
 
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 2c2931f..bf85b51 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -109,6 +109,9 @@
 // maximum normal mix buffer size
 static const uint32_t kMaxNormalMixBufferSizeMs = 24;
 
+// Offloaded output thread standby delay: allows track transition without going to standby
+static const nsecs_t kOffloadStandbyDelayNs = seconds(1);
+
 // Whether to use fast mixer
 static const enum {
     FastMixer_Never,    // never initialize or use: for debugging only
@@ -269,6 +272,7 @@
         // mSampleRate, mFrameCount, mChannelMask, mChannelCount, mFrameSize, and mFormat are
         // set by PlaybackThread::readOutputParameters() or RecordThread::readInputParameters()
         mParamStatus(NO_ERROR),
+        //FIXME: mStandby should be true here. Is this some kind of hack?
         mStandby(false), mOutDevice(outDevice), mInDevice(inDevice),
         mAudioSource(AUDIO_SOURCE_DEFAULT), mId(id),
         // mName will be set by concrete (non-virtual) subclass
@@ -473,31 +477,49 @@
     }
 }
 
-void AudioFlinger::ThreadBase::acquireWakeLock()
+void AudioFlinger::ThreadBase::acquireWakeLock(int uid)
 {
     Mutex::Autolock _l(mLock);
-    acquireWakeLock_l();
+    acquireWakeLock_l(uid);
 }
 
-void AudioFlinger::ThreadBase::acquireWakeLock_l()
+String16 AudioFlinger::ThreadBase::getWakeLockTag()
 {
-    if (mPowerManager == 0) {
-        // use checkService() to avoid blocking if power service is not up yet
-        sp<IBinder> binder =
-            defaultServiceManager()->checkService(String16("power"));
-        if (binder == 0) {
-            ALOGW("Thread %s cannot connect to the power manager service", mName);
-        } else {
-            mPowerManager = interface_cast<IPowerManager>(binder);
-            binder->linkToDeath(mDeathRecipient);
-        }
+    switch (mType) {
+        case MIXER:
+            return String16("AudioMix");
+        case DIRECT:
+            return String16("AudioDirectOut");
+        case DUPLICATING:
+            return String16("AudioDup");
+        case RECORD:
+            return String16("AudioIn");
+        case OFFLOAD:
+            return String16("AudioOffload");
+        default:
+            ALOG_ASSERT(false);
+            return String16("AudioUnknown");
     }
+}
+
+void AudioFlinger::ThreadBase::acquireWakeLock_l(int uid)
+{
+    getPowerManager_l();
     if (mPowerManager != 0) {
         sp<IBinder> binder = new BBinder();
-        status_t status = mPowerManager->acquireWakeLock(POWERMANAGER_PARTIAL_WAKE_LOCK,
-                                                         binder,
-                                                         String16(mName),
-                                                         String16("media"));
+        status_t status;
+        if (uid >= 0) {
+            status = mPowerManager->acquireWakeLockWithUid(POWERMANAGER_PARTIAL_WAKE_LOCK,
+                    binder,
+                    getWakeLockTag(),
+                    String16("media"),
+                    uid);
+        } else {
+            status = mPowerManager->acquireWakeLock(POWERMANAGER_PARTIAL_WAKE_LOCK,
+                    binder,
+                    getWakeLockTag(),
+                    String16("media"));
+        }
         if (status == NO_ERROR) {
             mWakeLockToken = binder;
         }
@@ -522,6 +544,41 @@
     }
 }
 
+void AudioFlinger::ThreadBase::updateWakeLockUids(const SortedVector<int> &uids) {
+    Mutex::Autolock _l(mLock);
+    updateWakeLockUids_l(uids);
+}
+
+void AudioFlinger::ThreadBase::getPowerManager_l() {
+
+    if (mPowerManager == 0) {
+        // use checkService() to avoid blocking if power service is not up yet
+        sp<IBinder> binder =
+            defaultServiceManager()->checkService(String16("power"));
+        if (binder == 0) {
+            ALOGW("Thread %s cannot connect to the power manager service", mName);
+        } else {
+            mPowerManager = interface_cast<IPowerManager>(binder);
+            binder->linkToDeath(mDeathRecipient);
+        }
+    }
+}
+
+void AudioFlinger::ThreadBase::updateWakeLockUids_l(const SortedVector<int> &uids) {
+
+    getPowerManager_l();
+    if (mWakeLockToken == NULL) {
+        ALOGE("no wake lock to update!");
+        return;
+    }
+    if (mPowerManager != 0) {
+        sp<IBinder> binder = new BBinder();
+        status_t status;
+        status = mPowerManager->updateWakeLockUids(mWakeLockToken, uids.size(), uids.array());
+        ALOGV("acquireWakeLock_l() %s status %d", mName, status);
+    }
+}
+
 void AudioFlinger::ThreadBase::clearPowerManager()
 {
     Mutex::Autolock _l(mLock);
@@ -699,14 +756,22 @@
         goto Exit;
     }
 
-    // Do not allow effects with session ID 0 on direct output or duplicating threads
-    // TODO: add rule for hw accelerated effects on direct outputs with non PCM format
-    if (sessionId == AUDIO_SESSION_OUTPUT_MIX && mType != MIXER) {
-        ALOGW("createEffect_l() Cannot add auxiliary effect %s to session %d",
-                desc->name, sessionId);
-        lStatus = BAD_VALUE;
-        goto Exit;
+    // Allow global effects only on offloaded and mixer threads
+    if (sessionId == AUDIO_SESSION_OUTPUT_MIX) {
+        switch (mType) {
+        case MIXER:
+        case OFFLOAD:
+            break;
+        case DIRECT:
+        case DUPLICATING:
+        case RECORD:
+        default:
+            ALOGW("createEffect_l() Cannot add global effect %s on thread %s", desc->name, mName);
+            lStatus = BAD_VALUE;
+            goto Exit;
+        }
     }
+
     // Only Pre processor effects are allowed on input threads and only on input threads
     if ((mType == RECORD) != ((desc->flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_PRE_PROC)) {
         ALOGW("createEffect_l() effect %s (flags %08x) created on wrong thread type %d",
@@ -749,6 +814,8 @@
             if (lStatus != NO_ERROR) {
                 goto Exit;
             }
+            effect->setOffloaded(mType == OFFLOAD, mId);
+
             lStatus = chain->addEffect_l(effect);
             if (lStatus != NO_ERROR) {
                 goto Exit;
@@ -810,6 +877,10 @@
     sp<EffectChain> chain = getEffectChain_l(sessionId);
     bool chainCreated = false;
 
+    ALOGD_IF((mType == OFFLOAD) && !effect->isOffloadable(),
+             "addEffect_l() on offloaded thread %p: effect %s does not support offload flags %x",
+                    this, effect->desc().name, effect->desc().flags);
+
     if (chain == 0) {
         // create a new chain for this session
         ALOGV("addEffect_l() new effect chain for session %d", sessionId);
@@ -826,6 +897,8 @@
         return BAD_VALUE;
     }
 
+    effect->setOffloaded(mType == OFFLOAD, mId);
+
     status_t status = chain->addEffect_l(effect);
     if (status != NO_ERROR) {
         if (chainCreated) {
@@ -930,6 +1003,7 @@
     :   ThreadBase(audioFlinger, id, device, AUDIO_DEVICE_NONE, type),
         mNormalFrameCount(0), mMixBuffer(NULL),
         mAllocMixBuffer(NULL), mSuspended(0), mBytesWritten(0),
+        mActiveTracksGeneration(0),
         // mStreamTypes[] initialized in constructor body
         mOutput(output),
         mLastWriteTime(0), mNumWrites(0), mNumDelayedWrites(0), mInWrite(false),
@@ -939,11 +1013,14 @@
         mBytesRemaining(0),
         mCurrentWriteLength(0),
         mUseAsyncWrite(false),
-        mWriteBlocked(false),
-        mDraining(false),
+        mWriteAckSequence(0),
+        mDrainSequence(0),
+        mSignalPending(false),
         mScreenState(AudioFlinger::mScreenState),
         // index 0 is reserved for normal mixer's submix
-        mFastTrackAvailMask(((1 << FastMixerState::kMaxFastTracks) - 1) & ~1)
+        mFastTrackAvailMask(((1 << FastMixerState::kMaxFastTracks) - 1) & ~1),
+        // mLatchD, mLatchQ,
+        mLatchDValid(false), mLatchQValid(false)
 {
     snprintf(mName, kNameLength, "AudioOut_%X", id);
     mNBLogWriter = audioFlinger->newWriter_l(kLogSize, mName);
@@ -1110,6 +1187,7 @@
         int sessionId,
         IAudioFlinger::track_flags_t *flags,
         pid_t tid,
+        int uid,
         status_t *status)
 {
     sp<Track> track;
@@ -1243,10 +1321,10 @@
 
         if (!isTimed) {
             track = new Track(this, client, streamType, sampleRate, format,
-                    channelMask, frameCount, sharedBuffer, sessionId, *flags);
+                    channelMask, frameCount, sharedBuffer, sessionId, uid, *flags);
         } else {
             track = TimedTrack::create(this, client, streamType, sampleRate, format,
-                    channelMask, frameCount, sharedBuffer, sessionId);
+                    channelMask, frameCount, sharedBuffer, sessionId, uid);
         }
         if (track == 0 || track->getCblk() == NULL || track->name() < 0) {
             lStatus = NO_MEMORY;
@@ -1327,14 +1405,14 @@
 {
     Mutex::Autolock _l(mLock);
     mStreamTypes[stream].volume = value;
-    signal_l();
+    broadcast_l();
 }
 
 void AudioFlinger::PlaybackThread::setStreamMute(audio_stream_type_t stream, bool muted)
 {
     Mutex::Autolock _l(mLock);
     mStreamTypes[stream].mute = muted;
-    signal_l();
+    broadcast_l();
 }
 
 float AudioFlinger::PlaybackThread::streamVolume(audio_stream_type_t stream) const
@@ -1382,6 +1460,9 @@
         track->mResetDone = false;
         track->mPresentationCompleteFrames = 0;
         mActiveTracks.add(track);
+        mWakeLockUids.add(track->uid());
+        mActiveTracksGeneration++;
+        mLatestActiveTrack = track;
         sp<EffectChain> chain = getEffectChain_l(track->sessionId());
         if (chain != 0) {
             ALOGV("addTrack_l() starting track on chain %p for session %d", chain.get(),
@@ -1392,8 +1473,8 @@
         status = NO_ERROR;
     }
 
-    ALOGV("mWaitWorkCV.broadcast");
-    mWaitWorkCV.broadcast();
+    ALOGV("signal playback thread");
+    broadcast_l();
 
     return status;
 }
@@ -1434,14 +1515,14 @@
     }
 }
 
-void AudioFlinger::PlaybackThread::signal_l()
+void AudioFlinger::PlaybackThread::broadcast_l()
 {
     // Thread could be blocked waiting for async
     // so signal it to handle state changes immediately
     // If threadLoop is currently unlocked a signal of mWaitWorkCV will
     // be lost so we also flag to prevent it blocking on mWaitWorkCV
     mSignalPending = true;
-    mWaitWorkCV.signal();
+    mWaitWorkCV.broadcast();
 }
 
 String8 AudioFlinger::PlaybackThread::getParameters(const String8& keys)
@@ -1489,29 +1570,31 @@
 void AudioFlinger::PlaybackThread::writeCallback()
 {
     ALOG_ASSERT(mCallbackThread != 0);
-    mCallbackThread->setWriteBlocked(false);
+    mCallbackThread->resetWriteBlocked();
 }
 
 void AudioFlinger::PlaybackThread::drainCallback()
 {
     ALOG_ASSERT(mCallbackThread != 0);
-    mCallbackThread->setDraining(false);
+    mCallbackThread->resetDraining();
 }
 
-void AudioFlinger::PlaybackThread::setWriteBlocked(bool value)
+void AudioFlinger::PlaybackThread::resetWriteBlocked(uint32_t sequence)
 {
     Mutex::Autolock _l(mLock);
-    mWriteBlocked = value;
-    if (!value) {
+    // reject out of sequence requests
+    if ((mWriteAckSequence & 1) && (sequence == mWriteAckSequence)) {
+        mWriteAckSequence &= ~1;
         mWaitWorkCV.signal();
     }
 }
 
-void AudioFlinger::PlaybackThread::setDraining(bool value)
+void AudioFlinger::PlaybackThread::resetDraining(uint32_t sequence)
 {
     Mutex::Autolock _l(mLock);
-    mDraining = value;
-    if (!value) {
+    // reject out of sequence requests
+    if ((mDrainSequence & 1) && (sequence == mDrainSequence)) {
+        mDrainSequence &= ~1;
         mWaitWorkCV.signal();
     }
 }
@@ -1570,6 +1653,7 @@
         if (mOutput->stream->set_callback(mOutput->stream,
                                       AudioFlinger::PlaybackThread::asyncCallback, this) == 0) {
             mUseAsyncWrite = true;
+            mCallbackThread = new AudioFlinger::AsyncCallbackThread(this);
         }
     }
 
@@ -1818,29 +1902,41 @@
         } else {
             bytesWritten = framesWritten;
         }
+        status_t status = mNormalSink->getTimestamp(mLatchD.mTimestamp);
+        if (status == NO_ERROR) {
+            size_t totalFramesWritten = mNormalSink->framesWritten();
+            if (totalFramesWritten >= mLatchD.mTimestamp.mPosition) {
+                mLatchD.mUnpresentedFrames = totalFramesWritten - mLatchD.mTimestamp.mPosition;
+                mLatchDValid = true;
+            }
+        }
     // otherwise use the HAL / AudioStreamOut directly
     } else {
         // Direct output and offload threads
         size_t offset = (mCurrentWriteLength - mBytesRemaining) / sizeof(int16_t);
         if (mUseAsyncWrite) {
-            mWriteBlocked = true;
+            ALOGW_IF(mWriteAckSequence & 1, "threadLoop_write(): out of sequence write request");
+            mWriteAckSequence += 2;
+            mWriteAckSequence |= 1;
             ALOG_ASSERT(mCallbackThread != 0);
-            mCallbackThread->setWriteBlocked(true);
+            mCallbackThread->setWriteBlocked(mWriteAckSequence);
         }
+        // FIXME We should have an implementation of timestamps for direct output threads.
+        // They are used e.g for multichannel PCM playback over HDMI.
         bytesWritten = mOutput->stream->write(mOutput->stream,
                                                    mMixBuffer + offset, mBytesRemaining);
         if (mUseAsyncWrite &&
                 ((bytesWritten < 0) || (bytesWritten == (ssize_t)mBytesRemaining))) {
             // do not wait for async callback in case of error of full write
-            mWriteBlocked = false;
+            mWriteAckSequence &= ~1;
             ALOG_ASSERT(mCallbackThread != 0);
-            mCallbackThread->setWriteBlocked(false);
+            mCallbackThread->setWriteBlocked(mWriteAckSequence);
         }
     }
 
     mNumWrites++;
     mInWrite = false;
-
+    mStandby = false;
     return bytesWritten;
 }
 
@@ -1849,9 +1945,10 @@
     if (mOutput->stream->drain) {
         ALOGV("draining %s", (mMixerStatus == MIXER_DRAIN_TRACK) ? "early" : "full");
         if (mUseAsyncWrite) {
-            mDraining = true;
+            ALOGW_IF(mDrainSequence & 1, "threadLoop_drain(): out of sequence drain request");
+            mDrainSequence |= 1;
             ALOG_ASSERT(mCallbackThread != 0);
-            mCallbackThread->setDraining(true);
+            mCallbackThread->setDraining(mDrainSequence);
         }
         mOutput->stream->drain(mOutput->stream,
             (mMixerStatus == MIXER_DRAIN_TRACK) ? AUDIO_DRAIN_EARLY_NOTIFY
@@ -2061,6 +2158,8 @@
     // FIXME could this be made local to while loop?
     writeFrames = 0;
 
+    int lastGeneration = 0;
+
     cacheParameters_l();
     sleepTime = idleSleepTime;
 
@@ -2078,6 +2177,8 @@
     // and then that string will be logged at the next convenient opportunity.
     const char *logString = NULL;
 
+    checkSilentMode_l();
+
     while (!exitPending())
     {
         cpuStats.sample(myName);
@@ -2096,12 +2197,17 @@
                 logString = NULL;
             }
 
+            if (mLatchDValid) {
+                mLatchQ = mLatchD;
+                mLatchDValid = false;
+                mLatchQValid = true;
+            }
+
             if (checkForNewParameters_l()) {
                 cacheParameters_l();
             }
 
             saveOutputTracks();
-
             if (mSignalPending) {
                 // A signal was raised while we were unlocked
                 mSignalPending = false;
@@ -2110,18 +2216,18 @@
                     break;
                 }
                 releaseWakeLock_l();
+                mWakeLockUids.clear();
+                mActiveTracksGeneration++;
                 ALOGV("wait async completion");
                 mWaitWorkCV.wait(mLock);
                 ALOGV("async completion/wake");
                 acquireWakeLock_l();
-                if (exitPending()) {
-                    break;
-                }
-                if (!mActiveTracks.size() && (systemTime() > standbyTime)) {
-                    continue;
-                }
+                standbyTime = systemTime() + standbyDelay;
                 sleepTime = 0;
-            } else if ((!mActiveTracks.size() && systemTime() > standbyTime) ||
+
+                continue;
+            }
+            if ((!mActiveTracks.size() && systemTime() > standbyTime) ||
                                    isSuspended()) {
                 // put audio hardware into standby after short delay
                 if (shouldStandby_l()) {
@@ -2142,6 +2248,8 @@
                     }
 
                     releaseWakeLock_l();
+                    mWakeLockUids.clear();
+                    mActiveTracksGeneration++;
                     // wait until we have something to do...
                     ALOGV("%s going to sleep", myName.string());
                     mWaitWorkCV.wait(mLock);
@@ -2163,15 +2271,21 @@
                     continue;
                 }
             }
-
             // mMixerStatusIgnoringFastTracks is also updated internally
             mMixerStatus = prepareTracks_l(&tracksToRemove);
 
+            // compare with previously applied list
+            if (lastGeneration != mActiveTracksGeneration) {
+                // update wakelock
+                updateWakeLockUids_l(mWakeLockUids);
+                lastGeneration = mActiveTracksGeneration;
+            }
+
             // prevent any changes in effect chain list and in each effect chain
             // during mixing and effect process as the audio buffers could be deleted
             // or modified if an effect is created or deleted
             lockEffectChains_l(effectChains);
-        }
+        } // mLock scope ends
 
         if (mBytesRemaining == 0) {
             mCurrentWriteLength = 0;
@@ -2196,12 +2310,21 @@
             }
 
             // only process effects if we're going to write
-            if (sleepTime == 0) {
+            if (sleepTime == 0 && mType != OFFLOAD) {
                 for (size_t i = 0; i < effectChains.size(); i ++) {
                     effectChains[i]->process_l();
                 }
             }
         }
+        // Process effect chains for offloaded thread even if no audio
+        // was read from audio track: process only updates effect state
+        // and thus does have to be synchronized with audio writes but may have
+        // to be called while waiting for async write callback
+        if (mType == OFFLOAD) {
+            for (size_t i = 0; i < effectChains.size(); i ++) {
+                effectChains[i]->process_l();
+            }
+        }
 
         // enable changes in effect chain
         unlockEffectChains(effectChains);
@@ -2236,7 +2359,6 @@
                 }
 }
 
-                mStandby = false;
             } else {
                 usleep(sleepTime);
             }
@@ -2272,6 +2394,8 @@
     }
 
     releaseWakeLock();
+    mWakeLockUids.clear();
+    mActiveTracksGeneration++;
 
     ALOGV("Thread %p type %d exiting", this, mType);
     return false;
@@ -2285,6 +2409,8 @@
         for (size_t i=0 ; i<count ; i++) {
             const sp<Track>& track = tracksToRemove.itemAt(i);
             mActiveTracks.remove(track);
+            mWakeLockUids.remove(track->uid());
+            mActiveTracksGeneration++;
             ALOGV("removeTracks_l removing track on session %d", track->sessionId());
             sp<EffectChain> chain = getEffectChain_l(track->sessionId());
             if (chain != 0) {
@@ -2300,6 +2426,22 @@
 
 }
 
+status_t AudioFlinger::PlaybackThread::getTimestamp_l(AudioTimestamp& timestamp)
+{
+    if (mNormalSink != 0) {
+        return mNormalSink->getTimestamp(timestamp);
+    }
+    if (mType == OFFLOAD && mOutput->stream->get_presentation_position) {
+        uint64_t position64;
+        int ret = mOutput->stream->get_presentation_position(
+                                                mOutput->stream, &position64, &timestamp.mTime);
+        if (ret == 0) {
+            timestamp.mPosition = (uint32_t)position64;
+            return NO_ERROR;
+        }
+    }
+    return INVALID_OPERATION;
+}
 // ----------------------------------------------------------------------------
 
 AudioFlinger::MixerThread::MixerThread(const sp<AudioFlinger>& audioFlinger, AudioStreamOut* output,
@@ -2595,11 +2737,12 @@
     ALOGV("Audio hardware entering standby, mixer %p, suspend count %d", this, mSuspended);
     mOutput->stream->common.standby(&mOutput->stream->common);
     if (mUseAsyncWrite != 0) {
-        mWriteBlocked = false;
-        mDraining = false;
+        // discard any pending drain or write ack by incrementing sequence
+        mWriteAckSequence = (mWriteAckSequence + 2) & ~1;
+        mDrainSequence = (mDrainSequence + 2) & ~1;
         ALOG_ASSERT(mCallbackThread != 0);
-        mCallbackThread->setWriteBlocked(false);
-        mCallbackThread->setDraining(false);
+        mCallbackThread->setWriteBlocked(mWriteAckSequence);
+        mCallbackThread->setDraining(mDrainSequence);
     }
 }
 
@@ -3463,6 +3606,12 @@
 
         Track* const track = t.get();
         audio_track_cblk_t* cblk = track->cblk();
+        // Only consider last track started for volume and mixer state control.
+        // In theory an older track could underrun and restart after the new one starts
+        // but as we only care about the transition phase between two tracks on a
+        // direct output, it is not a problem to ignore the underrun case.
+        sp<Track> l = mLatestActiveTrack.promote();
+        bool last = l.get() == track;
 
         // The first time a track is added we wait
         // for all its buffers to be filled before processing it
@@ -3472,11 +3621,6 @@
         } else {
             minFrames = 1;
         }
-        // Only consider last track started for volume and mixer state control.
-        // This is the last entry in mActiveTracks unless a track underruns.
-        // As we only care about the transition phase between two tracks on a
-        // direct output, it is not a problem to ignore the underrun case.
-        bool last = (i == (count - 1));
 
         if ((track->framesReady() >= minFrames) && track->isReady() &&
                 !track->isPaused() && !track->isTerminated())
@@ -3485,7 +3629,8 @@
 
             if (track->mFillingUpStatus == Track::FS_FILLED) {
                 track->mFillingUpStatus = Track::FS_ACTIVE;
-                mLeftVolFloat = mRightVolFloat = 0;
+                // make sure processVolume_l() will apply new volume even if 0
+                mLeftVolFloat = mRightVolFloat = -1.0;
                 if (track->mState == TrackBase::RESUMING) {
                     track->mState = TrackBase::ACTIVE;
                 }
@@ -3502,7 +3647,7 @@
         } else {
             // clear effect chain input buffer if the last active track started underruns
             // to avoid sending previous audio buffer again to effects
-            if (!mEffectChains.isEmpty() && (i == (count -1))) {
+            if (!mEffectChains.isEmpty() && last) {
                 mEffectChains[0]->clearInputBuffer();
             }
 
@@ -3514,7 +3659,8 @@
                 // TODO: implement behavior for compressed audio
                 size_t audioHALFrames = (latency_l() * mSampleRate) / 1000;
                 size_t framesWritten = mBytesWritten / mFrameSize;
-                if (mStandby || track->presentationComplete(framesWritten, audioHALFrames)) {
+                if (mStandby || !last ||
+                        track->presentationComplete(framesWritten, audioHALFrames)) {
                     if (track->isStopped()) {
                         track->reset();
                     }
@@ -3527,6 +3673,9 @@
                 if (--(track->mRetryCount) <= 0) {
                     ALOGV("BUFFER TIMEOUT: remove(%d) from active list", track->name());
                     tracksToRemove->add(track);
+                    // indicate to client process that the track was disabled because of underrun;
+                    // it will then automatically call start() when data is available
+                    android_atomic_or(CBLK_DISABLED, &cblk->mFlags);
                 } else if (last) {
                     mixerStatus = MIXER_TRACKS_ENABLED;
                 }
@@ -3677,17 +3826,21 @@
 
     // use shorter standby delay as on normal output to release
     // hardware resources as soon as possible
-    standbyDelay = microseconds(activeSleepTime*2);
+    if (audio_is_linear_pcm(mFormat)) {
+        standbyDelay = microseconds(activeSleepTime*2);
+    } else {
+        standbyDelay = kOffloadStandbyDelayNs;
+    }
 }
 
 // ----------------------------------------------------------------------------
 
 AudioFlinger::AsyncCallbackThread::AsyncCallbackThread(
-        const sp<AudioFlinger::OffloadThread>& offloadThread)
+        const wp<AudioFlinger::PlaybackThread>& playbackThread)
     :   Thread(false /*canCallJava*/),
-        mOffloadThread(offloadThread),
-        mWriteBlocked(false),
-        mDraining(false)
+        mPlaybackThread(playbackThread),
+        mWriteAckSequence(0),
+        mDrainSequence(0)
 {
 }
 
@@ -3703,8 +3856,8 @@
 bool AudioFlinger::AsyncCallbackThread::threadLoop()
 {
     while (!exitPending()) {
-        bool writeBlocked;
-        bool draining;
+        uint32_t writeAckSequence;
+        uint32_t drainSequence;
 
         {
             Mutex::Autolock _l(mLock);
@@ -3712,18 +3865,21 @@
             if (exitPending()) {
                 break;
             }
-            writeBlocked = mWriteBlocked;
-            draining = mDraining;
-            ALOGV("AsyncCallbackThread mWriteBlocked %d mDraining %d", mWriteBlocked, mDraining);
+            ALOGV("AsyncCallbackThread mWriteAckSequence %d mDrainSequence %d",
+                  mWriteAckSequence, mDrainSequence);
+            writeAckSequence = mWriteAckSequence;
+            mWriteAckSequence &= ~1;
+            drainSequence = mDrainSequence;
+            mDrainSequence &= ~1;
         }
         {
-            sp<AudioFlinger::OffloadThread> offloadThread = mOffloadThread.promote();
-            if (offloadThread != 0) {
-                if (writeBlocked == false) {
-                    offloadThread->setWriteBlocked(false);
+            sp<AudioFlinger::PlaybackThread> playbackThread = mPlaybackThread.promote();
+            if (playbackThread != 0) {
+                if (writeAckSequence & 1) {
+                    playbackThread->resetWriteBlocked(writeAckSequence >> 1);
                 }
-                if (draining == false) {
-                    offloadThread->setDraining(false);
+                if (drainSequence & 1) {
+                    playbackThread->resetDraining(drainSequence >> 1);
                 }
             }
         }
@@ -3739,20 +3895,36 @@
     mWaitWorkCV.broadcast();
 }
 
-void AudioFlinger::AsyncCallbackThread::setWriteBlocked(bool value)
+void AudioFlinger::AsyncCallbackThread::setWriteBlocked(uint32_t sequence)
 {
     Mutex::Autolock _l(mLock);
-    mWriteBlocked = value;
-    if (!value) {
+    // bit 0 is cleared
+    mWriteAckSequence = sequence << 1;
+}
+
+void AudioFlinger::AsyncCallbackThread::resetWriteBlocked()
+{
+    Mutex::Autolock _l(mLock);
+    // ignore unexpected callbacks
+    if (mWriteAckSequence & 2) {
+        mWriteAckSequence |= 1;
         mWaitWorkCV.signal();
     }
 }
 
-void AudioFlinger::AsyncCallbackThread::setDraining(bool value)
+void AudioFlinger::AsyncCallbackThread::setDraining(uint32_t sequence)
 {
     Mutex::Autolock _l(mLock);
-    mDraining = value;
-    if (!value) {
+    // bit 0 is cleared
+    mDrainSequence = sequence << 1;
+}
+
+void AudioFlinger::AsyncCallbackThread::resetDraining()
+{
+    Mutex::Autolock _l(mLock);
+    // ignore unexpected callbacks
+    if (mDrainSequence & 2) {
+        mDrainSequence |= 1;
         mWaitWorkCV.signal();
     }
 }
@@ -3763,14 +3935,11 @@
         AudioStreamOut* output, audio_io_handle_t id, uint32_t device)
     :   DirectOutputThread(audioFlinger, output, id, device, OFFLOAD),
         mHwPaused(false),
+        mFlushPending(false),
         mPausedBytesRemaining(0)
 {
-    mCallbackThread = new AudioFlinger::AsyncCallbackThread(this);
-}
-
-AudioFlinger::OffloadThread::~OffloadThread()
-{
-    mPreviousTrack.clear();
+    //FIXME: mStandby should be set to true by ThreadBase constructor
+    mStandby = true;
 }
 
 void AudioFlinger::OffloadThread::threadLoop_exit()
@@ -3790,14 +3959,14 @@
     Vector< sp<Track> > *tracksToRemove
 )
 {
-    ALOGV("OffloadThread::prepareTracks_l");
     size_t count = mActiveTracks.size();
 
     mixer_state mixerStatus = MIXER_IDLE;
-    if (mFlushPending) {
-        flushHw_l();
-        mFlushPending = false;
-    }
+    bool doHwPause = false;
+    bool doHwResume = false;
+
+    ALOGV("OffloadThread::prepareTracks_l active tracks %d", count);
+
     // find out which tracks need to be processed
     for (size_t i = 0; i < count; i++) {
         sp<Track> t = mActiveTracks[i].promote();
@@ -3807,29 +3976,18 @@
         }
         Track* const track = t.get();
         audio_track_cblk_t* cblk = track->cblk();
-        if (mPreviousTrack != NULL) {
-            if (t != mPreviousTrack) {
-                // Flush any data still being written from last track
-                mBytesRemaining = 0;
-                if (mPausedBytesRemaining) {
-                    // Last track was paused so we also need to flush saved
-                    // mixbuffer state and invalidate track so that it will
-                    // re-submit that unwritten data when it is next resumed
-                    mPausedBytesRemaining = 0;
-                    // Invalidate is a bit drastic - would be more efficient
-                    // to have a flag to tell client that some of the
-                    // previously written data was lost
-                    mPreviousTrack->invalidate();
-                }
-            }
-        }
-        mPreviousTrack = t;
-        bool last = (i == (count - 1));
+        // Only consider last track started for volume and mixer state control.
+        // In theory an older track could underrun and restart after the new one starts
+        // but as we only care about the transition phase between two tracks on a
+        // direct output, it is not a problem to ignore the underrun case.
+        sp<Track> l = mLatestActiveTrack.promote();
+        bool last = l.get() == track;
+
         if (track->isPausing()) {
             track->setPaused();
             if (last) {
                 if (!mHwPaused) {
-                    mOutput->stream->pause(mOutput->stream);
+                    doHwPause = true;
                     mHwPaused = true;
                 }
                 // If we were part way through writing the mixbuffer to
@@ -3844,29 +4002,59 @@
             }
             tracksToRemove->add(track);
         } else if (track->framesReady() && track->isReady() &&
-                !track->isPaused() && !track->isTerminated()) {
+                !track->isPaused() && !track->isTerminated() && !track->isStopping_2()) {
             ALOGVV("OffloadThread: track %d s=%08x [OK]", track->name(), cblk->mServer);
             if (track->mFillingUpStatus == Track::FS_FILLED) {
                 track->mFillingUpStatus = Track::FS_ACTIVE;
-                mLeftVolFloat = mRightVolFloat = 0;
+                // make sure processVolume_l() will apply new volume even if 0
+                mLeftVolFloat = mRightVolFloat = -1.0;
                 if (track->mState == TrackBase::RESUMING) {
-                    if (mPausedBytesRemaining) {
-                        // Need to continue write that was interrupted
-                        mCurrentWriteLength = mPausedWriteLength;
-                        mBytesRemaining = mPausedBytesRemaining;
-                        mPausedBytesRemaining = 0;
-                    }
                     track->mState = TrackBase::ACTIVE;
+                    if (last) {
+                        if (mPausedBytesRemaining) {
+                            // Need to continue write that was interrupted
+                            mCurrentWriteLength = mPausedWriteLength;
+                            mBytesRemaining = mPausedBytesRemaining;
+                            mPausedBytesRemaining = 0;
+                        }
+                        if (mHwPaused) {
+                            doHwResume = true;
+                            mHwPaused = false;
+                            // threadLoop_mix() will handle the case that we need to
+                            // resume an interrupted write
+                        }
+                        // enable write to audio HAL
+                        sleepTime = 0;
+                    }
                 }
             }
 
             if (last) {
-                if (mHwPaused) {
-                    mOutput->stream->resume(mOutput->stream);
-                    mHwPaused = false;
-                    // threadLoop_mix() will handle the case that we need to
-                    // resume an interrupted write
+                sp<Track> previousTrack = mPreviousTrack.promote();
+                if (previousTrack != 0) {
+                    if (track != previousTrack.get()) {
+                        // Flush any data still being written from last track
+                        mBytesRemaining = 0;
+                        if (mPausedBytesRemaining) {
+                            // Last track was paused so we also need to flush saved
+                            // mixbuffer state and invalidate track so that it will
+                            // re-submit that unwritten data when it is next resumed
+                            mPausedBytesRemaining = 0;
+                            // Invalidate is a bit drastic - would be more efficient
+                            // to have a flag to tell client that some of the
+                            // previously written data was lost
+                            previousTrack->invalidate();
+                        }
+                        // flush data already sent to the DSP if changing audio session as audio
+                        // comes from a different source. Also invalidate previous track to force a
+                        // seek when resuming.
+                        if (previousTrack->sessionId() != track->sessionId()) {
+                            previousTrack->invalidate();
+                            mFlushPending = true;
+                        }
+                    }
                 }
+                mPreviousTrack = track;
                 // reset retry count
                 track->mRetryCount = kMaxTrackRetriesOffload;
                 mActiveTrack = t;
@@ -3883,21 +4071,27 @@
                     // has been written
                     ALOGV("OffloadThread: underrun and STOPPING_1 -> draining, STOPPING_2");
                     track->mState = TrackBase::STOPPING_2; // so presentation completes after drain
-                    sleepTime = 0;
-                    standbyTime = systemTime() + standbyDelay;
-                    if (last) {
-                        mixerStatus = MIXER_DRAIN_TRACK;
+                    // do not drain if no data was ever sent to HAL (mStandby == true)
+                    if (last && !mStandby) {
+                        // do not modify drain sequence if we are already draining. This happens
+                        // when resuming from pause after drain.
+                        if ((mDrainSequence & 1) == 0) {
+                            sleepTime = 0;
+                            standbyTime = systemTime() + standbyDelay;
+                            mixerStatus = MIXER_DRAIN_TRACK;
+                            mDrainSequence += 2;
+                        }
                         if (mHwPaused) {
                             // It is possible to move from PAUSED to STOPPING_1 without
                             // a resume so we must ensure hardware is running
-                            mOutput->stream->resume(mOutput->stream);
+                            doHwResume = true;
                             mHwPaused = false;
                         }
                     }
                 }
             } else if (track->isStopping_2()) {
-                // Drain has completed, signal presentation complete
-                if (!mDraining || !last) {
+                // Drain has completed or we are in standby, signal presentation complete
+                if (!(mDrainSequence & 1) || !last || mStandby) {
                     track->mState = TrackBase::STOPPED;
                     size_t audioHALFrames =
                             (mOutput->stream->get_latency(mOutput->stream)*mSampleRate) / 1000;
@@ -3914,6 +4108,9 @@
                     ALOGV("OffloadThread: BUFFER TIMEOUT: remove(%d) from active list",
                           track->name());
                     tracksToRemove->add(track);
+                    // indicate to client process that the track was disabled because of underrun;
+                    // it will then automatically call start() when data is available
+                    android_atomic_or(CBLK_DISABLED, &cblk->mFlags);
                 } else if (last){
                     mixerStatus = MIXER_TRACKS_ENABLED;
                 }
@@ -3922,6 +4119,25 @@
         // compute volume for this track
         processVolume_l(track, last);
     }
+
+    // make sure the pause/flush/resume sequence is executed in the right order.
+    // If a flush is pending and a track is active but the HW is not paused, force a HW pause
+    // before flush and then resume HW. This can happen in case of pause/flush/resume
+    // if resume is received before pause is executed.
+    if (!mStandby && (doHwPause || (mFlushPending && !mHwPaused && (count != 0)))) {
+        mOutput->stream->pause(mOutput->stream);
+        if (!doHwPause) {
+            doHwResume = true;
+        }
+    }
+    if (mFlushPending) {
+        flushHw_l();
+        mFlushPending = false;
+    }
+    if (!mStandby && doHwResume) {
+        mOutput->stream->resume(mOutput->stream);
+    }
+
     // remove all the tracks that need to be...
     removeTracks_l(*tracksToRemove);
 
@@ -3936,8 +4152,9 @@
 // must be called with thread mutex locked
 bool AudioFlinger::OffloadThread::waitingAsyncCallback_l()
 {
-    ALOGV("waitingAsyncCallback_l mWriteBlocked %d mDraining %d", mWriteBlocked, mDraining);
-    if (mUseAsyncWrite && (mWriteBlocked || mDraining)) {
+    ALOGVV("waitingAsyncCallback_l mWriteAckSequence %d mDrainSequence %d",
+          mWriteAckSequence, mDrainSequence);
+    if (mUseAsyncWrite && ((mWriteAckSequence & 1) || (mDrainSequence & 1))) {
         return true;
     }
     return false;
@@ -3973,11 +4190,12 @@
     mPausedWriteLength = 0;
     mPausedBytesRemaining = 0;
     if (mUseAsyncWrite) {
-        mWriteBlocked = false;
-        mDraining = false;
+        // discard any pending drain or write ack by incrementing sequence
+        mWriteAckSequence = (mWriteAckSequence + 2) & ~1;
+        mDrainSequence = (mDrainSequence + 2) & ~1;
         ALOG_ASSERT(mCallbackThread != 0);
-        mCallbackThread->setWriteBlocked(false);
-        mCallbackThread->setDraining(false);
+        mCallbackThread->setWriteBlocked(mWriteAckSequence);
+        mCallbackThread->setDraining(mDrainSequence);
     }
 }
 
@@ -4038,6 +4256,7 @@
     for (size_t i = 0; i < outputTracks.size(); i++) {
         outputTracks[i]->write(mMixBuffer, writeFrames);
     }
+    mStandby = false;
     return (ssize_t)mixBufferSize;
 }
 
@@ -4069,7 +4288,8 @@
                                             mSampleRate,
                                             mFormat,
                                             mChannelMask,
-                                            frameCount);
+                                            frameCount,
+                                            IPCThreadState::self()->getCallingUid());
     if (outputTrack->cblk() != NULL) {
         thread->setStreamVolume(AUDIO_STREAM_CNT, 1.0f);
         mOutputTracks.add(outputTrack);
@@ -4171,7 +4391,6 @@
     snprintf(mName, kNameLength, "AudioIn_%X", id);
 
     readInputParameters();
-
 }
 
 
@@ -4203,7 +4422,11 @@
     nsecs_t lastWarning = 0;
 
     inputStandBy();
-    acquireWakeLock();
+    {
+        Mutex::Autolock _l(mLock);
+        activeTrack = mActiveTrack;
+        acquireWakeLock_l(activeTrack != 0 ? activeTrack->uid() : -1);
+    }
 
     // used to verify we've read at least once before evaluating how many bytes were read
     bool readOnce = false;
@@ -4216,6 +4439,12 @@
         { // scope for mLock
             Mutex::Autolock _l(mLock);
             checkForNewParameters_l();
+            if (mActiveTrack != 0 && activeTrack != mActiveTrack) {
+                SortedVector<int> tmp;
+                tmp.add(mActiveTrack->uid());
+                updateWakeLockUids_l(tmp);
+            }
+            activeTrack = mActiveTrack;
             if (mActiveTrack == 0 && mConfigEvents.isEmpty()) {
                 standby();
 
@@ -4228,7 +4457,7 @@
                 // go to sleep
                 mWaitWorkCV.wait(mLock);
                 ALOGV("RecordThread: loop starting");
-                acquireWakeLock_l();
+                acquireWakeLock_l(mActiveTrack != 0 ? mActiveTrack->uid() : -1);
                 continue;
             }
             if (mActiveTrack != 0) {
@@ -4256,6 +4485,7 @@
                     mStandby = false;
                 }
             }
+
             lockEffectChains_l(effectChains);
         }
 
@@ -4403,6 +4633,10 @@
 
     {
         Mutex::Autolock _l(mLock);
+        for (size_t i = 0; i < mTracks.size(); i++) {
+            sp<RecordTrack> track = mTracks[i];
+            track->invalidate();
+        }
         mActiveTrack.clear();
         mStartStopCond.broadcast();
     }
@@ -4433,6 +4667,7 @@
         audio_channel_mask_t channelMask,
         size_t frameCount,
         int sessionId,
+        int uid,
         IAudioFlinger::track_flags_t *flags,
         pid_t tid,
         status_t *status)
@@ -4442,10 +4677,9 @@
 
     lStatus = initCheck();
     if (lStatus != NO_ERROR) {
-        ALOGE("Audio driver not initialized.");
+        ALOGE("createRecordTrack_l() audio driver not initialized");
         goto Exit;
     }
-
     // client expresses a preference for FAST, but we get the final say
     if (*flags & IAudioFlinger::TRACK_FAST) {
       if (
@@ -4503,10 +4737,12 @@
         Mutex::Autolock _l(mLock);
 
         track = new RecordTrack(this, client, sampleRate,
-                      format, channelMask, frameCount, sessionId);
+                      format, channelMask, frameCount, sessionId, uid);
 
         if (track->getCblk() == 0) {
+            ALOGE("createRecordTrack_l() no control block");
             lStatus = NO_MEMORY;
+            track.clear();
             goto Exit;
         }
         mTracks.add(track);
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index 31d5323..207f1eb 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -237,10 +237,13 @@
                     effect_uuid_t mType;    // effect type UUID
                 };
 
-                void        acquireWakeLock();
-                void        acquireWakeLock_l();
+                void        acquireWakeLock(int uid = -1);
+                void        acquireWakeLock_l(int uid = -1);
                 void        releaseWakeLock();
                 void        releaseWakeLock_l();
+                void        updateWakeLockUids(const SortedVector<int> &uids);
+                void        updateWakeLockUids_l(const SortedVector<int> &uids);
+                void        getPowerManager_l();
                 void setEffectSuspended_l(const effect_uuid_t *type,
                                           bool suspend,
                                           int sessionId);
@@ -251,6 +254,8 @@
                 // check if some effects must be suspended when an effect chain is added
                 void checkSuspendOnAddEffectChain_l(const sp<EffectChain>& chain);
 
+                String16 getWakeLockTag();
+
     virtual     void        preExit() { }
 
     friend class AudioFlinger;      // for mEffectChains
@@ -377,9 +382,9 @@
                 void        removeTracks_l(const Vector< sp<Track> >& tracksToRemove);
 
                 void        writeCallback();
-                void        setWriteBlocked(bool value);
+                void        resetWriteBlocked(uint32_t sequence);
                 void        drainCallback();
-                void        setDraining(bool value);
+                void        resetDraining(uint32_t sequence);
 
     static      int         asyncCallback(stream_callback_event_t event, void *param, void *cookie);
 
@@ -419,6 +424,7 @@
                                 int sessionId,
                                 IAudioFlinger::track_flags_t *flags,
                                 pid_t tid,
+                                int uid,
                                 status_t *status);
 
                 AudioStreamOut* getOutput() const;
@@ -466,6 +472,8 @@
                 // Return's the HAL's frame count i.e. fast mixer buffer size.
                 size_t      frameCountHAL() const { return mFrameCount; }
 
+                status_t         getTimestamp_l(AudioTimestamp& timestamp);
+
 protected:
     // updated by readOutputParameters()
     size_t                          mNormalFrameCount;  // normal mixer and effects
@@ -491,6 +499,9 @@
                 void        setMasterMute_l(bool muted) { mMasterMute = muted; }
 protected:
     SortedVector< wp<Track> >       mActiveTracks;  // FIXME check if this could be sp<>
+    SortedVector<int>               mWakeLockUids;
+    int                             mActiveTracksGeneration;
+    wp<Track>                       mLatestActiveTrack; // latest track added to mActiveTracks
 
     // Allocate a track name for a given channel mask.
     //   Returns name >= 0 if successful, -1 on failure.
@@ -526,7 +537,7 @@
     status_t    addTrack_l(const sp<Track>& track);
     bool        destroyTrack_l(const sp<Track>& track);
     void        removeTrack_l(const sp<Track>& track);
-    void        signal_l();
+    void        broadcast_l();
 
     void        readOutputParameters();
 
@@ -577,8 +588,21 @@
     size_t                          mBytesRemaining;
     size_t                          mCurrentWriteLength;
     bool                            mUseAsyncWrite;
-    bool                            mWriteBlocked;
-    bool                            mDraining;
+    // mWriteAckSequence contains current write sequence on bits 31-1. The write sequence is
+    // incremented each time a write(), a flush() or a standby() occurs.
+    // Bit 0 is set when a write blocks and indicates a callback is expected.
+    // Bit 0 is reset by the async callback thread calling resetWriteBlocked(). Out of sequence
+    // callbacks are ignored.
+    uint32_t                        mWriteAckSequence;
+    // mDrainSequence contains current drain sequence on bits 31-1. The drain sequence is
+    // incremented each time a drain is requested or a flush() or standby() occurs.
+    // Bit 0 is set when the drain() command is called at the HAL and indicates a callback is
+    // expected.
+    // Bit 0 is reset by the async callback thread calling resetDraining(). Out of sequence
+    // callbacks are ignored.
+    uint32_t                        mDrainSequence;
+    // A condition that must be evaluated by prepareTrack_l() has changed and we must not wait
+    // for async write callback in the thread loop before evaluating it
     bool                            mSignalPending;
     sp<AsyncCallbackThread>         mCallbackThread;
 
@@ -606,6 +630,17 @@
                 // accessed by both binder threads and within threadLoop(), lock on mutex needed
                 unsigned    mFastTrackAvailMask;    // bit i set if fast track [i] is available
     virtual     void        flushOutput_l();
+
+private:
+    // timestamp latch:
+    //  D input is written by threadLoop_write while mutex is unlocked, and read while locked
+    //  Q output is written while locked, and read while locked
+    struct {
+        AudioTimestamp  mTimestamp;
+        uint32_t        mUnpresentedFrames;
+    } mLatchD, mLatchQ;
+    bool mLatchDValid;  // true means mLatchD is valid, and clock it into latch at next opportunity
+    bool mLatchQValid;  // true means mLatchQ is valid
 };
 
 class MixerThread : public PlaybackThread {
@@ -707,7 +742,7 @@
 
     OffloadThread(const sp<AudioFlinger>& audioFlinger, AudioStreamOut* output,
                         audio_io_handle_t id, uint32_t device);
-    virtual                 ~OffloadThread();
+    virtual                 ~OffloadThread() {};
 
 protected:
     // threadLoop snippets
@@ -727,13 +762,13 @@
     bool        mFlushPending;
     size_t      mPausedWriteLength;     // length in bytes of write interrupted by pause
     size_t      mPausedBytesRemaining;  // bytes still waiting in mixbuffer after resume
-    sp<Track>   mPreviousTrack;         // used to detect track switch
+    wp<Track>   mPreviousTrack;         // used to detect track switch
 };
 
 class AsyncCallbackThread : public Thread {
 public:
 
-    AsyncCallbackThread(const sp<OffloadThread>& offloadThread);
+    AsyncCallbackThread(const wp<PlaybackThread>& playbackThread);
 
     virtual             ~AsyncCallbackThread();
 
@@ -744,15 +779,23 @@
     virtual void        onFirstRef();
 
             void        exit();
-            void        setWriteBlocked(bool value);
-            void        setDraining(bool value);
+            void        setWriteBlocked(uint32_t sequence);
+            void        resetWriteBlocked();
+            void        setDraining(uint32_t sequence);
+            void        resetDraining();
 
 private:
-    wp<OffloadThread>   mOffloadThread;
-    bool                mWriteBlocked;
-    bool                mDraining;
-    Condition           mWaitWorkCV;
-    Mutex               mLock;
+    const wp<PlaybackThread>   mPlaybackThread;
+    // mWriteAckSequence corresponds to the last write sequence passed by the offload thread via
+    // setWriteBlocked(). The sequence is shifted one bit to the left and the lsb is used
+    // to indicate that the callback has been received via resetWriteBlocked()
+    uint32_t                   mWriteAckSequence;
+    // mDrainSequence corresponds to the last drain sequence passed by the offload thread via
+    // setDraining(). The sequence is shifted one bit to the left and the lsb is used
+    // to indicate that the callback has been received via resetDraining()
+    uint32_t                   mDrainSequence;
+    Condition                  mWaitWorkCV;
+    Mutex                      mLock;
 };
 
 class DuplicatingThread : public MixerThread {
@@ -837,6 +880,7 @@
                     audio_channel_mask_t channelMask,
                     size_t frameCount,
                     int sessionId,
+                    int uid,
                     IAudioFlinger::track_flags_t *flags,
                     pid_t tid,
                     status_t *status);
diff --git a/services/audioflinger/TrackBase.h b/services/audioflinger/TrackBase.h
index 523e4b2..cd201d9 100644
--- a/services/audioflinger/TrackBase.h
+++ b/services/audioflinger/TrackBase.h
@@ -45,6 +45,7 @@
                                 size_t frameCount,
                                 const sp<IMemory>& sharedBuffer,
                                 int sessionId,
+                                int uid,
                                 bool isOut);
     virtual             ~TrackBase();
 
@@ -54,6 +55,7 @@
             sp<IMemory> getCblk() const { return mCblkMemory; }
             audio_track_cblk_t* cblk() const { return mCblk; }
             int         sessionId() const { return mSessionId; }
+            int         uid() const { return mUid; }
     virtual status_t    setSyncEvent(const sp<SyncEvent>& event);
 
 protected:
@@ -132,6 +134,7 @@
                                     // openRecord(), and then adjusted as needed
 
     const int           mSessionId;
+    int                 mUid;
     Vector < sp<SyncEvent> >mSyncEvents;
     const bool          mIsOut;
     ServerProxy*        mServerProxy;
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index e676365..af04ce7 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -68,6 +68,7 @@
             size_t frameCount,
             const sp<IMemory>& sharedBuffer,
             int sessionId,
+            int clientUid,
             bool isOut)
     :   RefBase(),
         mThread(thread),
@@ -88,6 +89,18 @@
         mId(android_atomic_inc(&nextTrackId)),
         mTerminated(false)
 {
+    // if the caller is us, trust the specified uid
+    if (IPCThreadState::self()->getCallingPid() != getpid_cached || clientUid == -1) {
+        int newclientUid = IPCThreadState::self()->getCallingUid();
+        if (clientUid != -1 && clientUid != newclientUid) {
+            ALOGW("uid %d tried to pass itself off as %d", newclientUid, clientUid);
+        }
+        clientUid = newclientUid;
+    }
+    // clientUid contains the uid of the app that is responsible for this track, so we can blame
+    // battery usage on it.
+    mUid = clientUid;
+
     // client == 0 implies sharedBuffer == 0
     ALOG_ASSERT(!(client == 0 && sharedBuffer != 0));
 
@@ -283,6 +296,17 @@
     return mTrack->setParameters(keyValuePairs);
 }
 
+status_t AudioFlinger::TrackHandle::getTimestamp(AudioTimestamp& timestamp)
+{
+    return mTrack->getTimestamp(timestamp);
+}
+
+
+void AudioFlinger::TrackHandle::signal()
+{
+    return mTrack->signal();
+}
+
 status_t AudioFlinger::TrackHandle::onTransact(
     uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
 {
@@ -302,9 +326,10 @@
             size_t frameCount,
             const sp<IMemory>& sharedBuffer,
             int sessionId,
+            int uid,
             IAudioFlinger::track_flags_t flags)
     :   TrackBase(thread, client, sampleRate, format, channelMask, frameCount, sharedBuffer,
-            sessionId, true /*isOut*/),
+            sessionId, uid, true /*isOut*/),
     mFillingUpStatus(FS_INVALID),
     // mRetryCount initialized later when needed
     mSharedBuffer(sharedBuffer),
@@ -359,6 +384,16 @@
 AudioFlinger::PlaybackThread::Track::~Track()
 {
     ALOGV("PlaybackThread::Track destructor");
+
+    // The destructor would clear mSharedBuffer,
+    // but it will not push the decremented reference count,
+    // leaving the client's IMemory dangling indefinitely.
+    // This prevents that leak.
+    if (mSharedBuffer != 0) {
+        mSharedBuffer.clear();
+        // flush the binder command buffer
+        IPCThreadState::self()->flushCommands();
+    }
 }
 
 void AudioFlinger::PlaybackThread::Track::destroy()
@@ -387,7 +422,7 @@
 
 /*static*/ void AudioFlinger::PlaybackThread::Track::appendDumpHeader(String8& result)
 {
-    result.append("   Name Client Type Fmt Chn mask Session fCount S F SRate  "
+    result.append("   Name Client Type      Fmt Chn mask Session fCount S F SRate  "
                   "L dB  R dB    Server Main buf  Aux Buf Flags UndFrmCnt\n");
 }
 
@@ -452,7 +487,7 @@
         nowInUnderrun = '?';
         break;
     }
-    snprintf(&buffer[7], size-7, " %6u %4u %3u %08X %7u %6u %1c %1d %5u %5.2g %5.2g  "
+    snprintf(&buffer[7], size-7, " %6u %4u %08X %08X %7u %6u %1c %1d %5u %5.2g %5.2g  "
                                  "%08X %08X %08X 0x%03X %9u%c\n",
             (mClient == 0) ? getpid_cached : mClient->pid(),
             mStreamType,
@@ -493,6 +528,10 @@
     return status;
 }
 
+// releaseBuffer() is not overridden
+
+// ExtendedAudioBufferProvider interface
+
 // Note that framesReady() takes a mutex on the control block using tryLock().
 // This could result in priority inversion if framesReady() is called by the normal mixer,
 // as the normal mixer thread runs at lower
@@ -505,6 +544,11 @@
     return mAudioTrackServerProxy->framesReady();
 }
 
+size_t AudioFlinger::PlaybackThread::Track::framesReleased() const
+{
+    return mAudioTrackServerProxy->framesReleased();
+}
+
 // Don't call for fast tracks; the framesReady() could result in priority inversion
 bool AudioFlinger::PlaybackThread::Track::isReady() const {
     if (mFillingUpStatus != FS_FILLING || isStopped() || isPausing()) {
@@ -529,7 +573,17 @@
 
     sp<ThreadBase> thread = mThread.promote();
     if (thread != 0) {
-        Mutex::Autolock _l(thread->mLock);
+        if (isOffloaded()) {
+            Mutex::Autolock _laf(thread->mAudioFlinger->mLock);
+            Mutex::Autolock _lth(thread->mLock);
+            sp<EffectChain> ec = thread->getEffectChain_l(mSessionId);
+            if (thread->mAudioFlinger->isNonOffloadableGlobalEffectEnabled_l() ||
+                    (ec != 0 && ec->isNonOffloadableEnabled())) {
+                invalidate();
+                return PERMISSION_DENIED;
+            }
+        }
+        Mutex::Autolock _lth(thread->mLock);
         track_state state = mState;
         // here the track could be either new, or restarted
         // in both cases "unstop" the track
@@ -560,6 +614,15 @@
         // track was already in the active list, not a problem
         if (status == ALREADY_EXISTS) {
             status = NO_ERROR;
+        } else {
+            // Acknowledge any pending flush(), so that subsequent new data isn't discarded.
+            // It is usually unsafe to access the server proxy from a binder thread.
+            // But in this case we know the mixer thread (whether normal mixer or fast mixer)
+            // isn't looking at this track yet:  we still hold the normal mixer thread lock,
+            // and for fast tracks the track is not yet in the fast mixer thread's active set.
+            ServerProxy::Buffer buffer;
+            buffer.mFrameCount = 1;
+            (void) mAudioTrackServerProxy->obtainBuffer(&buffer, true /*ackFlush*/);
         }
     } else {
         status = BAD_VALUE;
@@ -617,7 +680,7 @@
         case RESUMING:
             mState = PAUSING;
             ALOGV("ACTIVE/RESUMING => PAUSING (%d) on thread %p", mName, thread.get());
-            playbackThread->signal_l();
+            playbackThread->broadcast_l();
             break;
 
         default:
@@ -677,7 +740,7 @@
         // before mixer thread can run. This is important when offloading
         // because the hardware buffer could hold a large amount of audio
         playbackThread->flushOutput_l();
-        playbackThread->signal_l();
+        playbackThread->broadcast_l();
     }
 }
 
@@ -711,6 +774,37 @@
     }
 }
 
+status_t AudioFlinger::PlaybackThread::Track::getTimestamp(AudioTimestamp& timestamp)
+{
+    // Client should implement this using SSQ; the unpresented frame count in latch is irrelevant
+    if (isFastTrack()) {
+        return INVALID_OPERATION;
+    }
+    sp<ThreadBase> thread = mThread.promote();
+    if (thread == 0) {
+        return INVALID_OPERATION;
+    }
+    Mutex::Autolock _l(thread->mLock);
+    PlaybackThread *playbackThread = (PlaybackThread *)thread.get();
+    if (!isOffloaded()) {
+        if (!playbackThread->mLatchQValid) {
+            return INVALID_OPERATION;
+        }
+        uint32_t unpresentedFrames =
+                ((int64_t) playbackThread->mLatchQ.mUnpresentedFrames * mSampleRate) /
+                playbackThread->mSampleRate;
+        uint32_t framesWritten = mAudioTrackServerProxy->framesReleased();
+        if (framesWritten < unpresentedFrames) {
+            return INVALID_OPERATION;
+        }
+        timestamp.mPosition = framesWritten - unpresentedFrames;
+        timestamp.mTime = playbackThread->mLatchQ.mTimestamp.mTime;
+        return NO_ERROR;
+    }
+
+    return playbackThread->getTimestamp_l(timestamp);
+}
+
 status_t AudioFlinger::PlaybackThread::Track::attachAuxEffect(int EffectId)
 {
     status_t status = DEAD_OBJECT;
@@ -736,7 +830,11 @@
                 return INVALID_OPERATION;
             }
             srcThread->removeEffect_l(effect);
-            playbackThread->addEffect_l(effect);
+            status = playbackThread->addEffect_l(effect);
+            if (status != NO_ERROR) {
+                srcThread->addEffect_l(effect);
+                return INVALID_OPERATION;
+            }
             // removeEffect_l() has stopped the effect if it was active so it must be restarted
             if (effect->state() == EffectModule::ACTIVE ||
                     effect->state() == EffectModule::STOPPING) {
@@ -754,6 +852,7 @@
                                         dstChain->strategy(),
                                         AUDIO_SESSION_OUTPUT_MIX,
                                         effect->id());
+            AudioSystem::setEffectEnabled(effect->id(), effect->isEnabled());
         }
         status = playbackThread->attachAuxEffect(this, EffectId);
     }
@@ -857,6 +956,16 @@
     mIsInvalid = true;
 }
 
+void AudioFlinger::PlaybackThread::Track::signal()
+{
+    sp<ThreadBase> thread = mThread.promote();
+    if (thread != 0) {
+        PlaybackThread *t = (PlaybackThread *)thread.get();
+        Mutex::Autolock _l(t->mLock);
+        t->broadcast_l();
+    }
+}
+
 // ----------------------------------------------------------------------------
 
 sp<AudioFlinger::PlaybackThread::TimedTrack>
@@ -869,13 +978,14 @@
             audio_channel_mask_t channelMask,
             size_t frameCount,
             const sp<IMemory>& sharedBuffer,
-            int sessionId) {
+            int sessionId,
+            int uid) {
     if (!client->reserveTimedTrack())
         return 0;
 
     return new TimedTrack(
         thread, client, streamType, sampleRate, format, channelMask, frameCount,
-        sharedBuffer, sessionId);
+        sharedBuffer, sessionId, uid);
 }
 
 AudioFlinger::PlaybackThread::TimedTrack::TimedTrack(
@@ -887,9 +997,10 @@
             audio_channel_mask_t channelMask,
             size_t frameCount,
             const sp<IMemory>& sharedBuffer,
-            int sessionId)
+            int sessionId,
+            int uid)
     : Track(thread, client, streamType, sampleRate, format, channelMask,
-            frameCount, sharedBuffer, sessionId, IAudioFlinger::TRACK_TIMED),
+            frameCount, sharedBuffer, sessionId, uid, IAudioFlinger::TRACK_TIMED),
       mQueueHeadInFlight(false),
       mTrimQueueHeadOnRelease(false),
       mFramesPendingInQueue(0),
@@ -1382,9 +1493,10 @@
             uint32_t sampleRate,
             audio_format_t format,
             audio_channel_mask_t channelMask,
-            size_t frameCount)
+            size_t frameCount,
+            int uid)
     :   Track(playbackThread, NULL, AUDIO_STREAM_CNT, sampleRate, format, channelMask, frameCount,
-                NULL, 0, IAudioFlinger::TRACK_DEFAULT),
+                NULL, 0, uid, IAudioFlinger::TRACK_DEFAULT),
     mActive(false), mSourceThread(sourceThread), mClientProxy(NULL)
 {
 
@@ -1644,9 +1756,10 @@
             audio_format_t format,
             audio_channel_mask_t channelMask,
             size_t frameCount,
-            int sessionId)
+            int sessionId,
+            int uid)
     :   TrackBase(thread, client, sampleRate, format,
-                  channelMask, frameCount, 0 /*sharedBuffer*/, sessionId, false /*isOut*/),
+                  channelMask, frameCount, 0 /*sharedBuffer*/, sessionId, uid, false /*isOut*/),
         mOverflow(false)
 {
     ALOGV("RecordTrack constructor");
@@ -1719,6 +1832,16 @@
     }
 }
 
+void AudioFlinger::RecordThread::RecordTrack::invalidate()
+{
+    // FIXME should use proxy, and needs work
+    audio_track_cblk_t* cblk = mCblk;
+    android_atomic_or(CBLK_INVALID, &cblk->mFlags);
+    android_atomic_release_store(0x40000000, &cblk->mFutex);
+    // client is not in server, so FUTEX_WAKE is needed instead of FUTEX_WAKE_PRIVATE
+    (void) __futex_syscall3(&cblk->mFutex, FUTEX_WAKE, INT_MAX);
+}
+
 
 /*static*/ void AudioFlinger::RecordThread::RecordTrack::appendDumpHeader(String8& result)
 {
diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk
index d659ebb..51ba698 100644
--- a/services/camera/libcameraservice/Android.mk
+++ b/services/camera/libcameraservice/Android.mk
@@ -33,7 +33,9 @@
     device3/Camera3InputStream.cpp \
     device3/Camera3OutputStream.cpp \
     device3/Camera3ZslStream.cpp \
+    device3/StatusTracker.cpp \
     gui/RingBufferConsumer.cpp \
+    utils/CameraTraces.cpp \
 
 LOCAL_SHARED_LIBRARIES:= \
     libui \
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 359b3ca..eeedfc9 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -42,6 +42,7 @@
 #include "api1/Camera2Client.h"
 #include "api_pro/ProCamera2Client.h"
 #include "api2/CameraDeviceClient.h"
+#include "utils/CameraTraces.h"
 #include "CameraDeviceFactory.h"
 
 namespace android {
@@ -211,7 +212,7 @@
 status_t CameraService::getCameraInfo(int cameraId,
                                       struct CameraInfo* cameraInfo) {
     if (!mModule) {
-        return NO_INIT;
+        return -ENODEV;
     }
 
     if (cameraId < 0 || cameraId >= mNumberOfCameras) {
@@ -225,6 +226,49 @@
     return rc;
 }
 
+status_t CameraService::getCameraCharacteristics(int cameraId,
+                                                CameraMetadata* cameraInfo) {
+    if (!cameraInfo) {
+        ALOGE("%s: cameraInfo is NULL", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    if (!mModule) {
+        ALOGE("%s: camera hardware module doesn't exist", __FUNCTION__);
+        return -ENODEV;
+    }
+
+    if (mModule->common.module_api_version < CAMERA_MODULE_API_VERSION_2_0) {
+        // TODO: Remove this check once HAL1 shim is in place.
+        ALOGE("%s: Only HAL module version V2 or higher supports static metadata", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    if (cameraId < 0 || cameraId >= mNumberOfCameras) {
+        ALOGE("%s: Invalid camera id: %d", __FUNCTION__, cameraId);
+        return BAD_VALUE;
+    }
+
+    int facing;
+    if (getDeviceVersion(cameraId, &facing) == CAMERA_DEVICE_API_VERSION_1_0) {
+        // TODO: Remove this check once HAL1 shim is in place.
+        ALOGE("%s: HAL1 doesn't support static metadata yet", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    if (getDeviceVersion(cameraId, &facing) <= CAMERA_DEVICE_API_VERSION_2_1) {
+        // Disable HAL2.x support for camera2 API for now.
+        ALOGW("%s: HAL2.x doesn't support getCameraCharacteristics for now", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    struct camera_info info;
+    status_t ret = mModule->get_camera_info(cameraId, &info);
+    *cameraInfo = info.static_camera_characteristics;
+
+    return ret;
+}
+
 int CameraService::getDeviceVersion(int cameraId, int* facing) {
     struct camera_info info;
     if (mModule->get_camera_info(cameraId, &info) != OK) {
@@ -262,7 +306,7 @@
     return false;
 }
 
-bool CameraService::validateConnect(int cameraId,
+status_t CameraService::validateConnect(int cameraId,
                                     /*inout*/
                                     int& clientUid) const {
 
@@ -275,19 +319,19 @@
         if (callingPid != getpid()) {
             ALOGE("CameraService::connect X (pid %d) rejected (don't trust clientUid)",
                     callingPid);
-            return false;
+            return PERMISSION_DENIED;
         }
     }
 
     if (!mModule) {
         ALOGE("Camera HAL module not loaded");
-        return false;
+        return -ENODEV;
     }
 
     if (cameraId < 0 || cameraId >= mNumberOfCameras) {
         ALOGE("CameraService::connect X (pid %d) rejected (invalid cameraId %d).",
             callingPid, cameraId);
-        return false;
+        return -ENODEV;
     }
 
     char value[PROPERTY_VALUE_MAX];
@@ -295,23 +339,23 @@
     if (strcmp(value, "1") == 0) {
         // Camera is disabled by DevicePolicyManager.
         ALOGI("Camera is disabled. connect X (pid %d) rejected", callingPid);
-        return false;
+        return -EACCES;
     }
 
     ICameraServiceListener::Status currentStatus = getStatus(cameraId);
     if (currentStatus == ICameraServiceListener::STATUS_NOT_PRESENT) {
         ALOGI("Camera is not plugged in,"
                " connect X (pid %d) rejected", callingPid);
-        return false;
+        return -ENODEV;
     } else if (currentStatus == ICameraServiceListener::STATUS_ENUMERATING) {
         ALOGI("Camera is enumerating,"
                " connect X (pid %d) rejected", callingPid);
-        return false;
+        return -EBUSY;
     }
     // Else don't check for STATUS_NOT_AVAILABLE.
     //  -- It's done implicitly in canConnectUnsafe /w the mBusy array
 
-    return true;
+    return OK;
 }
 
 bool CameraService::canConnectUnsafe(int cameraId,
@@ -358,11 +402,13 @@
     return true;
 }
 
-sp<ICamera> CameraService::connect(
+status_t CameraService::connect(
         const sp<ICameraClient>& cameraClient,
         int cameraId,
         const String16& clientPackageName,
-        int clientUid) {
+        int clientUid,
+        /*out*/
+        sp<ICamera>& device) {
 
     String8 clientName8(clientPackageName);
     int callingPid = getCallingPid();
@@ -370,8 +416,9 @@
     LOG1("CameraService::connect E (pid %d \"%s\", id %d)", callingPid,
             clientName8.string(), cameraId);
 
-    if (!validateConnect(cameraId, /*inout*/clientUid)) {
-        return NULL;
+    status_t status = validateConnect(cameraId, /*inout*/clientUid);
+    if (status != OK) {
+        return status;
     }
 
 
@@ -382,9 +429,10 @@
         if (!canConnectUnsafe(cameraId, clientPackageName,
                               cameraClient->asBinder(),
                               /*out*/clientTmp)) {
-            return NULL;
+            return -EBUSY;
         } else if (client.get() != NULL) {
-            return static_cast<Client*>(clientTmp.get());
+            device = static_cast<Client*>(clientTmp.get());
+            return OK;
         }
 
         int facing = -1;
@@ -414,19 +462,18 @@
             break;
           case -1:
             ALOGE("Invalid camera id %d", cameraId);
-            return NULL;
+            return BAD_VALUE;
           default:
             ALOGE("Unknown camera device HAL version: %d", deviceVersion);
-            return NULL;
+            return INVALID_OPERATION;
         }
 
-        if (!connectFinishUnsafe(client,
-                                 client->getRemote())) {
+        status_t status = connectFinishUnsafe(client, client->getRemote());
+        if (status != OK) {
             // this is probably not recoverable.. maybe the client can try again
             // OK: we can only get here if we were originally in PRESENT state
             updateStatus(ICameraServiceListener::STATUS_PRESENT, cameraId);
-
-            return NULL;
+            return status;
         }
 
         mClient[cameraId] = client;
@@ -436,34 +483,38 @@
     // important: release the mutex here so the client can call back
     //    into the service from its destructor (can be at the end of the call)
 
-    return client;
+    device = client;
+    return OK;
 }
 
-bool CameraService::connectFinishUnsafe(const sp<BasicClient>& client,
-                                        const sp<IBinder>& remoteCallback) {
-    if (client->initialize(mModule) != OK) {
-        return false;
+status_t CameraService::connectFinishUnsafe(const sp<BasicClient>& client,
+                                            const sp<IBinder>& remoteCallback) {
+    status_t status = client->initialize(mModule);
+    if (status != OK) {
+        return status;
     }
 
     remoteCallback->linkToDeath(this);
 
-    return true;
+    return OK;
 }
 
-sp<IProCameraUser> CameraService::connect(
+status_t CameraService::connectPro(
                                         const sp<IProCameraCallbacks>& cameraCb,
                                         int cameraId,
                                         const String16& clientPackageName,
-                                        int clientUid)
+                                        int clientUid,
+                                        /*out*/
+                                        sp<IProCameraUser>& device)
 {
     String8 clientName8(clientPackageName);
     int callingPid = getCallingPid();
 
     LOG1("CameraService::connectPro E (pid %d \"%s\", id %d)", callingPid,
             clientName8.string(), cameraId);
-
-    if (!validateConnect(cameraId, /*inout*/clientUid)) {
-        return NULL;
+    status_t status = validateConnect(cameraId, /*inout*/clientUid);
+    if (status != OK) {
+        return status;
     }
 
     sp<ProClient> client;
@@ -474,7 +525,7 @@
             if (!canConnectUnsafe(cameraId, clientPackageName,
                                   cameraCb->asBinder(),
                                   /*out*/client)) {
-                return NULL;
+                return -EBUSY;
             }
         }
 
@@ -485,7 +536,7 @@
           case CAMERA_DEVICE_API_VERSION_1_0:
             ALOGE("Camera id %d uses HALv1, doesn't support ProCamera",
                   cameraId);
-            return NULL;
+            return -EOPNOTSUPP;
             break;
           case CAMERA_DEVICE_API_VERSION_2_0:
           case CAMERA_DEVICE_API_VERSION_2_1:
@@ -495,14 +546,15 @@
             break;
           case -1:
             ALOGE("Invalid camera id %d", cameraId);
-            return NULL;
+            return BAD_VALUE;
           default:
             ALOGE("Unknown camera device HAL version: %d", deviceVersion);
-            return NULL;
+            return INVALID_OPERATION;
         }
 
-        if (!connectFinishUnsafe(client, client->getRemote())) {
-            return NULL;
+        status_t status = connectFinishUnsafe(client, client->getRemote());
+        if (status != OK) {
+            return status;
         }
 
         mProClientList[cameraId].push(client);
@@ -512,18 +564,18 @@
     }
     // important: release the mutex here so the client can call back
     //    into the service from its destructor (can be at the end of the call)
-
-    return client;
+    device = client;
+    return OK;
 }
 
-sp<ICameraDeviceUser> CameraService::connect(
+status_t CameraService::connectDevice(
         const sp<ICameraDeviceCallbacks>& cameraCb,
         int cameraId,
         const String16& clientPackageName,
-        int clientUid)
+        int clientUid,
+        /*out*/
+        sp<ICameraDeviceUser>& device)
 {
-    // TODO: this function needs to return status_t
-    // so that we have an error code when things go wrong and the client is NULL
 
     String8 clientName8(clientPackageName);
     int callingPid = getCallingPid();
@@ -531,8 +583,9 @@
     LOG1("CameraService::connectDevice E (pid %d \"%s\", id %d)", callingPid,
             clientName8.string(), cameraId);
 
-    if (!validateConnect(cameraId, /*inout*/clientUid)) {
-        return NULL;
+    status_t status = validateConnect(cameraId, /*inout*/clientUid);
+    if (status != OK) {
+        return status;
     }
 
     sp<CameraDeviceClient> client;
@@ -543,7 +596,7 @@
             if (!canConnectUnsafe(cameraId, clientPackageName,
                                   cameraCb->asBinder(),
                                   /*out*/client)) {
-                return NULL;
+                return -EBUSY;
             }
         }
 
@@ -560,10 +613,8 @@
 
         switch(deviceVersion) {
           case CAMERA_DEVICE_API_VERSION_1_0:
-            ALOGE("Camera id %d uses old HAL, doesn't support CameraDevice",
-                  cameraId);
-            return NULL;
-            break;
+            ALOGW("Camera using old HAL version: %d", deviceVersion);
+            return -EOPNOTSUPP;
            // TODO: don't allow 2.0  Only allow 2.1 and higher
           case CAMERA_DEVICE_API_VERSION_2_0:
           case CAMERA_DEVICE_API_VERSION_2_1:
@@ -573,17 +624,18 @@
             break;
           case -1:
             ALOGE("Invalid camera id %d", cameraId);
-            return NULL;
+            return BAD_VALUE;
           default:
             ALOGE("Unknown camera device HAL version: %d", deviceVersion);
-            return NULL;
+            return INVALID_OPERATION;
         }
 
-        if (!connectFinishUnsafe(client, client->getRemote())) {
+        status_t status = connectFinishUnsafe(client, client->getRemote());
+        if (status != OK) {
             // this is probably not recoverable.. maybe the client can try again
             // OK: we can only get here if we were originally in PRESENT state
             updateStatus(ICameraServiceListener::STATUS_PRESENT, cameraId);
-            return NULL;
+            return status;
         }
 
         LOG1("CameraService::connectDevice X (id %d, this pid is %d)", cameraId,
@@ -594,7 +646,8 @@
     // important: release the mutex here so the client can call back
     //    into the service from its destructor (can be at the end of the call)
 
-    return client;
+    device = client;
+    return OK;
 }
 
 
@@ -1167,6 +1220,10 @@
 
         if (locked) mServiceLock.unlock();
 
+        // Dump camera traces if there were any
+        write(fd, "\n", 1);
+        camera3::CameraTraces::dump(fd, args);
+
         // change logging level
         int n = args.size();
         for (int i = 0; i + 1 < n; i++) {
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 980eb97..ad6a582 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -71,16 +71,26 @@
     virtual int32_t     getNumberOfCameras();
     virtual status_t    getCameraInfo(int cameraId,
                                       struct CameraInfo* cameraInfo);
+    virtual status_t    getCameraCharacteristics(int cameraId,
+                                                 CameraMetadata* cameraInfo);
 
-    virtual sp<ICamera> connect(const sp<ICameraClient>& cameraClient, int cameraId,
-            const String16& clientPackageName, int clientUid);
-    virtual sp<IProCameraUser> connect(const sp<IProCameraCallbacks>& cameraCb,
-            int cameraId, const String16& clientPackageName, int clientUid);
-    virtual sp<ICameraDeviceUser> connect(
+    virtual status_t connect(const sp<ICameraClient>& cameraClient, int cameraId,
+            const String16& clientPackageName, int clientUid,
+            /*out*/
+            sp<ICamera>& device);
+
+    virtual status_t connectPro(const sp<IProCameraCallbacks>& cameraCb,
+            int cameraId, const String16& clientPackageName, int clientUid,
+            /*out*/
+            sp<IProCameraUser>& device);
+
+    virtual status_t connectDevice(
             const sp<ICameraDeviceCallbacks>& cameraCb,
             int cameraId,
             const String16& clientPackageName,
-            int clientUid);
+            int clientUid,
+            /*out*/
+            sp<ICameraDeviceUser>& device);
 
     virtual status_t    addListener(const sp<ICameraServiceListener>& listener);
     virtual status_t    removeListener(
@@ -204,8 +214,7 @@
         virtual status_t      connect(const sp<ICameraClient>& client) = 0;
         virtual status_t      lock() = 0;
         virtual status_t      unlock() = 0;
-        virtual status_t      setPreviewDisplay(const sp<Surface>& surface) = 0;
-        virtual status_t      setPreviewTexture(const sp<IGraphicBufferProducer>& bufferProducer)=0;
+        virtual status_t      setPreviewTarget(const sp<IGraphicBufferProducer>& bufferProducer)=0;
         virtual void          setPreviewCallbackFlag(int flag) = 0;
         virtual status_t      setPreviewCallbackTarget(
                 const sp<IGraphicBufferProducer>& callbackProducer) = 0;
@@ -308,7 +317,7 @@
     virtual void onFirstRef();
 
     // Step 1. Check if we can connect, before we acquire the service lock.
-    bool                validateConnect(int cameraId,
+    status_t            validateConnect(int cameraId,
                                         /*inout*/
                                         int& clientUid) const;
 
@@ -320,7 +329,7 @@
                                          sp<BasicClient> &client);
 
     // When connection is successful, initialize client and track its death
-    bool                connectFinishUnsafe(const sp<BasicClient>& client,
+    status_t            connectFinishUnsafe(const sp<BasicClient>& client,
                                             const sp<IBinder>& remoteCallback);
 
     virtual sp<BasicClient>  getClientByRemote(const wp<IBinder>& cameraClient);
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index 46aa60c..0b6ca5c 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-#define LOG_TAG "Camera2"
+#define LOG_TAG "Camera2Client"
 #define ATRACE_TAG ATRACE_TAG_CAMERA
 //#define LOG_NDEBUG 0
 
@@ -76,13 +76,15 @@
         return res;
     }
 
-    SharedParameters::Lock l(mParameters);
+    {
+        SharedParameters::Lock l(mParameters);
 
-    res = l.mParameters.initialize(&(mDevice->info()));
-    if (res != OK) {
-        ALOGE("%s: Camera %d: unable to build defaults: %s (%d)",
-                __FUNCTION__, mCameraId, strerror(-res), res);
-        return NO_INIT;
+        res = l.mParameters.initialize(&(mDevice->info()));
+        if (res != OK) {
+            ALOGE("%s: Camera %d: unable to build defaults: %s (%d)",
+                    __FUNCTION__, mCameraId, strerror(-res), res);
+            return NO_INIT;
+        }
     }
 
     String8 threadName;
@@ -135,6 +137,7 @@
     mCallbackProcessor->run(threadName.string());
 
     if (gLogLevel >= 1) {
+        SharedParameters::Lock l(mParameters);
         ALOGD("%s: Default parameters converted from camera %d:", __FUNCTION__,
               mCameraId);
         ALOGD("%s", l.mParameters.paramsFlattened.string());
@@ -283,6 +286,7 @@
         CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_INACTIVE)
         CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN)
         CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED)
+        CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED)
         CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN)
         CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED)
         CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED)
@@ -326,6 +330,10 @@
     result.appendFormat("    Video stabilization is %s\n",
             p.videoStabilization ? "enabled" : "disabled");
 
+    result.appendFormat("    Selected still capture FPS range: %d - %d\n",
+            p.fastInfo.bestStillCaptureFpsRange[0],
+            p.fastInfo.bestStillCaptureFpsRange[1]);
+
     result.append("  Current streams:\n");
     result.appendFormat("    Preview stream ID: %d\n",
             getPreviewStreamId());
@@ -348,6 +356,10 @@
         result.appendFormat("    meteringCropRegion\n");
         haveQuirk = true;
     }
+    if (p.quirks.partialResults) {
+        result.appendFormat("    usePartialResult\n");
+        haveQuirk = true;
+    }
     if (!haveQuirk) {
         result.appendFormat("    none\n");
     }
@@ -491,25 +503,7 @@
     return EBUSY;
 }
 
-status_t Camera2Client::setPreviewDisplay(
-        const sp<Surface>& surface) {
-    ATRACE_CALL();
-    ALOGV("%s: E", __FUNCTION__);
-    Mutex::Autolock icl(mBinderSerializationLock);
-    status_t res;
-    if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
-
-    sp<IBinder> binder;
-    sp<ANativeWindow> window;
-    if (surface != 0) {
-        binder = surface->getIGraphicBufferProducer()->asBinder();
-        window = surface;
-    }
-
-    return setPreviewWindowL(binder,window);
-}
-
-status_t Camera2Client::setPreviewTexture(
+status_t Camera2Client::setPreviewTarget(
         const sp<IGraphicBufferProducer>& bufferProducer) {
     ATRACE_CALL();
     ALOGV("%s: E", __FUNCTION__);
@@ -521,7 +515,10 @@
     sp<ANativeWindow> window;
     if (bufferProducer != 0) {
         binder = bufferProducer->asBinder();
-        window = new Surface(bufferProducer);
+        // Using controlledByApp flag to ensure that the buffer queue remains in
+        // async mode for the old camera API, where many applications depend
+        // on that behavior.
+        window = new Surface(bufferProducer, /*controlledByApp*/ true);
     }
     return setPreviewWindowL(binder, window);
 }
@@ -619,7 +616,7 @@
     }
     if (params.previewCallbackFlags != (uint32_t)flag) {
 
-        if (flag != CAMERA_FRAME_CALLBACK_FLAG_NOOP) {
+        if (params.previewCallbackSurface && flag != CAMERA_FRAME_CALLBACK_FLAG_NOOP) {
             // Disable any existing preview callback window when enabling
             // preview callback flags
             res = mCallbackProcessor->setCallbackWindow(NULL);
@@ -758,7 +755,7 @@
         return res;
     }
 
-    Vector<uint8_t> outputStreams;
+    Vector<int32_t> outputStreams;
     bool callbacksEnabled = (params.previewCallbackFlags &
             CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) ||
             params.previewCallbackSurface;
@@ -869,6 +866,7 @@
             // no break
         case Parameters::RECORD:
         case Parameters::PREVIEW:
+            syncWithDevice();
             res = stopStream();
             if (res != OK) {
                 ALOGE("%s: Camera %d: Can't stop streaming: %s (%d)",
@@ -1013,7 +1011,7 @@
         return res;
     }
 
-    Vector<uint8_t> outputStreams;
+    Vector<int32_t> outputStreams;
     outputStreams.push(getPreviewStreamId());
     outputStreams.push(getRecordingStreamId());
 
@@ -1158,6 +1156,8 @@
         l.mParameters.currentAfTriggerId = ++l.mParameters.afTriggerCounter;
         triggerId = l.mParameters.currentAfTriggerId;
     }
+    ATRACE_ASYNC_BEGIN(kAutofocusLabel, triggerId);
+
     syncWithDevice();
 
     mDevice->triggerAutofocus(triggerId);
@@ -1180,6 +1180,12 @@
                 l.mParameters.focusMode == Parameters::FOCUS_MODE_INFINITY) {
             return OK;
         }
+
+        // An active AF trigger is canceled
+        if (l.mParameters.afTriggerCounter == l.mParameters.currentAfTriggerId) {
+            ATRACE_ASYNC_END(kAutofocusLabel, l.mParameters.currentAfTriggerId);
+        }
+
         triggerId = ++l.mParameters.afTriggerCounter;
 
         // When using triggerAfWithAuto quirk, may need to reset focus mode to
@@ -1208,6 +1214,7 @@
     status_t res;
     if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
 
+    int takePictureCounter;
     {
         SharedParameters::Lock l(mParameters);
         switch (l.mParameters.state) {
@@ -1246,8 +1253,11 @@
                     __FUNCTION__, mCameraId, strerror(-res), res);
             return res;
         }
+        takePictureCounter = ++l.mParameters.takePictureCounter;
     }
 
+    ATRACE_ASYNC_BEGIN(kTakepictureLabel, takePictureCounter);
+
     // Need HAL to have correct settings before (possibly) triggering precapture
     syncWithDevice();
 
@@ -1475,7 +1485,24 @@
     bool afInMotion = false;
     {
         SharedParameters::Lock l(mParameters);
+        // Trace end of AF state
+        char tmp[32];
+        if (l.mParameters.afStateCounter > 0) {
+            camera_metadata_enum_snprint(
+                ANDROID_CONTROL_AF_STATE, l.mParameters.focusState, tmp, sizeof(tmp));
+            ATRACE_ASYNC_END(tmp, l.mParameters.afStateCounter);
+        }
+
+        // Update state
         l.mParameters.focusState = newState;
+        l.mParameters.afStateCounter++;
+
+        // Trace start of AF state
+
+        camera_metadata_enum_snprint(
+            ANDROID_CONTROL_AF_STATE, l.mParameters.focusState, tmp, sizeof(tmp));
+        ATRACE_ASYNC_BEGIN(tmp, l.mParameters.afStateCounter);
+
         switch (l.mParameters.focusMode) {
             case Parameters::FOCUS_MODE_AUTO:
             case Parameters::FOCUS_MODE_MACRO:
@@ -1497,6 +1524,7 @@
                     case ANDROID_CONTROL_AF_STATE_INACTIVE:
                     case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
                     case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
+                    case ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
                     default:
                         // Unexpected in AUTO/MACRO mode
                         ALOGE("%s: Unexpected AF state transition in AUTO/MACRO mode: %d",
@@ -1539,6 +1567,7 @@
                         afInMotion = true;
                         // no break
                     case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
+                    case ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
                         // Stop passive scan, inform upstream
                         if (l.mParameters.enableFocusMoveMessages) {
                             sendMovingMessage = true;
@@ -1567,6 +1596,7 @@
         }
     }
     if (sendCompletedMessage) {
+        ATRACE_ASYNC_END(kAutofocusLabel, triggerId);
         SharedCameraCallbacks::Lock l(mSharedCameraCallbacks);
         if (l.mRemoteCallback != 0) {
             l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS,
@@ -1776,4 +1806,7 @@
     return res;
 }
 
+const char* Camera2Client::kAutofocusLabel = "autofocus";
+const char* Camera2Client::kTakepictureLabel = "take_picture";
+
 } // namespace android
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index ed448f3..fe0bf74 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -57,8 +57,7 @@
     virtual status_t        connect(const sp<ICameraClient>& client);
     virtual status_t        lock();
     virtual status_t        unlock();
-    virtual status_t        setPreviewDisplay(const sp<Surface>& surface);
-    virtual status_t        setPreviewTexture(
+    virtual status_t        setPreviewTarget(
         const sp<IGraphicBufferProducer>& bufferProducer);
     virtual void            setPreviewCallbackFlag(int flag);
     virtual status_t        setPreviewCallbackTarget(
@@ -137,6 +136,10 @@
     static const int32_t kCaptureRequestIdStart = 30000000;
     static const int32_t kCaptureRequestIdEnd   = 40000000;
 
+    // Constant strings for ATRACE logging
+    static const char* kAutofocusLabel;
+    static const char* kTakepictureLabel;
+
 private:
     /** ICamera interface-related private members */
     typedef camera2::Parameters Parameters;
diff --git a/services/camera/libcameraservice/api1/CameraClient.cpp b/services/camera/libcameraservice/api1/CameraClient.cpp
index ad8856b..bd6805d 100644
--- a/services/camera/libcameraservice/api1/CameraClient.cpp
+++ b/services/camera/libcameraservice/api1/CameraClient.cpp
@@ -308,26 +308,20 @@
     return result;
 }
 
-// set the Surface that the preview will use
-status_t CameraClient::setPreviewDisplay(const sp<Surface>& surface) {
-    LOG1("setPreviewDisplay(%p) (pid %d)", surface.get(), getCallingPid());
-
-    sp<IBinder> binder(surface != 0 ? surface->getIGraphicBufferProducer()->asBinder() : 0);
-    sp<ANativeWindow> window(surface);
-    return setPreviewWindow(binder, window);
-}
-
-// set the SurfaceTextureClient that the preview will use
-status_t CameraClient::setPreviewTexture(
+// set the buffer consumer that the preview will use
+status_t CameraClient::setPreviewTarget(
         const sp<IGraphicBufferProducer>& bufferProducer) {
-    LOG1("setPreviewTexture(%p) (pid %d)", bufferProducer.get(),
+    LOG1("setPreviewTarget(%p) (pid %d)", bufferProducer.get(),
             getCallingPid());
 
     sp<IBinder> binder;
     sp<ANativeWindow> window;
     if (bufferProducer != 0) {
         binder = bufferProducer->asBinder();
-        window = new Surface(bufferProducer);
+        // Using controlledByApp flag to ensure that the buffer queue remains in
+        // async mode for the old camera API, where many applications depend
+        // on that behavior.
+        window = new Surface(bufferProducer, /*controlledByApp*/ true);
     }
     return setPreviewWindow(binder, window);
 }
diff --git a/services/camera/libcameraservice/api1/CameraClient.h b/services/camera/libcameraservice/api1/CameraClient.h
index abde75a..4b89564 100644
--- a/services/camera/libcameraservice/api1/CameraClient.h
+++ b/services/camera/libcameraservice/api1/CameraClient.h
@@ -37,8 +37,7 @@
     virtual status_t        connect(const sp<ICameraClient>& client);
     virtual status_t        lock();
     virtual status_t        unlock();
-    virtual status_t        setPreviewDisplay(const sp<Surface>& surface);
-    virtual status_t        setPreviewTexture(const sp<IGraphicBufferProducer>& bufferProducer);
+    virtual status_t        setPreviewTarget(const sp<IGraphicBufferProducer>& bufferProducer);
     virtual void            setPreviewCallbackFlag(int flag);
     virtual status_t        setPreviewCallbackTarget(
             const sp<IGraphicBufferProducer>& callbackProducer);
diff --git a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
index 9d8c4a1..d2ac79c 100644
--- a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
@@ -312,6 +312,16 @@
             return OK;
         }
 
+        if (imgBuffer.width != static_cast<uint32_t>(l.mParameters.previewWidth) ||
+                imgBuffer.height != static_cast<uint32_t>(l.mParameters.previewHeight)) {
+            ALOGW("%s: The preview size has changed to %d x %d from %d x %d, this buffer is"
+                    " no longer valid, dropping",__FUNCTION__,
+                    l.mParameters.previewWidth, l.mParameters.previewHeight,
+                    imgBuffer.width, imgBuffer.height);
+            mCallbackConsumer->unlockBuffer(imgBuffer);
+            return OK;
+        }
+
         previewFormat = l.mParameters.previewFormat;
         useFlexibleYuv = l.mParameters.fastInfo.useFlexibleYuv &&
                 (previewFormat == HAL_PIXEL_FORMAT_YCrCb_420_SP ||
diff --git a/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp b/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp
index ad1590a..8a4ce4e 100644
--- a/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp
+++ b/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp
@@ -43,6 +43,7 @@
         mShutterNotified(false),
         mClient(client),
         mCaptureState(IDLE),
+        mStateTransitionCount(0),
         mTriggerId(0),
         mTimeoutCount(0),
         mCaptureId(Camera2Client::kCaptureRequestIdStart),
@@ -103,12 +104,12 @@
     }
 }
 
-void CaptureSequencer::onFrameAvailable(int32_t frameId,
+void CaptureSequencer::onFrameAvailable(int32_t requestId,
         const CameraMetadata &frame) {
     ALOGV("%s: Listener found new frame", __FUNCTION__);
     ATRACE_CALL();
     Mutex::Autolock l(mInputMutex);
-    mNewFrameId = frameId;
+    mNewFrameId = requestId;
     mNewFrame = frame;
     if (!mNewFrameReceived) {
         mNewFrameReceived = true;
@@ -198,8 +199,14 @@
 
     Mutex::Autolock l(mStateMutex);
     if (currentState != mCaptureState) {
+        if (mCaptureState != IDLE) {
+            ATRACE_ASYNC_END(kStateNames[mCaptureState], mStateTransitionCount);
+        }
         mCaptureState = currentState;
-        ATRACE_INT("cam2_capt_state", mCaptureState);
+        mStateTransitionCount++;
+        if (mCaptureState != IDLE) {
+            ATRACE_ASYNC_BEGIN(kStateNames[mCaptureState], mStateTransitionCount);
+        }
         ALOGV("Camera %d: New capture state %s",
                 client->getCameraId(), kStateNames[mCaptureState]);
         mStateChanged.signal();
@@ -243,6 +250,7 @@
         mBusy = false;
     }
 
+    int takePictureCounter = 0;
     {
         SharedParameters::Lock l(client->getParameters());
         switch (l.mParameters.state) {
@@ -270,6 +278,7 @@
                         Parameters::getStateName(l.mParameters.state));
                 res = INVALID_OPERATION;
         }
+        takePictureCounter = l.mParameters.takePictureCounter;
     }
     sp<ZslProcessorInterface> processor = mZslProcessor.promote();
     if (processor != 0) {
@@ -282,6 +291,8 @@
      * Fire the jpegCallback in Camera#takePicture(..., jpegCallback)
      */
     if (mCaptureBuffer != 0 && res == OK) {
+        ATRACE_ASYNC_END(Camera2Client::kTakepictureLabel, takePictureCounter);
+
         Camera2Client::SharedCameraCallbacks::Lock
             l(client->mSharedCameraCallbacks);
         ALOGV("%s: Sending still image to client", __FUNCTION__);
@@ -379,11 +390,23 @@
         sp<Camera2Client> &client) {
     ATRACE_CALL();
 
+    bool isAeConverged = false;
     // Get the onFrameAvailable callback when the requestID == mCaptureId
     client->registerFrameListener(mCaptureId, mCaptureId + 1,
             this);
+
+    {
+        Mutex::Autolock l(mInputMutex);
+        isAeConverged = (mAEState == ANDROID_CONTROL_AE_STATE_CONVERGED);
+    }
+
     {
         SharedParameters::Lock l(client->getParameters());
+        // Skip AE precapture when it is already converged and not in force flash mode.
+        if (l.mParameters.flashMode != Parameters::FLASH_MODE_ON && isAeConverged) {
+            return STANDARD_CAPTURE;
+        }
+
         mTriggerId = l.mParameters.precaptureTriggerCounter++;
     }
     client->getCameraDevice()->triggerPrecaptureMetering(mTriggerId);
@@ -437,7 +460,8 @@
     status_t res;
     ATRACE_CALL();
     SharedParameters::Lock l(client->getParameters());
-    Vector<uint8_t> outputStreams;
+    Vector<int32_t> outputStreams;
+    uint8_t captureIntent = static_cast<uint8_t>(ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE);
 
     /**
      * Set up output streams in the request
@@ -456,6 +480,7 @@
 
     if (l.mParameters.state == Parameters::VIDEO_SNAPSHOT) {
         outputStreams.push(client->getRecordingStreamId());
+        captureIntent = static_cast<uint8_t>(ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT);
     }
 
     res = mCaptureRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS,
@@ -465,6 +490,10 @@
                 &mCaptureId, 1);
     }
     if (res == OK) {
+        res = mCaptureRequest.update(ANDROID_CONTROL_CAPTURE_INTENT,
+                &captureIntent, 1);
+    }
+    if (res == OK) {
         res = mCaptureRequest.sort();
     }
 
diff --git a/services/camera/libcameraservice/api1/client2/CaptureSequencer.h b/services/camera/libcameraservice/api1/client2/CaptureSequencer.h
index 76750aa..9fb4ee7 100644
--- a/services/camera/libcameraservice/api1/client2/CaptureSequencer.h
+++ b/services/camera/libcameraservice/api1/client2/CaptureSequencer.h
@@ -62,7 +62,7 @@
     void notifyAutoExposure(uint8_t newState, int triggerId);
 
     // Notifications from the frame processor
-    virtual void onFrameAvailable(int32_t frameId, const CameraMetadata &frame);
+    virtual void onFrameAvailable(int32_t requestId, const CameraMetadata &frame);
 
     // Notifications from the JPEG processor
     void onCaptureAvailable(nsecs_t timestamp, sp<MemoryBase> captureBuffer);
@@ -100,7 +100,7 @@
      * Internal to CaptureSequencer
      */
     static const nsecs_t kWaitDuration = 100000000; // 100 ms
-    static const int kMaxTimeoutsForPrecaptureStart = 2; // 200 ms
+    static const int kMaxTimeoutsForPrecaptureStart = 10; // 1 sec
     static const int kMaxTimeoutsForPrecaptureEnd = 20;  // 2 sec
     static const int kMaxTimeoutsForCaptureEnd    = 40;  // 4 sec
 
@@ -125,6 +125,7 @@
         NUM_CAPTURE_STATES
     } mCaptureState;
     static const char* kStateNames[];
+    int mStateTransitionCount;
     Mutex mStateMutex; // Guards mCaptureState
     Condition mStateChanged;
 
diff --git a/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp b/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
index c34cb12..19acae4 100644
--- a/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
@@ -29,13 +29,27 @@
 namespace camera2 {
 
 FrameProcessor::FrameProcessor(wp<CameraDeviceBase> device,
-                               wp<Camera2Client> client) :
+                               sp<Camera2Client> client) :
     FrameProcessorBase(device),
     mClient(client),
-    mLastFrameNumberOfFaces(0) {
+    mLastFrameNumberOfFaces(0),
+    mLast3AFrameNumber(-1) {
 
     sp<CameraDeviceBase> d = device.promote();
     mSynthesize3ANotify = !(d->willNotify3A());
+
+    {
+        SharedParameters::Lock l(client->getParameters());
+        mUsePartialQuirk = l.mParameters.quirks.partialResults;
+
+        // Initialize starting 3A state
+        m3aState.afTriggerId = l.mParameters.afTriggerCounter;
+        m3aState.aeTriggerId = l.mParameters.precaptureTriggerCounter;
+        // Check if lens is fixed-focus
+        if (l.mParameters.focusMode == Parameters::FOCUS_MODE_FIXED) {
+            m3aState.afMode = ANDROID_CONTROL_AF_MODE_OFF;
+        }
+    }
 }
 
 FrameProcessor::~FrameProcessor() {
@@ -49,20 +63,25 @@
         return false;
     }
 
-    if (processFaceDetect(frame, client) != OK) {
+    bool partialResult = false;
+    if (mUsePartialQuirk) {
+        camera_metadata_entry_t entry;
+        entry = frame.find(ANDROID_QUIRKS_PARTIAL_RESULT);
+        if (entry.count > 0 &&
+                entry.data.u8[0] == ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL) {
+            partialResult = true;
+        }
+    }
+
+    if (!partialResult && processFaceDetect(frame, client) != OK) {
         return false;
     }
 
     if (mSynthesize3ANotify) {
-        // Ignoring missing fields for now
         process3aState(frame, client);
     }
 
-    if (!FrameProcessorBase::processSingleFrame(frame, device)) {
-        return false;
-    }
-
-    return true;
+    return FrameProcessorBase::processSingleFrame(frame, device);
 }
 
 status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame,
@@ -198,86 +217,75 @@
 
     ATRACE_CALL();
     camera_metadata_ro_entry_t entry;
-    int mId = client->getCameraId();
+    int cameraId = client->getCameraId();
 
     entry = frame.find(ANDROID_REQUEST_FRAME_COUNT);
     int32_t frameNumber = entry.data.i32[0];
 
+    // Don't send 3A notifications for the same frame number twice
+    if (frameNumber <= mLast3AFrameNumber) {
+        ALOGV("%s: Already sent 3A for frame number %d, skipping",
+                __FUNCTION__, frameNumber);
+        return OK;
+    }
+
+    mLast3AFrameNumber = frameNumber;
+
     // Get 3A states from result metadata
     bool gotAllStates = true;
 
     AlgState new3aState;
 
-    entry = frame.find(ANDROID_CONTROL_AE_STATE);
-    if (entry.count == 0) {
-        ALOGE("%s: Camera %d: No AE state provided by HAL for frame %d!",
-                __FUNCTION__, mId, frameNumber);
-        gotAllStates = false;
-    } else {
-        new3aState.aeState =
-                static_cast<camera_metadata_enum_android_control_ae_state>(
-                    entry.data.u8[0]);
-    }
+    // TODO: Also use AE mode, AE trigger ID
 
-    entry = frame.find(ANDROID_CONTROL_AF_STATE);
-    if (entry.count == 0) {
-        ALOGE("%s: Camera %d: No AF state provided by HAL for frame %d!",
-                __FUNCTION__, mId, frameNumber);
-        gotAllStates = false;
-    } else {
-        new3aState.afState =
-                static_cast<camera_metadata_enum_android_control_af_state>(
-                    entry.data.u8[0]);
-    }
+    gotAllStates &= get3aResult<uint8_t>(frame, ANDROID_CONTROL_AF_MODE,
+            &new3aState.afMode, frameNumber, cameraId);
 
-    entry = frame.find(ANDROID_CONTROL_AWB_STATE);
-    if (entry.count == 0) {
-        ALOGE("%s: Camera %d: No AWB state provided by HAL for frame %d!",
-                __FUNCTION__, mId, frameNumber);
-        gotAllStates = false;
-    } else {
-        new3aState.awbState =
-                static_cast<camera_metadata_enum_android_control_awb_state>(
-                    entry.data.u8[0]);
-    }
+    gotAllStates &= get3aResult<uint8_t>(frame, ANDROID_CONTROL_AWB_MODE,
+            &new3aState.awbMode, frameNumber, cameraId);
 
-    int32_t afTriggerId = 0;
-    entry = frame.find(ANDROID_CONTROL_AF_TRIGGER_ID);
-    if (entry.count == 0) {
-        ALOGE("%s: Camera %d: No AF trigger ID provided by HAL for frame %d!",
-                __FUNCTION__, mId, frameNumber);
-        gotAllStates = false;
-    } else {
-        afTriggerId = entry.data.i32[0];
-    }
+    gotAllStates &= get3aResult<uint8_t>(frame, ANDROID_CONTROL_AE_STATE,
+            &new3aState.aeState, frameNumber, cameraId);
 
-    int32_t aeTriggerId = 0;
-    entry = frame.find(ANDROID_CONTROL_AE_PRECAPTURE_ID);
-    if (entry.count == 0) {
-        ALOGE("%s: Camera %d: No AE precapture trigger ID provided by HAL"
-                " for frame %d!",
-                __FUNCTION__, mId, frameNumber);
-        gotAllStates = false;
-    } else {
-        aeTriggerId = entry.data.i32[0];
-    }
+    gotAllStates &= get3aResult<uint8_t>(frame, ANDROID_CONTROL_AF_STATE,
+            &new3aState.afState, frameNumber, cameraId);
+
+    gotAllStates &= get3aResult<uint8_t>(frame, ANDROID_CONTROL_AWB_STATE,
+            &new3aState.awbState, frameNumber, cameraId);
+
+    gotAllStates &= get3aResult<int32_t>(frame, ANDROID_CONTROL_AF_TRIGGER_ID,
+            &new3aState.afTriggerId, frameNumber, cameraId);
+
+    gotAllStates &= get3aResult<int32_t>(frame, ANDROID_CONTROL_AE_PRECAPTURE_ID,
+            &new3aState.aeTriggerId, frameNumber, cameraId);
 
     if (!gotAllStates) return BAD_VALUE;
 
     if (new3aState.aeState != m3aState.aeState) {
-        ALOGV("%s: AE state changed from 0x%x to 0x%x",
-                __FUNCTION__, m3aState.aeState, new3aState.aeState);
-        client->notifyAutoExposure(new3aState.aeState, aeTriggerId);
+        ALOGV("%s: Camera %d: AE state %d->%d",
+                __FUNCTION__, cameraId,
+                m3aState.aeState, new3aState.aeState);
+        client->notifyAutoExposure(new3aState.aeState, new3aState.aeTriggerId);
     }
-    if (new3aState.afState != m3aState.afState) {
-        ALOGV("%s: AF state changed from 0x%x to 0x%x",
-                __FUNCTION__, m3aState.afState, new3aState.afState);
-        client->notifyAutoFocus(new3aState.afState, afTriggerId);
+
+    if (new3aState.afState != m3aState.afState ||
+        new3aState.afMode != m3aState.afMode ||
+        new3aState.afTriggerId != m3aState.afTriggerId) {
+        ALOGV("%s: Camera %d: AF state %d->%d. AF mode %d->%d. Trigger %d->%d",
+                __FUNCTION__, cameraId,
+                m3aState.afState, new3aState.afState,
+                m3aState.afMode, new3aState.afMode,
+                m3aState.afTriggerId, new3aState.afTriggerId);
+        client->notifyAutoFocus(new3aState.afState, new3aState.afTriggerId);
     }
-    if (new3aState.awbState != m3aState.awbState) {
-        ALOGV("%s: AWB state changed from 0x%x to 0x%x",
-                __FUNCTION__, m3aState.awbState, new3aState.awbState);
-        client->notifyAutoWhitebalance(new3aState.awbState, aeTriggerId);
+    if (new3aState.awbState != m3aState.awbState ||
+        new3aState.awbMode != m3aState.awbMode) {
+        ALOGV("%s: Camera %d: AWB state %d->%d. AWB mode %d->%d",
+                __FUNCTION__, cameraId,
+                m3aState.awbState, new3aState.awbState,
+                m3aState.awbMode, new3aState.awbMode);
+        client->notifyAutoWhitebalance(new3aState.awbState,
+                new3aState.aeTriggerId);
     }
 
     m3aState = new3aState;
@@ -285,6 +293,39 @@
     return OK;
 }
 
+template<typename Src, typename T>
+bool FrameProcessor::get3aResult(const CameraMetadata& result, int32_t tag,
+        T* value, int32_t frameNumber, int cameraId) {
+    camera_metadata_ro_entry_t entry;
+    if (value == NULL) {
+        ALOGE("%s: Camera %d: Value to write to is NULL",
+                __FUNCTION__, cameraId);
+        return false;
+    }
+
+    entry = result.find(tag);
+    if (entry.count == 0) {
+        ALOGE("%s: Camera %d: No %s provided by HAL for frame %d!",
+                __FUNCTION__, cameraId,
+                get_camera_metadata_tag_name(tag), frameNumber);
+        return false;
+    } else {
+        switch(sizeof(Src)){
+            case sizeof(uint8_t):
+                *value = static_cast<T>(entry.data.u8[0]);
+                break;
+            case sizeof(int32_t):
+                *value = static_cast<T>(entry.data.i32[0]);
+                break;
+            default:
+                ALOGE("%s: Camera %d: Unsupported source",
+                        __FUNCTION__, cameraId);
+                return false;
+        }
+    }
+    return true;
+}
+
 
 void FrameProcessor::callbackFaceDetection(sp<Camera2Client> client,
                                      const camera_frame_metadata &metadata) {
diff --git a/services/camera/libcameraservice/api1/client2/FrameProcessor.h b/services/camera/libcameraservice/api1/client2/FrameProcessor.h
index 2a17d45..856ad32 100644
--- a/services/camera/libcameraservice/api1/client2/FrameProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/FrameProcessor.h
@@ -39,7 +39,7 @@
  */
 class FrameProcessor : public FrameProcessorBase {
   public:
-    FrameProcessor(wp<CameraDeviceBase> device, wp<Camera2Client> client);
+    FrameProcessor(wp<CameraDeviceBase> device, sp<Camera2Client> client);
     ~FrameProcessor();
 
   private:
@@ -61,18 +61,44 @@
     status_t process3aState(const CameraMetadata &frame,
             const sp<Camera2Client> &client);
 
+    // Helper for process3aState
+    template<typename Src, typename T>
+    bool get3aResult(const CameraMetadata& result, int32_t tag, T* value,
+            int32_t frameNumber, int cameraId);
+
+
     struct AlgState {
+        // TODO: also track AE mode
+        camera_metadata_enum_android_control_af_mode   afMode;
+        camera_metadata_enum_android_control_awb_mode  awbMode;
+
         camera_metadata_enum_android_control_ae_state  aeState;
         camera_metadata_enum_android_control_af_state  afState;
         camera_metadata_enum_android_control_awb_state awbState;
 
+        int32_t                                        afTriggerId;
+        int32_t                                        aeTriggerId;
+
+        // These defaults need to match those in Parameters.cpp
         AlgState() :
+                afMode(ANDROID_CONTROL_AF_MODE_AUTO),
+                awbMode(ANDROID_CONTROL_AWB_MODE_AUTO),
                 aeState(ANDROID_CONTROL_AE_STATE_INACTIVE),
                 afState(ANDROID_CONTROL_AF_STATE_INACTIVE),
-                awbState(ANDROID_CONTROL_AWB_STATE_INACTIVE) {
+                awbState(ANDROID_CONTROL_AWB_STATE_INACTIVE),
+                afTriggerId(0),
+                aeTriggerId(0) {
         }
     } m3aState;
 
+    // Whether the partial result quirk is enabled for this device
+    bool mUsePartialQuirk;
+
+    // Track most recent frame number for which 3A notifications were sent for.
+    // Used to filter against sending 3A notifications for the same frame
+    // several times.
+    int32_t mLast3AFrameNumber;
+
     // Emit FaceDetection event to java if faces changed
     void callbackFaceDetection(sp<Camera2Client> client,
                                const camera_frame_metadata &metadata);
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp
index 0459866..08af566 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.cpp
+++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp
@@ -58,13 +58,13 @@
     res = buildQuirks();
     if (res != OK) return res;
 
-    camera_metadata_ro_entry_t availableProcessedSizes =
-        staticInfo(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES, 2);
-    if (!availableProcessedSizes.count) return NO_INIT;
+    const Size MAX_PREVIEW_SIZE = { MAX_PREVIEW_WIDTH, MAX_PREVIEW_HEIGHT };
+    res = getFilteredPreviewSizes(MAX_PREVIEW_SIZE, &availablePreviewSizes);
+    if (res != OK) return res;
 
     // TODO: Pick more intelligently
-    previewWidth = availableProcessedSizes.data.i32[0];
-    previewHeight = availableProcessedSizes.data.i32[1];
+    previewWidth = availablePreviewSizes[0].width;
+    previewHeight = availablePreviewSizes[0].height;
     videoWidth = previewWidth;
     videoHeight = previewHeight;
 
@@ -75,12 +75,13 @@
                     previewWidth, previewHeight));
     {
         String8 supportedPreviewSizes;
-        for (size_t i=0; i < availableProcessedSizes.count; i += 2) {
+        for (size_t i = 0; i < availablePreviewSizes.size(); i++) {
             if (i != 0) supportedPreviewSizes += ",";
             supportedPreviewSizes += String8::format("%dx%d",
-                    availableProcessedSizes.data.i32[i],
-                    availableProcessedSizes.data.i32[i+1]);
+                    availablePreviewSizes[i].width,
+                    availablePreviewSizes[i].height);
         }
+        ALOGV("Supported preview sizes are: %s", supportedPreviewSizes.string());
         params.set(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES,
                 supportedPreviewSizes);
         params.set(CameraParameters::KEY_SUPPORTED_VIDEO_SIZES,
@@ -182,7 +183,7 @@
     // still have to do something sane for them
 
     // NOTE: Not scaled like FPS range values are.
-    previewFps = fpsFromRange(previewFpsRange[0], previewFpsRange[1]);
+    int previewFps = fpsFromRange(previewFpsRange[0], previewFpsRange[1]);
     params.set(CameraParameters::KEY_PREVIEW_FRAME_RATE,
             previewFps);
 
@@ -248,9 +249,17 @@
         staticInfo(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, 4);
     if (!availableJpegThumbnailSizes.count) return NO_INIT;
 
-    // TODO: Pick default thumbnail size sensibly
-    jpegThumbSize[0] = availableJpegThumbnailSizes.data.i32[0];
-    jpegThumbSize[1] = availableJpegThumbnailSizes.data.i32[1];
+    // Pick the largest thumbnail size that matches still image aspect ratio.
+    ALOG_ASSERT(pictureWidth > 0 && pictureHeight > 0,
+            "Invalid picture size, %d x %d", pictureWidth, pictureHeight);
+    float picAspectRatio = static_cast<float>(pictureWidth) / pictureHeight;
+    Size thumbnailSize =
+            getMaxSizeForRatio(
+                    picAspectRatio,
+                    &availableJpegThumbnailSizes.data.i32[0],
+                    availableJpegThumbnailSizes.count);
+    jpegThumbSize[0] = thumbnailSize.width;
+    jpegThumbSize[1] = thumbnailSize.height;
 
     params.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH,
             jpegThumbSize[0]);
@@ -795,30 +804,25 @@
 
     enableFocusMoveMessages = false;
     afTriggerCounter = 1;
+    afStateCounter = 0;
     currentAfTriggerId = -1;
     afInMotion = false;
 
     precaptureTriggerCounter = 1;
 
+    takePictureCounter = 0;
+
     previewCallbackFlags = 0;
     previewCallbackOneShot = false;
     previewCallbackSurface = false;
 
-    camera_metadata_ro_entry_t supportedHardwareLevel =
-        staticInfo(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL, 0, 0, false);
-    if (!supportedHardwareLevel.count || (supportedHardwareLevel.data.u8[0] ==
-            ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED)) {
-        ALOGI("Camera %d: ZSL mode disabled for limited mode HALs", cameraId);
+    char value[PROPERTY_VALUE_MAX];
+    property_get("camera.disable_zsl_mode", value, "0");
+    if (!strcmp(value,"1")) {
+        ALOGI("Camera %d: Disabling ZSL mode", cameraId);
         zslMode = false;
     } else {
-        char value[PROPERTY_VALUE_MAX];
-        property_get("camera.disable_zsl_mode", value, "0");
-        if (!strcmp(value,"1")) {
-            ALOGI("Camera %d: Disabling ZSL mode", cameraId);
-            zslMode = false;
-        } else {
-            zslMode = true;
-        }
+        zslMode = true;
     }
 
     lightFx = LIGHTFX_NONE;
@@ -851,6 +855,33 @@
         arrayHeight = activeArraySize.data.i32[3];
     } else return NO_INIT;
 
+    // We'll set the target FPS range for still captures to be as wide
+    // as possible to give the HAL maximum latitude for exposure selection
+    camera_metadata_ro_entry_t availableFpsRanges =
+        staticInfo(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, 2);
+    if (availableFpsRanges.count < 2 || availableFpsRanges.count % 2 != 0) {
+        return NO_INIT;
+    }
+
+    int32_t bestStillCaptureFpsRange[2] = {
+        availableFpsRanges.data.i32[0], availableFpsRanges.data.i32[1]
+    };
+    int32_t curRange =
+            bestStillCaptureFpsRange[1] - bestStillCaptureFpsRange[0];
+    for (size_t i = 2; i < availableFpsRanges.count; i += 2) {
+        int32_t nextRange =
+                availableFpsRanges.data.i32[i + 1] -
+                availableFpsRanges.data.i32[i];
+        if ( (nextRange > curRange) ||       // Maximize size of FPS range first
+                (nextRange == curRange &&    // Then minimize low-end FPS
+                 bestStillCaptureFpsRange[0] > availableFpsRanges.data.i32[i])) {
+
+            bestStillCaptureFpsRange[0] = availableFpsRanges.data.i32[i];
+            bestStillCaptureFpsRange[1] = availableFpsRanges.data.i32[i + 1];
+            curRange = nextRange;
+        }
+    }
+
     camera_metadata_ro_entry_t availableFaceDetectModes =
         staticInfo(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, 0, 0,
                 false);
@@ -970,6 +1001,8 @@
 
     fastInfo.arrayWidth = arrayWidth;
     fastInfo.arrayHeight = arrayHeight;
+    fastInfo.bestStillCaptureFpsRange[0] = bestStillCaptureFpsRange[0];
+    fastInfo.bestStillCaptureFpsRange[1] = bestStillCaptureFpsRange[1];
     fastInfo.bestFaceDetectMode = bestFaceDetectMode;
     fastInfo.maxFaces = maxFaces;
 
@@ -1013,6 +1046,11 @@
     ALOGV_IF(quirks.meteringCropRegion, "Camera %d: Quirk meteringCropRegion"
                 " enabled", cameraId);
 
+    entry = info->find(ANDROID_QUIRKS_USE_PARTIAL_RESULT);
+    quirks.partialResults = (entry.count != 0 && entry.data.u8[0] == 1);
+    ALOGV_IF(quirks.partialResults, "Camera %d: Quirk usePartialResult"
+                " enabled", cameraId);
+
     return OK;
 }
 
@@ -1072,15 +1110,13 @@
                     validatedParams.previewWidth, validatedParams.previewHeight);
             return BAD_VALUE;
         }
-        camera_metadata_ro_entry_t availablePreviewSizes =
-            staticInfo(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES);
-        for (i = 0; i < availablePreviewSizes.count; i += 2 ) {
-            if ((availablePreviewSizes.data.i32[i] ==
+        for (i = 0; i < availablePreviewSizes.size(); i++) {
+            if ((availablePreviewSizes[i].width ==
                     validatedParams.previewWidth) &&
-                (availablePreviewSizes.data.i32[i+1] ==
+                (availablePreviewSizes[i].height ==
                     validatedParams.previewHeight)) break;
         }
-        if (i == availablePreviewSizes.count) {
+        if (i == availablePreviewSizes.size()) {
             ALOGE("%s: Requested preview size %d x %d is not supported",
                     __FUNCTION__, validatedParams.previewWidth,
                     validatedParams.previewHeight);
@@ -1097,13 +1133,22 @@
 
     // PREVIEW_FPS_RANGE
     bool fpsRangeChanged = false;
+    int32_t lastSetFpsRange[2];
+
+    params.getPreviewFpsRange(&lastSetFpsRange[0], &lastSetFpsRange[1]);
+    lastSetFpsRange[0] /= kFpsToApiScale;
+    lastSetFpsRange[1] /= kFpsToApiScale;
+
     newParams.getPreviewFpsRange(&validatedParams.previewFpsRange[0],
             &validatedParams.previewFpsRange[1]);
     validatedParams.previewFpsRange[0] /= kFpsToApiScale;
     validatedParams.previewFpsRange[1] /= kFpsToApiScale;
 
-    if (validatedParams.previewFpsRange[0] != previewFpsRange[0] ||
-            validatedParams.previewFpsRange[1] != previewFpsRange[1]) {
+    // Compare the FPS range value from the last set() to the current set()
+    // to determine if the client has changed it
+    if (validatedParams.previewFpsRange[0] != lastSetFpsRange[0] ||
+            validatedParams.previewFpsRange[1] != lastSetFpsRange[1]) {
+
         fpsRangeChanged = true;
         camera_metadata_ro_entry_t availablePreviewFpsRanges =
             staticInfo(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, 2);
@@ -1121,10 +1166,6 @@
                     validatedParams.previewFpsRange[1]);
             return BAD_VALUE;
         }
-        validatedParams.previewFps =
-            fpsFromRange(validatedParams.previewFpsRange[0],
-                         validatedParams.previewFpsRange[1]);
-        newParams.setPreviewFrameRate(validatedParams.previewFps);
     }
 
     // PREVIEW_FORMAT
@@ -1159,12 +1200,14 @@
         }
     }
 
-    // PREVIEW_FRAME_RATE
-    // Deprecated, only use if the preview fps range is unchanged this time.
-    // The single-value FPS is the same as the minimum of the range.
+    // PREVIEW_FRAME_RATE Deprecated, only use if the preview fps range is
+    // unchanged this time.  The single-value FPS is the same as the minimum of
+    // the range.  To detect whether the application has changed the value of
+    // previewFps, compare against their last-set preview FPS.
     if (!fpsRangeChanged) {
-        validatedParams.previewFps = newParams.getPreviewFrameRate();
-        if (validatedParams.previewFps != previewFps || recordingHintChanged) {
+        int previewFps = newParams.getPreviewFrameRate();
+        int lastSetPreviewFps = params.getPreviewFrameRate();
+        if (previewFps != lastSetPreviewFps || recordingHintChanged) {
             camera_metadata_ro_entry_t availableFrameRates =
                 staticInfo(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
             /**
@@ -1177,8 +1220,8 @@
               * Either way, in case of multiple ranges, break the tie by
               * selecting the smaller range.
               */
-            int targetFps = validatedParams.previewFps;
-            // all ranges which have targetFps
+
+            // all ranges which have previewFps
             Vector<Range> candidateRanges;
             for (i = 0; i < availableFrameRates.count; i+=2) {
                 Range r = {
@@ -1186,13 +1229,13 @@
                             availableFrameRates.data.i32[i+1]
                 };
 
-                if (r.min <= targetFps && targetFps <= r.max) {
+                if (r.min <= previewFps && previewFps <= r.max) {
                     candidateRanges.push(r);
                 }
             }
             if (candidateRanges.isEmpty()) {
                 ALOGE("%s: Requested preview frame rate %d is not supported",
-                        __FUNCTION__, validatedParams.previewFps);
+                        __FUNCTION__, previewFps);
                 return BAD_VALUE;
             }
             // most applicable range with targetFps
@@ -1231,11 +1274,6 @@
                 validatedParams.previewFpsRange[1],
                 validatedParams.recordingHint);
         }
-        newParams.set(CameraParameters::KEY_PREVIEW_FPS_RANGE,
-                String8::format("%d,%d",
-                        validatedParams.previewFpsRange[0] * kFpsToApiScale,
-                        validatedParams.previewFpsRange[1] * kFpsToApiScale));
-
     }
 
     // PICTURE_SIZE
@@ -1618,15 +1656,13 @@
                     __FUNCTION__);
             return BAD_VALUE;
         }
-        camera_metadata_ro_entry_t availableVideoSizes =
-            staticInfo(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES);
-        for (i = 0; i < availableVideoSizes.count; i += 2 ) {
-            if ((availableVideoSizes.data.i32[i] ==
+        for (i = 0; i < availablePreviewSizes.size(); i++) {
+            if ((availablePreviewSizes[i].width ==
                     validatedParams.videoWidth) &&
-                (availableVideoSizes.data.i32[i+1] ==
+                (availablePreviewSizes[i].height ==
                     validatedParams.videoHeight)) break;
         }
-        if (i == availableVideoSizes.count) {
+        if (i == availablePreviewSizes.size()) {
             ALOGE("%s: Requested video size %d x %d is not supported",
                     __FUNCTION__, validatedParams.videoWidth,
                     validatedParams.videoHeight);
@@ -1712,8 +1748,15 @@
             &metadataMode, 1);
     if (res != OK) return res;
 
-    res = request->update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
-            previewFpsRange, 2);
+    camera_metadata_entry_t intent =
+            request->find(ANDROID_CONTROL_CAPTURE_INTENT);
+    if (intent.data.u8[0] == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
+        res = request->update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
+                fastInfo.bestStillCaptureFpsRange, 2);
+    } else {
+        res = request->update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
+                previewFpsRange, 2);
+    }
     if (res != OK) return res;
 
     uint8_t reqWbLock = autoWhiteBalanceLock ?
@@ -2447,6 +2490,64 @@
     return cropYToArray(normalizedYToCrop(y));
 }
 
+status_t Parameters::getFilteredPreviewSizes(Size limit, Vector<Size> *sizes) {
+    if (info == NULL) {
+        ALOGE("%s: Static metadata is not initialized", __FUNCTION__);
+        return NO_INIT;
+    }
+    if (sizes == NULL) {
+        ALOGE("%s: Input size is null", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    const size_t SIZE_COUNT = sizeof(Size) / sizeof(int);
+    camera_metadata_ro_entry_t availableProcessedSizes =
+        staticInfo(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES, SIZE_COUNT);
+    if (availableProcessedSizes.count < SIZE_COUNT) return BAD_VALUE;
+
+    Size previewSize;
+    for (size_t i = 0; i < availableProcessedSizes.count; i += SIZE_COUNT) {
+        previewSize.width = availableProcessedSizes.data.i32[i];
+        previewSize.height = availableProcessedSizes.data.i32[i+1];
+            // Need skip the preview sizes that are too large.
+            if (previewSize.width <= limit.width &&
+                    previewSize.height <= limit.height) {
+                sizes->push(previewSize);
+            }
+    }
+    if (sizes->isEmpty()) {
+        ALOGE("generated preview size list is empty!!");
+        return BAD_VALUE;
+    }
+    return OK;
+}
+
+Parameters::Size Parameters::getMaxSizeForRatio(
+        float ratio, const int32_t* sizeArray, size_t count) {
+    ALOG_ASSERT(sizeArray != NULL, "size array shouldn't be NULL");
+    ALOG_ASSERT(count >= 2 && count % 2 == 0, "count must be a positive even number");
+
+    Size maxSize = {0, 0};
+    for (size_t i = 0; i < count; i += 2) {
+        if (sizeArray[i] > 0 && sizeArray[i+1] > 0) {
+            float curRatio = static_cast<float>(sizeArray[i]) / sizeArray[i+1];
+            if (fabs(curRatio - ratio) < ASPECT_RATIO_TOLERANCE && maxSize.width < sizeArray[i]) {
+                maxSize.width = sizeArray[i];
+                maxSize.height = sizeArray[i+1];
+            }
+        }
+    }
+
+    if (maxSize.width == 0 || maxSize.height == 0) {
+        maxSize.width = sizeArray[0];
+        maxSize.height = sizeArray[1];
+        ALOGW("Unable to find the size to match the given aspect ratio %f."
+                "Fall back to %d x %d", ratio, maxSize.width, maxSize.height);
+    }
+
+    return maxSize;
+}
+
 Parameters::CropRegion Parameters::calculateCropRegion(
                             Parameters::CropRegion::Outputs outputs) const {
 
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.h b/services/camera/libcameraservice/api1/client2/Parameters.h
index 464830c..32dbd42 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.h
+++ b/services/camera/libcameraservice/api1/client2/Parameters.h
@@ -46,7 +46,6 @@
 
     int previewWidth, previewHeight;
     int32_t previewFpsRange[2];
-    int previewFps; // deprecated, here only for tracking changes
     int previewFormat;
 
     int previewTransform; // set by CAMERA_CMD_SET_DISPLAY_ORIENTATION
@@ -105,6 +104,11 @@
     };
     Vector<Area> focusingAreas;
 
+    struct Size {
+        int32_t width;
+        int32_t height;
+    };
+
     int32_t exposureCompensation;
     bool autoExposureLock;
     bool autoWhiteBalanceLock;
@@ -135,11 +139,14 @@
 
     bool enableFocusMoveMessages;
     int afTriggerCounter;
+    int afStateCounter;
     int currentAfTriggerId;
     bool afInMotion;
 
     int precaptureTriggerCounter;
 
+    int takePictureCounter;
+
     uint32_t previewCallbackFlags;
     bool previewCallbackOneShot;
     bool previewCallbackSurface;
@@ -159,6 +166,11 @@
 
     // Number of zoom steps to simulate
     static const unsigned int NUM_ZOOM_STEPS = 100;
+    // Max preview size allowed
+    static const unsigned int MAX_PREVIEW_WIDTH = 1920;
+    static const unsigned int MAX_PREVIEW_HEIGHT = 1080;
+    // Aspect ratio tolerance
+    static const float ASPECT_RATIO_TOLERANCE = 0.001;
 
     // Full static camera info, object owned by someone else, such as
     // Camera2Device.
@@ -171,6 +183,7 @@
     struct DeviceInfo {
         int32_t arrayWidth;
         int32_t arrayHeight;
+        int32_t bestStillCaptureFpsRange[2];
         uint8_t bestFaceDetectMode;
         int32_t maxFaces;
         struct OverrideModes {
@@ -194,6 +207,7 @@
         bool triggerAfWithAuto;
         bool useZslFormat;
         bool meteringCropRegion;
+        bool partialResults;
     } quirks;
 
     /**
@@ -317,6 +331,12 @@
     int cropYToNormalized(int y) const;
     int normalizedXToCrop(int x) const;
     int normalizedYToCrop(int y) const;
+
+    Vector<Size> availablePreviewSizes;
+    // Get size list (that are no larger than limit) from static metadata.
+    status_t getFilteredPreviewSizes(Size limit, Vector<Size> *sizes);
+    // Get max size (from the size array) that matches the given aspect ratio.
+    Size getMaxSizeForRatio(float ratio, const int32_t* sizeArray, size_t count);
 };
 
 // This class encapsulates the Parameters class so that it can only be accessed
diff --git a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
index dfe8580..6076dae 100644
--- a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
@@ -412,7 +412,7 @@
 }
 
 status_t StreamingProcessor::startStream(StreamType type,
-        const Vector<uint8_t> &outputStreams) {
+        const Vector<int32_t> &outputStreams) {
     ATRACE_CALL();
     status_t res;
 
@@ -830,8 +830,8 @@
     mRecordingHeapFree = mRecordingHeapCount;
 }
 
-bool StreamingProcessor::isStreamActive(const Vector<uint8_t> &streams,
-        uint8_t recordingStreamId) {
+bool StreamingProcessor::isStreamActive(const Vector<int32_t> &streams,
+        int32_t recordingStreamId) {
     for (size_t i = 0; i < streams.size(); i++) {
         if (streams[i] == recordingStreamId) {
             return true;
diff --git a/services/camera/libcameraservice/api1/client2/StreamingProcessor.h b/services/camera/libcameraservice/api1/client2/StreamingProcessor.h
index d879b83..833bb8f 100644
--- a/services/camera/libcameraservice/api1/client2/StreamingProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/StreamingProcessor.h
@@ -64,7 +64,7 @@
         RECORD
     };
     status_t startStream(StreamType type,
-            const Vector<uint8_t> &outputStreams);
+            const Vector<int32_t> &outputStreams);
 
     // Toggle between paused and unpaused. Stream must be started first.
     status_t togglePauseStream(bool pause);
@@ -97,7 +97,7 @@
     StreamType mActiveRequest;
     bool mPaused;
 
-    Vector<uint8_t> mActiveStreamIds;
+    Vector<int32_t> mActiveStreamIds;
 
     // Preview-related members
     int32_t mPreviewRequestId;
@@ -132,8 +132,8 @@
     void releaseAllRecordingFramesLocked();
 
     // Determine if the specified stream is currently in use
-    static bool isStreamActive(const Vector<uint8_t> &streams,
-            uint8_t recordingStreamId);
+    static bool isStreamActive(const Vector<int32_t> &streams,
+            int32_t recordingStreamId);
 };
 
 
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
index 3b118f4..4207ba9 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
@@ -71,7 +71,7 @@
     }
 }
 
-void ZslProcessor::onFrameAvailable(int32_t /*frameId*/,
+void ZslProcessor::onFrameAvailable(int32_t /*requestId*/,
         const CameraMetadata &frame) {
     Mutex::Autolock l(mInputMutex);
     camera_metadata_ro_entry_t entry;
@@ -300,12 +300,12 @@
         uint8_t requestType = ANDROID_REQUEST_TYPE_REPROCESS;
         res = request.update(ANDROID_REQUEST_TYPE,
                 &requestType, 1);
-        uint8_t inputStreams[1] =
-                { static_cast<uint8_t>(mZslReprocessStreamId) };
+        int32_t inputStreams[1] =
+                { mZslReprocessStreamId };
         if (res == OK) request.update(ANDROID_REQUEST_INPUT_STREAMS,
                 inputStreams, 1);
-        uint8_t outputStreams[1] =
-                { static_cast<uint8_t>(client->getCaptureStreamId()) };
+        int32_t outputStreams[1] =
+                { client->getCaptureStreamId() };
         if (res == OK) request.update(ANDROID_REQUEST_OUTPUT_STREAMS,
                 outputStreams, 1);
         res = request.update(ANDROID_REQUEST_ID,
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.h b/services/camera/libcameraservice/api1/client2/ZslProcessor.h
index 5fb178f..6d3cb85 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.h
@@ -54,7 +54,7 @@
     // From mZslConsumer
     virtual void onFrameAvailable();
     // From FrameProcessor
-    virtual void onFrameAvailable(int32_t frameId, const CameraMetadata &frame);
+    virtual void onFrameAvailable(int32_t requestId, const CameraMetadata &frame);
 
     virtual void onBufferReleased(buffer_handle_t *handle);
 
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp
index 7c4da50..776ebe2 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp
@@ -61,7 +61,7 @@
     deleteStream();
 }
 
-void ZslProcessor3::onFrameAvailable(int32_t /*frameId*/,
+void ZslProcessor3::onFrameAvailable(int32_t /*requestId*/,
                                      const CameraMetadata &frame) {
     Mutex::Autolock l(mInputMutex);
     camera_metadata_ro_entry_t entry;
@@ -247,13 +247,13 @@
         uint8_t requestType = ANDROID_REQUEST_TYPE_REPROCESS;
         res = request.update(ANDROID_REQUEST_TYPE,
                 &requestType, 1);
-        uint8_t inputStreams[1] =
-                { static_cast<uint8_t>(mZslStreamId) };
+        int32_t inputStreams[1] =
+                { mZslStreamId };
         if (res == OK) request.update(ANDROID_REQUEST_INPUT_STREAMS,
                 inputStreams, 1);
         // TODO: Shouldn't we also update the latest preview frame?
-        uint8_t outputStreams[1] =
-                { static_cast<uint8_t>(client->getCaptureStreamId()) };
+        int32_t outputStreams[1] =
+                { client->getCaptureStreamId() };
         if (res == OK) request.update(ANDROID_REQUEST_OUTPUT_STREAMS,
                 outputStreams, 1);
         res = request.update(ANDROID_REQUEST_ID,
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor3.h b/services/camera/libcameraservice/api1/client2/ZslProcessor3.h
index 35b85f5..d2f8322 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor3.h
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor3.h
@@ -51,7 +51,7 @@
     ~ZslProcessor3();
 
     // From FrameProcessor
-    virtual void onFrameAvailable(int32_t frameId, const CameraMetadata &frame);
+    virtual void onFrameAvailable(int32_t requestId, const CameraMetadata &frame);
 
     /**
      ****************************************
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 414316d..1cdf8dc 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -45,14 +45,6 @@
                 cameraId, cameraFacing, clientPid, clientUid, servicePid),
     mRemoteCallback(remoteCallback) {
 }
-void CameraDeviceClientBase::notifyError() {
-    // Thread safe. Don't bother locking.
-    sp<ICameraDeviceCallbacks> remoteCb = mRemoteCallback;
-
-    if (remoteCb != 0) {
-        remoteCb->notifyCallback(CAMERA_MSG_ERROR, CAMERA_ERROR_RELEASED, 0);
-    }
-}
 
 // Interface used by CameraService
 
@@ -89,7 +81,8 @@
 
     mFrameProcessor->registerListener(FRAME_PROCESSOR_LISTENER_MIN_ID,
                                       FRAME_PROCESSOR_LISTENER_MAX_ID,
-                                      /*listener*/this);
+                                      /*listener*/this,
+                                      /*quirkSendPartials*/true);
 
     return OK;
 }
@@ -137,7 +130,7 @@
      * Write in the output stream IDs which we calculate from
      * the capture request's list of surface targets
      */
-    Vector<uint8_t> outputStreamIds;
+    Vector<int32_t> outputStreamIds;
     outputStreamIds.setCapacity(request->mSurfaceList.size());
     for (size_t i = 0; i < request->mSurfaceList.size(); ++i) {
         sp<Surface> surface = request->mSurfaceList[i];
@@ -164,7 +157,6 @@
     metadata.update(ANDROID_REQUEST_OUTPUT_STREAMS, &outputStreamIds[0],
                     outputStreamIds.size());
 
-    // TODO: @hide ANDROID_REQUEST_ID, or use another request token
     int32_t requestId = mRequestIdCounter++;
     metadata.update(ANDROID_REQUEST_ID, &requestId, /*size*/1);
     ALOGV("%s: Camera %d: Submitting request with ID %d",
@@ -298,11 +290,28 @@
         }
     }
 
+    // HACK b/10949105
+    // Query consumer usage bits to set async operation mode for
+    // GLConsumer using controlledByApp parameter.
+    bool useAsync = false;
+    int32_t consumerUsage;
+    if ((res = bufferProducer->query(NATIVE_WINDOW_CONSUMER_USAGE_BITS,
+            &consumerUsage)) != OK) {
+        ALOGE("%s: Camera %d: Failed to query consumer usage", __FUNCTION__,
+              mCameraId);
+        return res;
+    }
+    if (consumerUsage & GraphicBuffer::USAGE_HW_TEXTURE) {
+        ALOGW("%s: Camera %d: Forcing asynchronous mode for stream",
+                __FUNCTION__, mCameraId);
+        useAsync = true;
+    }
+
     sp<IBinder> binder;
     sp<ANativeWindow> anw;
     if (bufferProducer != 0) {
         binder = bufferProducer->asBinder();
-        anw = new Surface(bufferProducer);
+        anw = new Surface(bufferProducer, useAsync);
     }
 
     // TODO: remove w,h,f since we are ignoring them
@@ -360,6 +369,26 @@
 
         ALOGV("%s: Camera %d: Successfully created a new stream ID %d",
               __FUNCTION__, mCameraId, streamId);
+
+        /**
+         * Set the stream transform flags to automatically
+         * rotate the camera stream for preview use cases.
+         */
+        int32_t transform = 0;
+        res = getRotationTransformLocked(&transform);
+
+        if (res != OK) {
+            // Error logged by getRotationTransformLocked.
+            return res;
+        }
+
+        res = mDevice->setStreamTransform(streamId, transform);
+        if (res != OK) {
+            ALOGE("%s: Failed to set stream transform (stream id %d)",
+                  __FUNCTION__, streamId);
+            return res;
+        }
+
         return streamId;
     }
 
@@ -436,6 +465,20 @@
     return res;
 }
 
+status_t CameraDeviceClient::flush() {
+    ATRACE_CALL();
+    ALOGV("%s", __FUNCTION__);
+
+    status_t res = OK;
+    if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
+
+    Mutex::Autolock icl(mBinderSerializationLock);
+
+    if (!mDevice.get()) return DEAD_OBJECT;
+
+    return mDevice->flush();
+}
+
 status_t CameraDeviceClient::dump(int fd, const Vector<String16>& args) {
     String8 result;
     result.appendFormat("CameraDeviceClient[%d] (%p) PID: %d, dump:\n",
@@ -450,6 +493,34 @@
     return dumpDevice(fd, args);
 }
 
+
+void CameraDeviceClient::notifyError() {
+    // Thread safe. Don't bother locking.
+    sp<ICameraDeviceCallbacks> remoteCb = getRemoteCallback();
+
+    if (remoteCb != 0) {
+        remoteCb->onDeviceError(ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE);
+    }
+}
+
+void CameraDeviceClient::notifyIdle() {
+    // Thread safe. Don't bother locking.
+    sp<ICameraDeviceCallbacks> remoteCb = getRemoteCallback();
+
+    if (remoteCb != 0) {
+        remoteCb->onDeviceIdle();
+    }
+}
+
+void CameraDeviceClient::notifyShutter(int requestId,
+        nsecs_t timestamp) {
+    // Thread safe. Don't bother locking.
+    sp<ICameraDeviceCallbacks> remoteCb = getRemoteCallback();
+    if (remoteCb != 0) {
+        remoteCb->onCaptureStarted(requestId, timestamp);
+    }
+}
+
 // TODO: refactor the code below this with IProCameraUser.
 // it's 100% copy-pasted, so lets not change it right now to make it easier.
 
@@ -481,19 +552,17 @@
 }
 
 /** Device-related methods */
-void CameraDeviceClient::onFrameAvailable(int32_t frameId,
-                                        const CameraMetadata& frame) {
+void CameraDeviceClient::onFrameAvailable(int32_t requestId,
+        const CameraMetadata& frame) {
     ATRACE_CALL();
     ALOGV("%s", __FUNCTION__);
 
-    Mutex::Autolock icl(mBinderSerializationLock);
-    SharedCameraCallbacks::Lock l(mSharedCameraCallbacks);
-
-    if (mRemoteCallback != NULL) {
+    // Thread-safe. No lock necessary.
+    sp<ICameraDeviceCallbacks> remoteCb = mRemoteCallback;
+    if (remoteCb != NULL) {
         ALOGV("%s: frame = %p ", __FUNCTION__, &frame);
-        mRemoteCallback->onResultReceived(frameId, frame);
+        remoteCb->onResultReceived(requestId, frame);
     }
-
 }
 
 // TODO: move to Camera2ClientBase
@@ -548,4 +617,64 @@
     return true;
 }
 
+status_t CameraDeviceClient::getRotationTransformLocked(int32_t* transform) {
+    ALOGV("%s: begin", __FUNCTION__);
+
+    if (transform == NULL) {
+        ALOGW("%s: null transform", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    *transform = 0;
+
+    const CameraMetadata& staticInfo = mDevice->info();
+    camera_metadata_ro_entry_t entry = staticInfo.find(ANDROID_SENSOR_ORIENTATION);
+    if (entry.count == 0) {
+        ALOGE("%s: Camera %d: Can't find android.sensor.orientation in "
+                "static metadata!", __FUNCTION__, mCameraId);
+        return INVALID_OPERATION;
+    }
+
+    int32_t& flags = *transform;
+
+    int orientation = entry.data.i32[0];
+    switch (orientation) {
+        case 0:
+            flags = 0;
+            break;
+        case 90:
+            flags = NATIVE_WINDOW_TRANSFORM_ROT_90;
+            break;
+        case 180:
+            flags = NATIVE_WINDOW_TRANSFORM_ROT_180;
+            break;
+        case 270:
+            flags = NATIVE_WINDOW_TRANSFORM_ROT_270;
+            break;
+        default:
+            ALOGE("%s: Invalid HAL android.sensor.orientation value: %d",
+                  __FUNCTION__, orientation);
+            return INVALID_OPERATION;
+    }
+
+    /**
+     * This magic flag makes surfaceflinger un-rotate the buffers
+     * to counter the extra global device UI rotation whenever the user
+     * physically rotates the device.
+     *
+     * By doing this, the camera buffer always ends up aligned
+     * with the physical camera for a "see through" effect.
+     *
+     * In essence, the buffer only gets rotated during preview use-cases.
+     * The user is still responsible to re-create streams of the proper
+     * aspect ratio, or the preview will end up looking non-uniformly
+     * stretched.
+     */
+    flags |= NATIVE_WINDOW_TRANSFORM_INVERSE_DISPLAY;
+
+    ALOGV("%s: final transform = 0x%x", __FUNCTION__, flags);
+
+    return OK;
+}
+
 } // namespace android
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 21d633c..b9c16aa 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -45,8 +45,6 @@
             uid_t clientUid,
             int servicePid);
 
-    virtual void notifyError();
-
     sp<ICameraDeviceCallbacks> mRemoteCallback;
 };
 
@@ -89,6 +87,10 @@
 
     // Wait until all the submitted requests have finished processing
     virtual status_t      waitUntilIdle();
+
+    // Flush all active and pending requests as fast as possible
+    virtual status_t      flush();
+
     /**
      * Interface used by CameraService
      */
@@ -108,14 +110,25 @@
     virtual status_t      dump(int fd, const Vector<String16>& args);
 
     /**
+     * Device listener interface
+     */
+
+    virtual void notifyIdle();
+    virtual void notifyError();
+    virtual void notifyShutter(int requestId, nsecs_t timestamp);
+
+    /**
      * Interface used by independent components of CameraDeviceClient.
      */
 protected:
     /** FilteredListener implementation **/
-    virtual void          onFrameAvailable(int32_t frameId,
+    virtual void          onFrameAvailable(int32_t requestId,
                                            const CameraMetadata& frame);
     virtual void          detachDevice();
 
+    // Calculate the ANativeWindow transform from android.sensor.orientation
+    status_t              getRotationTransformLocked(/*out*/int32_t* transform);
+
 private:
     /** ICameraDeviceUser interface-related private members */
 
diff --git a/services/camera/libcameraservice/api_pro/ProCamera2Client.cpp b/services/camera/libcameraservice/api_pro/ProCamera2Client.cpp
index 2b583e5..1a7a7a7 100644
--- a/services/camera/libcameraservice/api_pro/ProCamera2Client.cpp
+++ b/services/camera/libcameraservice/api_pro/ProCamera2Client.cpp
@@ -374,7 +374,7 @@
 }
 
 /** Device-related methods */
-void ProCamera2Client::onFrameAvailable(int32_t frameId,
+void ProCamera2Client::onFrameAvailable(int32_t requestId,
                                         const CameraMetadata& frame) {
     ATRACE_CALL();
     ALOGV("%s", __FUNCTION__);
@@ -386,7 +386,7 @@
         CameraMetadata tmp(frame);
         camera_metadata_t* meta = tmp.release();
         ALOGV("%s: meta = %p ", __FUNCTION__, meta);
-        mRemoteCallback->onResultReceived(frameId, meta);
+        mRemoteCallback->onResultReceived(requestId, meta);
         tmp.acquire(meta);
     }
 
diff --git a/services/camera/libcameraservice/api_pro/ProCamera2Client.h b/services/camera/libcameraservice/api_pro/ProCamera2Client.h
index 0bf6784..8a0f547 100644
--- a/services/camera/libcameraservice/api_pro/ProCamera2Client.h
+++ b/services/camera/libcameraservice/api_pro/ProCamera2Client.h
@@ -97,7 +97,7 @@
 
 protected:
     /** FilteredListener implementation **/
-    virtual void          onFrameAvailable(int32_t frameId,
+    virtual void          onFrameAvailable(int32_t requestId,
                                            const CameraMetadata& frame);
     virtual void          detachDevice();
 
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index 060e2a2..2d1253f 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -95,7 +95,7 @@
     if (res != OK) {
         ALOGE("%s: Camera %d: unable to initialize device: %s (%d)",
                 __FUNCTION__, TClientBase::mCameraId, strerror(-res), res);
-        return NO_INIT;
+        return res;
     }
 
     res = mDevice->setNotifyCallback(this);
@@ -226,13 +226,18 @@
 }
 
 template <typename TClientBase>
-void Camera2ClientBase<TClientBase>::notifyShutter(int frameNumber,
+void Camera2ClientBase<TClientBase>::notifyIdle() {
+    ALOGV("Camera device is now idle");
+}
+
+template <typename TClientBase>
+void Camera2ClientBase<TClientBase>::notifyShutter(int requestId,
                                                    nsecs_t timestamp) {
-    (void)frameNumber;
+    (void)requestId;
     (void)timestamp;
 
-    ALOGV("%s: Shutter notification for frame %d at time %lld", __FUNCTION__,
-          frameNumber, timestamp);
+    ALOGV("%s: Shutter notification for request id %d at time %lld",
+            __FUNCTION__, requestId, timestamp);
 }
 
 template <typename TClientBase>
@@ -244,13 +249,6 @@
     ALOGV("%s: Autofocus state now %d, last trigger %d",
           __FUNCTION__, newState, triggerId);
 
-    typename SharedCameraCallbacks::Lock l(mSharedCameraCallbacks);
-    if (l.mRemoteCallback != 0) {
-        l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS_MOVE, 1, 0);
-    }
-    if (l.mRemoteCallback != 0) {
-        l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS, 1, 0);
-    }
 }
 
 template <typename TClientBase>
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index d23197c..61e44f0 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -62,7 +62,8 @@
      */
 
     virtual void          notifyError(int errorCode, int arg1, int arg2);
-    virtual void          notifyShutter(int frameNumber, nsecs_t timestamp);
+    virtual void          notifyIdle();
+    virtual void          notifyShutter(int requestId, nsecs_t timestamp);
     virtual void          notifyAutoFocus(uint8_t newState, int triggerId);
     virtual void          notifyAutoExposure(uint8_t newState, int triggerId);
     virtual void          notifyAutoWhitebalance(uint8_t newState,
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index aa92bec..e80abf1 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -138,9 +138,18 @@
      */
     class NotificationListener {
       public:
-        // Refer to the Camera2 HAL definition for notification definitions
+        // The set of notifications is a merge of the notifications required for
+        // API1 and API2.
+
+        // Required for API 1 and 2
         virtual void notifyError(int errorCode, int arg1, int arg2) = 0;
-        virtual void notifyShutter(int frameNumber, nsecs_t timestamp) = 0;
+
+        // Required only for API2
+        virtual void notifyIdle() = 0;
+        virtual void notifyShutter(int requestId,
+                nsecs_t timestamp) = 0;
+
+        // Required only for API1
         virtual void notifyAutoFocus(uint8_t newState, int triggerId) = 0;
         virtual void notifyAutoExposure(uint8_t newState, int triggerId) = 0;
         virtual void notifyAutoWhitebalance(uint8_t newState,
@@ -165,12 +174,14 @@
     /**
      * Wait for a new frame to be produced, with timeout in nanoseconds.
      * Returns TIMED_OUT when no frame produced within the specified duration
+     * May be called concurrently to most methods, except for getNextFrame
      */
     virtual status_t waitForNextFrame(nsecs_t timeout) = 0;
 
     /**
      * Get next metadata frame from the frame queue. Returns NULL if the queue
      * is empty; caller takes ownership of the metadata buffer.
+     * May be called concurrently to most methods, except for waitForNextFrame
      */
     virtual status_t getNextFrame(CameraMetadata *frame) = 0;
 
@@ -209,6 +220,13 @@
      */
     virtual status_t pushReprocessBuffer(int reprocessStreamId,
             buffer_handle_t *buffer, wp<BufferReleasedListener> listener) = 0;
+
+    /**
+     * Flush all pending and in-flight requests. Blocks until flush is
+     * complete.
+     */
+    virtual status_t flush() = 0;
+
 };
 
 }; // namespace android
diff --git a/services/camera/libcameraservice/common/FrameProcessorBase.cpp b/services/camera/libcameraservice/common/FrameProcessorBase.cpp
index e7b440a..f2064fb 100644
--- a/services/camera/libcameraservice/common/FrameProcessorBase.cpp
+++ b/services/camera/libcameraservice/common/FrameProcessorBase.cpp
@@ -37,11 +37,11 @@
 }
 
 status_t FrameProcessorBase::registerListener(int32_t minId,
-        int32_t maxId, wp<FilteredListener> listener) {
+        int32_t maxId, wp<FilteredListener> listener, bool quirkSendPartials) {
     Mutex::Autolock l(mInputMutex);
     ALOGV("%s: Registering listener for frame id range %d - %d",
             __FUNCTION__, minId, maxId);
-    RangeListener rListener = { minId, maxId, listener };
+    RangeListener rListener = { minId, maxId, listener, quirkSendPartials };
     mRangeListeners.push_back(rListener);
     return OK;
 }
@@ -66,7 +66,14 @@
 void FrameProcessorBase::dump(int fd, const Vector<String16>& /*args*/) {
     String8 result("    Latest received frame:\n");
     write(fd, result.string(), result.size());
-    mLastFrame.dump(fd, 2, 6);
+
+    CameraMetadata lastFrame;
+    {
+        // Don't race while dumping metadata
+        Mutex::Autolock al(mLastFrameMutex);
+        lastFrame = CameraMetadata(mLastFrame);
+    }
+    lastFrame.dump(fd, 2, 6);
 }
 
 bool FrameProcessorBase::threadLoop() {
@@ -113,6 +120,7 @@
         }
 
         if (!frame.isEmpty()) {
+            Mutex::Autolock al(mLastFrameMutex);
             mLastFrame.acquire(frame);
         }
     }
@@ -137,6 +145,16 @@
     ATRACE_CALL();
     camera_metadata_ro_entry_t entry;
 
+    // Quirks: Don't deliver partial results to listeners that don't want them
+    bool quirkIsPartial = false;
+    entry = frame.find(ANDROID_QUIRKS_PARTIAL_RESULT);
+    if (entry.count != 0 &&
+            entry.data.u8[0] == ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL) {
+        ALOGV("%s: Camera %d: Not forwarding partial result to listeners",
+                __FUNCTION__, device->getId());
+        quirkIsPartial = true;
+    }
+
     entry = frame.find(ANDROID_REQUEST_ID);
     if (entry.count == 0) {
         ALOGE("%s: Camera %d: Error reading frame id",
@@ -152,7 +170,8 @@
         List<RangeListener>::iterator item = mRangeListeners.begin();
         while (item != mRangeListeners.end()) {
             if (requestId >= item->minId &&
-                    requestId < item->maxId) {
+                    requestId < item->maxId &&
+                    (!quirkIsPartial || item->quirkSendPartials) ) {
                 sp<FilteredListener> listener = item->listener.promote();
                 if (listener == 0) {
                     item = mRangeListeners.erase(item);
diff --git a/services/camera/libcameraservice/common/FrameProcessorBase.h b/services/camera/libcameraservice/common/FrameProcessorBase.h
index 1e46beb..89b608a 100644
--- a/services/camera/libcameraservice/common/FrameProcessorBase.h
+++ b/services/camera/libcameraservice/common/FrameProcessorBase.h
@@ -39,14 +39,16 @@
     virtual ~FrameProcessorBase();
 
     struct FilteredListener: virtual public RefBase {
-        virtual void onFrameAvailable(int32_t frameId,
+        virtual void onFrameAvailable(int32_t requestId,
                                       const CameraMetadata &frame) = 0;
     };
 
     // Register a listener for a range of IDs [minId, maxId). Multiple listeners
-    // can be listening to the same range
+    // can be listening to the same range.
+    // QUIRK: sendPartials controls whether partial results will be sent.
     status_t registerListener(int32_t minId, int32_t maxId,
-                              wp<FilteredListener> listener);
+                              wp<FilteredListener> listener,
+                              bool quirkSendPartials = true);
     status_t removeListener(int32_t minId, int32_t maxId,
                             wp<FilteredListener> listener);
 
@@ -58,11 +60,13 @@
     virtual bool threadLoop();
 
     Mutex mInputMutex;
+    Mutex mLastFrameMutex;
 
     struct RangeListener {
         int32_t minId;
         int32_t maxId;
         wp<FilteredListener> listener;
+        bool quirkSendPartials;
     };
     List<RangeListener> mRangeListeners;
 
diff --git a/services/camera/libcameraservice/device2/Camera2Device.cpp b/services/camera/libcameraservice/device2/Camera2Device.cpp
index 710d0e9..2bc1a8a 100644
--- a/services/camera/libcameraservice/device2/Camera2Device.cpp
+++ b/services/camera/libcameraservice/device2/Camera2Device.cpp
@@ -464,8 +464,10 @@
                 listener->notifyError(ext1, ext2, ext3);
                 break;
             case CAMERA2_MSG_SHUTTER: {
-                nsecs_t timestamp = (nsecs_t)ext2 | ((nsecs_t)(ext3) << 32 );
-                listener->notifyShutter(ext1, timestamp);
+                // TODO: Only needed for camera2 API, which is unsupported
+                // by HAL2 directly.
+                // nsecs_t timestamp = (nsecs_t)ext2 | ((nsecs_t)(ext3) << 32 );
+                // listener->notifyShutter(requestId, timestamp);
                 break;
             }
             case CAMERA2_MSG_AUTOFOCUS:
@@ -567,6 +569,13 @@
     return res;
 }
 
+status_t Camera2Device::flush() {
+    ATRACE_CALL();
+
+    mRequestQueue.clear();
+    return waitUntilDrained();
+}
+
 /**
  * Camera2Device::MetadataQueue
  */
@@ -591,9 +600,7 @@
 
 Camera2Device::MetadataQueue::~MetadataQueue() {
     ATRACE_CALL();
-    Mutex::Autolock l(mMutex);
-    freeBuffers(mEntries.begin(), mEntries.end());
-    freeBuffers(mStreamSlot.begin(), mStreamSlot.end());
+    clear();
 }
 
 // Connect to camera2 HAL as consumer (input requests/reprocessing)
@@ -784,6 +791,23 @@
     return signalConsumerLocked();
 }
 
+status_t Camera2Device::MetadataQueue::clear()
+{
+    ATRACE_CALL();
+    ALOGV("%s: E", __FUNCTION__);
+
+    Mutex::Autolock l(mMutex);
+
+    // Clear streaming slot
+    freeBuffers(mStreamSlot.begin(), mStreamSlot.end());
+    mStreamSlotCount = 0;
+
+    // Clear request queue
+    freeBuffers(mEntries.begin(), mEntries.end());
+    mCount = 0;
+    return OK;
+}
+
 status_t Camera2Device::MetadataQueue::dump(int fd,
         const Vector<String16>& /*args*/) {
     ATRACE_CALL();
diff --git a/services/camera/libcameraservice/device2/Camera2Device.h b/services/camera/libcameraservice/device2/Camera2Device.h
index 8945ec2..1f53c56 100644
--- a/services/camera/libcameraservice/device2/Camera2Device.h
+++ b/services/camera/libcameraservice/device2/Camera2Device.h
@@ -28,6 +28,10 @@
 
 /**
  * CameraDevice for HAL devices with version CAMERA_DEVICE_API_VERSION_2_0
+ *
+ * TODO for camera2 API implementation:
+ * Does not produce notifyShutter / notifyIdle callbacks to NotificationListener
+ * Use waitUntilDrained for idle.
  */
 class Camera2Device: public CameraDeviceBase {
   public:
@@ -67,6 +71,8 @@
     virtual status_t triggerPrecaptureMetering(uint32_t id);
     virtual status_t pushReprocessBuffer(int reprocessStreamId,
             buffer_handle_t *buffer, wp<BufferReleasedListener> listener);
+    // Flush implemented as just a wait
+    virtual status_t flush();
   private:
     const int mId;
     camera2_device_t *mHal2Device;
@@ -113,6 +119,9 @@
         status_t setStreamSlot(camera_metadata_t *buf);
         status_t setStreamSlot(const List<camera_metadata_t*> &bufs);
 
+        // Clear the request queue and the streaming slot
+        status_t clear();
+
         status_t dump(int fd, const Vector<String16>& args);
 
       private:
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 0a4a24c..3dbc1b0 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -41,6 +41,7 @@
 #include <utils/Trace.h>
 #include <utils/Timers.h>
 
+#include "utils/CameraTraces.h"
 #include "device3/Camera3Device.h"
 #include "device3/Camera3OutputStream.h"
 #include "device3/Camera3InputStream.h"
@@ -54,6 +55,7 @@
         mId(id),
         mHal3Device(NULL),
         mStatus(STATUS_UNINITIALIZED),
+        mUsePartialResultQuirk(false),
         mNextResultFrameNumber(0),
         mNextShutterFrameNumber(0),
         mListener(NULL)
@@ -82,6 +84,7 @@
 status_t Camera3Device::initialize(camera_module_t *module)
 {
     ATRACE_CALL();
+    Mutex::Autolock il(mInterfaceLock);
     Mutex::Autolock l(mLock);
 
     ALOGV("%s: Initializing device for camera %d", __FUNCTION__, mId);
@@ -159,9 +162,20 @@
         }
     }
 
+    /** Start up status tracker thread */
+    mStatusTracker = new StatusTracker(this);
+    res = mStatusTracker->run(String8::format("C3Dev-%d-Status", mId).string());
+    if (res != OK) {
+        SET_ERR_L("Unable to start status tracking thread: %s (%d)",
+                strerror(-res), res);
+        device->common.close(&device->common);
+        mStatusTracker.clear();
+        return res;
+    }
+
     /** Start up request queue thread */
 
-    mRequestThread = new RequestThread(this, device);
+    mRequestThread = new RequestThread(this, mStatusTracker, device);
     res = mRequestThread->run(String8::format("C3Dev-%d-ReqQueue", mId).string());
     if (res != OK) {
         SET_ERR_L("Unable to start request queue thread: %s (%d)",
@@ -175,81 +189,139 @@
 
     mDeviceInfo = info.static_camera_characteristics;
     mHal3Device = device;
-    mStatus = STATUS_IDLE;
+    mStatus = STATUS_UNCONFIGURED;
     mNextStreamId = 0;
     mNeedConfig = true;
+    mPauseStateNotify = false;
+
+    /** Check for quirks */
+
+    // Will the HAL be sending in early partial result metadata?
+    camera_metadata_entry partialResultsQuirk =
+            mDeviceInfo.find(ANDROID_QUIRKS_USE_PARTIAL_RESULT);
+    if (partialResultsQuirk.count > 0 && partialResultsQuirk.data.u8[0] == 1) {
+        mUsePartialResultQuirk = true;
+    }
 
     return OK;
 }
 
 status_t Camera3Device::disconnect() {
     ATRACE_CALL();
-    Mutex::Autolock l(mLock);
+    Mutex::Autolock il(mInterfaceLock);
 
     ALOGV("%s: E", __FUNCTION__);
 
     status_t res = OK;
-    if (mStatus == STATUS_UNINITIALIZED) return res;
 
-    if (mStatus == STATUS_ACTIVE ||
-            (mStatus == STATUS_ERROR && mRequestThread != NULL)) {
-        res = mRequestThread->clearRepeatingRequests();
-        if (res != OK) {
-            SET_ERR_L("Can't stop streaming");
-            // Continue to close device even in case of error
-        } else {
-            res = waitUntilDrainedLocked();
+    {
+        Mutex::Autolock l(mLock);
+        if (mStatus == STATUS_UNINITIALIZED) return res;
+
+        if (mStatus == STATUS_ACTIVE ||
+                (mStatus == STATUS_ERROR && mRequestThread != NULL)) {
+            res = mRequestThread->clearRepeatingRequests();
             if (res != OK) {
-                SET_ERR_L("Timeout waiting for HAL to drain");
+                SET_ERR_L("Can't stop streaming");
                 // Continue to close device even in case of error
+            } else {
+                res = waitUntilStateThenRelock(/*active*/ false, kShutdownTimeout);
+                if (res != OK) {
+                    SET_ERR_L("Timeout waiting for HAL to drain");
+                    // Continue to close device even in case of error
+                }
             }
         }
-    }
-    assert(mStatus == STATUS_IDLE || mStatus == STATUS_ERROR);
 
-    if (mStatus == STATUS_ERROR) {
-        CLOGE("Shutting down in an error state");
-    }
-
-    if (mRequestThread != NULL) {
-        mRequestThread->requestExit();
-    }
-
-    mOutputStreams.clear();
-    mInputStream.clear();
-
-    if (mRequestThread != NULL) {
-        if (mStatus != STATUS_ERROR) {
-            // HAL may be in a bad state, so waiting for request thread
-            // (which may be stuck in the HAL processCaptureRequest call)
-            // could be dangerous.
-            mRequestThread->join();
+        if (mStatus == STATUS_ERROR) {
+            CLOGE("Shutting down in an error state");
         }
+
+        if (mStatusTracker != NULL) {
+            mStatusTracker->requestExit();
+        }
+
+        if (mRequestThread != NULL) {
+            mRequestThread->requestExit();
+        }
+
+        mOutputStreams.clear();
+        mInputStream.clear();
+    }
+
+    // Joining done without holding mLock, otherwise deadlocks may ensue
+    // as the threads try to access parent state
+    if (mRequestThread != NULL && mStatus != STATUS_ERROR) {
+        // HAL may be in a bad state, so waiting for request thread
+        // (which may be stuck in the HAL processCaptureRequest call)
+        // could be dangerous.
+        mRequestThread->join();
+    }
+
+    if (mStatusTracker != NULL) {
+        mStatusTracker->join();
+    }
+
+    {
+        Mutex::Autolock l(mLock);
+
         mRequestThread.clear();
-    }
+        mStatusTracker.clear();
 
-    if (mHal3Device != NULL) {
-        mHal3Device->common.close(&mHal3Device->common);
-        mHal3Device = NULL;
-    }
+        if (mHal3Device != NULL) {
+            mHal3Device->common.close(&mHal3Device->common);
+            mHal3Device = NULL;
+        }
 
-    mStatus = STATUS_UNINITIALIZED;
+        mStatus = STATUS_UNINITIALIZED;
+    }
 
     ALOGV("%s: X", __FUNCTION__);
     return res;
 }
 
+// For dumping/debugging only -
+// try to acquire a lock a few times, eventually give up to proceed with
+// debug/dump operations
+bool Camera3Device::tryLockSpinRightRound(Mutex& lock) {
+    bool gotLock = false;
+    for (size_t i = 0; i < kDumpLockAttempts; ++i) {
+        if (lock.tryLock() == NO_ERROR) {
+            gotLock = true;
+            break;
+        } else {
+            usleep(kDumpSleepDuration);
+        }
+    }
+    return gotLock;
+}
+
 status_t Camera3Device::dump(int fd, const Vector<String16> &args) {
     ATRACE_CALL();
     (void)args;
+
+    // Try to lock, but continue in case of failure (to avoid blocking in
+    // deadlocks)
+    bool gotInterfaceLock = tryLockSpinRightRound(mInterfaceLock);
+    bool gotLock = tryLockSpinRightRound(mLock);
+
+    ALOGW_IF(!gotInterfaceLock,
+            "Camera %d: %s: Unable to lock interface lock, proceeding anyway",
+            mId, __FUNCTION__);
+    ALOGW_IF(!gotLock,
+            "Camera %d: %s: Unable to lock main lock, proceeding anyway",
+            mId, __FUNCTION__);
+
     String8 lines;
 
     const char *status =
             mStatus == STATUS_ERROR         ? "ERROR" :
             mStatus == STATUS_UNINITIALIZED ? "UNINITIALIZED" :
-            mStatus == STATUS_IDLE          ? "IDLE" :
+            mStatus == STATUS_UNCONFIGURED  ? "UNCONFIGURED" :
+            mStatus == STATUS_CONFIGURED    ? "CONFIGURED" :
             mStatus == STATUS_ACTIVE        ? "ACTIVE" :
             "Unknown";
+
     lines.appendFormat("    Device status: %s\n", status);
     if (mStatus == STATUS_ERROR) {
         lines.appendFormat("    Error cause: %s\n", mErrorCause.string());
@@ -281,12 +353,23 @@
     }
     write(fd, lines.string(), lines.size());
 
+    {
+        lines = String8("    Last request sent:\n");
+        write(fd, lines.string(), lines.size());
+
+        CameraMetadata lastRequest = getLatestRequestLocked();
+        lastRequest.dump(fd, /*verbosity*/2, /*indentation*/6);
+    }
+
     if (mHal3Device != NULL) {
         lines = String8("    HAL device dump:\n");
         write(fd, lines.string(), lines.size());
         mHal3Device->ops->dump(mHal3Device, fd);
     }
 
+    if (gotLock) mLock.unlock();
+    if (gotInterfaceLock) mInterfaceLock.unlock();
+
     return OK;
 }
 
@@ -303,6 +386,8 @@
 
 status_t Camera3Device::capture(CameraMetadata &request) {
     ATRACE_CALL();
+    status_t res;
+    Mutex::Autolock il(mInterfaceLock);
     Mutex::Autolock l(mLock);
 
     // TODO: take ownership of the request
@@ -314,7 +399,9 @@
         case STATUS_UNINITIALIZED:
             CLOGE("Device not initialized");
             return INVALID_OPERATION;
-        case STATUS_IDLE:
+        case STATUS_UNCONFIGURED:
+            // May be lazily configuring streams, will check during setup
+        case STATUS_CONFIGURED:
         case STATUS_ACTIVE:
             // OK
             break;
@@ -329,12 +416,23 @@
         return BAD_VALUE;
     }
 
-    return mRequestThread->queueRequest(newRequest);
+    res = mRequestThread->queueRequest(newRequest);
+    if (res == OK) {
+        waitUntilStateThenRelock(/*active*/ true, kActiveTimeout);
+        if (res != OK) {
+            SET_ERR_L("Can't transition to active in %f seconds!",
+                    kActiveTimeout/1e9);
+        }
+        ALOGV("Camera %d: Capture request enqueued", mId);
+    }
+    return res;
 }
 
 
 status_t Camera3Device::setStreamingRequest(const CameraMetadata &request) {
     ATRACE_CALL();
+    status_t res;
+    Mutex::Autolock il(mInterfaceLock);
     Mutex::Autolock l(mLock);
 
     switch (mStatus) {
@@ -344,7 +442,9 @@
         case STATUS_UNINITIALIZED:
             CLOGE("Device not initialized");
             return INVALID_OPERATION;
-        case STATUS_IDLE:
+        case STATUS_UNCONFIGURED:
+            // May be lazily configuring streams, will check during setup
+        case STATUS_CONFIGURED:
         case STATUS_ACTIVE:
             // OK
             break;
@@ -362,7 +462,16 @@
     RequestList newRepeatingRequests;
     newRepeatingRequests.push_back(newRepeatingRequest);
 
-    return mRequestThread->setRepeatingRequests(newRepeatingRequests);
+    res = mRequestThread->setRepeatingRequests(newRepeatingRequests);
+    if (res == OK) {
+        waitUntilStateThenRelock(/*active*/ true, kActiveTimeout);
+        if (res != OK) {
+            SET_ERR_L("Can't transition to active in %f seconds!",
+                    kActiveTimeout/1e9);
+        }
+        ALOGV("Camera %d: Repeating request set", mId);
+    }
+    return res;
 }
 
 
@@ -370,12 +479,16 @@
         const CameraMetadata &request) {
     status_t res;
 
-    if (mStatus == STATUS_IDLE) {
+    if (mStatus == STATUS_UNCONFIGURED || mNeedConfig) {
         res = configureStreamsLocked();
         if (res != OK) {
             SET_ERR_L("Can't set up streams: %s (%d)", strerror(-res), res);
             return NULL;
         }
+        if (mStatus == STATUS_UNCONFIGURED) {
+            CLOGE("No streams configured");
+            return NULL;
+        }
     }
 
     sp<CaptureRequest> newRequest = createCaptureRequest(request);
@@ -384,6 +497,7 @@
 
 status_t Camera3Device::clearStreamingRequest() {
     ATRACE_CALL();
+    Mutex::Autolock il(mInterfaceLock);
     Mutex::Autolock l(mLock);
 
     switch (mStatus) {
@@ -393,7 +507,8 @@
         case STATUS_UNINITIALIZED:
             CLOGE("Device not initialized");
             return INVALID_OPERATION;
-        case STATUS_IDLE:
+        case STATUS_UNCONFIGURED:
+        case STATUS_CONFIGURED:
         case STATUS_ACTIVE:
             // OK
             break;
@@ -401,12 +516,13 @@
             SET_ERR_L("Unexpected status: %d", mStatus);
             return INVALID_OPERATION;
     }
-
+    ALOGV("Camera %d: Clearing repeating request", mId);
     return mRequestThread->clearRepeatingRequests();
 }
 
 status_t Camera3Device::waitUntilRequestReceived(int32_t requestId, nsecs_t timeout) {
     ATRACE_CALL();
+    Mutex::Autolock il(mInterfaceLock);
 
     return mRequestThread->waitUntilRequestProcessed(requestId, timeout);
 }
@@ -414,7 +530,10 @@
 status_t Camera3Device::createInputStream(
         uint32_t width, uint32_t height, int format, int *id) {
     ATRACE_CALL();
+    Mutex::Autolock il(mInterfaceLock);
     Mutex::Autolock l(mLock);
+    ALOGV("Camera %d: Creating new input stream %d: %d x %d, format %d",
+            mId, mNextStreamId, width, height, format);
 
     status_t res;
     bool wasActive = false;
@@ -426,26 +545,24 @@
         case STATUS_UNINITIALIZED:
             ALOGE("%s: Device not initialized", __FUNCTION__);
             return INVALID_OPERATION;
-        case STATUS_IDLE:
+        case STATUS_UNCONFIGURED:
+        case STATUS_CONFIGURED:
             // OK
             break;
         case STATUS_ACTIVE:
             ALOGV("%s: Stopping activity to reconfigure streams", __FUNCTION__);
-            mRequestThread->setPaused(true);
-            res = waitUntilDrainedLocked();
+            res = internalPauseAndWaitLocked();
             if (res != OK) {
-                ALOGE("%s: Can't pause captures to reconfigure streams!",
-                        __FUNCTION__);
-                mStatus = STATUS_ERROR;
+                SET_ERR_L("Can't pause captures to reconfigure streams!");
                 return res;
             }
             wasActive = true;
             break;
         default:
-            ALOGE("%s: Unexpected status: %d", __FUNCTION__, mStatus);
+            SET_ERR_L("%s: Unexpected status: %d", mStatus);
             return INVALID_OPERATION;
     }
-    assert(mStatus == STATUS_IDLE);
+    assert(mStatus != STATUS_ACTIVE);
 
     if (mInputStream != 0) {
         ALOGE("%s: Cannot create more than 1 input stream", __FUNCTION__);
@@ -454,6 +571,7 @@
 
     sp<Camera3InputStream> newStream = new Camera3InputStream(mNextStreamId,
                 width, height, format);
+    newStream->setStatusTracker(mStatusTracker);
 
     mInputStream = newStream;
 
@@ -468,9 +586,10 @@
                     __FUNCTION__, mNextStreamId, strerror(-res), res);
             return res;
         }
-        mRequestThread->setPaused(false);
+        internalResumeLocked();
     }
 
+    ALOGV("Camera %d: Created input stream", mId);
     return OK;
 }
 
@@ -482,7 +601,10 @@
             int *id,
             sp<Camera3ZslStream>* zslStream) {
     ATRACE_CALL();
+    Mutex::Autolock il(mInterfaceLock);
     Mutex::Autolock l(mLock);
+    ALOGV("Camera %d: Creating ZSL stream %d: %d x %d, depth %d",
+            mId, mNextStreamId, width, height, depth);
 
     status_t res;
     bool wasActive = false;
@@ -494,26 +616,24 @@
         case STATUS_UNINITIALIZED:
             ALOGE("%s: Device not initialized", __FUNCTION__);
             return INVALID_OPERATION;
-        case STATUS_IDLE:
+        case STATUS_UNCONFIGURED:
+        case STATUS_CONFIGURED:
             // OK
             break;
         case STATUS_ACTIVE:
             ALOGV("%s: Stopping activity to reconfigure streams", __FUNCTION__);
-            mRequestThread->setPaused(true);
-            res = waitUntilDrainedLocked();
+            res = internalPauseAndWaitLocked();
             if (res != OK) {
-                ALOGE("%s: Can't pause captures to reconfigure streams!",
-                        __FUNCTION__);
-                mStatus = STATUS_ERROR;
+                SET_ERR_L("Can't pause captures to reconfigure streams!");
                 return res;
             }
             wasActive = true;
             break;
         default:
-            ALOGE("%s: Unexpected status: %d", __FUNCTION__, mStatus);
+            SET_ERR_L("Unexpected status: %d", mStatus);
             return INVALID_OPERATION;
     }
-    assert(mStatus == STATUS_IDLE);
+    assert(mStatus != STATUS_ACTIVE);
 
     if (mInputStream != 0) {
         ALOGE("%s: Cannot create more than 1 input stream", __FUNCTION__);
@@ -522,6 +642,7 @@
 
     sp<Camera3ZslStream> newStream = new Camera3ZslStream(mNextStreamId,
                 width, height, depth);
+    newStream->setStatusTracker(mStatusTracker);
 
     res = mOutputStreams.add(mNextStreamId, newStream);
     if (res < 0) {
@@ -543,16 +664,20 @@
                     __FUNCTION__, mNextStreamId, strerror(-res), res);
             return res;
         }
-        mRequestThread->setPaused(false);
+        internalResumeLocked();
     }
 
+    ALOGV("Camera %d: Created ZSL stream", mId);
     return OK;
 }
 
 status_t Camera3Device::createStream(sp<ANativeWindow> consumer,
         uint32_t width, uint32_t height, int format, size_t size, int *id) {
     ATRACE_CALL();
+    Mutex::Autolock il(mInterfaceLock);
     Mutex::Autolock l(mLock);
+    ALOGV("Camera %d: Creating new stream %d: %d x %d, format %d, size %d",
+            mId, mNextStreamId, width, height, format, size);
 
     status_t res;
     bool wasActive = false;
@@ -564,16 +689,15 @@
         case STATUS_UNINITIALIZED:
             CLOGE("Device not initialized");
             return INVALID_OPERATION;
-        case STATUS_IDLE:
+        case STATUS_UNCONFIGURED:
+        case STATUS_CONFIGURED:
             // OK
             break;
         case STATUS_ACTIVE:
             ALOGV("%s: Stopping activity to reconfigure streams", __FUNCTION__);
-            mRequestThread->setPaused(true);
-            res = waitUntilDrainedLocked();
+            res = internalPauseAndWaitLocked();
             if (res != OK) {
-                ALOGE("%s: Can't pause captures to reconfigure streams!",
-                        __FUNCTION__);
+                SET_ERR_L("Can't pause captures to reconfigure streams!");
                 return res;
             }
             wasActive = true;
@@ -582,7 +706,7 @@
             SET_ERR_L("Unexpected status: %d", mStatus);
             return INVALID_OPERATION;
     }
-    assert(mStatus == STATUS_IDLE);
+    assert(mStatus != STATUS_ACTIVE);
 
     sp<Camera3OutputStream> newStream;
     if (format == HAL_PIXEL_FORMAT_BLOB) {
@@ -592,6 +716,7 @@
         newStream = new Camera3OutputStream(mNextStreamId, consumer,
                 width, height, format);
     }
+    newStream->setStatusTracker(mStatusTracker);
 
     res = mOutputStreams.add(mNextStreamId, newStream);
     if (res < 0) {
@@ -611,9 +736,9 @@
                     mNextStreamId, strerror(-res), res);
             return res;
         }
-        mRequestThread->setPaused(false);
+        internalResumeLocked();
     }
-
+    ALOGV("Camera %d: Created new stream", mId);
     return OK;
 }
 
@@ -629,6 +754,7 @@
 status_t Camera3Device::getStreamInfo(int id,
         uint32_t *width, uint32_t *height, uint32_t *format) {
     ATRACE_CALL();
+    Mutex::Autolock il(mInterfaceLock);
     Mutex::Autolock l(mLock);
 
     switch (mStatus) {
@@ -638,7 +764,8 @@
         case STATUS_UNINITIALIZED:
             CLOGE("Device not initialized!");
             return INVALID_OPERATION;
-        case STATUS_IDLE:
+        case STATUS_UNCONFIGURED:
+        case STATUS_CONFIGURED:
         case STATUS_ACTIVE:
             // OK
             break;
@@ -663,6 +790,7 @@
 status_t Camera3Device::setStreamTransform(int id,
         int transform) {
     ATRACE_CALL();
+    Mutex::Autolock il(mInterfaceLock);
     Mutex::Autolock l(mLock);
 
     switch (mStatus) {
@@ -672,7 +800,8 @@
         case STATUS_UNINITIALIZED:
             CLOGE("Device not initialized");
             return INVALID_OPERATION;
-        case STATUS_IDLE:
+        case STATUS_UNCONFIGURED:
+        case STATUS_CONFIGURED:
         case STATUS_ACTIVE:
             // OK
             break;
@@ -693,6 +822,7 @@
 
 status_t Camera3Device::deleteStream(int id) {
     ATRACE_CALL();
+    Mutex::Autolock il(mInterfaceLock);
     Mutex::Autolock l(mLock);
     status_t res;
 
@@ -700,7 +830,7 @@
 
     // CameraDevice semantics require device to already be idle before
     // deleteStream is called, unlike for createStream.
-    if (mStatus != STATUS_IDLE) {
+    if (mStatus == STATUS_ACTIVE) {
         ALOGV("%s: Camera %d: Device not idle", __FUNCTION__, mId);
         return -EBUSY;
     }
@@ -744,6 +874,7 @@
         CameraMetadata *request) {
     ATRACE_CALL();
     ALOGV("%s: for template %d", __FUNCTION__, templateId);
+    Mutex::Autolock il(mInterfaceLock);
     Mutex::Autolock l(mLock);
 
     switch (mStatus) {
@@ -753,7 +884,8 @@
         case STATUS_UNINITIALIZED:
             CLOGE("Device is not initialized!");
             return INVALID_OPERATION;
-        case STATUS_IDLE:
+        case STATUS_UNCONFIGURED:
+        case STATUS_CONFIGURED:
         case STATUS_ACTIVE:
             // OK
             break;
@@ -779,61 +911,88 @@
 
 status_t Camera3Device::waitUntilDrained() {
     ATRACE_CALL();
+    Mutex::Autolock il(mInterfaceLock);
     Mutex::Autolock l(mLock);
 
-    return waitUntilDrainedLocked();
-}
-
-status_t Camera3Device::waitUntilDrainedLocked() {
-    ATRACE_CALL();
-    status_t res;
-
     switch (mStatus) {
         case STATUS_UNINITIALIZED:
-        case STATUS_IDLE:
+        case STATUS_UNCONFIGURED:
             ALOGV("%s: Already idle", __FUNCTION__);
             return OK;
+        case STATUS_CONFIGURED:
+            // To avoid race conditions, check with tracker to be sure
         case STATUS_ERROR:
         case STATUS_ACTIVE:
-            // Need to shut down
+            // Need to verify shut down
             break;
         default:
             SET_ERR_L("Unexpected status: %d",mStatus);
             return INVALID_OPERATION;
     }
 
-    if (mRequestThread != NULL) {
-        res = mRequestThread->waitUntilPaused(kShutdownTimeout);
-        if (res != OK) {
-            SET_ERR_L("Can't stop request thread in %f seconds!",
-                    kShutdownTimeout/1e9);
-            return res;
-        }
-    }
-    if (mInputStream != NULL) {
-        res = mInputStream->waitUntilIdle(kShutdownTimeout);
-        if (res != OK) {
-            SET_ERR_L("Can't idle input stream %d in %f seconds!",
-                    mInputStream->getId(), kShutdownTimeout/1e9);
-            return res;
-        }
-    }
-    for (size_t i = 0; i < mOutputStreams.size(); i++) {
-        res = mOutputStreams.editValueAt(i)->waitUntilIdle(kShutdownTimeout);
-        if (res != OK) {
-            SET_ERR_L("Can't idle output stream %d in %f seconds!",
-                    mOutputStreams.keyAt(i), kShutdownTimeout/1e9);
-            return res;
-        }
+    ALOGV("%s: Camera %d: Waiting until idle", __FUNCTION__, mId);
+    status_t res = waitUntilStateThenRelock(/*active*/ false, kShutdownTimeout);
+    return res;
+}
+
+// Pause to reconfigure
+status_t Camera3Device::internalPauseAndWaitLocked() {
+    mRequestThread->setPaused(true);
+    mPauseStateNotify = true;
+
+    ALOGV("%s: Camera %d: Internal wait until idle", __FUNCTION__, mId);
+    status_t res = waitUntilStateThenRelock(/*active*/ false, kShutdownTimeout);
+    if (res != OK) {
+        SET_ERR_L("Can't idle device in %f seconds!",
+                kShutdownTimeout/1e9);
     }
 
-    if (mStatus != STATUS_ERROR) {
-        mStatus = STATUS_IDLE;
-    }
+    return res;
+}
 
+// Resume after internalPauseAndWaitLocked
+status_t Camera3Device::internalResumeLocked() {
+    status_t res;
+
+    mRequestThread->setPaused(false);
+
+    res = waitUntilStateThenRelock(/*active*/ true, kActiveTimeout);
+    if (res != OK) {
+        SET_ERR_L("Can't transition to active in %f seconds!",
+                kActiveTimeout/1e9);
+    }
+    mPauseStateNotify = false;
     return OK;
 }
 
+status_t Camera3Device::waitUntilStateThenRelock(bool active,
+        nsecs_t timeout) {
+    status_t res = OK;
+    if (active == (mStatus == STATUS_ACTIVE)) {
+        // Desired state already reached
+        return res;
+    }
+
+    bool stateSeen = false;
+    do {
+        mRecentStatusUpdates.clear();
+
+        res = mStatusChanged.waitRelative(mLock, timeout);
+        if (res != OK) break;
+
+        // Check state change history during wait
+        for (size_t i = 0; i < mRecentStatusUpdates.size(); i++) {
+            if (active == (mRecentStatusUpdates[i] == STATUS_ACTIVE) ) {
+                stateSeen = true;
+                break;
+            }
+        }
+    } while (!stateSeen);
+
+    return res;
+}
+
+
 status_t Camera3Device::setNotifyCallback(NotificationListener *listener) {
     ATRACE_CALL();
     Mutex::Autolock l(mOutputLock);
@@ -851,7 +1010,6 @@
 }
 
 status_t Camera3Device::waitForNextFrame(nsecs_t timeout) {
-    ATRACE_CALL();
     status_t res;
     Mutex::Autolock l(mOutputLock);
 
@@ -885,6 +1043,7 @@
 
 status_t Camera3Device::triggerAutofocus(uint32_t id) {
     ATRACE_CALL();
+    Mutex::Autolock il(mInterfaceLock);
 
     ALOGV("%s: Triggering autofocus, id %d", __FUNCTION__, id);
     // Mix-in this trigger into the next request and only the next request.
@@ -905,6 +1064,7 @@
 
 status_t Camera3Device::triggerCancelAutofocus(uint32_t id) {
     ATRACE_CALL();
+    Mutex::Autolock il(mInterfaceLock);
 
     ALOGV("%s: Triggering cancel autofocus, id %d", __FUNCTION__, id);
     // Mix-in this trigger into the next request and only the next request.
@@ -925,6 +1085,7 @@
 
 status_t Camera3Device::triggerPrecaptureMetering(uint32_t id) {
     ATRACE_CALL();
+    Mutex::Autolock il(mInterfaceLock);
 
     ALOGV("%s: Triggering precapture metering, id %d", __FUNCTION__, id);
     // Mix-in this trigger into the next request and only the next request.
@@ -952,6 +1113,51 @@
     return INVALID_OPERATION;
 }
 
+status_t Camera3Device::flush() {
+    ATRACE_CALL();
+    ALOGV("%s: Camera %d: Flushing all requests", __FUNCTION__, mId);
+    Mutex::Autolock il(mInterfaceLock);
+    Mutex::Autolock l(mLock);
+
+    mRequestThread->clear();
+    return mHal3Device->ops->flush(mHal3Device);
+}
+
+/**
+ * Methods called by subclasses
+ */
+
+void Camera3Device::notifyStatus(bool idle) {
+    {
+        // Need mLock to safely update state and synchronize to current
+        // state of methods in flight.
+        Mutex::Autolock l(mLock);
+        // We can get various system-idle notices from the status tracker
+        // while starting up. Only care about them if we've actually sent
+        // in some requests recently.
+        if (mStatus != STATUS_ACTIVE && mStatus != STATUS_CONFIGURED) {
+            return;
+        }
+        ALOGV("%s: Camera %d: Now %s", __FUNCTION__, mId,
+                idle ? "idle" : "active");
+        mStatus = idle ? STATUS_CONFIGURED : STATUS_ACTIVE;
+        mRecentStatusUpdates.add(mStatus);
+        mStatusChanged.signal();
+
+        // Skip notifying listener if we're doing some user-transparent
+        // state changes
+        if (mPauseStateNotify) return;
+    }
+    NotificationListener *listener;
+    {
+        Mutex::Autolock l(mOutputLock);
+        listener = mListener;
+    }
+    if (idle && listener != NULL) {
+        listener->notifyIdle();
+    }
+}
+
 /**
  * Camera3Device private methods
  */
@@ -968,7 +1174,7 @@
             newRequest->mSettings.find(ANDROID_REQUEST_INPUT_STREAMS);
     if (inputStreams.count > 0) {
         if (mInputStream == NULL ||
-                mInputStream->getId() != inputStreams.data.u8[0]) {
+                mInputStream->getId() != inputStreams.data.i32[0]) {
             CLOGE("Request references unknown input stream %d",
                     inputStreams.data.u8[0]);
             return NULL;
@@ -997,7 +1203,7 @@
     }
 
     for (size_t i = 0; i < streams.count; i++) {
-        int idx = mOutputStreams.indexOfKey(streams.data.u8[i]);
+        int idx = mOutputStreams.indexOfKey(streams.data.i32[i]);
         if (idx == NAME_NOT_FOUND) {
             CLOGE("Request references unknown stream %d",
                     streams.data.u8[i]);
@@ -1028,18 +1234,18 @@
     ATRACE_CALL();
     status_t res;
 
-    if (mStatus != STATUS_IDLE) {
+    if (mStatus != STATUS_UNCONFIGURED && mStatus != STATUS_CONFIGURED) {
         CLOGE("Not idle");
         return INVALID_OPERATION;
     }
 
     if (!mNeedConfig) {
         ALOGV("%s: Skipping config, no stream changes", __FUNCTION__);
-        mStatus = STATUS_ACTIVE;
         return OK;
     }
 
     // Start configuring the streams
+    ALOGV("%s: Camera %d: Starting stream configuration", __FUNCTION__, mId);
 
     camera3_stream_configuration config;
 
@@ -1121,11 +1327,18 @@
     // across configure_streams() calls
     mRequestThread->configurationComplete();
 
-    // Finish configuring the streams lazily on first reference
+    // Update device state
 
-    mStatus = STATUS_ACTIVE;
     mNeedConfig = false;
 
+    if (config.num_streams > 0) {
+        mStatus = STATUS_CONFIGURED;
+    } else {
+        mStatus = STATUS_UNCONFIGURED;
+    }
+
+    ALOGV("%s: Camera %d: Stream configuration complete", __FUNCTION__, mId);
+
     return OK;
 }
 
@@ -1161,6 +1374,10 @@
     // But only do error state transition steps for the first error
     if (mStatus == STATUS_ERROR || mStatus == STATUS_UNINITIALIZED) return;
 
+    // Save stack trace. View by dumping it later.
+    CameraTraces::saveTrace();
+    // TODO: consider adding errorCause and client pid/procname
+
     mErrorCause = errorCause;
 
     mRequestThread->setPaused(true);
@@ -1172,18 +1389,187 @@
  */
 
 status_t Camera3Device::registerInFlight(int32_t frameNumber,
-        int32_t numBuffers) {
+        int32_t requestId, int32_t numBuffers) {
     ATRACE_CALL();
     Mutex::Autolock l(mInFlightLock);
 
     ssize_t res;
-    res = mInFlightMap.add(frameNumber, InFlightRequest(numBuffers));
+    res = mInFlightMap.add(frameNumber, InFlightRequest(requestId, numBuffers));
     if (res < 0) return res;
 
     return OK;
 }
 
 /**
+ * QUIRK(partial results)
+ * Check if all 3A fields are ready, and send off a partial 3A-only result
+ * to the output frame queue
+ */
+bool Camera3Device::processPartial3AQuirk(
+        int32_t frameNumber, int32_t requestId,
+        const CameraMetadata& partial) {
+
+    // Check if all 3A states are present
+    // The full list of fields is
+    //   android.control.afMode
+    //   android.control.awbMode
+    //   android.control.aeState
+    //   android.control.awbState
+    //   android.control.afState
+    //   android.control.afTriggerID
+    //   android.control.aePrecaptureID
+    // TODO: Add android.control.aeMode
+
+    bool gotAllStates = true;
+
+    uint8_t afMode;
+    uint8_t awbMode;
+    uint8_t aeState;
+    uint8_t afState;
+    uint8_t awbState;
+    int32_t afTriggerId;
+    int32_t aeTriggerId;
+
+    gotAllStates &= get3AResult(partial, ANDROID_CONTROL_AF_MODE,
+        &afMode, frameNumber);
+
+    gotAllStates &= get3AResult(partial, ANDROID_CONTROL_AWB_MODE,
+        &awbMode, frameNumber);
+
+    gotAllStates &= get3AResult(partial, ANDROID_CONTROL_AE_STATE,
+        &aeState, frameNumber);
+
+    gotAllStates &= get3AResult(partial, ANDROID_CONTROL_AF_STATE,
+        &afState, frameNumber);
+
+    gotAllStates &= get3AResult(partial, ANDROID_CONTROL_AWB_STATE,
+        &awbState, frameNumber);
+
+    gotAllStates &= get3AResult(partial, ANDROID_CONTROL_AF_TRIGGER_ID,
+        &afTriggerId, frameNumber);
+
+    gotAllStates &= get3AResult(partial, ANDROID_CONTROL_AE_PRECAPTURE_ID,
+        &aeTriggerId, frameNumber);
+
+    if (!gotAllStates) return false;
+
+    ALOGVV("%s: Camera %d: Frame %d, Request ID %d: AF mode %d, AWB mode %d, "
+        "AF state %d, AE state %d, AWB state %d, "
+        "AF trigger %d, AE precapture trigger %d",
+        __FUNCTION__, mId, frameNumber, requestId,
+        afMode, awbMode,
+        afState, aeState, awbState,
+        afTriggerId, aeTriggerId);
+
+    // Got all states, so construct a minimal result to send
+    // In addition to the above fields, this means adding in
+    //   android.request.frameCount
+    //   android.request.requestId
+    //   android.quirks.partialResult
+
+    const size_t kMinimal3AResultEntries = 10;
+
+    Mutex::Autolock l(mOutputLock);
+
+    CameraMetadata& min3AResult =
+            *mResultQueue.insert(
+                mResultQueue.end(),
+                CameraMetadata(kMinimal3AResultEntries, /*dataCapacity*/ 0));
+
+    if (!insert3AResult(min3AResult, ANDROID_REQUEST_FRAME_COUNT,
+            &frameNumber, frameNumber)) {
+        return false;
+    }
+
+    if (!insert3AResult(min3AResult, ANDROID_REQUEST_ID,
+            &requestId, frameNumber)) {
+        return false;
+    }
+
+    static const uint8_t partialResult = ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL;
+    if (!insert3AResult(min3AResult, ANDROID_QUIRKS_PARTIAL_RESULT,
+            &partialResult, frameNumber)) {
+        return false;
+    }
+
+    if (!insert3AResult(min3AResult, ANDROID_CONTROL_AF_MODE,
+            &afMode, frameNumber)) {
+        return false;
+    }
+
+    if (!insert3AResult(min3AResult, ANDROID_CONTROL_AWB_MODE,
+            &awbMode, frameNumber)) {
+        return false;
+    }
+
+    if (!insert3AResult(min3AResult, ANDROID_CONTROL_AE_STATE,
+            &aeState, frameNumber)) {
+        return false;
+    }
+
+    if (!insert3AResult(min3AResult, ANDROID_CONTROL_AF_STATE,
+            &afState, frameNumber)) {
+        return false;
+    }
+
+    if (!insert3AResult(min3AResult, ANDROID_CONTROL_AWB_STATE,
+            &awbState, frameNumber)) {
+        return false;
+    }
+
+    if (!insert3AResult(min3AResult, ANDROID_CONTROL_AF_TRIGGER_ID,
+            &afTriggerId, frameNumber)) {
+        return false;
+    }
+
+    if (!insert3AResult(min3AResult, ANDROID_CONTROL_AE_PRECAPTURE_ID,
+            &aeTriggerId, frameNumber)) {
+        return false;
+    }
+
+    mResultSignal.signal();
+
+    return true;
+}
+
+template<typename T>
+bool Camera3Device::get3AResult(const CameraMetadata& result, int32_t tag,
+        T* value, int32_t frameNumber) {
+    (void) frameNumber;
+
+    camera_metadata_ro_entry_t entry;
+
+    entry = result.find(tag);
+    if (entry.count == 0) {
+        ALOGVV("%s: Camera %d: Frame %d: No %s provided by HAL!", __FUNCTION__,
+            mId, frameNumber, get_camera_metadata_tag_name(tag));
+        return false;
+    }
+
+    if (sizeof(T) == sizeof(uint8_t)) {
+        *value = entry.data.u8[0];
+    } else if (sizeof(T) == sizeof(int32_t)) {
+        *value = entry.data.i32[0];
+    } else {
+        ALOGE("%s: Unexpected type", __FUNCTION__);
+        return false;
+    }
+    return true;
+}
+
+template<typename T>
+bool Camera3Device::insert3AResult(CameraMetadata& result, int32_t tag,
+        const T* value, int32_t frameNumber) {
+    if (result.update(tag, value, 1) != NO_ERROR) {
+        mResultQueue.erase(--mResultQueue.end(), mResultQueue.end());
+        SET_ERR("Frame %d: Failed to set %s in partial metadata",
+                frameNumber, get_camera_metadata_tag_name(tag));
+        return false;
+    }
+    return true;
+}
+
+/**
  * Camera HAL device callback methods
  */
 
@@ -1198,6 +1584,8 @@
                 frameNumber);
         return;
     }
+    bool partialResultQuirk = false;
+    CameraMetadata collectedQuirkResult;
 
     // Get capture timestamp from list of in-flight requests, where it was added
     // by the shutter notification for this frame. Then update the in-flight
@@ -1213,19 +1601,58 @@
             return;
         }
         InFlightRequest &request = mInFlightMap.editValueAt(idx);
+
+        // Check if this result carries only partial metadata
+        if (mUsePartialResultQuirk && result->result != NULL) {
+            camera_metadata_ro_entry_t partialResultEntry;
+            res = find_camera_metadata_ro_entry(result->result,
+                    ANDROID_QUIRKS_PARTIAL_RESULT, &partialResultEntry);
+            if (res != NAME_NOT_FOUND &&
+                    partialResultEntry.count > 0 &&
+                    partialResultEntry.data.u8[0] ==
+                    ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL) {
+                // A partial result. Flag this as such, and collect this
+                // set of metadata into the in-flight entry.
+                partialResultQuirk = true;
+                request.partialResultQuirk.collectedResult.append(
+                    result->result);
+                request.partialResultQuirk.collectedResult.erase(
+                    ANDROID_QUIRKS_PARTIAL_RESULT);
+                // Fire off a 3A-only result if possible
+                if (!request.partialResultQuirk.haveSent3A) {
+                    request.partialResultQuirk.haveSent3A =
+                            processPartial3AQuirk(frameNumber,
+                                    request.requestId,
+                                    request.partialResultQuirk.collectedResult);
+                }
+            }
+        }
+
         timestamp = request.captureTimestamp;
-        if (timestamp == 0) {
+        /**
+         * One of the following must happen before it's legal to call process_capture_result,
+         * unless partial metadata is being provided:
+         * - CAMERA3_MSG_SHUTTER (expected during normal operation)
+         * - CAMERA3_MSG_ERROR (expected during flush)
+         */
+        if (request.requestStatus == OK && timestamp == 0 && !partialResultQuirk) {
             SET_ERR("Called before shutter notify for frame %d",
                     frameNumber);
             return;
         }
 
-        if (result->result != NULL) {
+        // Did we get the (final) result metadata for this capture?
+        if (result->result != NULL && !partialResultQuirk) {
             if (request.haveResultMetadata) {
                 SET_ERR("Called multiple times with metadata for frame %d",
                         frameNumber);
                 return;
             }
+            if (mUsePartialResultQuirk &&
+                    !request.partialResultQuirk.collectedResult.isEmpty()) {
+                collectedQuirkResult.acquire(
+                    request.partialResultQuirk.collectedResult);
+            }
             request.haveResultMetadata = true;
         }
 
@@ -1237,6 +1664,7 @@
             return;
         }
 
+        // Check if everything has arrived for this result (buffers and metadata)
         if (request.haveResultMetadata && request.numBuffersLeft == 0) {
             ATRACE_ASYNC_END("frame capture", frameNumber);
             mInFlightMap.removeItemsAt(idx, 1);
@@ -1251,9 +1679,12 @@
     }
 
     // Process the result metadata, if provided
-    if (result->result != NULL) {
+    bool gotResult = false;
+    if (result->result != NULL && !partialResultQuirk) {
         Mutex::Autolock l(mOutputLock);
 
+        gotResult = true;
+
         if (frameNumber != mNextResultFrameNumber) {
             SET_ERR("Out-of-order capture result metadata submitted! "
                     "(got frame number %d, expecting %d)",
@@ -1262,19 +1693,26 @@
         }
         mNextResultFrameNumber++;
 
-        CameraMetadata &captureResult =
-                *mResultQueue.insert(mResultQueue.end(), CameraMetadata());
-
+        CameraMetadata captureResult;
         captureResult = result->result;
+
         if (captureResult.update(ANDROID_REQUEST_FRAME_COUNT,
                         (int32_t*)&frameNumber, 1) != OK) {
             SET_ERR("Failed to set frame# in metadata (%d)",
                     frameNumber);
+            gotResult = false;
         } else {
             ALOGVV("%s: Camera %d: Set frame# in metadata (%d)",
                     __FUNCTION__, mId, frameNumber);
         }
 
+        // Append any previous partials to form a complete result
+        if (mUsePartialResultQuirk && !collectedQuirkResult.isEmpty()) {
+            captureResult.append(collectedQuirkResult);
+        }
+
+        captureResult.sort();
+
         // Check that there's a timestamp in the result metadata
 
         camera_metadata_entry entry =
@@ -1282,10 +1720,19 @@
         if (entry.count == 0) {
             SET_ERR("No timestamp provided by HAL for frame %d!",
                     frameNumber);
+            gotResult = false;
         } else if (timestamp != entry.data.i64[0]) {
             SET_ERR("Timestamp mismatch between shutter notify and result"
                     " metadata for frame %d (%lld vs %lld respectively)",
                     frameNumber, timestamp, entry.data.i64[0]);
+            gotResult = false;
+        }
+
+        if (gotResult) {
+            // Valid result, insert into queue
+            CameraMetadata& queuedResult =
+                *mResultQueue.insert(mResultQueue.end(), CameraMetadata());
+            queuedResult.swap(captureResult);
         }
     } // scope for mOutputLock
 
@@ -1298,14 +1745,14 @@
         // Note: stream may be deallocated at this point, if this buffer was the
         // last reference to it.
         if (res != OK) {
-            SET_ERR("Can't return buffer %d for frame %d to its stream: "
+            ALOGE("Can't return buffer %d for frame %d to its stream: "
                     " %s (%d)", i, frameNumber, strerror(-res), res);
         }
     }
 
     // Finally, signal any waiters for new frames
 
-    if (result->result != NULL) {
+    if (gotResult) {
         mResultSignal.signal();
     }
 
@@ -1338,6 +1785,16 @@
             ALOGV("Camera %d: %s: HAL error, frame %d, stream %d: %d",
                     mId, __FUNCTION__, msg->message.error.frame_number,
                     streamId, msg->message.error.error_code);
+
+            // Set request error status for the request in the in-flight tracking
+            {
+                Mutex::Autolock l(mInFlightLock);
+                ssize_t idx = mInFlightMap.indexOfKey(msg->message.error.frame_number);
+                if (idx >= 0) {
+                    mInFlightMap.editValueAt(idx).requestStatus = msg->message.error.error_code;
+                }
+            }
+
             if (listener != NULL) {
                 listener->notifyError(msg->message.error.error_code,
                         msg->message.error.frame_number, streamId);
@@ -1360,12 +1817,17 @@
                 mNextShutterFrameNumber++;
             }
 
+            int32_t requestId = -1;
+
             // Set timestamp for the request in the in-flight tracking
+            // and get the request ID to send upstream
             {
                 Mutex::Autolock l(mInFlightLock);
                 idx = mInFlightMap.indexOfKey(frameNumber);
                 if (idx >= 0) {
-                    mInFlightMap.editValueAt(idx).captureTimestamp = timestamp;
+                    InFlightRequest &r = mInFlightMap.editValueAt(idx);
+                    r.captureTimestamp = timestamp;
+                    requestId = r.requestId;
                 }
             }
             if (idx < 0) {
@@ -1373,11 +1835,11 @@
                         frameNumber);
                 break;
             }
-            ALOGVV("Camera %d: %s: Shutter fired for frame %d at %lld",
-                    mId, __FUNCTION__, frameNumber, timestamp);
+            ALOGVV("Camera %d: %s: Shutter fired for frame %d (id %d) at %lld",
+                    mId, __FUNCTION__, frameNumber, requestId, timestamp);
             // Call listener, if any
             if (listener != NULL) {
-                listener->notifyShutter(frameNumber, timestamp);
+                listener->notifyShutter(requestId, timestamp);
             }
             break;
         }
@@ -1387,14 +1849,28 @@
     }
 }
 
+CameraMetadata Camera3Device::getLatestRequestLocked() {
+    ALOGV("%s", __FUNCTION__);
+
+    CameraMetadata retVal;
+
+    if (mRequestThread != NULL) {
+        retVal = mRequestThread->getLatestRequest();
+    }
+
+    return retVal;
+}
+
 /**
  * RequestThread inner class methods
  */
 
 Camera3Device::RequestThread::RequestThread(wp<Camera3Device> parent,
+        sp<StatusTracker> statusTracker,
         camera3_device_t *hal3Device) :
         Thread(false),
         mParent(parent),
+        mStatusTracker(statusTracker),
         mHal3Device(hal3Device),
         mId(getId(parent)),
         mReconfigured(false),
@@ -1402,6 +1878,7 @@
         mPaused(true),
         mFrameNumber(0),
         mLatestRequestId(NAME_NOT_FOUND) {
+    mStatusId = statusTracker->addComponent();
 }
 
 void Camera3Device::RequestThread::configurationComplete() {
@@ -1414,6 +1891,8 @@
     Mutex::Autolock l(mRequestLock);
     mRequestQueue.push_back(request);
 
+    unpauseForNewRequests();
+
     return OK;
 }
 
@@ -1479,6 +1958,9 @@
     mRepeatingRequests.clear();
     mRepeatingRequests.insert(mRepeatingRequests.begin(),
             requests.begin(), requests.end());
+
+    unpauseForNewRequests();
+
     return OK;
 }
 
@@ -1488,25 +1970,20 @@
     return OK;
 }
 
+status_t Camera3Device::RequestThread::clear() {
+    Mutex::Autolock l(mRequestLock);
+    mRepeatingRequests.clear();
+    mRequestQueue.clear();
+    mTriggerMap.clear();
+    return OK;
+}
+
 void Camera3Device::RequestThread::setPaused(bool paused) {
     Mutex::Autolock l(mPauseLock);
     mDoPause = paused;
     mDoPauseSignal.signal();
 }
 
-status_t Camera3Device::RequestThread::waitUntilPaused(nsecs_t timeout) {
-    ATRACE_CALL();
-    status_t res;
-    Mutex::Autolock l(mPauseLock);
-    while (!mPaused) {
-        res = mPausedSignal.waitRelative(mPauseLock, timeout);
-        if (res == TIMED_OUT) {
-            return res;
-        }
-    }
-    return OK;
-}
-
 status_t Camera3Device::RequestThread::waitUntilRequestProcessed(
         int32_t requestId, nsecs_t timeout) {
     Mutex::Autolock l(mLatestRequestMutex);
@@ -1523,7 +2000,13 @@
     return OK;
 }
 
-
+void Camera3Device::RequestThread::requestExit() {
+    // Call parent to set up shutdown
+    Thread::requestExit();
+    // The exit from any possible waits
+    mDoPauseSignal.signal();
+    mRequestSignal.signal();
+}
 
 bool Camera3Device::RequestThread::threadLoop() {
 
@@ -1545,6 +2028,18 @@
     camera3_capture_request_t request = camera3_capture_request_t();
     Vector<camera3_stream_buffer_t> outputBuffers;
 
+    // Get the request ID, if any
+    int requestId;
+    camera_metadata_entry_t requestIdEntry =
+            nextRequest->mSettings.find(ANDROID_REQUEST_ID);
+    if (requestIdEntry.count > 0) {
+        requestId = requestIdEntry.data.i32[0];
+    } else {
+        ALOGW("%s: Did not have android.request.id set in the request",
+                __FUNCTION__);
+        requestId = NAME_NOT_FOUND;
+    }
+
     // Insert any queued triggers (before metadata is locked)
     int32_t triggerCount;
     res = insertTriggers(nextRequest);
@@ -1562,6 +2057,19 @@
     // If the request is the same as last, or we had triggers last time
     if (mPrevRequest != nextRequest || triggersMixedIn) {
         /**
+         * HAL workaround:
+         * Insert a dummy trigger ID if a trigger is set but no trigger ID is
+         */
+        res = addDummyTriggerIds(nextRequest);
+        if (res != OK) {
+            SET_ERR("RequestThread: Unable to insert dummy trigger IDs "
+                    "(capture request %d, HAL device: %s (%d)",
+                    (mFrameNumber+1), strerror(-res), res);
+            cleanUpFailedRequest(request, nextRequest, outputBuffers);
+            return false;
+        }
+
+        /**
          * The request should be presorted so accesses in HAL
          *   are O(logn). Sidenote, sorting a sorted metadata is nop.
          */
@@ -1598,7 +2106,7 @@
         request.input_buffer = &inputBuffer;
         res = nextRequest->mInputStream->getInputBuffer(&inputBuffer);
         if (res != OK) {
-            SET_ERR("RequestThread: Can't get input buffer, skipping request:"
+            ALOGE("RequestThread: Can't get input buffer, skipping request:"
                     " %s (%d)", strerror(-res), res);
             cleanUpFailedRequest(request, nextRequest, outputBuffers);
             return true;
@@ -1614,8 +2122,8 @@
         res = nextRequest->mOutputStreams.editItemAt(i)->
                 getBuffer(&outputBuffers.editItemAt(i));
         if (res != OK) {
-            SET_ERR("RequestThread: Can't get output buffer, skipping request:"
-                    "%s (%d)", strerror(-res), res);
+            ALOGE("RequestThread: Can't get output buffer, skipping request:"
+                    " %s (%d)", strerror(-res), res);
             cleanUpFailedRequest(request, nextRequest, outputBuffers);
             return true;
         }
@@ -1632,7 +2140,7 @@
         return false;
     }
 
-    res = parent->registerInFlight(request.frame_number,
+    res = parent->registerInFlight(request.frame_number, requestId,
             request.num_output_buffers);
     if (res != OK) {
         SET_ERR("RequestThread: Unable to register new in-flight request:"
@@ -1641,6 +2149,14 @@
         return false;
     }
 
+    // Inform waitUntilRequestProcessed thread of a new request ID
+    {
+        Mutex::Autolock al(mLatestRequestMutex);
+
+        mLatestRequestId = requestId;
+        mLatestRequestSignal.signal();
+    }
+
     // Submit request and block until ready for next one
     ATRACE_ASYNC_BEGIN("frame capture", request.frame_number);
     ATRACE_BEGIN("camera3->process_capture_request");
@@ -1654,6 +2170,14 @@
         return false;
     }
 
+    // Update the latest request sent to HAL
+    if (request.settings != NULL) { // Don't update them if they were unchanged
+        Mutex::Autolock al(mLatestRequestMutex);
+
+        camera_metadata_t* cloned = clone_camera_metadata(request.settings);
+        mLatestRequest.acquire(cloned);
+    }
+
     if (request.settings != NULL) {
         nextRequest->mSettings.unlock(request.settings);
     }
@@ -1668,24 +2192,6 @@
     }
     mPrevTriggers = triggerCount;
 
-    // Read android.request.id from the request settings metadata
-    // - inform waitUntilRequestProcessed thread of a new request ID
-    {
-        Mutex::Autolock al(mLatestRequestMutex);
-
-        camera_metadata_entry_t requestIdEntry =
-                nextRequest->mSettings.find(ANDROID_REQUEST_ID);
-        if (requestIdEntry.count > 0) {
-            mLatestRequestId = requestIdEntry.data.i32[0];
-        } else {
-            ALOGW("%s: Did not have android.request.id set in the request",
-                  __FUNCTION__);
-            mLatestRequestId = NAME_NOT_FOUND;
-        }
-
-        mLatestRequestSignal.signal();
-    }
-
     // Return input buffer back to framework
     if (request.input_buffer != NULL) {
         Camera3Stream *stream =
@@ -1701,11 +2207,17 @@
         }
     }
 
-
-
     return true;
 }
 
+CameraMetadata Camera3Device::RequestThread::getLatestRequest() const {
+    Mutex::Autolock al(mLatestRequestMutex);
+
+    ALOGV("RequestThread::%s", __FUNCTION__);
+
+    return mLatestRequest;
+}
+
 void Camera3Device::RequestThread::cleanUpFailedRequest(
         camera3_capture_request_t &request,
         sp<CaptureRequest> &nextRequest,
@@ -1752,12 +2264,17 @@
 
         res = mRequestSignal.waitRelative(mRequestLock, kRequestTimeout);
 
-        if (res == TIMED_OUT) {
-            // Signal that we're paused by starvation
+        if ((mRequestQueue.empty() && mRepeatingRequests.empty()) ||
+                exitPending()) {
             Mutex::Autolock pl(mPauseLock);
             if (mPaused == false) {
+                ALOGV("%s: RequestThread: Going idle", __FUNCTION__);
                 mPaused = true;
-                mPausedSignal.signal();
+                // Let the tracker know
+                sp<StatusTracker> statusTracker = mStatusTracker.promote();
+                if (statusTracker != 0) {
+                    statusTracker->markComponentIdle(mStatusId, Fence::NO_FENCE);
+                }
             }
             // Stop waiting for now and let thread management happen
             return NULL;
@@ -1773,8 +2290,17 @@
         mRequestQueue.erase(firstRequest);
     }
 
-    // Not paused
+    // In case we've been unpaused by setPaused clearing mDoPause, need to
+    // update internal pause state (capture/setRepeatingRequest unpause
+    // directly).
     Mutex::Autolock pl(mPauseLock);
+    if (mPaused) {
+        ALOGV("%s: RequestThread: Unpaused", __FUNCTION__);
+        sp<StatusTracker> statusTracker = mStatusTracker.promote();
+        if (statusTracker != 0) {
+            statusTracker->markComponentActive(mStatusId);
+        }
+    }
     mPaused = false;
 
     // Check if we've reconfigured since last time, and reset the preview
@@ -1791,13 +2317,18 @@
     status_t res;
     Mutex::Autolock l(mPauseLock);
     while (mDoPause) {
-        // Signal that we're paused by request
         if (mPaused == false) {
             mPaused = true;
-            mPausedSignal.signal();
+            ALOGV("%s: RequestThread: Paused", __FUNCTION__);
+            // Let the tracker know
+            sp<StatusTracker> statusTracker = mStatusTracker.promote();
+            if (statusTracker != 0) {
+                statusTracker->markComponentIdle(mStatusId, Fence::NO_FENCE);
+            }
         }
+
         res = mDoPauseSignal.waitRelative(mPauseLock, kRequestTimeout);
-        if (res == TIMED_OUT) {
+        if (res == TIMED_OUT || exitPending()) {
             return true;
         }
     }
@@ -1806,6 +2337,24 @@
     return false;
 }
 
+void Camera3Device::RequestThread::unpauseForNewRequests() {
+    // With work to do, mark thread as unpaused.
+    // If paused by request (setPaused), don't resume, to avoid
+    // extra signaling/waiting overhead to waitUntilPaused
+    mRequestSignal.signal();
+    Mutex::Autolock p(mPauseLock);
+    if (!mDoPause) {
+        ALOGV("%s: RequestThread: Going active", __FUNCTION__);
+        if (mPaused) {
+            sp<StatusTracker> statusTracker = mStatusTracker.promote();
+            if (statusTracker != 0) {
+                statusTracker->markComponentActive(mStatusId);
+            }
+        }
+        mPaused = false;
+    }
+}
+
 void Camera3Device::RequestThread::setErrorState(const char *fmt, ...) {
     sp<Camera3Device> parent = mParent.promote();
     if (parent != NULL) {
@@ -1951,6 +2500,40 @@
     return OK;
 }
 
+status_t Camera3Device::RequestThread::addDummyTriggerIds(
+        const sp<CaptureRequest> &request) {
+    // Trigger ID 0 has special meaning in the HAL2 spec, so avoid it here
+    static const int32_t dummyTriggerId = 1;
+    status_t res;
+
+    CameraMetadata &metadata = request->mSettings;
+
+    // If AF trigger is active, insert a dummy AF trigger ID if none already
+    // exists
+    camera_metadata_entry afTrigger = metadata.find(ANDROID_CONTROL_AF_TRIGGER);
+    camera_metadata_entry afId = metadata.find(ANDROID_CONTROL_AF_TRIGGER_ID);
+    if (afTrigger.count > 0 &&
+            afTrigger.data.u8[0] != ANDROID_CONTROL_AF_TRIGGER_IDLE &&
+            afId.count == 0) {
+        res = metadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &dummyTriggerId, 1);
+        if (res != OK) return res;
+    }
+
+    // If AE precapture trigger is active, insert a dummy precapture trigger ID
+    // if none already exists
+    camera_metadata_entry pcTrigger =
+            metadata.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER);
+    camera_metadata_entry pcId = metadata.find(ANDROID_CONTROL_AE_PRECAPTURE_ID);
+    if (pcTrigger.count > 0 &&
+            pcTrigger.data.u8[0] != ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE &&
+            pcId.count == 0) {
+        res = metadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
+                &dummyTriggerId, 1);
+        if (res != OK) return res;
+    }
+
+    return OK;
+}
 
 
 /**
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 76c08ae..468f641 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -26,6 +26,7 @@
 #include <hardware/camera3.h>
 
 #include "common/CameraDeviceBase.h"
+#include "device3/StatusTracker.h"
 
 /**
  * Function pointer types with C calling convention to
@@ -124,27 +125,49 @@
     virtual status_t pushReprocessBuffer(int reprocessStreamId,
             buffer_handle_t *buffer, wp<BufferReleasedListener> listener);
 
+    virtual status_t flush();
+
+    // Methods called by subclasses
+    void             notifyStatus(bool idle); // updates from StatusTracker
+
   private:
+    static const size_t        kDumpLockAttempts  = 10;
+    static const size_t        kDumpSleepDuration = 100000; // 0.10 sec
     static const size_t        kInFlightWarnLimit = 20;
     static const nsecs_t       kShutdownTimeout   = 5000000000; // 5 sec
+    static const nsecs_t       kActiveTimeout     = 500000000;  // 500 ms
     struct                     RequestTrigger;
 
+    // A lock to enforce serialization on the input/configure side
+    // of the public interface.
+    // Only locked by public methods inherited from CameraDeviceBase.
+    // Not locked by methods guarded by mOutputLock, since they may act
+    // concurrently to the input/configure side of the interface.
+    // Must be locked before mLock if both will be locked by a method
+    Mutex                      mInterfaceLock;
+
+    // The main lock on internal state
     Mutex                      mLock;
 
+    // Camera device ID
+    const int                  mId;
+
     /**** Scope for mLock ****/
 
-    const int                  mId;
     camera3_device_t          *mHal3Device;
 
     CameraMetadata             mDeviceInfo;
     vendor_tag_query_ops_t     mVendorTagOps;
 
-    enum {
+    enum Status {
         STATUS_ERROR,
         STATUS_UNINITIALIZED,
-        STATUS_IDLE,
+        STATUS_UNCONFIGURED,
+        STATUS_CONFIGURED,
         STATUS_ACTIVE
     }                          mStatus;
+    Vector<Status>             mRecentStatusUpdates;
+    Condition                  mStatusChanged;
 
     // Tracking cause of fatal errors when in STATUS_ERROR
     String8                    mErrorCause;
@@ -158,9 +181,16 @@
     int                        mNextStreamId;
     bool                       mNeedConfig;
 
+    // Whether to send state updates upstream
+    // Pause when doing transparent reconfiguration
+    bool                       mPauseStateNotify;
+
     // Need to hold on to stream references until configure completes.
     Vector<sp<camera3::Camera3StreamInterface> > mDeletedStreams;
 
+    // Whether quirk ANDROID_QUIRKS_USE_PARTIAL_RESULT is enabled
+    bool                       mUsePartialResultQuirk;
+
     /**** End scope for mLock ****/
 
     class CaptureRequest : public LightRefBase<CaptureRequest> {
@@ -173,10 +203,38 @@
     typedef List<sp<CaptureRequest> > RequestList;
 
     /**
-     * Lock-held version of waitUntilDrained. Will transition to IDLE on
-     * success.
+     * Get the last request submitted to the hal by the request thread.
+     *
+     * Takes mLock.
      */
-    status_t           waitUntilDrainedLocked();
+    virtual CameraMetadata getLatestRequestLocked();
+
+    /**
+     * Pause processing and flush everything, but don't tell the clients.
+     * This is for reconfiguring outputs transparently when according to the
+     * CameraDeviceBase interface we shouldn't need to.
+     * Must be called with mLock and mInterfaceLock both held.
+     */
+    status_t internalPauseAndWaitLocked();
+
+    /**
+     * Resume work after internalPauseAndWaitLocked()
+     * Must be called with mLock and mInterfaceLock both held.
+     */
+    status_t internalResumeLocked();
+
+    /**
+     * Wait until status tracker tells us we've transitioned to the target state
+     * set, which is either ACTIVE when active==true or IDLE (which is any
+     * non-ACTIVE state) when active==false.
+     *
+     * Needs to be called with mLock and mInterfaceLock held.  This means there
+     * can ever only be one waiter at most.
+     *
+     * During the wait mLock is released.
+     *
+     */
+    status_t waitUntilStateThenRelock(bool active, nsecs_t timeout);
 
     /**
      * Do common work for setting up a streaming or single capture request.
@@ -206,6 +264,12 @@
     void               setErrorStateLocked(const char *fmt, ...);
     void               setErrorStateLockedV(const char *fmt, va_list args);
 
+    /**
+     * Debugging trylock/spin method
+     * Try to acquire a lock a few times with sleeps between before giving up.
+     */
+    bool               tryLockSpinRightRound(Mutex& lock);
+
     struct RequestTrigger {
         // Metadata tag number, e.g. android.control.aePrecaptureTrigger
         uint32_t metadataTag;
@@ -231,6 +295,7 @@
       public:
 
         RequestThread(wp<Camera3Device> parent,
+                sp<camera3::StatusTracker> statusTracker,
                 camera3_device_t *hal3Device);
 
         /**
@@ -249,6 +314,11 @@
         status_t queueRequest(sp<CaptureRequest> request);
 
         /**
+         * Remove all queued and repeating requests, and pending triggers
+         */
+        status_t clear();
+
+        /**
          * Queue a trigger to be dispatched with the next outgoing
          * process_capture_request. The settings for that request only
          * will be temporarily rewritten to add the trigger tag/value.
@@ -263,13 +333,6 @@
         void     setPaused(bool paused);
 
         /**
-         * Wait until thread is paused, either due to setPaused(true)
-         * or due to lack of input requests. Returns TIMED_OUT in case
-         * the thread does not pause within the timeout.
-         */
-        status_t waitUntilPaused(nsecs_t timeout);
-
-        /**
          * Wait until thread processes the capture request with settings'
          * android.request.id == requestId.
          *
@@ -278,6 +341,18 @@
          */
         status_t waitUntilRequestProcessed(int32_t requestId, nsecs_t timeout);
 
+        /**
+         * Shut down the thread. Shutdown is asynchronous, so thread may
+         * still be running once this method returns.
+         */
+        virtual void requestExit();
+
+        /**
+         * Get the latest request that was sent to the HAL
+         * with process_capture_request.
+         */
+        CameraMetadata getLatestRequest() const;
+
       protected:
 
         virtual bool threadLoop();
@@ -292,6 +367,10 @@
         //  restoring the old field values for those tags.
         status_t           removeTriggers(const sp<CaptureRequest> &request);
 
+        // HAL workaround: Make sure a trigger ID always exists if
+        // a trigger does
+        status_t          addDummyTriggerIds(const sp<CaptureRequest> &request);
+
         static const nsecs_t kRequestTimeout = 50e6; // 50 ms
 
         // Waits for a request, or returns NULL if times out.
@@ -307,14 +386,18 @@
 
         // Pause handling
         bool               waitIfPaused();
+        void               unpauseForNewRequests();
 
         // Relay error to parent device object setErrorState
         void               setErrorState(const char *fmt, ...);
 
         wp<Camera3Device>  mParent;
+        wp<camera3::StatusTracker>  mStatusTracker;
         camera3_device_t  *mHal3Device;
 
-        const int          mId;
+        const int          mId;       // The camera ID
+        int                mStatusId; // The RequestThread's component ID for
+                                      // status tracking
 
         Mutex              mRequestLock;
         Condition          mRequestSignal;
@@ -335,10 +418,11 @@
 
         uint32_t           mFrameNumber;
 
-        Mutex              mLatestRequestMutex;
+        mutable Mutex      mLatestRequestMutex;
         Condition          mLatestRequestSignal;
         // android.request.id for latest process_capture_request
         int32_t            mLatestRequestId;
+        CameraMetadata     mLatestRequest;
 
         typedef KeyedVector<uint32_t/*tag*/, RequestTrigger> TriggerMap;
         Mutex              mTriggerMutex;
@@ -353,22 +437,42 @@
      */
 
     struct InFlightRequest {
+        // android.request.id for the request
+        int     requestId;
         // Set by notify() SHUTTER call.
         nsecs_t captureTimestamp;
+        int     requestStatus;
         // Set by process_capture_result call with valid metadata
         bool    haveResultMetadata;
         // Decremented by calls to process_capture_result with valid output
         // buffers
         int     numBuffersLeft;
 
+        // Fields used by the partial result quirk only
+        struct PartialResultQuirkInFlight {
+            // Set by process_capture_result once 3A has been sent to clients
+            bool    haveSent3A;
+            // Result metadata collected so far, when partial results are in use
+            CameraMetadata collectedResult;
+
+            PartialResultQuirkInFlight():
+                    haveSent3A(false) {
+            }
+        } partialResultQuirk;
+
+        // Default constructor needed by KeyedVector
         InFlightRequest() :
+                requestId(0),
                 captureTimestamp(0),
+                requestStatus(OK),
                 haveResultMetadata(false),
                 numBuffersLeft(0) {
         }
 
-        explicit InFlightRequest(int numBuffers) :
+        InFlightRequest(int id, int numBuffers) :
+                requestId(id),
                 captureTimestamp(0),
+                requestStatus(OK),
                 haveResultMetadata(false),
                 numBuffersLeft(numBuffers) {
         }
@@ -379,7 +483,29 @@
     Mutex                  mInFlightLock; // Protects mInFlightMap
     InFlightMap            mInFlightMap;
 
-    status_t registerInFlight(int32_t frameNumber, int32_t numBuffers);
+    status_t registerInFlight(int32_t frameNumber, int32_t requestId,
+            int32_t numBuffers);
+
+    /**
+     * For the partial result quirk, check if all 3A state fields are available
+     * and if so, queue up 3A-only result to the client. Returns true if 3A
+     * is sent.
+     */
+    bool processPartial3AQuirk(int32_t frameNumber, int32_t requestId,
+            const CameraMetadata& partial);
+
+    // Helpers for reading and writing 3A metadata into to/from partial results
+    template<typename T>
+    bool get3AResult(const CameraMetadata& result, int32_t tag,
+            T* value, int32_t frameNumber);
+
+    template<typename T>
+    bool insert3AResult(CameraMetadata &result, int32_t tag, const T* value,
+            int32_t frameNumber);
+    /**
+     * Tracking for idle detection
+     */
+    sp<camera3::StatusTracker> mStatusTracker;
 
     /**
      * Output result queue and current HAL device 3A state
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
index 0850566..da51228 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
@@ -23,7 +23,8 @@
 
 #include <utils/Log.h>
 #include <utils/Trace.h>
-#include "Camera3IOStreamBase.h"
+#include "device3/Camera3IOStreamBase.h"
+#include "device3/StatusTracker.h"
 
 namespace android {
 
@@ -62,53 +63,6 @@
     return false;
 }
 
-status_t Camera3IOStreamBase::waitUntilIdle(nsecs_t timeout) {
-    status_t res;
-    {
-        Mutex::Autolock l(mLock);
-        while (mDequeuedBufferCount > 0) {
-            if (timeout != TIMEOUT_NEVER) {
-                nsecs_t startTime = systemTime();
-                res = mBufferReturnedSignal.waitRelative(mLock, timeout);
-                if (res == TIMED_OUT) {
-                    return res;
-                } else if (res != OK) {
-                    ALOGE("%s: Error waiting for outstanding buffers: %s (%d)",
-                            __FUNCTION__, strerror(-res), res);
-                    return res;
-                }
-                nsecs_t deltaTime = systemTime() - startTime;
-                if (timeout <= deltaTime) {
-                    timeout = 0;
-                } else {
-                    timeout -= deltaTime;
-                }
-            } else {
-                res = mBufferReturnedSignal.wait(mLock);
-                if (res != OK) {
-                    ALOGE("%s: Error waiting for outstanding buffers: %s (%d)",
-                            __FUNCTION__, strerror(-res), res);
-                    return res;
-                }
-            }
-        }
-    }
-
-    // No lock
-
-    unsigned int timeoutMs;
-    if (timeout == TIMEOUT_NEVER) {
-        timeoutMs = Fence::TIMEOUT_NEVER;
-    } else if (timeout == 0) {
-        timeoutMs = 0;
-    } else {
-        // Round up to wait at least 1 ms
-        timeoutMs = (timeout + 999999) / 1000000;
-    }
-
-    return mCombinedFence->wait(timeoutMs);
-}
-
 void Camera3IOStreamBase::dump(int fd, const Vector<String16> &args) const {
     (void) args;
     String8 lines;
@@ -190,6 +144,14 @@
     buffer.release_fence = releaseFence;
     buffer.status = status;
 
+    // Inform tracker about becoming busy
+    if (mDequeuedBufferCount == 0 && mState != STATE_IN_CONFIG &&
+            mState != STATE_IN_RECONFIG) {
+        sp<StatusTracker> statusTracker = mStatusTracker.promote();
+        if (statusTracker != 0) {
+            statusTracker->markComponentActive(mStatusId);
+        }
+    }
     mDequeuedBufferCount++;
 }
 
@@ -252,20 +214,32 @@
     sp<Fence> releaseFence;
     res = returnBufferCheckedLocked(buffer, timestamp, output,
                                     &releaseFence);
-    if (res != OK) {
-        return res;
+    // Res may be an error, but we still want to decrement our owned count
+    // to enable clean shutdown. So we'll just return the error but otherwise
+    // carry on
+
+    if (releaseFence != 0) {
+        mCombinedFence = Fence::merge(mName, mCombinedFence, releaseFence);
     }
 
-    mCombinedFence = Fence::merge(mName, mCombinedFence, releaseFence);
-
     mDequeuedBufferCount--;
+    if (mDequeuedBufferCount == 0 && mState != STATE_IN_CONFIG &&
+            mState != STATE_IN_RECONFIG) {
+        ALOGV("%s: Stream %d: All buffers returned; now idle", __FUNCTION__,
+                mId);
+        sp<StatusTracker> statusTracker = mStatusTracker.promote();
+        if (statusTracker != 0) {
+            statusTracker->markComponentIdle(mStatusId, mCombinedFence);
+        }
+    }
+
     mBufferReturnedSignal.signal();
 
     if (output) {
         mLastTimestamp = timestamp;
     }
 
-    return OK;
+    return res;
 }
 
 
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
index 9432a59..fcb9d04 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
@@ -43,7 +43,6 @@
      * Camera3Stream interface
      */
 
-    virtual status_t waitUntilIdle(nsecs_t timeout);
     virtual void     dump(int fd, const Vector<String16> &args) const;
 
   protected:
diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.cpp b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
index c80f512..5aa9a3e 100644
--- a/services/camera/libcameraservice/device3/Camera3InputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
@@ -115,7 +115,6 @@
                 bufferFound = true;
                 bufferItem = tmp;
                 mBuffersInFlight.erase(it);
-                mDequeuedBufferCount--;
             }
         }
     }
@@ -148,12 +147,11 @@
     if (res != OK) {
         ALOGE("%s: Stream %d: Error releasing buffer back to buffer queue:"
                 " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
-        return res;
     }
 
     *releaseFenceOut = releaseFence;
 
-    return OK;
+    return res;
 }
 
 status_t Camera3InputStream::returnInputBufferLocked(
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 35cb5ba..682755d 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -92,7 +92,22 @@
     ANativeWindowBuffer* anb;
     int fenceFd;
 
-    res = mConsumer->dequeueBuffer(mConsumer.get(), &anb, &fenceFd);
+    /**
+     * Release the lock briefly to avoid deadlock for below scenario:
+     * Thread 1: StreamingProcessor::startStream -> Camera3Stream::isConfiguring().
+     * This thread acquired StreamingProcessor lock and try to lock Camera3Stream lock.
+     * Thread 2: Camera3Stream::returnBuffer->StreamingProcessor::onFrameAvailable().
+     * This thread acquired Camera3Stream lock and bufferQueue lock, and try to lock
+     * StreamingProcessor lock.
+     * Thread 3: Camera3Stream::getBuffer(). This thread acquired Camera3Stream lock
+     * and try to lock bufferQueue lock.
+     * Then there is circular locking dependency.
+     */
+    sp<ANativeWindow> currentConsumer = mConsumer;
+    mLock.unlock();
+
+    res = currentConsumer->dequeueBuffer(currentConsumer.get(), &anb, &fenceFd);
+    mLock.lock();
     if (res != OK) {
         ALOGE("%s: Stream %d: Can't dequeue next output buffer: %s (%d)",
                 __FUNCTION__, mId, strerror(-res), res);
@@ -198,12 +213,11 @@
     mLock.lock();
     if (res != OK) {
         close(anwReleaseFence);
-        return res;
     }
 
     *releaseFenceOut = releaseFence;
 
-    return OK;
+    return res;
 }
 
 void Camera3OutputStream::dump(int fd, const Vector<String16> &args) const {
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index a6872aa..6d2cf94 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -20,13 +20,18 @@
 
 #include <utils/Log.h>
 #include <utils/Trace.h>
-#include "Camera3Stream.h"
+#include "device3/Camera3Stream.h"
+#include "device3/StatusTracker.h"
 
 namespace android {
 
 namespace camera3 {
 
 Camera3Stream::~Camera3Stream() {
+    sp<StatusTracker> statusTracker = mStatusTracker.promote();
+    if (statusTracker != 0 && mStatusId != StatusTracker::NO_STATUS_ID) {
+        statusTracker->removeComponent(mStatusId);
+    }
 }
 
 Camera3Stream* Camera3Stream::cast(camera3_stream *stream) {
@@ -44,7 +49,8 @@
     mId(id),
     mName(String8::format("Camera3Stream[%d]", id)),
     mMaxSize(maxSize),
-    mState(STATE_CONSTRUCTED) {
+    mState(STATE_CONSTRUCTED),
+    mStatusId(StatusTracker::NO_STATUS_ID) {
 
     camera3_stream::stream_type = type;
     camera3_stream::width = width;
@@ -119,6 +125,15 @@
         return NULL;
     }
 
+    // Stop tracking if currently doing so
+    if (mStatusId != StatusTracker::NO_STATUS_ID) {
+        sp<StatusTracker> statusTracker = mStatusTracker.promote();
+        if (statusTracker != 0) {
+            statusTracker->removeComponent(mStatusId);
+        }
+        mStatusId = StatusTracker::NO_STATUS_ID;
+    }
+
     if (mState == STATE_CONSTRUCTED) {
         mState = STATE_IN_CONFIG;
     } else { // mState == STATE_CONFIGURED
@@ -154,6 +169,12 @@
             return INVALID_OPERATION;
     }
 
+    // Register for idle tracking
+    sp<StatusTracker> statusTracker = mStatusTracker.promote();
+    if (statusTracker != 0) {
+        mStatusId = statusTracker->addComponent();
+    }
+
     // Check if the stream configuration is unchanged, and skip reallocation if
     // so. As documented in hardware/camera3.h:configure_streams().
     if (mState == STATE_IN_RECONFIG &&
@@ -265,6 +286,18 @@
     return hasOutstandingBuffersLocked();
 }
 
+status_t Camera3Stream::setStatusTracker(sp<StatusTracker> statusTracker) {
+    Mutex::Autolock l(mLock);
+    sp<StatusTracker> oldTracker = mStatusTracker.promote();
+    if (oldTracker != 0 && mStatusId != StatusTracker::NO_STATUS_ID) {
+        oldTracker->removeComponent(mStatusId);
+    }
+    mStatusId = StatusTracker::NO_STATUS_ID;
+    mStatusTracker = statusTracker;
+
+    return OK;
+}
+
 status_t Camera3Stream::disconnect() {
     ATRACE_CALL();
     Mutex::Autolock l(mLock);
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index b64fd86..6eeb721 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -190,12 +190,11 @@
     enum {
         TIMEOUT_NEVER = -1
     };
+
     /**
-     * Wait until the HAL is done with all of this stream's buffers, including
-     * signalling all release fences. Returns TIMED_OUT if the timeout is exceeded,
-     * OK on success. Pass in TIMEOUT_NEVER for timeout to indicate an indefinite wait.
+     * Set the status tracker to notify about idle transitions
      */
-    virtual status_t waitUntilIdle(nsecs_t timeout) = 0;
+    virtual status_t setStatusTracker(sp<StatusTracker> statusTracker);
 
     /**
      * Disconnect stream from its non-HAL endpoint. After this,
@@ -267,6 +266,11 @@
     // INVALID_OPERATION if they cannot be obtained.
     virtual status_t getEndpointUsage(uint32_t *usage) = 0;
 
+    // Tracking for idle state
+    wp<StatusTracker> mStatusTracker;
+    // Status tracker component ID
+    int mStatusId;
+
   private:
     uint32_t oldUsage;
     uint32_t oldMaxBuffers;
diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index 4768536..c93ae15 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
@@ -26,6 +26,8 @@
 
 namespace camera3 {
 
+class StatusTracker;
+
 /**
  * An interface for managing a single stream of input and/or output data from
  * the camera device.
@@ -128,13 +130,11 @@
     enum {
         TIMEOUT_NEVER = -1
     };
+
     /**
-     * Wait until the HAL is done with all of this stream's buffers, including
-     * signalling all release fences. Returns TIMED_OUT if the timeout is
-     * exceeded, OK on success. Pass in TIMEOUT_NEVER for timeout to indicate
-     * an indefinite wait.
+     * Set the state tracker to use for signaling idle transitions.
      */
-    virtual status_t waitUntilIdle(nsecs_t timeout) = 0;
+    virtual status_t setStatusTracker(sp<StatusTracker> statusTracker) = 0;
 
     /**
      * Disconnect stream from its non-HAL endpoint. After this,
diff --git a/services/camera/libcameraservice/device3/StatusTracker.cpp b/services/camera/libcameraservice/device3/StatusTracker.cpp
new file mode 100644
index 0000000..ab5419f
--- /dev/null
+++ b/services/camera/libcameraservice/device3/StatusTracker.cpp
@@ -0,0 +1,219 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Camera3-Status"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+
+// This is needed for stdint.h to define INT64_MAX in C++
+#define __STDC_LIMIT_MACROS
+
+#include <utils/Log.h>
+#include <utils/Trace.h>
+#include <ui/Fence.h>
+
+#include "device3/StatusTracker.h"
+#include "device3/Camera3Device.h"
+
+namespace android {
+
+namespace camera3 {
+
+StatusTracker::StatusTracker(wp<Camera3Device> parent) :
+        mComponentsChanged(false),
+        mParent(parent),
+        mNextComponentId(0),
+        mIdleFence(new Fence()),
+        mDeviceState(IDLE) {
+}
+
+StatusTracker::~StatusTracker() {
+}
+
+int StatusTracker::addComponent() {
+    int id;
+    ssize_t err;
+    {
+        Mutex::Autolock l(mLock);
+        id = mNextComponentId++;
+        ALOGV("%s: Adding new component %d", __FUNCTION__, id);
+
+        err = mStates.add(id, IDLE);
+        ALOGE_IF(err < 0, "%s: Can't add new component %d: %s (%d)",
+                __FUNCTION__, id, strerror(-err), err);
+    }
+
+    if (err >= 0) {
+        Mutex::Autolock pl(mPendingLock);
+        mComponentsChanged = true;
+        mPendingChangeSignal.signal();
+    }
+
+    return err < 0 ? err : id;
+}
+
+void StatusTracker::removeComponent(int id) {
+    ssize_t idx;
+    {
+        Mutex::Autolock l(mLock);
+        ALOGV("%s: Removing component %d", __FUNCTION__, id);
+        idx = mStates.removeItem(id);
+    }
+
+    if (idx >= 0) {
+        Mutex::Autolock pl(mPendingLock);
+        mComponentsChanged = true;
+        mPendingChangeSignal.signal();
+    }
+
+    return;
+}
+
+
+void StatusTracker::markComponentIdle(int id, const sp<Fence>& componentFence) {
+    markComponent(id, IDLE, componentFence);
+}
+
+void StatusTracker::markComponentActive(int id) {
+    markComponent(id, ACTIVE, Fence::NO_FENCE);
+}
+
+void StatusTracker::markComponent(int id, ComponentState state,
+        const sp<Fence>& componentFence) {
+    ALOGV("%s: Component %d is now %s", __FUNCTION__, id,
+            state == IDLE ? "idle" : "active");
+    Mutex::Autolock l(mPendingLock);
+
+    StateChange newState = {
+        id,
+        state,
+        componentFence
+    };
+
+    mPendingChangeQueue.add(newState);
+    mPendingChangeSignal.signal();
+}
+
+void StatusTracker::requestExit() {
+    // First mark thread dead
+    Thread::requestExit();
+    // Then exit any waits
+    mPendingChangeSignal.signal();
+}
+
+StatusTracker::ComponentState StatusTracker::getDeviceStateLocked() {
+    for (size_t i = 0; i < mStates.size(); i++) {
+        if (mStates.valueAt(i) == ACTIVE) {
+            ALOGV("%s: Component %d not idle", __FUNCTION__,
+                    mStates.keyAt(i));
+            return ACTIVE;
+        }
+    }
+    // - If not yet signaled, getSignalTime returns INT64_MAX
+    // - If invalid fence or error, returns -1
+    // - Otherwise returns time of signalling.
+    // Treat -1 as 'signalled', since HAL may not be using fences, and want
+    // to be able to idle in case of errors.
+    nsecs_t signalTime = mIdleFence->getSignalTime();
+    bool fencesDone = signalTime != INT64_MAX;
+
+    ALOGV_IF(!fencesDone, "%s: Fences still to wait on", __FUNCTION__);
+
+    return fencesDone ? IDLE : ACTIVE;
+}
+
+bool StatusTracker::threadLoop() {
+    status_t res;
+
+    // Wait for state updates
+    {
+        Mutex::Autolock pl(mPendingLock);
+        while (mPendingChangeQueue.size() == 0 && !mComponentsChanged) {
+            res = mPendingChangeSignal.waitRelative(mPendingLock,
+                    kWaitDuration);
+            if (exitPending()) return false;
+            if (res != OK) {
+                if (res != TIMED_OUT) {
+                    ALOGE("%s: Error waiting on state changes: %s (%d)",
+                            __FUNCTION__, strerror(-res), res);
+                }
+                // TIMED_OUT is expected
+                break;
+            }
+        }
+    }
+
+    // After new pending states appear, or timeout, check if we're idle.  Even
+    // with timeout, need to check to account for fences that may still be
+    // clearing out
+    sp<Camera3Device> parent;
+    {
+        Mutex::Autolock pl(mPendingLock);
+        Mutex::Autolock l(mLock);
+
+        // Collect all pending state updates and see if the device
+        // collectively transitions between idle and active for each one
+
+        // First pass for changed components or fence completions
+        ComponentState prevState = getDeviceStateLocked();
+        if (prevState != mDeviceState) {
+            // Only collect changes to overall device state
+            mStateTransitions.add(prevState);
+        }
+        // For each pending component state update, check if we've transitioned
+        // to a new overall device state
+        for (size_t i = 0; i < mPendingChangeQueue.size(); i++) {
+            const StateChange &newState = mPendingChangeQueue[i];
+            ssize_t idx = mStates.indexOfKey(newState.id);
+            // Ignore notices for unknown components
+            if (idx >= 0) {
+                // Update single component state
+                mStates.replaceValueAt(idx, newState.state);
+                mIdleFence = Fence::merge(String8("idleFence"),
+                        mIdleFence, newState.fence);
+                // .. and see if overall device state has changed
+                ComponentState newState = getDeviceStateLocked();
+                if (newState != prevState) {
+                    mStateTransitions.add(newState);
+                }
+                prevState = newState;
+            }
+        }
+        mPendingChangeQueue.clear();
+        mComponentsChanged = false;
+
+        // Store final state after all pending state changes are done with
+
+        mDeviceState = prevState;
+        parent = mParent.promote();
+    }
+
+    // Notify parent for all intermediate transitions
+    if (mStateTransitions.size() > 0 && parent.get()) {
+        for (size_t i = 0; i < mStateTransitions.size(); i++) {
+            bool idle = (mStateTransitions[i] == IDLE);
+            ALOGV("Camera device is now %s", idle ? "idle" : "active");
+            parent->notifyStatus(idle);
+        }
+    }
+    mStateTransitions.clear();
+
+    return true;
+}
+
+} // namespace android
+
+} // namespace camera3
diff --git a/services/camera/libcameraservice/device3/StatusTracker.h b/services/camera/libcameraservice/device3/StatusTracker.h
new file mode 100644
index 0000000..49cecb3
--- /dev/null
+++ b/services/camera/libcameraservice/device3/StatusTracker.h
@@ -0,0 +1,130 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA3_STATUSTRACKER_H
+#define ANDROID_SERVERS_CAMERA3_STATUSTRACKER_H
+
+#include <utils/Condition.h>
+#include <utils/Errors.h>
+#include <utils/List.h>
+#include <utils/Mutex.h>
+#include <utils/Thread.h>
+#include <utils/KeyedVector.h>
+#include <hardware/camera3.h>
+
+#include "common/CameraDeviceBase.h"
+
+namespace android {
+
+class Camera3Device;
+class Fence;
+
+namespace camera3 {
+
+/**
+ * State tracking for idle and other collective state transitions.
+ * Collects idle notifications from different sources and calls the
+ * parent when all of them become idle.
+ *
+ * The parent is responsible for synchronizing the status updates with its
+ * internal state correctly, which means the notifyStatus call to the parent may
+ * block for a while.
+ */
+class StatusTracker: public Thread {
+  public:
+    StatusTracker(wp<Camera3Device> parent);
+    ~StatusTracker();
+
+    // An always-invalid component ID
+    static const int NO_STATUS_ID = -1;
+
+    // Add a component to track; returns non-negative unique ID for the new
+    // component on success, negative error code on failure.
+    // New components start in the idle state.
+    int addComponent();
+
+    // Remove existing component from idle tracking. Ignores unknown IDs
+    void removeComponent(int id);
+
+    // Set the state of a tracked component to be idle. Ignores unknown IDs; can
+    // accept a fence to wait on to complete idle.  The fence is merged with any
+    // previous fences given, which means they all must signal before the
+    // component is considered idle.
+    void markComponentIdle(int id, const sp<Fence>& componentFence);
+
+    // Set the state of a tracked component to be active. Ignores unknown IDs.
+    void markComponentActive(int id);
+
+    virtual void requestExit();
+  protected:
+
+    virtual bool threadLoop();
+
+  private:
+    enum ComponentState {
+        IDLE,
+        ACTIVE
+    };
+
+    void markComponent(int id, ComponentState state,
+            const sp<Fence>& componentFence);
+
+    // Guards mPendingChange, mPendingStates, mComponentsChanged
+    Mutex mPendingLock;
+
+    Condition mPendingChangeSignal;
+
+    struct StateChange {
+        int id;
+        ComponentState state;
+        sp<Fence> fence;
+    };
+    // A queue of yet-to-be-processed state changes to components
+    Vector<StateChange> mPendingChangeQueue;
+    bool mComponentsChanged;
+
+    wp<Camera3Device> mParent;
+
+    // Guards rest of internals. Must be locked after mPendingLock if both used.
+    Mutex mLock;
+
+    int mNextComponentId;
+
+    // Current component states
+    KeyedVector<int, ComponentState> mStates;
+    // Merged fence for all processed state changes
+    sp<Fence> mIdleFence;
+    // Current overall device state
+    ComponentState mDeviceState;
+
+    // Private to threadLoop
+
+    // Determine current overall device state
+    // We're IDLE iff
+    // - All components are currently IDLE
+    // - The merged fence for all component updates has signalled
+    ComponentState getDeviceStateLocked();
+
+    Vector<ComponentState> mStateTransitions;
+
+    static const nsecs_t kWaitDuration = 250000000LL; // 250 ms
+};
+
+} // namespace camera3
+
+} // namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/utils/CameraTraces.cpp b/services/camera/libcameraservice/utils/CameraTraces.cpp
new file mode 100644
index 0000000..346e15f
--- /dev/null
+++ b/services/camera/libcameraservice/utils/CameraTraces.cpp
@@ -0,0 +1,94 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "CameraTraces"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+
+#include "utils/CameraTraces.h"
+#include <utils/ProcessCallStack.h>
+
+#include <utils/Mutex.h>
+#include <utils/List.h>
+
+#include <utils/Log.h>
+#include <cutils/trace.h>
+
+namespace android {
+namespace camera3 {
+
+struct CameraTracesImpl {
+    Mutex                    tracesLock;
+    List<ProcessCallStack>   pcsList;
+}; // class CameraTraces::Impl;
+
+static CameraTracesImpl gImpl;
+CameraTracesImpl& CameraTraces::sImpl = gImpl;
+
+void CameraTraces::saveTrace() {
+    ALOGV("%s: begin", __FUNCTION__);
+    ATRACE_BEGIN("CameraTraces::saveTrace");
+    Mutex::Autolock al(sImpl.tracesLock);
+
+    List<ProcessCallStack>& pcsList = sImpl.pcsList;
+
+    // Insert new ProcessCallStack, and immediately crawl all the threads
+    pcsList.push_front(ProcessCallStack());
+    ProcessCallStack& pcs = *pcsList.begin();
+    pcs.update();
+
+    if (pcsList.size() > MAX_TRACES) {
+        // Prune list periodically and discard oldest entry
+        pcsList.erase(--pcsList.end());
+    }
+
+    IF_ALOGV() {
+        pcs.log(LOG_TAG, ANDROID_LOG_VERBOSE);
+    }
+
+    ALOGD("Process trace saved. Use dumpsys media.camera to view.");
+
+    ATRACE_END();
+}
+
+status_t CameraTraces::dump(int fd, const Vector<String16> &args __attribute__((unused))) {
+    ALOGV("%s: fd = %d", __FUNCTION__, fd);
+    Mutex::Autolock al(sImpl.tracesLock);
+    List<ProcessCallStack>& pcsList = sImpl.pcsList;
+
+    if (fd < 0) {
+        ALOGW("%s: Negative FD (%d)", __FUNCTION__, fd);
+        return BAD_VALUE;
+    }
+
+    fdprintf(fd, "Camera traces (%zu):\n", pcsList.size());
+
+    if (pcsList.empty()) {
+        fdprintf(fd, "  No camera traces collected.\n");
+    }
+
+    // Print newest items first
+    List<ProcessCallStack>::iterator it, end;
+    for (it = pcsList.begin(), end = pcsList.end(); it != end; ++it) {
+        const ProcessCallStack& pcs = *it;
+        pcs.dump(fd, DUMP_INDENT);
+    }
+
+    return OK;
+}
+
+}; // namespace camera3
+}; // namespace android
diff --git a/services/camera/libcameraservice/utils/CameraTraces.h b/services/camera/libcameraservice/utils/CameraTraces.h
new file mode 100644
index 0000000..d10dbc9
--- /dev/null
+++ b/services/camera/libcameraservice/utils/CameraTraces.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_TRACES_H_
+#define ANDROID_SERVERS_CAMERA_TRACES_H_
+
+#include <utils/Errors.h>
+#include <utils/String16.h>
+#include <utils/Vector.h>
+
+namespace android {
+namespace camera3 {
+
+class CameraTracesImpl;
+
+// Collect a list of the process's stack traces
+class CameraTraces {
+public:
+    /**
+     * Save the current stack trace for each thread in the process. At most
+     * MAX_TRACES will be saved, after which the oldest traces will be discarded.
+     *
+     * <p>Use CameraTraces::dump to print out the traces.</p>
+     */
+    static void     saveTrace();
+
+    /**
+     * Prints all saved traces to the specified file descriptor.
+     *
+     * <p>Each line is indented by DUMP_INDENT spaces.</p>
+     */
+    static status_t dump(int fd, const Vector<String16>& args);
+
+private:
+    enum {
+        // Don't collect more than 100 traces. Discard oldest.
+        MAX_TRACES = 100,
+
+        // Insert 2 spaces when dumping the traces
+        DUMP_INDENT = 2,
+    };
+
+    CameraTraces();
+    ~CameraTraces();
+    CameraTraces(CameraTraces& rhs);
+
+    static CameraTracesImpl& sImpl;
+}; // class CameraTraces
+
+}; // namespace camera3
+}; // namespace android
+
+#endif // ANDROID_SERVERS_CAMERA_TRACES_H_