Merge "Fix use of uninitialized value in libmediadrm" into oc-dev am: 5924e3db24 am: 444b037748 am: 74b97ba8f2
am: 256a84920c

Change-Id: I5e865beb3ae30a1e3305ed0d52e6ecc5e534992c
diff --git a/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl b/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
index 0771fc8..4ced08c 100644
--- a/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
+++ b/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
@@ -79,8 +79,9 @@
      * <p>
      * @param operatingMode The kind of session to create; either NORMAL_MODE or
      *     CONSTRAINED_HIGH_SPEED_MODE. Must be a non-negative value.
+     * @param sessionParams Session wide camera parameters
      */
-    void endConfigure(int operatingMode);
+    void endConfigure(int operatingMode, in CameraMetadataNative sessionParams);
 
     void deleteStream(int streamId);
 
@@ -140,5 +141,7 @@
 
     void prepare2(int maxCount, int streamId);
 
+    void updateOutputConfiguration(int streamId, in OutputConfiguration outputConfiguration);
+
     void finalizeOutputConfigurations(int streamId, in OutputConfiguration outputConfiguration);
 }
diff --git a/camera/camera2/CaptureRequest.cpp b/camera/camera2/CaptureRequest.cpp
index 0597950..983d29b 100644
--- a/camera/camera2/CaptureRequest.cpp
+++ b/camera/camera2/CaptureRequest.cpp
@@ -44,6 +44,8 @@
 
     mMetadata.clear();
     mSurfaceList.clear();
+    mStreamIdxList.clear();
+    mSurfaceIdxList.clear();
 
     status_t err = OK;
 
@@ -53,6 +55,13 @@
     }
     ALOGV("%s: Read metadata from parcel", __FUNCTION__);
 
+    int isReprocess = 0;
+    if ((err = parcel->readInt32(&isReprocess)) != OK) {
+        ALOGE("%s: Failed to read reprocessing from parcel", __FUNCTION__);
+        return err;
+    }
+    mIsReprocess = (isReprocess != 0);
+
     int32_t size;
     if ((err = parcel->readInt32(&size)) != OK) {
         ALOGE("%s: Failed to read surface list size from parcel", __FUNCTION__);
@@ -61,7 +70,7 @@
     ALOGV("%s: Read surface list size = %d", __FUNCTION__, size);
 
     // Do not distinguish null arrays from 0-sized arrays.
-    for (int i = 0; i < size; ++i) {
+    for (int32_t i = 0; i < size; ++i) {
         // Parcel.writeParcelableArray
         size_t len;
         const char16_t* className = parcel->readString16Inplace(&len);
@@ -88,12 +97,32 @@
         mSurfaceList.push_back(surface);
     }
 
-    int isReprocess = 0;
-    if ((err = parcel->readInt32(&isReprocess)) != OK) {
-        ALOGE("%s: Failed to read reprocessing from parcel", __FUNCTION__);
+    int32_t streamSurfaceSize;
+    if ((err = parcel->readInt32(&streamSurfaceSize)) != OK) {
+        ALOGE("%s: Failed to read streamSurfaceSize from parcel", __FUNCTION__);
         return err;
     }
-    mIsReprocess = (isReprocess != 0);
+
+    if (streamSurfaceSize < 0) {
+        ALOGE("%s: Bad streamSurfaceSize %d from parcel", __FUNCTION__, streamSurfaceSize);
+        return BAD_VALUE;
+    }
+
+    for (int32_t i = 0; i < streamSurfaceSize; ++i) {
+        int streamIdx;
+        if ((err = parcel->readInt32(&streamIdx)) != OK) {
+            ALOGE("%s: Failed to read stream index from parcel", __FUNCTION__);
+            return err;
+        }
+        mStreamIdxList.push_back(streamIdx);
+
+        int surfaceIdx;
+        if ((err = parcel->readInt32(&surfaceIdx)) != OK) {
+            ALOGE("%s: Failed to read surface index from parcel", __FUNCTION__);
+            return err;
+        }
+        mSurfaceIdxList.push_back(surfaceIdx);
+    }
 
     return OK;
 }
@@ -110,28 +139,43 @@
         return err;
     }
 
-    int32_t size = static_cast<int32_t>(mSurfaceList.size());
+    parcel->writeInt32(mIsReprocess ? 1 : 0);
 
-    // Send 0-sized arrays when it's empty. Do not send null arrays.
-    parcel->writeInt32(size);
+    if (mSurfaceConverted) {
+        parcel->writeInt32(0); // 0-sized array
+    } else {
+        int32_t size = static_cast<int32_t>(mSurfaceList.size());
 
-    for (int32_t i = 0; i < size; ++i) {
-        // not sure if readParcelableArray does this, hard to tell from source
-        parcel->writeString16(String16("android.view.Surface"));
+        // Send 0-sized arrays when it's empty. Do not send null arrays.
+        parcel->writeInt32(size);
 
-        // Surface.writeToParcel
-        view::Surface surfaceShim;
-        surfaceShim.name = String16("unknown_name");
-        surfaceShim.graphicBufferProducer = mSurfaceList[i]->getIGraphicBufferProducer();
-        if ((err = surfaceShim.writeToParcel(parcel)) != OK) {
-            ALOGE("%s: Failed to write output target Surface %d to parcel: %s (%d)",
-                    __FUNCTION__, i, strerror(-err), err);
-            return err;
+        for (int32_t i = 0; i < size; ++i) {
+            // not sure if readParcelableArray does this, hard to tell from source
+            parcel->writeString16(String16("android.view.Surface"));
+
+            // Surface.writeToParcel
+            view::Surface surfaceShim;
+            surfaceShim.name = String16("unknown_name");
+            surfaceShim.graphicBufferProducer = mSurfaceList[i]->getIGraphicBufferProducer();
+            if ((err = surfaceShim.writeToParcel(parcel)) != OK) {
+                ALOGE("%s: Failed to write output target Surface %d to parcel: %s (%d)",
+                        __FUNCTION__, i, strerror(-err), err);
+                return err;
+            }
         }
     }
 
-    parcel->writeInt32(mIsReprocess ? 1 : 0);
-
+    parcel->writeInt32(mStreamIdxList.size());
+    for (size_t i = 0; i < mStreamIdxList.size(); ++i) {
+        if ((err = parcel->writeInt32(mStreamIdxList[i])) != OK) {
+            ALOGE("%s: Failed to write stream index to parcel", __FUNCTION__);
+            return err;
+        }
+        if ((err = parcel->writeInt32(mSurfaceIdxList[i])) != OK) {
+            ALOGE("%s: Failed to write surface index to parcel", __FUNCTION__);
+            return err;
+        }
+    }
     return OK;
 }
 
diff --git a/camera/camera2/OutputConfiguration.cpp b/camera/camera2/OutputConfiguration.cpp
index 468a1eb..813d6c9 100644
--- a/camera/camera2/OutputConfiguration.cpp
+++ b/camera/camera2/OutputConfiguration.cpp
@@ -160,12 +160,12 @@
 }
 
 OutputConfiguration::OutputConfiguration(sp<IGraphicBufferProducer>& gbp, int rotation,
-        int surfaceSetID) {
+        int surfaceSetID, bool isShared) {
     mGbps.push_back(gbp);
     mRotation = rotation;
     mSurfaceSetID = surfaceSetID;
     mIsDeferred = false;
-    mIsShared = false;
+    mIsShared = isShared;
 }
 
 status_t OutputConfiguration::writeToParcel(android::Parcel* parcel) const {
diff --git a/camera/include/camera/camera2/CaptureRequest.h b/camera/include/camera/camera2/CaptureRequest.h
index 0180183..c53799f 100644
--- a/camera/include/camera/camera2/CaptureRequest.h
+++ b/camera/include/camera/camera2/CaptureRequest.h
@@ -41,13 +41,30 @@
     virtual ~CaptureRequest();
 
     CameraMetadata          mMetadata;
+
+    // Used by NDK client to pass surfaces by stream/surface index.
+    bool                    mSurfaceConverted = false;
+
+    // Starting in Android O, create a Surface from Parcel will take one extra
+    // IPC call.
     Vector<sp<Surface> >    mSurfaceList;
+    // Optional way of passing surface list since passing Surface over binder
+    // is expensive. Use the stream/surface index from current output configuration
+    // to represent an configured output Surface. When stream/surface index is used,
+    // set mSurfaceList to zero length to save unparcel time.
+    Vector<int>             mStreamIdxList;
+    Vector<int>             mSurfaceIdxList; // per stream surface list index
+
     bool                    mIsReprocess;
 
+    void*                   mContext; // arbitrary user context from NDK apps, null for java apps
+
     /**
      * Keep impl up-to-date with CaptureRequest.java in frameworks/base
      */
+    // used by cameraserver to receive CaptureRequest from java/NDK client
     status_t                readFromParcel(const android::Parcel* parcel) override;
+    // used by NDK client to send CaptureRequest to cameraserver
     status_t                writeToParcel(android::Parcel* parcel) const override;
 };
 
diff --git a/camera/include/camera/camera2/OutputConfiguration.h b/camera/include/camera/camera2/OutputConfiguration.h
index 8e641c7..3599604 100644
--- a/camera/include/camera/camera2/OutputConfiguration.h
+++ b/camera/include/camera/camera2/OutputConfiguration.h
@@ -64,7 +64,7 @@
     OutputConfiguration(const android::Parcel& parcel);
 
     OutputConfiguration(sp<IGraphicBufferProducer>& gbp, int rotation,
-            int surfaceSetID = INVALID_SET_ID);
+            int surfaceSetID = INVALID_SET_ID, bool isShared = false);
 
     bool operator == (const OutputConfiguration& other) const {
         return ( mRotation == other.mRotation &&
@@ -110,6 +110,7 @@
 
     bool gbpsEqual(const OutputConfiguration& other) const;
     bool gbpsLessThan(const OutputConfiguration& other) const;
+    void addGraphicProducer(sp<IGraphicBufferProducer> gbp) {mGbps.push_back(gbp);}
 private:
     std::vector<sp<IGraphicBufferProducer>> mGbps;
     int                        mRotation;
diff --git a/camera/ndk/NdkCameraCaptureSession.cpp b/camera/ndk/NdkCameraCaptureSession.cpp
index 2a6b182..fd95296 100644
--- a/camera/ndk/NdkCameraCaptureSession.cpp
+++ b/camera/ndk/NdkCameraCaptureSession.cpp
@@ -135,3 +135,19 @@
     }
     return session->abortCaptures();
 }
+
+EXPORT
+camera_status_t ACameraCaptureSession_updateSharedOutput(ACameraCaptureSession* session,
+        ACaptureSessionOutput* output) {
+    ATRACE_CALL();
+    if (session == nullptr) {
+        ALOGE("%s: Error: session is null", __FUNCTION__);
+        return ACAMERA_ERROR_INVALID_PARAMETER;
+    }
+
+    if (session->isClosed()) {
+        ALOGE("%s: session %p is already closed", __FUNCTION__, session);
+        return ACAMERA_ERROR_SESSION_CLOSED;
+    }
+    return session->updateOutputConfiguration(output);
+}
diff --git a/camera/ndk/NdkCameraDevice.cpp b/camera/ndk/NdkCameraDevice.cpp
index 9f6d1f7..812a312 100644
--- a/camera/ndk/NdkCameraDevice.cpp
+++ b/camera/ndk/NdkCameraDevice.cpp
@@ -103,11 +103,74 @@
                 __FUNCTION__, window, out);
         return ACAMERA_ERROR_INVALID_PARAMETER;
     }
-    *out = new ACaptureSessionOutput(window);
+    *out = new ACaptureSessionOutput(window, false);
     return ACAMERA_OK;
 }
 
 EXPORT
+camera_status_t ACaptureSessionSharedOutput_create(
+        ANativeWindow* window, /*out*/ACaptureSessionOutput** out) {
+    ATRACE_CALL();
+    if (window == nullptr || out == nullptr) {
+        ALOGE("%s: Error: bad argument. window %p, out %p",
+                __FUNCTION__, window, out);
+        return ACAMERA_ERROR_INVALID_PARAMETER;
+    }
+    *out = new ACaptureSessionOutput(window, true);
+    return ACAMERA_OK;
+}
+
+EXPORT
+camera_status_t ACaptureSessionSharedOutput_add(ACaptureSessionOutput *out,
+        ANativeWindow* window) {
+    ATRACE_CALL();
+    if ((window == nullptr) || (out == nullptr)) {
+        ALOGE("%s: Error: bad argument. window %p, out %p",
+                __FUNCTION__, window, out);
+        return ACAMERA_ERROR_INVALID_PARAMETER;
+    }
+    if (!out->mIsShared) {
+        ALOGE("%s: Error trying to insert a new window in non-shared output configuration",
+                __FUNCTION__);
+        return ACAMERA_ERROR_INVALID_OPERATION;
+    }
+    if (out->mWindow == window) {
+        ALOGE("%s: Error trying to add the same window associated with the output configuration",
+                __FUNCTION__);
+        return ACAMERA_ERROR_INVALID_PARAMETER;
+    }
+
+    auto insert = out->mSharedWindows.insert(window);
+    camera_status_t ret = (insert.second) ? ACAMERA_OK : ACAMERA_ERROR_INVALID_PARAMETER;
+    return ret;
+}
+
+EXPORT
+camera_status_t ACaptureSessionSharedOutput_remove(ACaptureSessionOutput *out,
+        ANativeWindow* window) {
+    ATRACE_CALL();
+    if ((window == nullptr) || (out == nullptr)) {
+        ALOGE("%s: Error: bad argument. window %p, out %p",
+                __FUNCTION__, window, out);
+        return ACAMERA_ERROR_INVALID_PARAMETER;
+    }
+    if (!out->mIsShared) {
+        ALOGE("%s: Error trying to remove a  window in non-shared output configuration",
+                __FUNCTION__);
+        return ACAMERA_ERROR_INVALID_OPERATION;
+    }
+    if (out->mWindow == window) {
+        ALOGE("%s: Error trying to remove the same window associated with the output configuration",
+                __FUNCTION__);
+        return ACAMERA_ERROR_INVALID_PARAMETER;
+    }
+
+    auto remove = out->mSharedWindows.erase(window);
+    camera_status_t ret = (remove) ? ACAMERA_OK : ACAMERA_ERROR_INVALID_PARAMETER;
+    return ret;
+}
+
+EXPORT
 void ACaptureSessionOutput_free(ACaptureSessionOutput* output) {
     ATRACE_CALL();
     if (output != nullptr) {
@@ -157,5 +220,21 @@
                 __FUNCTION__, device, outputs, callbacks, session);
         return ACAMERA_ERROR_INVALID_PARAMETER;
     }
-    return device->createCaptureSession(outputs, callbacks, session);
+    return device->createCaptureSession(outputs, nullptr, callbacks, session);
+}
+
+EXPORT
+camera_status_t ACameraDevice_createCaptureSessionWithSessionParameters(
+        ACameraDevice* device,
+        const ACaptureSessionOutputContainer*       outputs,
+        const ACaptureRequest* sessionParameters,
+        const ACameraCaptureSession_stateCallbacks* callbacks,
+        /*out*/ACameraCaptureSession** session) {
+    ATRACE_CALL();
+    if (device == nullptr || outputs == nullptr || callbacks == nullptr || session == nullptr) {
+        ALOGE("%s: Error: invalid input: device %p, outputs %p, callbacks %p, session %p",
+                __FUNCTION__, device, outputs, callbacks, session);
+        return ACAMERA_ERROR_INVALID_PARAMETER;
+    }
+    return device->createCaptureSession(outputs, sessionParameters, callbacks, session);
 }
diff --git a/camera/ndk/NdkCaptureRequest.cpp b/camera/ndk/NdkCaptureRequest.cpp
index 5b4c180..ac1856b 100644
--- a/camera/ndk/NdkCaptureRequest.cpp
+++ b/camera/ndk/NdkCaptureRequest.cpp
@@ -142,3 +142,40 @@
     delete request;
     return;
 }
+
+EXPORT
+camera_status_t ACaptureRequest_setUserContext(
+        ACaptureRequest* request, void* context) {
+    if (request == nullptr) {
+        ALOGE("%s: invalid argument! request is NULL", __FUNCTION__);
+        return ACAMERA_ERROR_INVALID_PARAMETER;
+    }
+    return request->setContext(context);
+}
+
+EXPORT
+camera_status_t ACaptureRequest_getUserContext(
+        const ACaptureRequest* request, /*out*/void** context) {
+    if (request == nullptr || context == nullptr) {
+        ALOGE("%s: invalid argument! request %p, context %p",
+                __FUNCTION__, request, context);
+        return ACAMERA_ERROR_INVALID_PARAMETER;
+    }
+    return request->getContext(context);
+}
+
+EXPORT
+ACaptureRequest* ACaptureRequest_copy(const ACaptureRequest* src) {
+    ATRACE_CALL();
+    if (src == nullptr) {
+        ALOGE("%s: src is null!", __FUNCTION__);
+        return nullptr;
+    }
+
+    ACaptureRequest* pRequest = new ACaptureRequest();
+    pRequest->settings = new ACameraMetadata(*(src->settings));
+    pRequest->targets  = new ACameraOutputTargets();
+    *(pRequest->targets)  = *(src->targets);
+    pRequest->context = src->context;
+    return pRequest;
+}
diff --git a/camera/ndk/impl/ACameraCaptureSession.cpp b/camera/ndk/impl/ACameraCaptureSession.cpp
index b9c159d..f60e5fd 100644
--- a/camera/ndk/impl/ACameraCaptureSession.cpp
+++ b/camera/ndk/impl/ACameraCaptureSession.cpp
@@ -148,6 +148,23 @@
     return ret;
 }
 
+camera_status_t ACameraCaptureSession::updateOutputConfiguration(ACaptureSessionOutput *output) {
+    sp<CameraDevice> dev = getDeviceSp();
+    if (dev == nullptr) {
+        ALOGE("Error: Device associated with session %p has been closed!", this);
+        return ACAMERA_ERROR_SESSION_CLOSED;
+    }
+
+    camera_status_t ret;
+    dev->lockDeviceForSessionOps();
+    {
+        Mutex::Autolock _l(mSessionLock);
+        ret = dev->updateOutputConfigurationLocked(output);
+    }
+    dev->unlockDevice();
+    return ret;
+}
+
 ACameraDevice*
 ACameraCaptureSession::getDevice() {
     Mutex::Autolock _l(mSessionLock);
diff --git a/camera/ndk/impl/ACameraCaptureSession.h b/camera/ndk/impl/ACameraCaptureSession.h
index 339c665..a2068e7 100644
--- a/camera/ndk/impl/ACameraCaptureSession.h
+++ b/camera/ndk/impl/ACameraCaptureSession.h
@@ -24,7 +24,8 @@
 using namespace android;
 
 struct ACaptureSessionOutput {
-    explicit ACaptureSessionOutput(ANativeWindow* window) : mWindow(window) {};
+    explicit ACaptureSessionOutput(ANativeWindow* window, bool isShared = false) :
+            mWindow(window), mIsShared(isShared) {};
 
     bool operator == (const ACaptureSessionOutput& other) const {
         return mWindow == other.mWindow;
@@ -40,6 +41,8 @@
     }
 
     ANativeWindow* mWindow;
+    std::set<ANativeWindow *> mSharedWindows;
+    bool           mIsShared;
     int            mRotation = CAMERA3_STREAM_ROTATION_0;
 };
 
@@ -89,6 +92,8 @@
             int numRequests, ACaptureRequest** requests,
             /*optional*/int* captureSequenceId);
 
+    camera_status_t updateOutputConfiguration(ACaptureSessionOutput *output);
+
     ACameraDevice* getDevice();
 
   private:
diff --git a/camera/ndk/impl/ACameraDevice.cpp b/camera/ndk/impl/ACameraDevice.cpp
index 907802c..f7cea4f 100644
--- a/camera/ndk/impl/ACameraDevice.cpp
+++ b/camera/ndk/impl/ACameraDevice.cpp
@@ -157,6 +157,7 @@
 camera_status_t
 CameraDevice::createCaptureSession(
         const ACaptureSessionOutputContainer*       outputs,
+        const ACaptureRequest* sessionParameters,
         const ACameraCaptureSession_stateCallbacks* callbacks,
         /*out*/ACameraCaptureSession** session) {
     sp<ACameraCaptureSession> currentSession = mCurrentSession.promote();
@@ -172,7 +173,7 @@
     }
 
     // Create new session
-    ret = configureStreamsLocked(outputs);
+    ret = configureStreamsLocked(outputs, sessionParameters);
     if (ret != ACAMERA_OK) {
         ALOGE("Fail to create new session. cannot configure streams");
         return ret;
@@ -289,6 +290,83 @@
     return ACAMERA_OK;
 }
 
+camera_status_t CameraDevice::updateOutputConfigurationLocked(ACaptureSessionOutput *output) {
+    camera_status_t ret = checkCameraClosedOrErrorLocked();
+    if (ret != ACAMERA_OK) {
+        return ret;
+    }
+
+    if (output == nullptr) {
+        return ACAMERA_ERROR_INVALID_PARAMETER;
+    }
+
+    if (!output->mIsShared) {
+        ALOGE("Error output configuration is not shared");
+        return ACAMERA_ERROR_INVALID_OPERATION;
+    }
+
+    int32_t streamId = -1;
+    for (auto& kvPair : mConfiguredOutputs) {
+        if (kvPair.second.first == output->mWindow) {
+            streamId = kvPair.first;
+            break;
+        }
+    }
+    if (streamId < 0) {
+        ALOGE("Error: Invalid output configuration");
+        return ACAMERA_ERROR_INVALID_PARAMETER;
+    }
+
+    sp<IGraphicBufferProducer> iGBP(nullptr);
+    ret = getIGBPfromAnw(output->mWindow, iGBP);
+    if (ret != ACAMERA_OK) {
+        ALOGE("Camera device %s failed to extract graphic producer from native window",
+                getId());
+        return ret;
+    }
+
+    OutputConfiguration outConfig(iGBP, output->mRotation, OutputConfiguration::INVALID_SET_ID,
+            true);
+
+    for (auto& anw : output->mSharedWindows) {
+        ret = getIGBPfromAnw(anw, iGBP);
+        if (ret != ACAMERA_OK) {
+            ALOGE("Camera device %s failed to extract graphic producer from native window",
+                    getId());
+            return ret;
+        }
+        outConfig.addGraphicProducer(iGBP);
+    }
+
+    auto remoteRet = mRemote->updateOutputConfiguration(streamId, outConfig);
+    if (!remoteRet.isOk()) {
+        switch (remoteRet.serviceSpecificErrorCode()) {
+            case hardware::ICameraService::ERROR_INVALID_OPERATION:
+                ALOGE("Camera device %s invalid operation: %s", getId(),
+                        remoteRet.toString8().string());
+                return ACAMERA_ERROR_INVALID_OPERATION;
+                break;
+            case hardware::ICameraService::ERROR_ALREADY_EXISTS:
+                ALOGE("Camera device %s output surface already exists: %s", getId(),
+                        remoteRet.toString8().string());
+                return ACAMERA_ERROR_INVALID_PARAMETER;
+                break;
+            case hardware::ICameraService::ERROR_ILLEGAL_ARGUMENT:
+                ALOGE("Camera device %s invalid input argument: %s", getId(),
+                        remoteRet.toString8().string());
+                return ACAMERA_ERROR_INVALID_PARAMETER;
+                break;
+            default:
+                ALOGE("Camera device %s failed to add shared output: %s", getId(),
+                        remoteRet.toString8().string());
+                return ACAMERA_ERROR_UNKNOWN;
+        }
+    }
+    mConfiguredOutputs[streamId] = std::make_pair(output->mWindow, outConfig);
+
+    return ACAMERA_OK;
+}
+
 camera_status_t
 CameraDevice::allocateCaptureRequest(
         const ACaptureRequest* request, /*out*/sp<CaptureRequest>& outReq) {
@@ -296,6 +374,8 @@
     sp<CaptureRequest> req(new CaptureRequest());
     req->mMetadata = request->settings->getInternalData();
     req->mIsReprocess = false; // NDK does not support reprocessing yet
+    req->mContext = request->context;
+    req->mSurfaceConverted = true; // set to true, and fill in stream/surface idx to speed up IPC
 
     for (auto outputTarget : request->targets->mOutputs) {
         ANativeWindow* anw = outputTarget.mWindow;
@@ -306,7 +386,31 @@
             return ret;
         }
         req->mSurfaceList.push_back(surface);
+
+        bool found = false;
+        // lookup stream/surface ID
+        for (const auto& kvPair : mConfiguredOutputs) {
+            int streamId = kvPair.first;
+            const OutputConfiguration& outConfig = kvPair.second.second;
+            const auto& gbps = outConfig.getGraphicBufferProducers();
+            for (int surfaceId = 0; surfaceId < (int) gbps.size(); surfaceId++) {
+                if (gbps[surfaceId] == surface->getIGraphicBufferProducer()) {
+                    found = true;
+                    req->mStreamIdxList.push_back(streamId);
+                    req->mSurfaceIdxList.push_back(surfaceId);
+                    break;
+                }
+            }
+            if (found) {
+                break;
+            }
+        }
+        if (!found) {
+            ALOGE("Unconfigured output target %p in capture request!", anw);
+            return ret;
+        }
     }
+
     outReq = req;
     return ACAMERA_OK;
 }
@@ -322,6 +426,7 @@
         ACameraOutputTarget outputTarget(anw);
         pRequest->targets->mOutputs.insert(outputTarget);
     }
+    pRequest->context = req->mContext;
     return pRequest;
 }
 
@@ -356,7 +461,7 @@
     }
 
     // No new session, unconfigure now
-    camera_status_t ret = configureStreamsLocked(nullptr);
+    camera_status_t ret = configureStreamsLocked(nullptr, nullptr);
     if (ret != ACAMERA_OK) {
         ALOGE("Unconfigure stream failed. Device might still be configured! ret %d", ret);
     }
@@ -486,17 +591,11 @@
 CameraDevice::getIGBPfromAnw(
         ANativeWindow* anw,
         sp<IGraphicBufferProducer>& out) {
-    if (anw == nullptr) {
-        ALOGE("Error: output ANativeWindow is null");
-        return ACAMERA_ERROR_INVALID_PARAMETER;
+    sp<Surface> surface;
+    camera_status_t ret = getSurfaceFromANativeWindow(anw, surface);
+    if (ret != ACAMERA_OK) {
+        return ret;
     }
-    int value;
-    int err = (*anw->query)(anw, NATIVE_WINDOW_CONCRETE_TYPE, &value);
-    if (err != OK || value != NATIVE_WINDOW_SURFACE) {
-        ALOGE("Error: ANativeWindow is not backed by Surface!");
-        return ACAMERA_ERROR_INVALID_PARAMETER;
-    }
-    const sp<Surface> surface(static_cast<Surface*>(anw));
     out = surface->getIGraphicBufferProducer();
     return ACAMERA_OK;
 }
@@ -520,7 +619,8 @@
 }
 
 camera_status_t
-CameraDevice::configureStreamsLocked(const ACaptureSessionOutputContainer* outputs) {
+CameraDevice::configureStreamsLocked(const ACaptureSessionOutputContainer* outputs,
+        const ACaptureRequest* sessionParameters) {
     ACaptureSessionOutputContainer emptyOutput;
     if (outputs == nullptr) {
         outputs = &emptyOutput;
@@ -540,7 +640,8 @@
             return ret;
         }
         outputSet.insert(std::make_pair(
-                anw, OutputConfiguration(iGBP, outConfig.mRotation)));
+                anw, OutputConfiguration(iGBP, outConfig.mRotation,
+                        OutputConfiguration::INVALID_SET_ID, outConfig.mIsShared)));
     }
     auto addSet = outputSet;
     std::vector<int> deleteList;
@@ -615,7 +716,11 @@
         mConfiguredOutputs.insert(std::make_pair(streamId, outputPair));
     }
 
-    remoteRet = mRemote->endConfigure(/*isConstrainedHighSpeed*/ false);
+    CameraMetadata params;
+    if ((sessionParameters != nullptr) && (sessionParameters->settings != nullptr)) {
+        params.append(sessionParameters->settings->getInternalData());
+    }
+    remoteRet = mRemote->endConfigure(/*isConstrainedHighSpeed*/ false, params);
     if (remoteRet.serviceSpecificErrorCode() == hardware::ICameraService::ERROR_ILLEGAL_ARGUMENT) {
         ALOGE("Camera device %s cannnot support app output configuration: %s", getId(),
                 remoteRet.toString8().string());
@@ -730,19 +835,26 @@
             setCameraDeviceErrorLocked(ACAMERA_ERROR_CAMERA_SERVICE);
             return;
         }
-        ANativeWindow* anw = outputPairIt->second.first;
 
-        ALOGV("Camera %s Lost output buffer for ANW %p frame %" PRId64,
-                getId(), anw, frameNumber);
+        const auto& gbps = outputPairIt->second.second.getGraphicBufferProducers();
+        for (const auto& outGbp : gbps) {
+            for (auto surface : request->mSurfaceList) {
+                if (surface->getIGraphicBufferProducer() == outGbp) {
+                    ANativeWindow* anw = static_cast<ANativeWindow*>(surface.get());
+                    ALOGV("Camera %s Lost output buffer for ANW %p frame %" PRId64,
+                            getId(), anw, frameNumber);
 
-        sp<AMessage> msg = new AMessage(kWhatCaptureBufferLost, mHandler);
-        msg->setPointer(kContextKey, cbh.mCallbacks.context);
-        msg->setObject(kSessionSpKey, session);
-        msg->setPointer(kCallbackFpKey, (void*) onBufferLost);
-        msg->setObject(kCaptureRequestKey, request);
-        msg->setPointer(kAnwKey, (void*) anw);
-        msg->setInt64(kFrameNumberKey, frameNumber);
-        postSessionMsgAndCleanup(msg);
+                    sp<AMessage> msg = new AMessage(kWhatCaptureBufferLost, mHandler);
+                    msg->setPointer(kContextKey, cbh.mCallbacks.context);
+                    msg->setObject(kSessionSpKey, session);
+                    msg->setPointer(kCallbackFpKey, (void*) onBufferLost);
+                    msg->setObject(kCaptureRequestKey, request);
+                    msg->setPointer(kAnwKey, (void*) anw);
+                    msg->setInt64(kFrameNumberKey, frameNumber);
+                    postSessionMsgAndCleanup(msg);
+                }
+            }
+        }
     } else { // Handle other capture failures
         // Fire capture failure callback if there is one registered
         ACameraCaptureSession_captureCallback_failed onError = cbh.mCallbacks.onCaptureFailed;
diff --git a/camera/ndk/impl/ACameraDevice.h b/camera/ndk/impl/ACameraDevice.h
index 6ed3881..1db3dfb 100644
--- a/camera/ndk/impl/ACameraDevice.h
+++ b/camera/ndk/impl/ACameraDevice.h
@@ -36,7 +36,8 @@
 #include <camera/camera2/OutputConfiguration.h>
 #include <camera/camera2/CaptureRequest.h>
 
-#include <camera/NdkCameraDevice.h>
+#include <camera/NdkCameraManager.h>
+#include <camera/NdkCameraCaptureSession.h>
 #include "ACameraMetadata.h"
 
 namespace android {
@@ -59,6 +60,7 @@
 
     camera_status_t createCaptureSession(
             const ACaptureSessionOutputContainer*       outputs,
+            const ACaptureRequest* sessionParameters,
             const ACameraCaptureSession_stateCallbacks* callbacks,
             /*out*/ACameraCaptureSession** session);
 
@@ -122,7 +124,9 @@
             /*out*/int* captureSequenceId,
             bool isRepeating);
 
-    static camera_status_t allocateCaptureRequest(
+    camera_status_t updateOutputConfigurationLocked(ACaptureSessionOutput *output);
+
+    camera_status_t allocateCaptureRequest(
             const ACaptureRequest* request, sp<CaptureRequest>& outReq);
 
     static ACaptureRequest* allocateACaptureRequest(sp<CaptureRequest>& req);
@@ -136,7 +140,8 @@
     // For capture session to notify its end of life
     void notifySessionEndOfLifeLocked(ACameraCaptureSession* session);
 
-    camera_status_t configureStreamsLocked(const ACaptureSessionOutputContainer* outputs);
+    camera_status_t configureStreamsLocked(const ACaptureSessionOutputContainer* outputs,
+           const ACaptureRequest* sessionParameters);
 
     // Input message will be posted and cleared after this returns
     void postSessionMsgAndCleanup(sp<AMessage>& msg);
@@ -306,9 +311,10 @@
 
     camera_status_t createCaptureSession(
             const ACaptureSessionOutputContainer*       outputs,
+            const ACaptureRequest* sessionParameters,
             const ACameraCaptureSession_stateCallbacks* callbacks,
             /*out*/ACameraCaptureSession** session) {
-        return mDevice->createCaptureSession(outputs, callbacks, session);
+        return mDevice->createCaptureSession(outputs, sessionParameters, callbacks, session);
     }
 
     /***********************
diff --git a/camera/ndk/impl/ACameraMetadata.cpp b/camera/ndk/impl/ACameraMetadata.cpp
index 7b33c32..29ad09b 100644
--- a/camera/ndk/impl/ACameraMetadata.cpp
+++ b/camera/ndk/impl/ACameraMetadata.cpp
@@ -235,7 +235,7 @@
 }
 
 const CameraMetadata&
-ACameraMetadata::getInternalData() {
+ACameraMetadata::getInternalData() const {
     return mData;
 }
 
diff --git a/camera/ndk/impl/ACameraMetadata.h b/camera/ndk/impl/ACameraMetadata.h
index 143efc7..0fd7efa 100644
--- a/camera/ndk/impl/ACameraMetadata.h
+++ b/camera/ndk/impl/ACameraMetadata.h
@@ -64,7 +64,7 @@
     void filterUnsupportedFeatures(); // Hide features not yet supported by NDK
     void filterStreamConfigurations(); // Hide input streams, translate hal format to NDK formats
 
-    const CameraMetadata& getInternalData();
+    const CameraMetadata& getInternalData() const;
 
     template<typename INTERNAL_T, typename NDK_T>
     camera_status_t updateImpl(uint32_t tag, uint32_t count, const NDK_T* data) {
diff --git a/camera/ndk/impl/ACaptureRequest.h b/camera/ndk/impl/ACaptureRequest.h
index e5b453e..06b2cc3 100644
--- a/camera/ndk/impl/ACaptureRequest.h
+++ b/camera/ndk/impl/ACaptureRequest.h
@@ -45,8 +45,19 @@
 };
 
 struct ACaptureRequest {
+    camera_status_t setContext(void* ctx) {
+        context = ctx;
+        return ACAMERA_OK;
+    }
+
+    camera_status_t getContext(void** ctx) const {
+        *ctx = context;
+        return ACAMERA_OK;
+    }
+
     ACameraMetadata*      settings;
     ACameraOutputTargets* targets;
+    void*                 context;
 };
 
 #endif // _ACAPTURE_REQUEST_H
diff --git a/camera/ndk/include/camera/NdkCameraCaptureSession.h b/camera/ndk/include/camera/NdkCameraCaptureSession.h
index d96f538..67003c1 100644
--- a/camera/ndk/include/camera/NdkCameraCaptureSession.h
+++ b/camera/ndk/include/camera/NdkCameraCaptureSession.h
@@ -591,6 +591,54 @@
 
 #endif /* __ANDROID_API__ >= 24 */
 
+#if __ANDROID_API__ >= 28
+
+typedef struct ACaptureSessionOutput ACaptureSessionOutput;
+
+/**
+ * Update shared ACaptureSessionOutput.
+ *
+ * <p>A shared ACaptureSessionOutput (see {@link ACaptureSessionSharedOutput_create}) that
+ * was modified via calls to {@link ACaptureSessionSharedOutput_add} or
+ * {@link ACaptureSessionSharedOutput_remove} must be updated by calling this method before its
+ * changes take effect. After the update call returns  with {@link ACAMERA_OK}, any newly added
+ * native windows can be used as a target in subsequent capture requests.</p>
+ *
+ * <p>Native windows that get removed must not be part of any active repeating or single/burst
+ * request or have any pending results. Consider updating repeating requests via
+ * {@link ACaptureSessionOutput_setRepeatingRequest} and then wait for the last frame number
+ * when the sequence completes
+ * {@link ACameraCaptureSession_captureCallback#onCaptureSequenceCompleted}.</p>
+ *
+ * <p>Native windows that get added must not be part of any other registered ACaptureSessionOutput
+ * and must be compatible. Compatible windows must have matching format, rotation and
+ * consumer usage.</p>
+ *
+ * <p>A shared ACameraCaptureSession can support up to 4 additional native windows.</p>
+ *
+ * @param session the capture session of interest
+ * @param output the modified output configuration
+ *
+ * @return <ul><li>
+ *             {@link ACAMERA_OK} if the method succeeds.</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if session or output is NULL; or output
+ *             contains invalid native windows; or if an attempt was made to add
+ *             a native window to a different output configuration; or new native window is not
+ *             compatible; or any removed native window still has pending requests;</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_OPERATION} if output configuration is not shared (see
+ *             {@link ACaptureSessionSharedOutput_create};  or the number of additional
+ *             native windows goes beyond the supported limit.</li>
+ *         <li>{@link ACAMERA_ERROR_SESSION_CLOSED} if the capture session has been closed</li>
+ *         <li>{@link ACAMERA_ERROR_CAMERA_DISCONNECTED} if the camera device is closed</li>
+ *         <li>{@link ACAMERA_ERROR_CAMERA_DEVICE} if the camera device encounters fatal error</li>
+ *         <li>{@link ACAMERA_ERROR_CAMERA_SERVICE} if the camera service encounters fatal
+ *             error</li>
+ *         <li>{@link ACAMERA_ERROR_UNKNOWN} if the method fails for some other reasons</li></ul>
+ */
+camera_status_t ACameraCaptureSession_updateSharedOutput(ACameraCaptureSession* session,
+        ACaptureSessionOutput* output);
+#endif /* __ANDROID_API__ >= 28 */
+
 __END_DECLS
 
 #endif /* _NDK_CAMERA_CAPTURE_SESSION_H */
diff --git a/camera/ndk/include/camera/NdkCameraDevice.h b/camera/ndk/include/camera/NdkCameraDevice.h
index 6c9e85a..c0eb5c1 100644
--- a/camera/ndk/include/camera/NdkCameraDevice.h
+++ b/camera/ndk/include/camera/NdkCameraDevice.h
@@ -251,6 +251,36 @@
      * @see ACameraDevice_createCaptureRequest
      */
     TEMPLATE_MANUAL = 6,
+
+    /**
+     * A template for selecting camera parameters that match TEMPLATE_PREVIEW as closely as
+     * possible while improving the camera output for motion tracking use cases.
+     *
+     * <p>This template is best used by applications that are frequently switching between motion
+     * tracking use cases and regular still capture use cases, to minimize the IQ changes
+     * when swapping use cases.</p>
+     *
+     * <p>This template is guaranteed to be supported on camera devices that support the
+     * {@link ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_MOTION_TRACKING} capability.</p>
+     *
+     * @see ACameraDevice_createCaptureRequest
+     */
+    TEMPLATE_MOTION_TRACKING_PREVIEW = 7,
+
+    /**
+     * A template for selecting camera parameters that maximize the quality of camera output for
+     * motion tracking use cases.
+     *
+     * <p>This template is best used by applications dedicated to motion tracking applications,
+     * which aren't concerned about fast switches between motion tracking and other use cases.</p>
+     *
+     * <p>This template is guaranteed to be supported on camera devices that support the
+     * {@link ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_MOTION_TRACKING} capability.</p>
+     *
+     * @see ACameraDevice_createCaptureRequest
+     */
+    TEMPLATE_MOTION_TRACKING_BEST = 8,
+
 } ACameraDevice_request_template;
 
 /**
@@ -661,9 +691,102 @@
 
 #endif /* __ANDROID_API__ >= 24 */
 
+#if __ANDROID_API__ >= 28
+
+/**
+ * Create a shared ACaptureSessionOutput object.
+ *
+ * <p>The ACaptureSessionOutput is used in {@link ACaptureSessionOutputContainer_add} method to add
+ * an output {@link ANativeWindow} to ACaptureSessionOutputContainer. Use
+ * {@link ACaptureSessionOutput_free} to free the object and its memory after application no longer
+ * needs the {@link ACaptureSessionOutput}. A shared ACaptureSessionOutput can be further modified
+ * via {@link ACaptureSessionSharedOutput_add} or {@link ACaptureSessionSharedOutput_remove} and
+ * must be updated via {@link ACameraCaptureSession_updateSharedOutput}.</p>
+ *
+ * @param anw the {@link ANativeWindow} to be associated with the {@link ACaptureSessionOutput}
+ * @param output the output {@link ACaptureSessionOutput} will be stored here if the
+ *                  method call succeeds.
+ *
+ * @return <ul>
+ *         <li>{@link ACAMERA_OK} if the method call succeeds. The created container will be
+ *                                filled in the output argument.</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if anw or output is NULL.</li></ul>
+ *
+ * @see ACaptureSessionOutputContainer_add
+ */
+camera_status_t ACaptureSessionSharedOutput_create(
+        ANativeWindow* anw, /*out*/ACaptureSessionOutput** output);
+
+/**
+ * Add a native window to shared ACaptureSessionOutput.
+ *
+ * The ACaptureSessionOutput must be created via {@link ACaptureSessionSharedOutput_create}.
+ *
+ * @param output  the shared ACaptureSessionOutput to be extended.
+ * @param anw The new native window.
+ *
+ * @return <ul>
+ *         <li>{@link ACAMERA_OK} if the method call succeeds.</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if anw or output is NULL; or output is not
+ *             shared see {@link ACaptureSessionSharedOutput_create}; or anw matches with the native
+ *             window associated with ACaptureSessionOutput; or anw is already present inside
+ *             ACaptureSessionOutput.</li></ul>
+ */
+camera_status_t ACaptureSessionSharedOutput_add(ACaptureSessionOutput *output, ANativeWindow *anw);
+
+/**
+ * Remove a native window from shared ACaptureSessionOutput.
+ *
+ * @param output the {@link ACaptureSessionOutput} to be modified.
+ * @param anw The native window to be removed.
+ *
+ * @return <ul>
+ *         <li>{@link ACAMERA_OK} if the method call succeeds.</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if anw or output is NULL; or output is not
+ *             shared see {@link ACaptureSessionSharedOutput_create}; or anw matches with the native
+ *             window associated with ACaptureSessionOutput; or anw is not present inside
+ *             ACaptureSessionOutput.</li></ul>
+ */
+camera_status_t ACaptureSessionSharedOutput_remove(ACaptureSessionOutput *output,
+        ANativeWindow* anw);
+
+/**
+ * Create a new camera capture session similar to {@link ACameraDevice_createCaptureSession}. This
+ * function allows clients to pass additional session parameters during session initialization. For
+ * further information about session parameters see {@link ACAMERA_REQUEST_AVAILABLE_SESSION_KEYS}.
+ *
+ * @param device the camera device of interest.
+ * @param outputs the {@link ACaptureSessionOutputContainer} describes all output streams.
+ * @param sessionParameters An optional capture request that contains the initial values of session
+ *                          parameters advertised in
+ *                          {@link ACAMERA_REQUEST_AVAILABLE_SESSION_KEYS}.
+ * @param callbacks the {@link ACameraCaptureSession_stateCallbacks}
+ *                  capture session state callbacks.
+ * @param session the created {@link ACameraCaptureSession} will be filled here if the method call
+ *                succeeds.
+ *
+ * @return <ul>
+ *         <li>{@link ACAMERA_OK} if the method call succeeds. The created capture session will be
+ *                                filled in session argument.</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if any of device, outputs, callbacks or
+ *                                session is NULL.</li>
+ *         <li>{@link ACAMERA_ERROR_CAMERA_DISCONNECTED} if the camera device is closed.</li>
+ *         <li>{@link ACAMERA_ERROR_CAMERA_DEVICE} if the camera device encounters fatal error.</li>
+ *         <li>{@link ACAMERA_ERROR_CAMERA_SERVICE} if the camera service encounters fatal error.
+ *         </li>
+ *         <li>{@link ACAMERA_ERROR_UNKNOWN} if the method fails for some other reasons.</li></ul>
+ */
+camera_status_t ACameraDevice_createCaptureSessionWithSessionParameters(
+        ACameraDevice* device,
+        const ACaptureSessionOutputContainer* outputs,
+        const ACaptureRequest* sessionParameters,
+        const ACameraCaptureSession_stateCallbacks* callbacks,
+        /*out*/ACameraCaptureSession** session);
+
+#endif /* __ANDROID_API__ >= 28 */
+
 __END_DECLS
 
 #endif /* _NDK_CAMERA_DEVICE_H */
 
 /** @} */
-
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 629d75a..2c144b7 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -471,10 +471,6 @@
      * Otherwise will always be present.</p>
      * <p>The maximum number of regions supported by the device is determined by the value
      * of android.control.maxRegionsAe.</p>
-     * <p>The data representation is int[5 * area_count].
-     * Every five elements represent a metering region of (xmin, ymin, xmax, ymax, weight).
-     * The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
-     * ymax.</p>
      * <p>The coordinate system is based on the active pixel array,
      * with (0,0) being the top-left pixel in the active pixel array, and
      * (ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.width - 1,
@@ -495,6 +491,10 @@
      * region and output only the intersection rectangle as the metering region in the result
      * metadata.  If the region is entirely outside the crop region, it will be ignored and
      * not reported in the result metadata.</p>
+     * <p>The data representation is <code>int[5 * area_count]</code>.
+     * Every five elements represent a metering region of <code>(xmin, ymin, xmax, ymax, weight)</code>.
+     * The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
+     * ymax.</p>
      *
      * @see ACAMERA_SCALER_CROP_REGION
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
@@ -633,10 +633,6 @@
      * Otherwise will always be present.</p>
      * <p>The maximum number of focus areas supported by the device is determined by the value
      * of android.control.maxRegionsAf.</p>
-     * <p>The data representation is int[5 * area_count].
-     * Every five elements represent a metering region of (xmin, ymin, xmax, ymax, weight).
-     * The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
-     * ymax.</p>
      * <p>The coordinate system is based on the active pixel array,
      * with (0,0) being the top-left pixel in the active pixel array, and
      * (ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.width - 1,
@@ -657,6 +653,10 @@
      * region and output only the intersection rectangle as the metering region in the result
      * metadata. If the region is entirely outside the crop region, it will be ignored and
      * not reported in the result metadata.</p>
+     * <p>The data representation is <code>int[5 * area_count]</code>.
+     * Every five elements represent a metering region of <code>(xmin, ymin, xmax, ymax, weight)</code>.
+     * The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
+     * ymax.</p>
      *
      * @see ACAMERA_SCALER_CROP_REGION
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
@@ -791,10 +791,6 @@
      * Otherwise will always be present.</p>
      * <p>The maximum number of regions supported by the device is determined by the value
      * of android.control.maxRegionsAwb.</p>
-     * <p>The data representation is int[5 * area_count].
-     * Every five elements represent a metering region of (xmin, ymin, xmax, ymax, weight).
-     * The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
-     * ymax.</p>
      * <p>The coordinate system is based on the active pixel array,
      * with (0,0) being the top-left pixel in the active pixel array, and
      * (ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.width - 1,
@@ -815,6 +811,10 @@
      * region and output only the intersection rectangle as the metering region in the result
      * metadata.  If the region is entirely outside the crop region, it will be ignored and
      * not reported in the result metadata.</p>
+     * <p>The data representation is <code>int[5 * area_count]</code>.
+     * Every five elements represent a metering region of <code>(xmin, ymin, xmax, ymax, weight)</code>.
+     * The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
+     * ymax.</p>
      *
      * @see ACAMERA_SCALER_CROP_REGION
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
@@ -837,10 +837,13 @@
      *
      * <p>This control (except for MANUAL) is only effective if
      * <code>ACAMERA_CONTROL_MODE != OFF</code> and any 3A routine is active.</p>
-     * <p>ZERO_SHUTTER_LAG will be supported if ACAMERA_REQUEST_AVAILABLE_CAPABILITIES
-     * contains PRIVATE_REPROCESSING or YUV_REPROCESSING. MANUAL will be supported if
-     * ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains MANUAL_SENSOR. Other intent values are
-     * always supported.</p>
+     * <p>All intents are supported by all devices, except that:
+     *   * ZERO_SHUTTER_LAG will be supported if ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains
+     * PRIVATE_REPROCESSING or YUV_REPROCESSING.
+     *   * MANUAL will be supported if ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains
+     * MANUAL_SENSOR.
+     *   * MOTION_TRACKING will be supported if ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains
+     * MOTION_TRACKING.</p>
      *
      * @see ACAMERA_CONTROL_MODE
      * @see ACAMERA_REQUEST_AVAILABLE_CAPABILITIES
@@ -885,11 +888,10 @@
      * <p>When set to AUTO, the individual algorithm controls in
      * ACAMERA_CONTROL_* are in effect, such as ACAMERA_CONTROL_AF_MODE.</p>
      * <p>When set to USE_SCENE_MODE, the individual controls in
-     * ACAMERA_CONTROL_* are mostly disabled, and the camera device implements
-     * one of the scene mode settings (such as ACTION, SUNSET, or PARTY)
-     * as it wishes. The camera device scene mode 3A settings are provided by
-     * capture results {@link ACameraMetadata} from
-     * {@link ACameraCaptureSession_captureCallback_result}.</p>
+     * ACAMERA_CONTROL_* are mostly disabled, and the camera device
+     * implements one of the scene mode settings (such as ACTION,
+     * SUNSET, or PARTY) as it wishes. The camera device scene mode
+     * 3A settings are provided by {@link ACameraCaptureSession_captureCallback_result capture results}.</p>
      * <p>When set to OFF_KEEP_STATE, it is similar to OFF mode, the only difference
      * is that this frame will not be used by camera device background 3A statistics
      * update, as if this frame is never captured. This mode can be used in the scenario
@@ -1043,20 +1045,18 @@
      * <p>For constant-framerate recording, for each normal
      * <a href="https://developer.android.com/reference/android/media/CamcorderProfile.html">CamcorderProfile</a>, that is, a
      * <a href="https://developer.android.com/reference/android/media/CamcorderProfile.html">CamcorderProfile</a> that has
-     * <a href="https://developer.android.com/reference/android/media/CamcorderProfile.html#quality">quality</a>
-     * in the range [
-     * <a href="https://developer.android.com/reference/android/media/CamcorderProfile.html#QUALITY_LOW">QUALITY_LOW</a>,
-     * <a href="https://developer.android.com/reference/android/media/CamcorderProfile.html#QUALITY_2160P">QUALITY_2160P</a>],
-     * if the profile is supported by the device and has
-     * <a href="https://developer.android.com/reference/android/media/CamcorderProfile.html#videoFrameRate">videoFrameRate</a>
-     * <code>x</code>, this list will always include (<code>x</code>,<code>x</code>).</p>
+     * <a href="https://developer.android.com/reference/android/media/CamcorderProfile.html#quality">quality</a> in
+     * the range [<a href="https://developer.android.com/reference/android/media/CamcorderProfile.html#QUALITY_LOW">QUALITY_LOW</a>,
+     * <a href="https://developer.android.com/reference/android/media/CamcorderProfile.html#QUALITY_2160P">QUALITY_2160P</a>], if the profile is
+     * supported by the device and has
+     * <a href="https://developer.android.com/reference/android/media/CamcorderProfile.html#videoFrameRate">videoFrameRate</a> <code>x</code>, this list will
+     * always include (<code>x</code>,<code>x</code>).</p>
      * </li>
      * <li>
      * <p>Also, a camera device must either not support any
      * <a href="https://developer.android.com/reference/android/media/CamcorderProfile.html">CamcorderProfile</a>,
      * or support at least one
-     * normal <a href="https://developer.android.com/reference/android/media/CamcorderProfile.html">CamcorderProfile</a>
-     * that has
+     * normal <a href="https://developer.android.com/reference/android/media/CamcorderProfile.html">CamcorderProfile</a> that has
      * <a href="https://developer.android.com/reference/android/media/CamcorderProfile.html#videoFrameRate">videoFrameRate</a> <code>x</code> &gt;= 24.</p>
      * </li>
      * </ul>
@@ -1619,13 +1619,13 @@
      * compared to previous regular requests. enableZsl does not affect requests with other
      * capture intents.</p>
      * <p>For example, when requests are submitted in the following order:
-     *   Request A: enableZsl is <code>true</code>, ACAMERA_CONTROL_CAPTURE_INTENT is PREVIEW
-     *   Request B: enableZsl is <code>true</code>, ACAMERA_CONTROL_CAPTURE_INTENT is STILL_CAPTURE</p>
+     *   Request A: enableZsl is ON, ACAMERA_CONTROL_CAPTURE_INTENT is PREVIEW
+     *   Request B: enableZsl is ON, ACAMERA_CONTROL_CAPTURE_INTENT is STILL_CAPTURE</p>
      * <p>The output images for request B may have contents captured before the output images for
      * request A, and the result metadata for request B may be older than the result metadata for
      * request A.</p>
-     * <p>Note that when enableZsl is <code>true</code>, it is not guaranteed to get output images captured in the
-     * past for requests with STILL_CAPTURE capture intent.</p>
+     * <p>Note that when enableZsl is <code>true</code>, it is not guaranteed to get output images captured in
+     * the past for requests with STILL_CAPTURE capture intent.</p>
      * <p>For applications targeting SDK versions O and newer, the value of enableZsl in
      * TEMPLATE_STILL_CAPTURE template may be <code>true</code>. The value in other templates is always
      * <code>false</code> if present.</p>
@@ -1638,6 +1638,28 @@
      */
     ACAMERA_CONTROL_ENABLE_ZSL =                                // byte (acamera_metadata_enum_android_control_enable_zsl_t)
             ACAMERA_CONTROL_START + 41,
+    /**
+     * <p>Whether a significant scene change is detected within the currently-set AF
+     * region(s).</p>
+     *
+     * <p>Type: int32 (acamera_metadata_enum_android_control_af_scene_change_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+     * </ul></p>
+     *
+     * <p>When the camera focus routine detects a change in the scene it is looking at,
+     * such as a large shift in camera viewpoint, significant motion in the scene, or a
+     * significant illumination change, this value will be set to DETECTED for a single capture
+     * result. Otherwise the value will be NOT_DETECTED. The threshold for detection is similar
+     * to what would trigger a new passive focus scan to begin in CONTINUOUS autofocus modes.</p>
+     * <p>afSceneChange may be DETECTED only if afMode is AF_MODE_CONTINUOUS_VIDEO or
+     * AF_MODE_CONTINUOUS_PICTURE. In other AF modes, afSceneChange must be NOT_DETECTED.</p>
+     * <p>This key will be available if the camera device advertises this key via {@link ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS }.</p>
+     */
+    ACAMERA_CONTROL_AF_SCENE_CHANGE =                           // int32 (acamera_metadata_enum_android_control_af_scene_change_t)
+            ACAMERA_CONTROL_START + 42,
     ACAMERA_CONTROL_END,
 
     /**
@@ -1959,14 +1981,14 @@
      * <p>When an ACAMERA_JPEG_ORIENTATION of non-zero degree is requested,
      * the camera device will handle thumbnail rotation in one of the following ways:</p>
      * <ul>
-     * <li>Set the
-     *   <a href="https://developer.android.com/reference/android/media/ExifInterface.html#TAG_ORIENTATION">EXIF orientation flag</a>
+     * <li>Set the <a href="https://developer.android.com/reference/android/media/ExifInterface.html#TAG_ORIENTATION">EXIF orientation flag</a>
      *   and keep jpeg and thumbnail image data unrotated.</li>
      * <li>Rotate the jpeg and thumbnail image data and not set
-     *   <a href="https://developer.android.com/reference/android/media/ExifInterface.html#TAG_ORIENTATION">EXIF orientation flag</a>.
-     *   In this case, LIMITED or FULL hardware level devices will report rotated thumnail size
-     *   in capture result, so the width and height will be interchanged if 90 or 270 degree
-     *   orientation is requested. LEGACY device will always report unrotated thumbnail size.</li>
+     *   <a href="https://developer.android.com/reference/android/media/ExifInterface.html#TAG_ORIENTATION">EXIF orientation flag</a>. In this
+     *   case, LIMITED or FULL hardware level devices will report rotated thumnail size in
+     *   capture result, so the width and height will be interchanged if 90 or 270 degree
+     *   orientation is requested. LEGACY device will always report unrotated thumbnail
+     *   size.</li>
      * </ul>
      *
      * @see ACAMERA_JPEG_ORIENTATION
@@ -2216,35 +2238,31 @@
      * </ul></p>
      *
      * <p>The position of the camera device's lens optical center,
-     * as a three-dimensional vector <code>(x,y,z)</code>, relative to the
-     * optical center of the largest camera device facing in the
-     * same direction as this camera, in the
-     * <a href="https://developer.android.com/reference/android/hardware/SensorEvent.html">Android sensor coordinate axes</a>.
-     * Note that only the axis definitions are shared with
-     * the sensor coordinate system, but not the origin.</p>
-     * <p>If this device is the largest or only camera device with a
-     * given facing, then this position will be <code>(0, 0, 0)</code>; a
-     * camera device with a lens optical center located 3 cm from
-     * the main sensor along the +X axis (to the right from the
-     * user's perspective) will report <code>(0.03, 0, 0)</code>.</p>
-     * <p>To transform a pixel coordinates between two cameras
-     * facing the same direction, first the source camera
-     * ACAMERA_LENS_RADIAL_DISTORTION must be corrected for.  Then
-     * the source camera ACAMERA_LENS_INTRINSIC_CALIBRATION needs
-     * to be applied, followed by the ACAMERA_LENS_POSE_ROTATION
-     * of the source camera, the translation of the source camera
-     * relative to the destination camera, the
-     * ACAMERA_LENS_POSE_ROTATION of the destination camera, and
-     * finally the inverse of ACAMERA_LENS_INTRINSIC_CALIBRATION
-     * of the destination camera. This obtains a
-     * radial-distortion-free coordinate in the destination
-     * camera pixel coordinates.</p>
-     * <p>To compare this against a real image from the destination
-     * camera, the destination camera image then needs to be
-     * corrected for radial distortion before comparison or
-     * sampling.</p>
+     * as a three-dimensional vector <code>(x,y,z)</code>.</p>
+     * <p>Prior to Android P, or when ACAMERA_LENS_POSE_REFERENCE is PRIMARY_CAMERA, this position
+     * is relative to the optical center of the largest camera device facing in the same
+     * direction as this camera, in the <a href="https://developer.android.com/reference/android/hardware/SensorEvent.html">Android sensor
+     * coordinate axes</a>. Note that only the axis definitions are shared with the sensor
+     * coordinate system, but not the origin.</p>
+     * <p>If this device is the largest or only camera device with a given facing, then this
+     * position will be <code>(0, 0, 0)</code>; a camera device with a lens optical center located 3 cm
+     * from the main sensor along the +X axis (to the right from the user's perspective) will
+     * report <code>(0.03, 0, 0)</code>.</p>
+     * <p>To transform a pixel coordinates between two cameras facing the same direction, first
+     * the source camera ACAMERA_LENS_RADIAL_DISTORTION must be corrected for.  Then the source
+     * camera ACAMERA_LENS_INTRINSIC_CALIBRATION needs to be applied, followed by the
+     * ACAMERA_LENS_POSE_ROTATION of the source camera, the translation of the source camera
+     * relative to the destination camera, the ACAMERA_LENS_POSE_ROTATION of the destination
+     * camera, and finally the inverse of ACAMERA_LENS_INTRINSIC_CALIBRATION of the destination
+     * camera. This obtains a radial-distortion-free coordinate in the destination camera pixel
+     * coordinates.</p>
+     * <p>To compare this against a real image from the destination camera, the destination camera
+     * image then needs to be corrected for radial distortion before comparison or sampling.</p>
+     * <p>When ACAMERA_LENS_POSE_REFERENCE is GYROSCOPE, then this position is relative to
+     * the center of the primary gyroscope on the device.</p>
      *
      * @see ACAMERA_LENS_INTRINSIC_CALIBRATION
+     * @see ACAMERA_LENS_POSE_REFERENCE
      * @see ACAMERA_LENS_POSE_ROTATION
      * @see ACAMERA_LENS_RADIAL_DISTORTION
      */
@@ -2415,6 +2433,26 @@
      */
     ACAMERA_LENS_RADIAL_DISTORTION =                            // float[6]
             ACAMERA_LENS_START + 11,
+    /**
+     * <p>The origin for ACAMERA_LENS_POSE_TRANSLATION.</p>
+     *
+     * @see ACAMERA_LENS_POSE_TRANSLATION
+     *
+     * <p>Type: byte (acamera_metadata_enum_android_lens_pose_reference_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Different calibration methods and use cases can produce better or worse results
+     * depending on the selected coordinate origin.</p>
+     * <p>For devices designed to support the MOTION_TRACKING capability, the GYROSCOPE origin
+     * makes device calibration and later usage by applications combining camera and gyroscope
+     * information together simpler.</p>
+     */
+    ACAMERA_LENS_POSE_REFERENCE =                               // byte (acamera_metadata_enum_android_lens_pose_reference_t)
+            ACAMERA_LENS_START + 12,
     ACAMERA_LENS_END,
 
     /**
@@ -2662,11 +2700,12 @@
      * into the 3 stream types as below:</p>
      * <ul>
      * <li>Processed (but stalling): any non-RAW format with a stallDurations &gt; 0.
-     *   Typically {@link AIMAGE_FORMAT_JPEG} format.</li>
-     * <li>Raw formats: {@link AIMAGE_FORMAT_RAW16}, {@link AIMAGE_FORMAT_RAW10}, or
-     *   {@link AIMAGE_FORMAT_RAW12}.</li>
-     * <li>Processed (but not-stalling): any non-RAW format without a stall duration.
-     *   Typically {@link AIMAGE_FORMAT_YUV_420_888}.</li>
+     *   Typically {@link AIMAGE_FORMAT_JPEG JPEG format}.</li>
+     * <li>Raw formats: {@link AIMAGE_FORMAT_RAW16 RAW_SENSOR}, {@link AIMAGE_FORMAT_RAW10 RAW10}, or
+     *   {@link AIMAGE_FORMAT_RAW12 RAW12}.</li>
+     * <li>Processed (but not-stalling): any non-RAW format without a stall duration.  Typically
+     *   {@link AIMAGE_FORMAT_YUV_420_888 YUV_420_888},
+     *   <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#NV21">NV21</a>, or <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YV12">YV12</a>.</li>
      * </ul>
      *
      * @see ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS
@@ -2787,7 +2826,7 @@
             ACAMERA_REQUEST_START + 12,
     /**
      * <p>A list of all keys that the camera device has available
-     * to use with {@link ACaptureRequest}.</p>
+     * to use with {@link ACaptureRequest }.</p>
      *
      * <p>Type: int32[n]</p>
      *
@@ -2809,9 +2848,7 @@
     ACAMERA_REQUEST_AVAILABLE_REQUEST_KEYS =                    // int32[n]
             ACAMERA_REQUEST_START + 13,
     /**
-     * <p>A list of all keys that the camera device has available
-     * to query with {@link ACameraMetadata} from
-     * {@link ACameraCaptureSession_captureCallback_result}.</p>
+     * <p>A list of all keys that the camera device has available to use with {@link ACameraCaptureSession_captureCallback_result }.</p>
      *
      * <p>Type: int32[n]</p>
      *
@@ -2842,9 +2879,7 @@
     ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS =                     // int32[n]
             ACAMERA_REQUEST_START + 14,
     /**
-     * <p>A list of all keys that the camera device has available
-     * to query with {@link ACameraMetadata} from
-     * {@link ACameraManager_getCameraCharacteristics}.</p>
+     * <p>A list of all keys that the camera device has available to use with {@link ACameraManager_getCameraCharacteristics }.</p>
      *
      * <p>Type: int32[n]</p>
      *
@@ -2862,6 +2897,59 @@
      */
     ACAMERA_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS =            // int32[n]
             ACAMERA_REQUEST_START + 15,
+    /**
+     * <p>A subset of the available request keys that the camera device
+     * can pass as part of the capture session initialization.</p>
+     *
+     * <p>Type: int32[n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>This is a subset of ACAMERA_REQUEST_AVAILABLE_REQUEST_KEYS which
+     * contains a list of keys that are difficult to apply per-frame and
+     * can result in unexpected delays when modified during the capture session
+     * lifetime. Typical examples include parameters that require a
+     * time-consuming hardware re-configuration or internal camera pipeline
+     * change. For performance reasons we advise clients to pass their initial
+     * values as part of
+     * {@link ACameraDevice_createCaptureSessionWithSessionParameters }.
+     * Once the camera capture session is enabled it is also recommended to avoid
+     * changing them from their initial values set in
+     * {@link ACameraDevice_createCaptureSessionWithSessionParameters }.
+     * Control over session parameters can still be exerted in capture requests
+     * but clients should be aware and expect delays during their application.
+     * An example usage scenario could look like this:</p>
+     * <ul>
+     * <li>The camera client starts by quering the session parameter key list via
+     *   {@link ACameraManager_getCameraCharacteristics }.</li>
+     * <li>Before triggering the capture session create sequence, a capture request
+     *   must be built via
+     *   {@link ACameraDevice_createCaptureRequest }
+     *   using an appropriate template matching the particular use case.</li>
+     * <li>The client should go over the list of session parameters and check
+     *   whether some of the keys listed matches with the parameters that
+     *   they intend to modify as part of the first capture request.</li>
+     * <li>If there is no such match, the capture request can be  passed
+     *   unmodified to
+     *   {@link ACameraDevice_createCaptureSessionWithSessionParameters }.</li>
+     * <li>If matches do exist, the client should update the respective values
+     *   and pass the request to
+     *   {@link ACameraDevice_createCaptureSessionWithSessionParameters }.</li>
+     * <li>After the capture session initialization completes the session parameter
+     *   key list can continue to serve as reference when posting or updating
+     *   further requests. As mentioned above further changes to session
+     *   parameters should ideally be avoided, if updates are necessary
+     *   however clients could expect a delay/glitch during the
+     *   parameter switch.</li>
+     * </ul>
+     *
+     * @see ACAMERA_REQUEST_AVAILABLE_REQUEST_KEYS
+     */
+    ACAMERA_REQUEST_AVAILABLE_SESSION_KEYS =                    // int32[n]
+            ACAMERA_REQUEST_START + 16,
     ACAMERA_REQUEST_END,
 
     /**
@@ -2876,7 +2964,6 @@
      * </ul></p>
      *
      * <p>This control can be used to implement digital zoom.</p>
-     * <p>The data representation is int[4], which maps to (left, top, width, height).</p>
      * <p>The crop region coordinate system is based off
      * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with <code>(0, 0)</code> being the
      * top-left corner of the sensor active array.</p>
@@ -2906,6 +2993,7 @@
      * for rounding and other hardware requirements; the final
      * crop region used will be included in the output capture
      * result.</p>
+     * <p>The data representation is int[4], which maps to (left, top, width, height).</p>
      *
      * @see ACAMERA_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
@@ -3061,13 +3149,14 @@
      * ignored).</p>
      * <p>The following formats may always have a stall duration:</p>
      * <ul>
-     * <li>{@link AIMAGE_FORMAT_JPEG}</li>
-     * <li>{@link AIMAGE_FORMAT_RAW16}</li>
+     * <li>{@link AIMAGE_FORMAT_JPEG }</li>
+     * <li>{@link AIMAGE_FORMAT_RAW16 }</li>
      * </ul>
      * <p>The following formats will never have a stall duration:</p>
      * <ul>
-     * <li>{@link AIMAGE_FORMAT_YUV_420_888}</li>
-     * <li>{@link AIMAGE_FORMAT_RAW10}</li>
+     * <li>{@link AIMAGE_FORMAT_YUV_420_888 }</li>
+     * <li>{@link AIMAGE_FORMAT_RAW10 }</li>
+     * <li>{@link AIMAGE_FORMAT_RAW12 }</li>
      * </ul>
      * <p>All other formats may or may not have an allowed stall duration on
      * a per-capability basis; refer to ACAMERA_REQUEST_AVAILABLE_CAPABILITIES
@@ -3177,39 +3266,29 @@
      * can run concurrently to the rest of the camera pipeline, but
      * cannot process more than 1 capture at a time.</li>
      * </ul>
-     * <p>The necessary information for the application, given the model above,
-     * is provided via
-     * {@link ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS}.
-     * These are used to determine the maximum frame rate / minimum frame
-     * duration that is possible for a given stream configuration.</p>
+     * <p>The necessary information for the application, given the model above, is provided via
+     * {@link ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS }.
+     * These are used to determine the maximum frame rate / minimum frame duration that is
+     * possible for a given stream configuration.</p>
      * <p>Specifically, the application can use the following rules to
      * determine the minimum frame duration it can request from the camera
      * device:</p>
      * <ol>
-     * <li>Let the set of currently configured input/output streams
-     * be called <code>S</code>.</li>
-     * <li>Find the minimum frame durations for each stream in <code>S</code>, by looking
-     * it up in {@link ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS}
-     * (with its respective size/format). Let this set of frame durations be
-     * called <code>F</code>.</li>
-     * <li>For any given request <code>R</code>, the minimum frame duration allowed
-     * for <code>R</code> is the maximum out of all values in <code>F</code>. Let the streams
-     * used in <code>R</code> be called <code>S_r</code>.</li>
+     * <li>Let the set of currently configured input/output streams be called <code>S</code>.</li>
+     * <li>Find the minimum frame durations for each stream in <code>S</code>, by looking it up in {@link ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS }
+     * (with its respective size/format). Let this set of frame durations be called <code>F</code>.</li>
+     * <li>For any given request <code>R</code>, the minimum frame duration allowed for <code>R</code> is the maximum
+     * out of all values in <code>F</code>. Let the streams used in <code>R</code> be called <code>S_r</code>.</li>
      * </ol>
-     * <p>If none of the streams in <code>S_r</code> have a stall time (listed in {@link
-     * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS}
-     * using its respective size/format), then the frame duration in <code>F</code>
-     * determines the steady state frame rate that the application will get
-     * if it uses <code>R</code> as a repeating request. Let this special kind of
-     * request be called <code>Rsimple</code>.</p>
-     * <p>A repeating request <code>Rsimple</code> can be <em>occasionally</em> interleaved
-     * by a single capture of a new request <code>Rstall</code> (which has at least
-     * one in-use stream with a non-0 stall time) and if <code>Rstall</code> has the
-     * same minimum frame duration this will not cause a frame rate loss
-     * if all buffers from the previous <code>Rstall</code> have already been
-     * delivered.</p>
-     * <p>For more details about stalling, see
-     * {@link ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS}.</p>
+     * <p>If none of the streams in <code>S_r</code> have a stall time (listed in {@link ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS }
+     * using its respective size/format), then the frame duration in <code>F</code> determines the steady
+     * state frame rate that the application will get if it uses <code>R</code> as a repeating request. Let
+     * this special kind of request be called <code>Rsimple</code>.</p>
+     * <p>A repeating request <code>Rsimple</code> can be <em>occasionally</em> interleaved by a single capture of a
+     * new request <code>Rstall</code> (which has at least one in-use stream with a non-0 stall time) and if
+     * <code>Rstall</code> has the same minimum frame duration this will not cause a frame rate loss if all
+     * buffers from the previous <code>Rstall</code> have already been delivered.</p>
+     * <p>For more details about stalling, see {@link ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS }.</p>
      * <p>This control is only effective if ACAMERA_CONTROL_AE_MODE or ACAMERA_CONTROL_MODE is set to
      * OFF; otherwise the auto-exposure algorithm will override this value.</p>
      *
@@ -3567,14 +3646,12 @@
      * timestamps for other captures from the same camera device, but are
      * not guaranteed to be comparable to any other time source.</p>
      * <p>When ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE <code>==</code> REALTIME, the
-     * timestamps measure time in the same timebase as
-     * <a href="https://developer.android.com/reference/android/os/SystemClock.html#elapsedRealtimeNanos">elapsedRealtimeNanos</a>
-     * (or CLOCK_BOOTTIME), and they can
+     * timestamps measure time in the same timebase as <a href="https://developer.android.com/reference/android/os/SystemClock.html#elapsedRealtimeNanos">SystemClock#elapsedRealtimeNanos</a>, and they can
      * be compared to other timestamps from other subsystems that
      * are using that base.</p>
      * <p>For reprocessing, the timestamp will match the start of exposure of
-     * the input image, i.e. {@link CaptureResult#SENSOR_TIMESTAMP the
-     * timestamp} in the TotalCaptureResult that was used to create the
+     * the input image, i.e. <a href="https://developer.android.com/reference/CaptureResult.html#SENSOR_TIMESTAMP">the
+     * timestamp</a> in the TotalCaptureResult that was used to create the
      * reprocess capture request.</p>
      *
      * @see ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE
@@ -3775,7 +3852,6 @@
      * optically shielded pixel areas. By blocking light, these pixels
      * provides a reliable black reference for black level compensation
      * in active array region.</p>
-     * <p>The data representation is int[4], which maps to (left, top, width, height).</p>
      * <p>This key provides a list of disjoint rectangles specifying the
      * regions of optically shielded (with metal shield) black pixel
      * regions if the camera device is capable of reading out these black
@@ -3785,6 +3861,7 @@
      * black level of each captured raw images.</p>
      * <p>When this key is reported, the ACAMERA_SENSOR_DYNAMIC_BLACK_LEVEL and
      * ACAMERA_SENSOR_DYNAMIC_WHITE_LEVEL will also be reported.</p>
+     * <p>The data representation is <code>int[4]</code>, which maps to <code>(left, top, width, height)</code>.</p>
      *
      * @see ACAMERA_SENSOR_BLACK_LEVEL_PATTERN
      * @see ACAMERA_SENSOR_DYNAMIC_BLACK_LEVEL
@@ -3825,9 +3902,8 @@
      * layout key (see ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT), i.e. the
      * nth value given corresponds to the black level offset for the nth
      * color channel listed in the CFA.</p>
-     * <p>This key will be available if ACAMERA_SENSOR_OPTICAL_BLACK_REGIONS is
-     * available or the camera device advertises this key via
-     * {@link ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS}.</p>
+     * <p>This key will be available if ACAMERA_SENSOR_OPTICAL_BLACK_REGIONS is available or the
+     * camera device advertises this key via {@link ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS }.</p>
      *
      * @see ACAMERA_SENSOR_BLACK_LEVEL_PATTERN
      * @see ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
@@ -3853,7 +3929,7 @@
      * estimated white level for each frame.</p>
      * <p>This key will be available if ACAMERA_SENSOR_OPTICAL_BLACK_REGIONS is
      * available or the camera device advertises this key via
-     * {@link ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS}.</p>
+     * {@link ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS }.</p>
      *
      * @see ACAMERA_SENSOR_BLACK_LEVEL_PATTERN
      * @see ACAMERA_SENSOR_INFO_WHITE_LEVEL
@@ -3882,13 +3958,13 @@
      * <p>This rectangle is defined relative to the full pixel array; (0,0) is the top-left of
      * the full pixel array, and the size of the full pixel array is given by
      * ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE.</p>
-     * <p>The data representation is int[4], which maps to (left, top, width, height).</p>
      * <p>The coordinate system for most other keys that list pixel coordinates, including
      * ACAMERA_SCALER_CROP_REGION, is defined relative to the active array rectangle given in
      * this field, with <code>(0, 0)</code> being the top-left of this rectangle.</p>
      * <p>The active array may be smaller than the full pixel array, since the full array may
      * include black calibration pixels or other inactive regions, and geometric correction
      * resulting in scaling or cropping may have been applied.</p>
+     * <p>The data representation is <code>int[4]</code>, which maps to <code>(left, top, width, height)</code>.</p>
      *
      * @see ACAMERA_SCALER_CROP_REGION
      * @see ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE
@@ -3960,8 +4036,7 @@
      * <p>Attempting to use frame durations beyond the maximum will result in the frame
      * duration being clipped to the maximum. See that control for a full definition of frame
      * durations.</p>
-     * <p>Refer to {@link
-     * ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS}
+     * <p>Refer to {@link ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS }
      * for the minimum frame duration values.</p>
      */
     ACAMERA_SENSOR_INFO_MAX_FRAME_DURATION =                    // int64
@@ -4000,9 +4075,9 @@
      * the raw buffers produced by this sensor.</p>
      * <p>If a camera device supports raw sensor formats, either this or
      * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE is the maximum dimensions for the raw
-     * output formats listed in ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS (this depends on
-     * whether or not the image sensor returns buffers containing pixels that are not
-     * part of the active array region for blacklevel calibration or other purposes).</p>
+     * output formats listed in {@link ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS }
+     * (this depends on whether or not the image sensor returns buffers containing pixels that
+     * are not part of the active array region for blacklevel calibration or other purposes).</p>
      * <p>Some parts of the full pixel array may not receive light from the scene,
      * or be otherwise inactive.  The ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE key
      * defines the rectangle of active pixels that will be included in processed image
@@ -4092,7 +4167,6 @@
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
      * </ul></p>
      *
-     * <p>The data representation is int[4], which maps to (left, top, width, height).</p>
      * <p>This is the rectangle representing the size of the active region of the sensor (i.e.
      * the region that actually receives light from the scene) before any geometric correction
      * has been applied, and should be treated as the active region rectangle for any of the
@@ -4143,6 +4217,7 @@
      * ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE.</p>
      * <p>The pre-correction active array may be smaller than the full pixel array, since the
      * full array may include black calibration pixels or other inactive regions.</p>
+     * <p>The data representation is <code>int[4]</code>, which maps to <code>(left, top, width, height)</code>.</p>
      *
      * @see ACAMERA_LENS_RADIAL_DISTORTION
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
@@ -4302,10 +4377,10 @@
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      * </ul></p>
      *
-     * <p>The data representation is int[4], which maps to (left, top, width, height).</p>
      * <p>The coordinate system is that of ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with
      * <code>(0, 0)</code> being the top-left pixel of the active array.</p>
-     * <p>Only available if ACAMERA_STATISTICS_FACE_DETECT_MODE != OFF</p>
+     * <p>Only available if ACAMERA_STATISTICS_FACE_DETECT_MODE != OFF
+     * The data representation is <code>int[4]</code>, which maps to <code>(left, top, width, height)</code>.</p>
      *
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
      * @see ACAMERA_STATISTICS_FACE_DETECT_MODE
@@ -4832,7 +4907,7 @@
      * <p>See the individual level enums for full descriptions of the supported capabilities.  The
      * ACAMERA_REQUEST_AVAILABLE_CAPABILITIES entry describes the device's capabilities at a
      * finer-grain level, if needed. In addition, many controls have their available settings or
-     * ranges defined in individual metadata tag entries in this document.</p>
+     * ranges defined in individual entries from {@link ACameraManager_getCameraCharacteristics }.</p>
      * <p>Some features are not part of any particular hardware level or capability and must be
      * queried separately. These include:</p>
      * <ul>
@@ -4853,6 +4928,23 @@
      */
     ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL =                     // byte (acamera_metadata_enum_android_info_supported_hardware_level_t)
             ACAMERA_INFO_START,
+    /**
+     * <p>A short string for manufacturer version information about the camera device, such as
+     * ISP hardware, sensors, etc.</p>
+     *
+     * <p>Type: byte</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>This can be used in <a href="https://developer.android.com/reference/android/media/ExifInterface.html#TAG_IMAGE_DESCRIPTION">TAG_IMAGE_DESCRIPTION</a>
+     * in jpeg EXIF. This key may be absent if no version information is available on the
+     * device.</p>
+     */
+    ACAMERA_INFO_VERSION =                                      // byte
+            ACAMERA_INFO_START + 1,
     ACAMERA_INFO_END,
 
     /**
@@ -5645,6 +5737,15 @@
      */
     ACAMERA_CONTROL_CAPTURE_INTENT_MANUAL                            = 6,
 
+    /**
+     * <p>This request is for a motion tracking use case, where
+     * the application will use camera and inertial sensor data to
+     * locate and track objects in the world.</p>
+     * <p>The camera device auto-exposure routine will limit the exposure time
+     * of the camera to no more than 20 milliseconds, to minimize motion blur.</p>
+     */
+    ACAMERA_CONTROL_CAPTURE_INTENT_MOTION_TRACKING                   = 7,
+
 } acamera_metadata_enum_android_control_capture_intent_t;
 
 // ACAMERA_CONTROL_EFFECT_MODE
@@ -6135,6 +6236,20 @@
 
 } acamera_metadata_enum_android_control_enable_zsl_t;
 
+// ACAMERA_CONTROL_AF_SCENE_CHANGE
+typedef enum acamera_metadata_enum_acamera_control_af_scene_change {
+    /**
+     * <p>Scene change is not detected within the AF region(s).</p>
+     */
+    ACAMERA_CONTROL_AF_SCENE_CHANGE_NOT_DETECTED                     = 0,
+
+    /**
+     * <p>Scene change is detected within the AF region(s).</p>
+     */
+    ACAMERA_CONTROL_AF_SCENE_CHANGE_DETECTED                         = 1,
+
+} acamera_metadata_enum_android_control_af_scene_change_t;
+
 
 
 // ACAMERA_EDGE_MODE
@@ -6157,13 +6272,13 @@
     ACAMERA_EDGE_MODE_HIGH_QUALITY                                   = 2,
 
     /**
-     * <p>Edge enhancement is applied at different levels for different output streams,
-     * based on resolution. Streams at maximum recording resolution (see {@link
-     * ACameraDevice_createCaptureSession}) or below have
-     * edge enhancement applied, while higher-resolution streams have no edge enhancement
-     * applied. The level of edge enhancement for low-resolution streams is tuned so that
-     * frame rate is not impacted, and the quality is equal to or better than FAST (since it
-     * is only applied to lower-resolution outputs, quality may improve from FAST).</p>
+     * <p>Edge enhancement is applied at different
+     * levels for different output streams, based on resolution. Streams at maximum recording
+     * resolution (see {@link ACameraDevice_createCaptureSession })
+     * or below have edge enhancement applied, while higher-resolution streams have no edge
+     * enhancement applied. The level of edge enhancement for low-resolution streams is tuned
+     * so that frame rate is not impacted, and the quality is equal to or better than FAST
+     * (since it is only applied to lower-resolution outputs, quality may improve from FAST).</p>
      * <p>This mode is intended to be used by applications operating in a zero-shutter-lag mode
      * with YUV or PRIVATE reprocessing, where the application continuously captures
      * high-resolution intermediate buffers into a circular buffer, from which a final image is
@@ -6342,6 +6457,28 @@
 
 } acamera_metadata_enum_android_lens_state_t;
 
+// ACAMERA_LENS_POSE_REFERENCE
+typedef enum acamera_metadata_enum_acamera_lens_pose_reference {
+    /**
+     * <p>The value of ACAMERA_LENS_POSE_TRANSLATION is relative to the optical center of
+     * the largest camera device facing the same direction as this camera.</p>
+     * <p>This default value for API levels before Android P.</p>
+     *
+     * @see ACAMERA_LENS_POSE_TRANSLATION
+     */
+    ACAMERA_LENS_POSE_REFERENCE_PRIMARY_CAMERA                       = 0,
+
+    /**
+     * <p>The value of ACAMERA_LENS_POSE_TRANSLATION is relative to the position of the
+     * primary gyroscope of this Android device.</p>
+     * <p>This is the value reported by all devices that support the MOTION_TRACKING capability.</p>
+     *
+     * @see ACAMERA_LENS_POSE_TRANSLATION
+     */
+    ACAMERA_LENS_POSE_REFERENCE_GYROSCOPE                            = 1,
+
+} acamera_metadata_enum_android_lens_pose_reference_t;
+
 
 // ACAMERA_LENS_INFO_FOCUS_DISTANCE_CALIBRATION
 typedef enum acamera_metadata_enum_acamera_lens_info_focus_distance_calibration {
@@ -6412,13 +6549,12 @@
 
     /**
      * <p>Noise reduction is applied at different levels for different output streams,
-     * based on resolution. Streams at maximum recording resolution (see {@link
-     * ACameraDevice_createCaptureSession}) or below have noise
-     * reduction applied, while higher-resolution streams have MINIMAL (if supported) or no
-     * noise reduction applied (if MINIMAL is not supported.) The degree of noise reduction
-     * for low-resolution streams is tuned so that frame rate is not impacted, and the quality
-     * is equal to or better than FAST (since it is only applied to lower-resolution outputs,
-     * quality may improve from FAST).</p>
+     * based on resolution. Streams at maximum recording resolution (see {@link ACameraDevice_createCaptureSession })
+     * or below have noise reduction applied, while higher-resolution streams have MINIMAL (if
+     * supported) or no noise reduction applied (if MINIMAL is not supported.) The degree of
+     * noise reduction for low-resolution streams is tuned so that frame rate is not impacted,
+     * and the quality is equal to or better than FAST (since it is only applied to
+     * lower-resolution outputs, quality may improve from FAST).</p>
      * <p>This mode is intended to be used by applications operating in a zero-shutter-lag mode
      * with YUV or PRIVATE reprocessing, where the application continuously captures
      * high-resolution intermediate buffers into a circular buffer, from which a final image is
@@ -6635,18 +6771,16 @@
      * to FAST. Additionally, maximum-resolution images can be captured at &gt;= 10 frames
      * per second.  Here, 'high resolution' means at least 8 megapixels, or the maximum
      * resolution of the device, whichever is smaller.</p>
-     * <p>More specifically, this means that at least one output {@link
-     * AIMAGE_FORMAT_YUV_420_888} size listed in
-     * {@link ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS} is larger or equal to the
-     * 'high resolution' defined above, and can be captured at at least 20 fps.
-     * For the largest {@link AIMAGE_FORMAT_YUV_420_888} size listed in
-     * {@link ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS}, camera device can capture this
-     * size for at least 10 frames per second.
-     * Also the ACAMERA_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES entry lists at least one FPS range
-     * where the minimum FPS is &gt;= 1 / minimumFrameDuration for the largest YUV_420_888 size.</p>
-     * <p>If the device supports the {@link AIMAGE_FORMAT_RAW10}, {@link
-     * AIMAGE_FORMAT_RAW12}, then those can also be captured at the same rate
-     * as the maximum-size YUV_420_888 resolution is.</p>
+     * <p>More specifically, this means that at least one output {@link AIMAGE_FORMAT_YUV_420_888 } size listed in
+     * {@link ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS }
+     * is larger or equal to the 'high resolution' defined above, and can be captured at at
+     * least 20 fps.  For the largest {@link AIMAGE_FORMAT_YUV_420_888 } size listed in
+     * {@link ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS },
+     * camera device can capture this size for at least 10 frames per second.  Also the
+     * ACAMERA_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES entry lists at least one FPS range where
+     * the minimum FPS is &gt;= 1 / minimumFrameDuration for the largest YUV_420_888 size.</p>
+     * <p>If the device supports the {@link AIMAGE_FORMAT_RAW10 }, {@link AIMAGE_FORMAT_RAW12 }, then those can also be
+     * captured at the same rate as the maximum-size YUV_420_888 resolution is.</p>
      * <p>In addition, the ACAMERA_SYNC_MAX_LATENCY field is guaranted to have a value between 0
      * and 4, inclusive. ACAMERA_CONTROL_AE_LOCK_AVAILABLE and ACAMERA_CONTROL_AWB_LOCK_AVAILABLE
      * are also guaranteed to be <code>true</code> so burst capture with these two locks ON yields
@@ -6663,13 +6797,13 @@
      * <p>The camera device can produce depth measurements from its field of view.</p>
      * <p>This capability requires the camera device to support the following:</p>
      * <ul>
-     * <li>{@link AIMAGE_FORMAT_DEPTH16} is supported as an output format.</li>
-     * <li>{@link AIMAGE_FORMAT_DEPTH_POINT_CLOUD} is optionally supported as an
-     *   output format.</li>
-     * <li>This camera device, and all camera devices with the same ACAMERA_LENS_FACING,
-     *   will list the following calibration entries in {@link ACameraMetadata} from both
-     *   {@link ACameraManager_getCameraCharacteristics} and
-     *   {@link ACameraCaptureSession_captureCallback_result}:<ul>
+     * <li>{@link AIMAGE_FORMAT_DEPTH16 } is supported as
+     *   an output format.</li>
+     * <li>{@link AIMAGE_FORMAT_DEPTH_POINT_CLOUD } is
+     *   optionally supported as an output format.</li>
+     * <li>This camera device, and all camera devices with the same ACAMERA_LENS_FACING, will
+     *   list the following calibration metadata entries in both {@link ACameraManager_getCameraCharacteristics }
+     *   and {@link ACameraCaptureSession_captureCallback_result }:<ul>
      * <li>ACAMERA_LENS_POSE_TRANSLATION</li>
      * <li>ACAMERA_LENS_POSE_ROTATION</li>
      * <li>ACAMERA_LENS_INTRINSIC_CALIBRATION</li>
@@ -6677,14 +6811,14 @@
      * </ul>
      * </li>
      * <li>The ACAMERA_DEPTH_DEPTH_IS_EXCLUSIVE entry is listed by this device.</li>
+     * <li>As of Android P, the ACAMERA_LENS_POSE_REFERENCE entry is listed by this device.</li>
      * <li>A LIMITED camera with only the DEPTH_OUTPUT capability does not have to support
      *   normal YUV_420_888, JPEG, and PRIV-format outputs. It only has to support the DEPTH16
      *   format.</li>
      * </ul>
      * <p>Generally, depth output operates at a slower frame rate than standard color capture,
      * so the DEPTH16 and DEPTH_POINT_CLOUD formats will commonly have a stall duration that
-     * should be accounted for (see
-     * {@link ACAMERA_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS}).
+     * should be accounted for (see {@link ACAMERA_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS }).
      * On a device that supports both depth and color-based output, to enable smooth preview,
      * using a repeating burst is recommended, where a depth-output target is only included
      * once every N frames, where N is the ratio between preview output rate and depth output
@@ -6693,12 +6827,57 @@
      * @see ACAMERA_DEPTH_DEPTH_IS_EXCLUSIVE
      * @see ACAMERA_LENS_FACING
      * @see ACAMERA_LENS_INTRINSIC_CALIBRATION
+     * @see ACAMERA_LENS_POSE_REFERENCE
      * @see ACAMERA_LENS_POSE_ROTATION
      * @see ACAMERA_LENS_POSE_TRANSLATION
      * @see ACAMERA_LENS_RADIAL_DISTORTION
      */
     ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT              = 8,
 
+    /**
+     * <p>The device supports controls and metadata required for accurate motion tracking for
+     * use cases such as augmented reality, electronic image stabilization, and so on.</p>
+     * <p>This means this camera device has accurate optical calibration and timestamps relative
+     * to the inertial sensors.</p>
+     * <p>This capability requires the camera device to support the following:</p>
+     * <ul>
+     * <li>Capture request templates <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#TEMPLATE_MOTION_TRACKING_PREVIEW">CameraDevice#TEMPLATE_MOTION_TRACKING_PREVIEW</a> and <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#TEMPLATE_MOTION_TRACKING_BEST">CameraDevice#TEMPLATE_MOTION_TRACKING_BEST</a> are defined.</li>
+     * <li>The stream configurations listed in <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">CameraDevice#createCaptureSession</a> for MOTION_TRACKING are
+     *   supported, either at 30 or 60fps maximum frame rate.</li>
+     * <li>The following camera characteristics and capture result metadata are provided:<ul>
+     * <li>ACAMERA_LENS_INTRINSIC_CALIBRATION</li>
+     * <li>ACAMERA_LENS_RADIAL_DISTORTION</li>
+     * <li>ACAMERA_LENS_POSE_ROTATION</li>
+     * <li>ACAMERA_LENS_POSE_TRANSLATION</li>
+     * <li>ACAMERA_LENS_POSE_REFERENCE with value GYROSCOPE</li>
+     * </ul>
+     * </li>
+     * <li>The ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE field has value <code>REALTIME</code>. When compared to
+     *   timestamps from the device's gyroscopes, the clock difference for events occuring at
+     *   the same actual time instant will be less than 1 ms.</li>
+     * <li>The value of the ACAMERA_SENSOR_ROLLING_SHUTTER_SKEW field is accurate to within 1 ms.</li>
+     * <li>The value of ACAMERA_SENSOR_EXPOSURE_TIME is guaranteed to be available in the
+     *   capture result.</li>
+     * <li>The ACAMERA_CONTROL_CAPTURE_INTENT control supports MOTION_TRACKING to limit maximum
+     *   exposure to 20 milliseconds.</li>
+     * <li>The stream configurations required for MOTION_TRACKING (listed at <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">CameraDevice#createCaptureSession</a>) can operate at least at
+     *   30fps; optionally, they can operate at 60fps, and '[60, 60]' is listed in
+     *   ACAMERA_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES.</li>
+     * </ul>
+     *
+     * @see ACAMERA_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES
+     * @see ACAMERA_CONTROL_CAPTURE_INTENT
+     * @see ACAMERA_LENS_INTRINSIC_CALIBRATION
+     * @see ACAMERA_LENS_POSE_REFERENCE
+     * @see ACAMERA_LENS_POSE_ROTATION
+     * @see ACAMERA_LENS_POSE_TRANSLATION
+     * @see ACAMERA_LENS_RADIAL_DISTORTION
+     * @see ACAMERA_SENSOR_EXPOSURE_TIME
+     * @see ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE
+     * @see ACAMERA_SENSOR_ROLLING_SHUTTER_SKEW
+     */
+    ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_MOTION_TRACKING           = 10,
+
 } acamera_metadata_enum_android_request_available_capabilities_t;
 
 
@@ -6918,8 +7097,8 @@
 
     /**
      * <p>Timestamps from ACAMERA_SENSOR_TIMESTAMP are in the same timebase as
-     * <a href="https://developer.android.com/reference/android/os/SystemClock.html#elapsedRealtimeNanos">elapsedRealtimeNanos</a>
-     * (or CLOCK_BOOTTIME), and they can be compared to other timestamps using that base.</p>
+     * <a href="https://developer.android.com/reference/android/os/SystemClock.html#elapsedRealtimeNanos">SystemClock#elapsedRealtimeNanos</a>,
+     * and they can be compared to other timestamps using that base.</p>
      *
      * @see ACAMERA_SENSOR_TIMESTAMP
      */
@@ -7104,7 +7283,7 @@
      * <p>This camera device does not have enough capabilities to qualify as a <code>FULL</code> device or
      * better.</p>
      * <p>Only the stream configurations listed in the <code>LEGACY</code> and <code>LIMITED</code> tables in the
-     * {@link ACameraDevice_createCaptureSession} documentation are guaranteed to be supported.</p>
+     * {@link ACameraDevice_createCaptureSession createCaptureSession} documentation are guaranteed to be supported.</p>
      * <p>All <code>LIMITED</code> devices support the <code>BACKWARDS_COMPATIBLE</code> capability, indicating basic
      * support for color image capture. The only exception is that the device may
      * alternatively support only the <code>DEPTH_OUTPUT</code> capability, if it can only output depth
@@ -7130,7 +7309,7 @@
     /**
      * <p>This camera device is capable of supporting advanced imaging applications.</p>
      * <p>The stream configurations listed in the <code>FULL</code>, <code>LEGACY</code> and <code>LIMITED</code> tables in the
-     * {@link ACameraDevice_createCaptureSession} documentation are guaranteed to be supported.</p>
+     * {@link ACameraDevice_createCaptureSession createCaptureSession} documentation are guaranteed to be supported.</p>
      * <p>A <code>FULL</code> device will support below capabilities:</p>
      * <ul>
      * <li><code>BURST_CAPTURE</code> capability (ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains
@@ -7157,8 +7336,7 @@
 
     /**
      * <p>This camera device is running in backward compatibility mode.</p>
-     * <p>Only the stream configurations listed in the <code>LEGACY</code> table in the {@link
-     * ACameraDevice_createCaptureSession} documentation are supported.</p>
+     * <p>Only the stream configurations listed in the <code>LEGACY</code> table in the {@link ACameraDevice_createCaptureSession createCaptureSession} documentation are supported.</p>
      * <p>A <code>LEGACY</code> device does not support per-frame control, manual sensor control, manual
      * post-processing, arbitrary cropping regions, and has relaxed performance constraints.
      * No additional capabilities beyond <code>BACKWARD_COMPATIBLE</code> will ever be listed by a
@@ -7179,9 +7357,7 @@
      * <p>This camera device is capable of YUV reprocessing and RAW data capture, in addition to
      * FULL-level capabilities.</p>
      * <p>The stream configurations listed in the <code>LEVEL_3</code>, <code>RAW</code>, <code>FULL</code>, <code>LEGACY</code> and
-     * <code>LIMITED</code> tables in the {@link
-     * ACameraDevice_createCaptureSession}
-     * documentation are guaranteed to be supported.</p>
+     * <code>LIMITED</code> tables in the {@link ACameraDevice_createCaptureSession createCaptureSession} documentation are guaranteed to be supported.</p>
      * <p>The following additional capabilities are guaranteed to be supported:</p>
      * <ul>
      * <li><code>YUV_REPROCESSING</code> capability (ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains
diff --git a/camera/ndk/include/camera/NdkCaptureRequest.h b/camera/ndk/include/camera/NdkCaptureRequest.h
index c62ba2c..4961ce3 100644
--- a/camera/ndk/include/camera/NdkCaptureRequest.h
+++ b/camera/ndk/include/camera/NdkCaptureRequest.h
@@ -305,6 +305,58 @@
 
 #endif /* __ANDROID_API__ >= 24 */
 
+#if __ANDROID_API__ >= 28
+
+/**
+ * Associate an arbitrary user context pointer to the {@link ACaptureRequest}
+ *
+ * This method is useful for user to identify the capture request in capture session callbacks.
+ * The context is NULL for newly created request.
+ * {@link ACameraOutputTarget_free} will not free the context. Also calling this method twice
+ * will not cause the previous context be freed.
+ * Also note that calling this method after the request has been sent to capture session will not
+ * change the context pointer in the capture callbacks.
+ *
+ * @param request the {@link ACaptureRequest} of interest.
+ * @param context the user context pointer to be associated with this capture request.
+ *
+ * @return <ul>
+ *         <li>{@link ACAMERA_OK} if the method call succeeds.</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if request is NULL.</li></ul>
+ */
+camera_status_t ACaptureRequest_setUserContext(
+        ACaptureRequest* request, void* context);
+
+/**
+ * Get the user context pointer of the {@link ACaptureRequest}
+ *
+ * This method is useful for user to identify the capture request in capture session callbacks.
+ * The context is NULL for newly created request.
+ *
+ * @param request the {@link ACaptureRequest} of interest.
+ * @param context the user context pointer of this capture request.
+ *
+ * @return <ul>
+ *         <li>{@link ACAMERA_OK} if the method call succeeds.</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if request is NULL.</li></ul>
+ */
+camera_status_t ACaptureRequest_getUserContext(
+        const ACaptureRequest* request, /*out*/void** context);
+
+/**
+ * Create a copy of input {@link ACaptureRequest}.
+ *
+ * <p>The returned ACaptureRequest must be freed by the application by {@link ACaptureRequest_free}
+ * after application is done using it.</p>
+ *
+ * @param src the input {@link ACaptureRequest} to be copied.
+ *
+ * @return a valid ACaptureRequest pointer or NULL if the input request cannot be copied.
+ */
+ACaptureRequest* ACaptureRequest_copy(const ACaptureRequest* src);
+
+#endif /* __ANDROID_API__ >= 28 */
+
 __END_DECLS
 
 #endif /* _NDK_CAPTURE_REQUEST_H */
diff --git a/camera/ndk/libcamera2ndk.map.txt b/camera/ndk/libcamera2ndk.map.txt
index 41bb22b..d179aa0 100644
--- a/camera/ndk/libcamera2ndk.map.txt
+++ b/camera/ndk/libcamera2ndk.map.txt
@@ -6,9 +6,11 @@
     ACameraCaptureSession_getDevice;
     ACameraCaptureSession_setRepeatingRequest;
     ACameraCaptureSession_stopRepeating;
+    ACameraCaptureSession_updateSharedOutput;
     ACameraDevice_close;
     ACameraDevice_createCaptureRequest;
     ACameraDevice_createCaptureSession;
+    ACameraDevice_createCaptureSessionWithSessionParameters;
     ACameraDevice_getId;
     ACameraManager_create;
     ACameraManager_delete;
@@ -25,9 +27,11 @@
     ACameraOutputTarget_create;
     ACameraOutputTarget_free;
     ACaptureRequest_addTarget;
+    ACaptureRequest_copy;
     ACaptureRequest_free;
     ACaptureRequest_getAllTags;
     ACaptureRequest_getConstEntry;
+    ACaptureRequest_getUserContext;
     ACaptureRequest_removeTarget;
     ACaptureRequest_setEntry_double;
     ACaptureRequest_setEntry_float;
@@ -35,11 +39,15 @@
     ACaptureRequest_setEntry_i64;
     ACaptureRequest_setEntry_rational;
     ACaptureRequest_setEntry_u8;
+    ACaptureRequest_setUserContext;
     ACaptureSessionOutputContainer_add;
     ACaptureSessionOutputContainer_create;
     ACaptureSessionOutputContainer_free;
     ACaptureSessionOutputContainer_remove;
     ACaptureSessionOutput_create;
+    ACaptureSessionSharedOutput_create;
+    ACaptureSessionSharedOutput_add;
+    ACaptureSessionSharedOutput_remove;
     ACaptureSessionOutput_free;
   local:
     *;
diff --git a/camera/tests/CameraBinderTests.cpp b/camera/tests/CameraBinderTests.cpp
index 51d9214..8fe9a86 100644
--- a/camera/tests/CameraBinderTests.cpp
+++ b/camera/tests/CameraBinderTests.cpp
@@ -476,7 +476,8 @@
         res = device->createStream(output, &streamId);
         EXPECT_TRUE(res.isOk()) << res;
         EXPECT_LE(0, streamId);
-        res = device->endConfigure(/*isConstrainedHighSpeed*/ false);
+        CameraMetadata sessionParams;
+        res = device->endConfigure(/*isConstrainedHighSpeed*/ false, sessionParams);
         EXPECT_TRUE(res.isOk()) << res;
         EXPECT_FALSE(callbacks->hadError());
 
@@ -574,7 +575,7 @@
         EXPECT_TRUE(res.isOk()) << res;
         res = device->deleteStream(streamId);
         EXPECT_TRUE(res.isOk()) << res;
-        res = device->endConfigure(/*isConstrainedHighSpeed*/ false);
+        res = device->endConfigure(/*isConstrainedHighSpeed*/ false, sessionParams);
         EXPECT_TRUE(res.isOk()) << res;
 
         sleep(/*second*/1); // allow some time for errors to show up, if any
diff --git a/camera/tests/CameraZSLTests.cpp b/camera/tests/CameraZSLTests.cpp
index ecca354..02c6e2a 100644
--- a/camera/tests/CameraZSLTests.cpp
+++ b/camera/tests/CameraZSLTests.cpp
@@ -256,10 +256,10 @@
         ASSERT_TRUE(nullptr != surfaceControl.get());
         ASSERT_TRUE(surfaceControl->isValid());
 
-        SurfaceComposerClient::openGlobalTransaction();
-        ASSERT_EQ(NO_ERROR, surfaceControl->setLayer(0x7fffffff));
-        ASSERT_EQ(NO_ERROR, surfaceControl->show());
-        SurfaceComposerClient::closeGlobalTransaction();
+        SurfaceComposerClient::Transaction{}
+                .setLayer(surfaceControl, 0x7fffffff)
+                .show(surfaceControl)
+                .apply();
 
         previewSurface = surfaceControl->getSurface();
         ASSERT_TRUE(previewSurface != NULL);
diff --git a/cmds/screenrecord/Android.mk b/cmds/screenrecord/Android.mk
index 7aa684a..5e83ed6 100644
--- a/cmds/screenrecord/Android.mk
+++ b/cmds/screenrecord/Android.mk
@@ -25,8 +25,8 @@
 	Program.cpp
 
 LOCAL_SHARED_LIBRARIES := \
-	libstagefright libmedia libutils libbinder libstagefright_foundation \
-	libjpeg libgui libcutils liblog libEGL libGLESv2
+	libstagefright libmedia libmedia_omx libutils libbinder libstagefright_foundation \
+	libjpeg libui libgui libcutils liblog libEGL libGLESv2
 
 LOCAL_C_INCLUDES := \
 	frameworks/av/media/libstagefright \
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index bc32bbe..f9e4639 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -213,7 +213,9 @@
  * Sets the display projection, based on the display dimensions, video size,
  * and device orientation.
  */
-static status_t setDisplayProjection(const sp<IBinder>& dpy,
+static status_t setDisplayProjection(
+        SurfaceComposerClient::Transaction& t,
+        const sp<IBinder>& dpy,
         const DisplayInfo& mainDpyInfo) {
 
     // Set the region of the layer stack we're interested in, which in our
@@ -279,7 +281,7 @@
         }
     }
 
-    SurfaceComposerClient::setDisplayProjection(dpy,
+    t.setDisplayProjection(dpy,
             gRotate ? DISPLAY_ORIENTATION_90 : DISPLAY_ORIENTATION_0,
             layerStackRect, displayRect);
     return NO_ERROR;
@@ -295,11 +297,11 @@
     sp<IBinder> dpy = SurfaceComposerClient::createDisplay(
             String8("ScreenRecorder"), false /*secure*/);
 
-    SurfaceComposerClient::openGlobalTransaction();
-    SurfaceComposerClient::setDisplaySurface(dpy, bufferProducer);
-    setDisplayProjection(dpy, mainDpyInfo);
-    SurfaceComposerClient::setDisplayLayerStack(dpy, 0);    // default stack
-    SurfaceComposerClient::closeGlobalTransaction();
+    SurfaceComposerClient::Transaction t;
+    t.setDisplaySurface(dpy, bufferProducer);
+    setDisplayProjection(t, dpy, mainDpyInfo);
+    t.setDisplayLayerStack(dpy, 0);    // default stack
+    t.apply();
 
     *pDisplayHandle = dpy;
 
@@ -379,9 +381,9 @@
                         ALOGW("getDisplayInfo(main) failed: %d", err);
                     } else if (orientation != mainDpyInfo.orientation) {
                         ALOGD("orientation changed, now %d", mainDpyInfo.orientation);
-                        SurfaceComposerClient::openGlobalTransaction();
-                        setDisplayProjection(virtualDpy, mainDpyInfo);
-                        SurfaceComposerClient::closeGlobalTransaction();
+                        SurfaceComposerClient::Transaction t;
+                        setDisplayProjection(t, virtualDpy, mainDpyInfo);
+                        t.apply();
                         orientation = mainDpyInfo.orientation;
                     }
                 }
diff --git a/cmds/stagefright/Android.mk b/cmds/stagefright/Android.mk
index f647ffd..c62833d 100644
--- a/cmds/stagefright/Android.mk
+++ b/cmds/stagefright/Android.mk
@@ -8,9 +8,9 @@
         SineSource.cpp
 
 LOCAL_SHARED_LIBRARIES := \
-        libstagefright libmedia libutils libbinder libstagefright_foundation \
-        libjpeg libgui libcutils liblog \
-        libhidlmemory \
+        libstagefright libmedia libmedia_omx libmediaextractor libutils libbinder \
+        libstagefright_foundation libjpeg libui libgui libcutils liblog \
+        libhidlbase \
         android.hardware.media.omx@1.0 \
 
 LOCAL_C_INCLUDES:= \
@@ -36,7 +36,8 @@
         record.cpp
 
 LOCAL_SHARED_LIBRARIES := \
-        libstagefright libmedia liblog libutils libbinder libstagefright_foundation
+        libstagefright libmedia libmediaextractor liblog libutils libbinder \
+        libstagefright_foundation
 
 LOCAL_C_INCLUDES:= \
         frameworks/av/media/libstagefright \
@@ -60,7 +61,8 @@
         recordvideo.cpp
 
 LOCAL_SHARED_LIBRARIES := \
-        libstagefright libmedia liblog libutils libbinder libstagefright_foundation
+        libstagefright libmedia libmediaextractor liblog libutils libbinder \
+        libstagefright_foundation
 
 LOCAL_C_INCLUDES:= \
         frameworks/av/media/libstagefright \
@@ -85,7 +87,8 @@
         audioloop.cpp
 
 LOCAL_SHARED_LIBRARIES := \
-        libstagefright libmedia liblog libutils libbinder libstagefright_foundation
+        libstagefright libmedia libmediaextractor liblog libutils libbinder \
+        libstagefright_foundation
 
 LOCAL_C_INCLUDES:= \
         frameworks/av/media/libstagefright \
@@ -107,7 +110,7 @@
         stream.cpp    \
 
 LOCAL_SHARED_LIBRARIES := \
-        libstagefright liblog libutils libbinder libgui \
+        libstagefright liblog libutils libbinder libui libgui \
         libstagefright_foundation libmedia libcutils
 
 LOCAL_C_INCLUDES:= \
@@ -132,7 +135,7 @@
 
 LOCAL_SHARED_LIBRARIES := \
         libstagefright liblog libutils libbinder libstagefright_foundation \
-        libmedia libaudioclient libgui libcutils
+        libmedia libmedia_omx libaudioclient libui libgui libcutils
 
 LOCAL_C_INCLUDES:= \
         frameworks/av/media/libstagefright \
@@ -163,6 +166,8 @@
         libbinder \
         libstagefright_foundation \
         libmedia \
+        libmedia_omx \
+        libui \
         libgui \
         libcutils \
         libRScpp \
diff --git a/cmds/stagefright/SineSource.h b/cmds/stagefright/SineSource.h
index be05661..f1fb96d 100644
--- a/cmds/stagefright/SineSource.h
+++ b/cmds/stagefright/SineSource.h
@@ -2,7 +2,7 @@
 
 #define SINE_SOURCE_H_
 
-#include <media/stagefright/MediaSource.h>
+#include <media/MediaSource.h>
 #include <utils/Compat.h>
 
 namespace android {
diff --git a/cmds/stagefright/audioloop.cpp b/cmds/stagefright/audioloop.cpp
index ed44b4d..67017eb 100644
--- a/cmds/stagefright/audioloop.cpp
+++ b/cmds/stagefright/audioloop.cpp
@@ -112,7 +112,7 @@
     looper->setName("audioloop");
     looper->start();
 
-    sp<IMediaSource> encoder = MediaCodecSource::Create(looper, meta, source);
+    sp<MediaSource> encoder = MediaCodecSource::Create(looper, meta, source);
 
     if (fileOut != NULL) {
         // target file specified, write encoded AMR output
@@ -128,7 +128,7 @@
         writer->stop();
     } else {
         // otherwise decode to speaker
-        sp<IMediaSource> decoder = SimpleDecodingSource::Create(encoder);
+        sp<MediaSource> decoder = SimpleDecodingSource::Create(encoder);
 
         if (playToSpeaker) {
             AudioPlayer *player = new AudioPlayer(NULL);
diff --git a/cmds/stagefright/codec.cpp b/cmds/stagefright/codec.cpp
index 3108a67..6a58467 100644
--- a/cmds/stagefright/codec.cpp
+++ b/cmds/stagefright/codec.cpp
@@ -430,10 +430,10 @@
         CHECK(control != NULL);
         CHECK(control->isValid());
 
-        SurfaceComposerClient::openGlobalTransaction();
-        CHECK_EQ(control->setLayer(INT_MAX), (status_t)OK);
-        CHECK_EQ(control->show(), (status_t)OK);
-        SurfaceComposerClient::closeGlobalTransaction();
+        SurfaceComposerClient::Transaction{}
+                 .setLayer(control, INT_MAX)
+                 .show(control)
+                 .apply();
 
         surface = control->getSurface();
         CHECK(surface != NULL);
diff --git a/cmds/stagefright/mediafilter.cpp b/cmds/stagefright/mediafilter.cpp
index f219e69..f24d2dd 100644
--- a/cmds/stagefright/mediafilter.cpp
+++ b/cmds/stagefright/mediafilter.cpp
@@ -764,10 +764,10 @@
         CHECK(control != NULL);
         CHECK(control->isValid());
 
-        SurfaceComposerClient::openGlobalTransaction();
-        CHECK_EQ((status_t)OK, control->setLayer(INT_MAX));
-        CHECK_EQ((status_t)OK, control->show());
-        SurfaceComposerClient::closeGlobalTransaction();
+        SurfaceComposerClient::Transaction{}
+                .setLayer(control, INT_MAX)
+                .show(control)
+                .apply();
 
         surface = control->getSurface();
         CHECK(surface != NULL);
diff --git a/cmds/stagefright/record.cpp b/cmds/stagefright/record.cpp
index 94c2e96..073ee6b 100644
--- a/cmds/stagefright/record.cpp
+++ b/cmds/stagefright/record.cpp
@@ -17,6 +17,7 @@
 #include "SineSource.h"
 
 #include <binder/ProcessState.h>
+#include <media/MediaExtractor.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/ALooper.h>
 #include <media/stagefright/foundation/AMessage.h>
@@ -27,7 +28,7 @@
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaCodecSource.h>
 #include <media/stagefright/MetaData.h>
-#include <media/stagefright/MediaExtractor.h>
+#include <media/stagefright/MediaExtractorFactory.h>
 #include <media/stagefright/MPEG4Writer.h>
 #include <media/stagefright/SimpleDecodingSource.h>
 #include <media/MediaPlayerInterface.h>
@@ -120,7 +121,7 @@
     sp<MediaSource> source;
 
     sp<MediaExtractor> extractor =
-        MediaExtractor::Create(new FileSource(filename));
+        MediaExtractorFactory::Create(new FileSource(filename));
     if (extractor == NULL) {
         return NULL;
     }
@@ -320,7 +321,7 @@
     looper->setName("record");
     looper->start();
 
-    sp<IMediaSource> encoder =
+    sp<MediaSource> encoder =
         MediaCodecSource::Create(looper, encMeta, audioSource);
 
     encoder->start();
diff --git a/cmds/stagefright/recordvideo.cpp b/cmds/stagefright/recordvideo.cpp
index 7a3c842..af39d46 100644
--- a/cmds/stagefright/recordvideo.cpp
+++ b/cmds/stagefright/recordvideo.cpp
@@ -303,7 +303,7 @@
     looper->setName("recordvideo");
     looper->start();
 
-    sp<IMediaSource> encoder =
+    sp<MediaSource> encoder =
         MediaCodecSource::Create(
                 looper, enc_meta, source, NULL /* consumer */,
                 preferSoftwareCodec ? MediaCodecSource::FLAG_PREFER_SOFTWARE_CODEC : 0);
diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp
index d70282b..bb517aa 100644
--- a/cmds/stagefright/stagefright.cpp
+++ b/cmds/stagefright/stagefright.cpp
@@ -31,9 +31,11 @@
 
 #include <binder/IServiceManager.h>
 #include <binder/ProcessState.h>
+#include <media/DataSource.h>
+#include <media/MediaExtractor.h>
+#include <media/MediaSource.h>
 #include <media/ICrypto.h>
 #include <media/IMediaHTTPService.h>
-#include <media/IMediaCodecService.h>
 #include <media/IMediaPlayerService.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ALooper.h>
@@ -41,14 +43,14 @@
 #include <media/stagefright/foundation/AUtils.h>
 #include "include/NuCachedSource2.h"
 #include <media/stagefright/AudioPlayer.h>
-#include <media/stagefright/DataSource.h>
+#include <media/stagefright/DataSourceFactory.h>
 #include <media/stagefright/JPEGSource.h>
+#include <media/stagefright/InterfaceUtils.h>
 #include <media/stagefright/MediaCodec.h>
 #include <media/stagefright/MediaCodecList.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/MediaExtractor.h>
-#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MediaExtractorFactory.h>
 #include <media/stagefright/MetaData.h>
 #include <media/stagefright/SimpleDecodingSource.h>
 #include <media/stagefright/Utils.h>
@@ -65,7 +67,6 @@
 #include <gui/SurfaceComposerClient.h>
 
 #include <android/hardware/media/omx/1.0/IOmx.h>
-#include <media/omx/1.0/WOmx.h>
 
 using namespace android;
 
@@ -79,6 +80,7 @@
 static bool gDisplayHistogram;
 static bool showProgress = true;
 static String8 gWriteMP4Filename;
+static String8 gComponentNameOverride;
 
 static sp<ANativeWindow> gSurface;
 
@@ -141,7 +143,7 @@
     }
 }
 
-static void dumpSource(const sp<IMediaSource> &source, const String8 &filename) {
+static void dumpSource(const sp<MediaSource> &source, const String8 &filename) {
     FILE *out = fopen(filename.string(), "wb");
 
     CHECK_EQ((status_t)OK, source->start());
@@ -174,13 +176,13 @@
     out = NULL;
 }
 
-static void playSource(sp<IMediaSource> &source) {
+static void playSource(sp<MediaSource> &source) {
     sp<MetaData> meta = source->getFormat();
 
     const char *mime;
     CHECK(meta->findCString(kKeyMIMEType, &mime));
 
-    sp<IMediaSource> rawSource;
+    sp<MediaSource> rawSource;
     if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_RAW, mime)) {
         rawSource = source;
     } else {
@@ -192,7 +194,10 @@
             CHECK(!gPreferSoftwareCodec);
             flags |= MediaCodecList::kHardwareCodecsOnly;
         }
-        rawSource = SimpleDecodingSource::Create(source, flags, gSurface);
+        rawSource = SimpleDecodingSource::Create(
+                source, flags, gSurface,
+                gComponentNameOverride.isEmpty() ? nullptr : gComponentNameOverride.c_str(),
+                !gComponentNameOverride.isEmpty());
         if (rawSource == NULL) {
             return;
         }
@@ -404,7 +409,7 @@
 ////////////////////////////////////////////////////////////////////////////////
 
 struct DetectSyncSource : public MediaSource {
-    explicit DetectSyncSource(const sp<IMediaSource> &source);
+    explicit DetectSyncSource(const sp<MediaSource> &source);
 
     virtual status_t start(MetaData *params = NULL);
     virtual status_t stop();
@@ -421,14 +426,14 @@
         OTHER,
     };
 
-    sp<IMediaSource> mSource;
+    sp<MediaSource> mSource;
     StreamType mStreamType;
     bool mSawFirstIDRFrame;
 
     DISALLOW_EVIL_CONSTRUCTORS(DetectSyncSource);
 };
 
-DetectSyncSource::DetectSyncSource(const sp<IMediaSource> &source)
+DetectSyncSource::DetectSyncSource(const sp<MediaSource> &source)
     : mSource(source),
       mStreamType(OTHER),
       mSawFirstIDRFrame(false) {
@@ -510,7 +515,7 @@
 ////////////////////////////////////////////////////////////////////////////////
 
 static void writeSourcesToMP4(
-        Vector<sp<IMediaSource> > &sources, bool syncInfoPresent) {
+        Vector<sp<MediaSource> > &sources, bool syncInfoPresent) {
 #if 0
     sp<MPEG4Writer> writer =
         new MPEG4Writer(gWriteMP4Filename.string());
@@ -528,7 +533,7 @@
     writer->setMaxFileDuration(60000000ll);
 
     for (size_t i = 0; i < sources.size(); ++i) {
-        sp<IMediaSource> source = sources.editItemAt(i);
+        sp<MediaSource> source = sources.editItemAt(i);
 
         CHECK_EQ(writer->addSource(
                     syncInfoPresent ? source : new DetectSyncSource(source)),
@@ -545,7 +550,7 @@
     writer->stop();
 }
 
-static void performSeekTest(const sp<IMediaSource> &source) {
+static void performSeekTest(const sp<MediaSource> &source) {
     CHECK_EQ((status_t)OK, source->start());
 
     int64_t durationUs;
@@ -617,6 +622,7 @@
     fprintf(stderr, "       -o playback audio\n");
     fprintf(stderr, "       -w(rite) filename (write to .mp4 file)\n");
     fprintf(stderr, "       -k seek test\n");
+    fprintf(stderr, "       -N(ame) of the component\n");
     fprintf(stderr, "       -x display a histogram of decoding times/fps "
                     "(video only)\n");
     fprintf(stderr, "       -q don't show progress indicator\n");
@@ -702,7 +708,7 @@
     sp<ALooper> looper;
 
     int res;
-    while ((res = getopt(argc, argv, "haqn:lm:b:ptsrow:kxSTd:D:")) >= 0) {
+    while ((res = getopt(argc, argv, "haqn:lm:b:ptsrow:kN:xSTd:D:")) >= 0) {
         switch (res) {
             case 'a':
             {
@@ -731,6 +737,12 @@
                 break;
             }
 
+            case 'N':
+            {
+                gComponentNameOverride.setTo(optarg);
+                break;
+            }
+
             case 'l':
             {
                 listComponents = true;
@@ -909,37 +921,24 @@
     }
 
     if (listComponents) {
-        sp<IOMX> omx;
-        if (property_get_bool("persist.media.treble_omx", true)) {
-            using namespace ::android::hardware::media::omx::V1_0;
-            sp<IOmx> tOmx = IOmx::getService();
+        using ::android::hardware::hidl_vec;
+        using ::android::hardware::hidl_string;
+        using namespace ::android::hardware::media::omx::V1_0;
+        sp<IOmx> omx = IOmx::getService();
+        CHECK(omx.get() != nullptr);
 
-            CHECK(tOmx.get() != NULL);
-
-            omx = new utils::LWOmx(tOmx);
-        } else {
-            sp<IServiceManager> sm = defaultServiceManager();
-            sp<IBinder> binder = sm->getService(String16("media.codec"));
-            sp<IMediaCodecService> service = interface_cast<IMediaCodecService>(binder);
-
-            CHECK(service.get() != NULL);
-
-            omx = service->getOMX();
-        }
-        CHECK(omx.get() != NULL);
-
-        List<IOMX::ComponentInfo> list;
-        omx->listNodes(&list);
-
-        for (List<IOMX::ComponentInfo>::iterator it = list.begin();
-             it != list.end(); ++it) {
-            printf("%s\t Roles: ", (*it).mName.string());
-            for (List<String8>::iterator itRoles = (*it).mRoles.begin() ;
-                    itRoles != (*it).mRoles.end() ; ++itRoles) {
-                printf("%s\t", (*itRoles).string());
-            }
-            printf("\n");
-        }
+        hidl_vec<IOmx::ComponentInfo> nodeList;
+        auto transStatus = omx->listNodes([](
+                const auto& status, const auto& nodeList) {
+                    CHECK(status == Status::OK);
+                    for (const auto& info : nodeList) {
+                        printf("%s\t Roles: ", info.mName.c_str());
+                        for (const auto& role : info.mRoles) {
+                            printf("%s\t", role.c_str());
+                        }
+                    }
+                });
+        CHECK(transStatus.isOk());
     }
 
     sp<SurfaceComposerClient> composerClient;
@@ -960,10 +959,10 @@
             CHECK(control != NULL);
             CHECK(control->isValid());
 
-            SurfaceComposerClient::openGlobalTransaction();
-            CHECK_EQ(control->setLayer(INT_MAX), (status_t)OK);
-            CHECK_EQ(control->show(), (status_t)OK);
-            SurfaceComposerClient::closeGlobalTransaction();
+            SurfaceComposerClient::Transaction{}
+                    .setLayer(control, INT_MAX)
+                    .show(control)
+                    .apply();
 
             gSurface = control->getSurface();
             CHECK(gSurface != NULL);
@@ -988,7 +987,7 @@
         const char *filename = argv[k];
 
         sp<DataSource> dataSource =
-            DataSource::CreateFromURI(NULL /* httpService */, filename);
+            DataSourceFactory::CreateFromURI(NULL /* httpService */, filename);
 
         if (strncasecmp(filename, "sine:", 5) && dataSource == NULL) {
             fprintf(stderr, "Unable to create data source.\n");
@@ -1002,8 +1001,8 @@
             isJPEG = true;
         }
 
-        Vector<sp<IMediaSource> > mediaSources;
-        sp<IMediaSource> mediaSource;
+        Vector<sp<MediaSource> > mediaSources;
+        sp<MediaSource> mediaSource;
 
         if (isJPEG) {
             mediaSource = new JPEGSource(dataSource);
@@ -1022,7 +1021,7 @@
                 mediaSources.push(mediaSource);
             }
         } else {
-            sp<IMediaExtractor> extractor = MediaExtractor::Create(dataSource);
+            sp<IMediaExtractor> extractor = MediaExtractorFactory::Create(dataSource);
 
             if (extractor == NULL) {
                 fprintf(stderr, "could not create extractor.\n");
@@ -1049,7 +1048,8 @@
                 bool haveAudio = false;
                 bool haveVideo = false;
                 for (size_t i = 0; i < numTracks; ++i) {
-                    sp<IMediaSource> source = extractor->getTrack(i);
+                    sp<MediaSource> source = CreateMediaSourceFromIMediaSource(
+                            extractor->getTrack(i));
                     if (source == nullptr) {
                         fprintf(stderr, "skip NULL track %zu, track count %zu.\n", i, numTracks);
                         continue;
@@ -1084,7 +1084,7 @@
                             i, MediaExtractor::kIncludeExtensiveMetaData);
 
                     if (meta == NULL) {
-                        break;
+                        continue;
                     }
                     const char *mime;
                     meta->findCString(kKeyMIMEType, &mime);
@@ -1115,7 +1115,7 @@
                            thumbTimeUs, thumbTimeUs / 1E6);
                 }
 
-                mediaSource = extractor->getTrack(i);
+                mediaSource = CreateMediaSourceFromIMediaSource(extractor->getTrack(i));
                 if (mediaSource == nullptr) {
                     fprintf(stderr, "skip NULL track %zu, total tracks %zu.\n", i, numTracks);
                     return -1;
@@ -1128,7 +1128,7 @@
         } else if (dumpStream) {
             dumpSource(mediaSource, dumpStreamFilename);
         } else if (dumpPCMStream) {
-            sp<IMediaSource> decSource = SimpleDecodingSource::Create(mediaSource);
+            sp<MediaSource> decSource = SimpleDecodingSource::Create(mediaSource);
             dumpSource(decSource, dumpStreamFilename);
         } else if (seekTest) {
             performSeekTest(mediaSource);
diff --git a/cmds/stagefright/stream.cpp b/cmds/stagefright/stream.cpp
index 2e1d240..b0199d8 100644
--- a/cmds/stagefright/stream.cpp
+++ b/cmds/stagefright/stream.cpp
@@ -21,15 +21,18 @@
 #include <binder/ProcessState.h>
 #include <cutils/properties.h> // for property_get
 
+#include <media/DataSource.h>
 #include <media/IMediaHTTPService.h>
 #include <media/IStreamSource.h>
+#include <media/MediaExtractor.h>
 #include <media/mediaplayer.h>
+#include <media/MediaSource.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/DataSource.h>
+#include <media/stagefright/DataSourceFactory.h>
+#include <media/stagefright/InterfaceUtils.h>
 #include <media/stagefright/MPEG2TSWriter.h>
-#include <media/stagefright/MediaExtractor.h>
-#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MediaExtractorFactory.h>
 #include <media/stagefright/MetaData.h>
 
 #include <binder/IServiceManager.h>
@@ -161,11 +164,11 @@
     : mCurrentBufferIndex(-1),
       mCurrentBufferOffset(0) {
     sp<DataSource> dataSource =
-        DataSource::CreateFromURI(NULL /* httpService */, filename);
+        DataSourceFactory::CreateFromURI(NULL /* httpService */, filename);
 
     CHECK(dataSource != NULL);
 
-    sp<IMediaExtractor> extractor = MediaExtractor::Create(dataSource);
+    sp<IMediaExtractor> extractor = MediaExtractorFactory::Create(dataSource);
     CHECK(extractor != NULL);
 
     mWriter = new MPEG2TSWriter(
@@ -182,7 +185,7 @@
             continue;
         }
 
-        sp<IMediaSource> track = extractor->getTrack(i);
+        sp<MediaSource> track = CreateMediaSourceFromIMediaSource(extractor->getTrack(i));
         if (track == nullptr) {
             fprintf(stderr, "skip NULL track %zu, total tracks %zu\n", i, numTracks);
             continue;
@@ -335,10 +338,10 @@
     CHECK(control != NULL);
     CHECK(control->isValid());
 
-    SurfaceComposerClient::openGlobalTransaction();
-    CHECK_EQ(control->setLayer(INT_MAX), (status_t)OK);
-    CHECK_EQ(control->show(), (status_t)OK);
-    SurfaceComposerClient::closeGlobalTransaction();
+    SurfaceComposerClient::Transaction{}
+            .setLayer(control, INT_MAX)
+            .show(control)
+            .apply();
 
     sp<Surface> surface = control->getSurface();
     CHECK(surface != NULL);
diff --git a/drm/libmediadrm/PluginMetricsReporting.cpp b/drm/libmediadrm/PluginMetricsReporting.cpp
index 57ff5b8..877cbd4 100644
--- a/drm/libmediadrm/PluginMetricsReporting.cpp
+++ b/drm/libmediadrm/PluginMetricsReporting.cpp
@@ -17,6 +17,7 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "PluginMetricsReporting"
 #include <utils/Log.h>
+#include <inttypes.h>
 
 #include <media/PluginMetricsReporting.h>
 
@@ -81,10 +82,7 @@
 
     analyticsItem.setFinalized(true);
     if (!analyticsItem.selfrecord()) {
-      // Note the cast to int is because we build on 32 and 64 bit.
-      // The cast prevents a peculiar printf problem where one format cannot
-      // satisfy both.
-      ALOGE("selfrecord() returned false. sessioId %d", (int) sessionId);
+      ALOGE("selfrecord() returned false. sessioId %" PRId64, sessionId);
     }
 
     for (int i = 0; i < metricsGroup.metric_sub_group_size(); ++i) {
diff --git a/drm/mediadrm/plugins/clearkey/Android.bp b/drm/mediadrm/plugins/clearkey/Android.bp
index 385815c..4b7a63c 100644
--- a/drm/mediadrm/plugins/clearkey/Android.bp
+++ b/drm/mediadrm/plugins/clearkey/Android.bp
@@ -53,6 +53,10 @@
 
     export_include_dirs: ["."],
     export_static_lib_headers: ["libjsmn"],
+
+    sanitize: {
+        integer_overflow: true,
+    },
 }
 
 //########################################################################
diff --git a/drm/mediadrm/plugins/clearkey/ClearKeyDrmProperties.h b/drm/mediadrm/plugins/clearkey/ClearKeyDrmProperties.h
new file mode 100644
index 0000000..a99e174
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/ClearKeyDrmProperties.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CLEARKEY_DRM_PROPERTIES_H_
+#define CLEARKEY_DRM_PROPERTIES_H_
+
+#include <utils/String8.h>
+
+namespace clearkeydrm {
+
+static const android::String8 kVendorKey("vendor");
+static const android::String8 kVendorValue("Google");
+static const android::String8 kVersionKey("version");
+static const android::String8 kVersionValue("1.0");
+static const android::String8 kPluginDescriptionKey("description");
+static const android::String8 kPluginDescriptionValue("ClearKey CDM");
+static const android::String8 kAlgorithmsKey("algorithms");
+static const android::String8 kAlgorithmsValue("");
+static const android::String8 kListenerTestSupportKey("listenerTestSupport");
+static const android::String8 kListenerTestSupportValue("true");
+
+static const android::String8 kDeviceIdKey("deviceId");
+static const uint8_t kTestDeviceIdData[] =
+        {0x0, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7,
+         0x8, 0x9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf};
+} // namespace clearkeydrm
+
+#endif // CLEARKEY_DRM_PROPERTIES_H_
diff --git a/drm/mediadrm/plugins/clearkey/DrmPlugin.cpp b/drm/mediadrm/plugins/clearkey/DrmPlugin.cpp
index ec07d87..944002d 100644
--- a/drm/mediadrm/plugins/clearkey/DrmPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/DrmPlugin.cpp
@@ -22,7 +22,7 @@
 #include <utils/StrongPointer.h>
 
 #include "DrmPlugin.h"
-
+#include "ClearKeyDrmProperties.h"
 #include "Session.h"
 
 namespace {
@@ -44,7 +44,22 @@
 
 DrmPlugin::DrmPlugin(SessionLibrary* sessionLibrary)
         : mSessionLibrary(sessionLibrary) {
+
     mPlayPolicy.clear();
+    initProperties();
+}
+
+void DrmPlugin::initProperties() {
+    mStringProperties.clear();
+    mStringProperties.add(kVendorKey, kVendorValue);
+    mStringProperties.add(kVersionKey, kVersionValue);
+    mStringProperties.add(kPluginDescriptionKey, kPluginDescriptionValue);
+    mStringProperties.add(kAlgorithmsKey, kAlgorithmsValue);
+    mStringProperties.add(kListenerTestSupportKey, kListenerTestSupportValue);
+
+    Vector<uint8_t> testDeviceId;
+    testDeviceId.appendArray(kTestDeviceIdData, sizeof(kTestDeviceIdData) / sizeof(uint8_t));
+    mByteArrayProperties.add(kDeviceIdKey, testDeviceId);
 }
 
 status_t DrmPlugin::openSession(Vector<uint8_t>& sessionId) {
@@ -122,22 +137,63 @@
     return res;
 }
 
+status_t DrmPlugin::getPropertyByteArray(
+        const String8& name, Vector<uint8_t>& value) const {
+    ssize_t index = mByteArrayProperties.indexOfKey(name);
+    if (index < 0) {
+        ALOGE("App requested unknown property: %s", name.string());
+        return android::ERROR_DRM_CANNOT_HANDLE;
+    }
+    value = mByteArrayProperties.valueAt(index);
+    return android::OK;
+}
+
+status_t DrmPlugin::setPropertyByteArray(
+        const String8& name, const Vector<uint8_t>& value)
+{
+    UNUSED(value);
+    if (0 == name.compare(kDeviceIdKey)) {
+        ALOGD("Cannot set immutable property: %s", name.string());
+        return android::ERROR_DRM_CANNOT_HANDLE;
+    }
+
+    // Setting of undefined properties is not supported
+    ALOGE("Failed to set property byte array, key=%s", name.string());
+    return android::ERROR_DRM_CANNOT_HANDLE;
+}
+
 status_t DrmPlugin::getPropertyString(
         const String8& name, String8& value) const {
-    if (name == "vendor") {
-        value = "Google";
-    } else if (name == "version") {
-        value = "1.0";
-    } else if (name == "description") {
-        value = "ClearKey CDM";
-    } else if (name == "algorithms") {
-        value = "";
-    } else if (name == "listenerTestSupport") {
-        value = "true";
-    } else {
-        ALOGE("App requested unknown string property %s", name.string());
+    ssize_t index = mStringProperties.indexOfKey(name);
+    if (index < 0) {
+        ALOGE("App requested unknown property: %s", name.string());
         return android::ERROR_DRM_CANNOT_HANDLE;
     }
+    value = mStringProperties.valueAt(index);
+    return android::OK;
+}
+
+status_t DrmPlugin::setPropertyString(
+        const String8& name, const String8& value) {
+    String8 immutableKeys;
+    immutableKeys.appendFormat("%s,%s,%s,%s",
+            kAlgorithmsKey.string(), kPluginDescriptionKey.string(),
+            kVendorKey.string(), kVersionKey.string());
+    if (immutableKeys.contains(name.string())) {
+        ALOGD("Cannot set immutable property: %s", name.string());
+        return android::ERROR_DRM_CANNOT_HANDLE;
+    }
+
+    ssize_t index = mStringProperties.indexOfKey(name);
+    if (index < 0) {
+        ALOGE("Cannot set undefined property string, key=%s", name.string());
+        return android::ERROR_DRM_CANNOT_HANDLE;
+    }
+
+    if (mStringProperties.add(name, value) < 0) {
+        ALOGE("Failed to set property string, key=%s", name.string());
+        return android::ERROR_DRM_UNKNOWN;
+    }
     return android::OK;
 }
 
diff --git a/drm/mediadrm/plugins/clearkey/DrmPlugin.h b/drm/mediadrm/plugins/clearkey/DrmPlugin.h
index f37a706..62bc86f 100644
--- a/drm/mediadrm/plugins/clearkey/DrmPlugin.h
+++ b/drm/mediadrm/plugins/clearkey/DrmPlugin.h
@@ -137,25 +137,13 @@
             const String8& name, String8& value) const;
 
     virtual status_t getPropertyByteArray(
-            const String8& name, Vector<uint8_t>& value) const {
-        UNUSED(name);
-        UNUSED(value);
-        return android::ERROR_DRM_CANNOT_HANDLE;
-    }
+            const String8& name, Vector<uint8_t>& value) const;
 
     virtual status_t setPropertyString(
-            const String8& name, const String8& value) {
-        UNUSED(name);
-        UNUSED(value);
-        return android::ERROR_DRM_CANNOT_HANDLE;
-    }
+            const String8& name, const String8& value);
 
     virtual status_t setPropertyByteArray(
-            const String8& name, const Vector<uint8_t>& value) {
-        UNUSED(name);
-        UNUSED(value);
-        return android::ERROR_DRM_CANNOT_HANDLE;
-    }
+            const String8& name, const Vector<uint8_t>& value);
 
     virtual status_t setCipherAlgorithm(
             const Vector<uint8_t>& sessionId, const String8& algorithm) {
@@ -242,9 +230,13 @@
     }
 
 private:
+    void initProperties();
     void setPlayPolicy();
 
-    android::KeyedVector<android::String8, android::String8> mPlayPolicy;
+    android::KeyedVector<String8, String8> mPlayPolicy;
+    android::KeyedVector<String8, String8> mStringProperties;
+    android::KeyedVector<String8, Vector<uint8_t>> mByteArrayProperties;
+
     SessionLibrary* mSessionLibrary;
 
     DISALLOW_EVIL_CONSTRUCTORS(DrmPlugin);
diff --git a/include/OWNERS b/include/OWNERS
index 3cb6d9c..d6bd998 100644
--- a/include/OWNERS
+++ b/include/OWNERS
@@ -1,5 +1,5 @@
 elaurent@google.com
-gkasten@android.com
+gkasten@google.com
 hunga@google.com
 jtinker@google.com
 lajos@google.com
diff --git a/include/common_time/OWNERS b/include/common_time/OWNERS
new file mode 100644
index 0000000..f9cb567
--- /dev/null
+++ b/include/common_time/OWNERS
@@ -0,0 +1 @@
+gkasten@google.com
diff --git a/include/media/AudioClient.h b/include/media/AudioClient.h
new file mode 120000
index 0000000..a0530e4
--- /dev/null
+++ b/include/media/AudioClient.h
@@ -0,0 +1 @@
+../../media/libaudioclient/include/media/AudioClient.h
\ No newline at end of file
diff --git a/include/media/DataSource.h b/include/media/DataSource.h
new file mode 120000
index 0000000..905bec1
--- /dev/null
+++ b/include/media/DataSource.h
@@ -0,0 +1 @@
+../../media/libmediaextractor/include/media/DataSource.h
\ No newline at end of file
diff --git a/include/media/IAudioRecord.h b/include/media/IAudioRecord.h
deleted file mode 120000
index 7fbf8f2..0000000
--- a/include/media/IAudioRecord.h
+++ /dev/null
@@ -1 +0,0 @@
-../../media/libaudioclient/include/media/IAudioRecord.h
\ No newline at end of file
diff --git a/include/media/IHDCP.h b/include/media/IHDCP.h
deleted file mode 120000
index 9d4568e..0000000
--- a/include/media/IHDCP.h
+++ /dev/null
@@ -1 +0,0 @@
-../../media/libmedia/include/media/IHDCP.h
\ No newline at end of file
diff --git a/include/media/IMediaCodecService.h b/include/media/IMediaCodecService.h
deleted file mode 120000
index 37f6822..0000000
--- a/include/media/IMediaCodecService.h
+++ /dev/null
@@ -1 +0,0 @@
-../../media/libmedia/include/media/IMediaCodecService.h
\ No newline at end of file
diff --git a/include/media/MediaDefs.h b/include/media/MediaDefs.h
deleted file mode 120000
index 9850603..0000000
--- a/include/media/MediaDefs.h
+++ /dev/null
@@ -1 +0,0 @@
-../../media/libmedia/include/media/MediaDefs.h
\ No newline at end of file
diff --git a/include/media/MediaExtractor.h b/include/media/MediaExtractor.h
new file mode 120000
index 0000000..4b35fe1
--- /dev/null
+++ b/include/media/MediaExtractor.h
@@ -0,0 +1 @@
+../../media/libmediaextractor/include/media/MediaExtractor.h
\ No newline at end of file
diff --git a/include/media/MediaSource.h b/include/media/MediaSource.h
new file mode 120000
index 0000000..2e147c4
--- /dev/null
+++ b/include/media/MediaSource.h
@@ -0,0 +1 @@
+../../media/libmediaextractor/include/media/MediaSource.h
\ No newline at end of file
diff --git a/include/media/VolumeShaper.h b/include/media/VolumeShaper.h
index 302641f..a3aaece 100644
--- a/include/media/VolumeShaper.h
+++ b/include/media/VolumeShaper.h
@@ -37,6 +37,8 @@
 
 namespace android {
 
+namespace media {
+
 // The native VolumeShaper class mirrors the java VolumeShaper class;
 // in addition, the native class contains implementation for actual operation.
 //
@@ -101,7 +103,7 @@
      * See "frameworks/base/media/java/android/media/VolumeShaper.java" for
      * details on the Java implementation.
      */
-    class Configuration : public Interpolator<S, T>, public RefBase {
+    class Configuration : public Interpolator<S, T>, public RefBase, public Parcelable {
     public:
         // Must match with VolumeShaper.java in frameworks/base.
         enum Type : int32_t {
@@ -283,7 +285,7 @@
         }
 
         // The parcel layout must match VolumeShaper.java
-        status_t writeToParcel(Parcel *parcel) const {
+        status_t writeToParcel(Parcel *parcel) const override {
             if (parcel == nullptr) return BAD_VALUE;
             return parcel->writeInt32((int32_t)mType)
                     ?: parcel->writeInt32(mId)
@@ -294,17 +296,17 @@
                             ?: Interpolator<S, T>::writeToParcel(parcel);
         }
 
-        status_t readFromParcel(const Parcel &parcel) {
+        status_t readFromParcel(const Parcel *parcel) override {
             int32_t type, optionFlags;
-            return parcel.readInt32(&type)
+            return parcel->readInt32(&type)
                     ?: setType((Type)type)
-                    ?: parcel.readInt32(&mId)
+                    ?: parcel->readInt32(&mId)
                     ?: mType == TYPE_ID
                         ? NO_ERROR
-                        : parcel.readInt32(&optionFlags)
+                        : parcel->readInt32(&optionFlags)
                             ?: setOptionFlags((OptionFlag)optionFlags)
-                            ?: parcel.readDouble(&mDurationMs)
-                            ?: Interpolator<S, T>::readFromParcel(parcel)
+                            ?: parcel->readDouble(&mDurationMs)
+                            ?: Interpolator<S, T>::readFromParcel(*parcel)
                             ?: checkCurve();
         }
 
@@ -336,7 +338,7 @@
      * See "frameworks/base/media/java/android/media/VolumeShaper.java" for
      * details on the Java implementation.
      */
-    class Operation : public RefBase {
+    class Operation : public RefBase, public Parcelable {
     public:
         // Must match with VolumeShaper.java.
         enum Flag : int32_t {
@@ -418,18 +420,18 @@
             return NO_ERROR;
         }
 
-        status_t writeToParcel(Parcel *parcel) const {
+        status_t writeToParcel(Parcel *parcel) const override {
             if (parcel == nullptr) return BAD_VALUE;
             return parcel->writeInt32((int32_t)mFlags)
                     ?: parcel->writeInt32(mReplaceId)
                     ?: parcel->writeFloat(mXOffset);
         }
 
-        status_t readFromParcel(const Parcel &parcel) {
+        status_t readFromParcel(const Parcel *parcel) override {
             int32_t flags;
-            return parcel.readInt32(&flags)
-                    ?: parcel.readInt32(&mReplaceId)
-                    ?: parcel.readFloat(&mXOffset)
+            return parcel->readInt32(&flags)
+                    ?: parcel->readInt32(&mReplaceId)
+                    ?: parcel->readFloat(&mXOffset)
                     ?: setFlags((Flag)flags);
         }
 
@@ -455,7 +457,7 @@
      * See "frameworks/base/media/java/android/media/VolumeShaper.java" for
      * details on the Java implementation.
      */
-    class State : public RefBase {
+    class State : public RefBase, public Parcelable {
     public:
         State(T volume, S xOffset)
             : mVolume(volume)
@@ -481,15 +483,15 @@
             mXOffset = xOffset;
         }
 
-        status_t writeToParcel(Parcel *parcel) const {
+        status_t writeToParcel(Parcel *parcel) const override {
             if (parcel == nullptr) return BAD_VALUE;
             return parcel->writeFloat(mVolume)
                     ?: parcel->writeFloat(mXOffset);
         }
 
-        status_t readFromParcel(const Parcel &parcel) {
-            return parcel.readFloat(&mVolume)
-                     ?: parcel.readFloat(&mXOffset);
+        status_t readFromParcel(const Parcel *parcel) override {
+            return parcel->readFloat(&mVolume)
+                     ?: parcel->readFloat(&mXOffset);
         }
 
         std::string toString() const {
@@ -1020,6 +1022,8 @@
     std::list<VolumeShaper> mVolumeShapers; // list provides stable iterators on erase
 }; // VolumeHandler
 
+} // namespace media
+
 } // namespace android
 
 #pragma pop_macro("LOG_TAG")
diff --git a/include/media/nbaio/NBLog.h b/include/media/nbaio/NBLog.h
deleted file mode 120000
index c35401e..0000000
--- a/include/media/nbaio/NBLog.h
+++ /dev/null
@@ -1 +0,0 @@
-../../../media/libnbaio/include/media/nbaio/NBLog.h
\ No newline at end of file
diff --git a/include/media/nbaio/PerformanceAnalysis.h b/include/media/nbaio/PerformanceAnalysis.h
deleted file mode 120000
index 7acfc90..0000000
--- a/include/media/nbaio/PerformanceAnalysis.h
+++ /dev/null
@@ -1 +0,0 @@
-../../../media/libnbaio/include/media/nbaio/PerformanceAnalysis.h
\ No newline at end of file
diff --git a/include/media/nblog/NBLog.h b/include/media/nblog/NBLog.h
new file mode 120000
index 0000000..3cc366c
--- /dev/null
+++ b/include/media/nblog/NBLog.h
@@ -0,0 +1 @@
+../../../media/libnblog/include/media/nblog/NBLog.h
\ No newline at end of file
diff --git a/include/media/nblog/PerformanceAnalysis.h b/include/media/nblog/PerformanceAnalysis.h
new file mode 120000
index 0000000..6ead3bc
--- /dev/null
+++ b/include/media/nblog/PerformanceAnalysis.h
@@ -0,0 +1 @@
+../../../media/libnblog/include/media/nblog/PerformanceAnalysis.h
\ No newline at end of file
diff --git a/include/media/nblog/ReportPerformance.h b/include/media/nblog/ReportPerformance.h
new file mode 120000
index 0000000..e9b8e80
--- /dev/null
+++ b/include/media/nblog/ReportPerformance.h
@@ -0,0 +1 @@
+../../../media/libnblog/include/media/nblog/ReportPerformance.h
\ No newline at end of file
diff --git a/include/private/media/OWNERS b/include/private/media/OWNERS
new file mode 100644
index 0000000..21723ba
--- /dev/null
+++ b/include/private/media/OWNERS
@@ -0,0 +1,3 @@
+elaurent@google.com
+gkasten@google.com
+hunga@google.com
diff --git a/include/soundtrigger/OWNERS b/include/soundtrigger/OWNERS
new file mode 100644
index 0000000..e83f6b9
--- /dev/null
+++ b/include/soundtrigger/OWNERS
@@ -0,0 +1,2 @@
+elaurent@google.com
+thorntonc@google.com
diff --git a/media/OWNERS b/media/OWNERS
index 1605efd..d49eb8d 100644
--- a/media/OWNERS
+++ b/media/OWNERS
@@ -2,6 +2,7 @@
 dwkang@google.com
 elaurent@google.com
 essick@google.com
+hkuang@google.com
 hunga@google.com
 jmtrivi@google.com
 krocard@google.com
diff --git a/media/audioserver/Android.mk b/media/audioserver/Android.mk
index 3ee7494..70c281a 100644
--- a/media/audioserver/Android.mk
+++ b/media/audioserver/Android.mk
@@ -3,7 +3,8 @@
 include $(CLEAR_VARS)
 
 LOCAL_SRC_FILES := \
-	main_audioserver.cpp
+	main_audioserver.cpp \
+	../libaudioclient/aidl/android/media/IAudioRecord.aidl
 
 LOCAL_SHARED_LIBRARIES := \
 	libaaudioservice \
@@ -12,11 +13,13 @@
 	libbinder \
 	libcutils \
 	liblog \
+	libhidltransport \
+	libhwbinder \
+	libmedia \
 	libmedialogservice \
 	libnbaio \
 	libsoundtriggerservice \
-	libutils \
-	libhwbinder
+	libutils
 
 # TODO oboeservice is the old folder name for aaudioservice. It will be changed.
 LOCAL_C_INCLUDES := \
@@ -33,9 +36,13 @@
 	frameworks/av/media/libaaudio/include \
 	frameworks/av/media/libaaudio/src \
 	frameworks/av/media/libaaudio/src/binding \
+	frameworks/av/media/libmedia \
 	$(call include-path-for, audio-utils) \
 	external/sonic \
 
+LOCAL_AIDL_INCLUDES := \
+        frameworks/av/media/libaudioclient/aidl
+
 # If AUDIOSERVER_MULTILIB in device.mk is non-empty then it is used to control
 # the LOCAL_MULTILIB for all audioserver exclusive libraries.
 # This is relevant for 64 bit architectures where either or both
diff --git a/media/audioserver/OWNERS b/media/audioserver/OWNERS
new file mode 100644
index 0000000..f9cb567
--- /dev/null
+++ b/media/audioserver/OWNERS
@@ -0,0 +1 @@
+gkasten@google.com
diff --git a/media/audioserver/audioserver.rc b/media/audioserver/audioserver.rc
index 9d42bce..75675a9 100644
--- a/media/audioserver/audioserver.rc
+++ b/media/audioserver/audioserver.rc
@@ -1,10 +1,13 @@
 service audioserver /system/bin/audioserver
-    class main
+    class core
     user audioserver
     # media gid needed for /dev/fm (radio) and for /data/misc/media (tee)
     group audio camera drmrpc inet media mediadrm net_bt net_bt_admin net_bw_acct
     ioprio rt 4
     writepid /dev/cpuset/foreground/tasks /dev/stune/foreground/tasks
+    onrestart restart vendor.audio-hal-2-0
+    # Keep the original service name for backward compatibility when upgrading
+    # O-MR1 devices with framework-only.
     onrestart restart audio-hal-2-0
 
 on property:vts.native_server.on=1
diff --git a/media/audioserver/main_audioserver.cpp b/media/audioserver/main_audioserver.cpp
index 474ef97..db57248 100644
--- a/media/audioserver/main_audioserver.cpp
+++ b/media/audioserver/main_audioserver.cpp
@@ -25,12 +25,9 @@
 #include <binder/IPCThreadState.h>
 #include <binder/ProcessState.h>
 #include <binder/IServiceManager.h>
+#include <hidl/HidlTransportSupport.h>
 #include <utils/Log.h>
 
-// FIXME: remove when BUG 31748996 is fixed
-#include <hwbinder/IPCThreadState.h>
-#include <hwbinder/ProcessState.h>
-
 // from LOCAL_C_INCLUDES
 #include "aaudio/AAudioTesting.h"
 #include "AudioFlinger.h"
@@ -38,12 +35,19 @@
 #include "AAudioService.h"
 #include "utility/AAudioUtilities.h"
 #include "MediaLogService.h"
+#include "MediaUtils.h"
 #include "SoundTriggerHwService.h"
 
 using namespace android;
 
 int main(int argc __unused, char **argv)
 {
+    // TODO: update with refined parameters
+    limitProcessMemory(
+        "audio.maxmem", /* "ro.audio.maxmem", property that defines limit */
+        (size_t)512 * (1 << 20), /* SIZE_MAX, upper limit in bytes */
+        20 /* upper limit as percentage of physical RAM */);
+
     signal(SIGPIPE, SIG_IGN);
 
     bool doLog = (bool) property_get_bool("ro.test_harness", 0);
@@ -128,6 +132,7 @@
             prctl(PR_SET_PDEATHSIG, SIGKILL);   // if parent media.log dies before me, kill me also
             setpgid(0, 0);                      // but if I die first, don't kill my parent
         }
+        android::hardware::configureRpcThreadpool(4, false /*callerWillJoin*/);
         sp<ProcessState> proc(ProcessState::self());
         sp<IServiceManager> sm = defaultServiceManager();
         ALOGI("ServiceManager: %p", sm.get());
@@ -145,10 +150,6 @@
 
         SoundTriggerHwService::instantiate();
         ProcessState::self()->startThreadPool();
-
-// FIXME: remove when BUG 31748996 is fixed
-        android::hardware::ProcessState::self()->startThreadPool();
-
         IPCThreadState::self()->joinThreadPool();
     }
 }
diff --git a/media/common_time/OWNERS b/media/common_time/OWNERS
new file mode 100644
index 0000000..f9cb567
--- /dev/null
+++ b/media/common_time/OWNERS
@@ -0,0 +1 @@
+gkasten@google.com
diff --git a/media/extractors/Android.bp b/media/extractors/Android.bp
new file mode 100644
index 0000000..e8176cf
--- /dev/null
+++ b/media/extractors/Android.bp
@@ -0,0 +1,3 @@
+subdirs = [
+    "*",
+]
diff --git a/media/libstagefright/AACExtractor.cpp b/media/extractors/aac/AACExtractor.cpp
similarity index 91%
rename from media/libstagefright/AACExtractor.cpp
rename to media/extractors/aac/AACExtractor.cpp
index 3ba1858..dfb54e2 100644
--- a/media/libstagefright/AACExtractor.cpp
+++ b/media/extractors/aac/AACExtractor.cpp
@@ -18,17 +18,16 @@
 #define LOG_TAG "AACExtractor"
 #include <utils/Log.h>
 
-#include "include/AACExtractor.h"
-#include "include/avc_utils.h"
-
+#include "AACExtractor.h"
+#include <media/DataSource.h>
+#include <media/MediaSource.h>
+#include <media/stagefright/foundation/avc_utils.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/DataSource.h>
 #include <media/stagefright/MediaBufferGroup.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/MediaSource.h>
 #include <media/stagefright/MetaData.h>
 #include <utils/String8.h>
 
@@ -140,13 +139,8 @@
     sp<AMessage> meta = _meta;
 
     if (meta == NULL) {
-        String8 mimeType;
-        float confidence;
-        sp<AMessage> _meta;
-
-        if (!SniffAAC(mDataSource, &mimeType, &confidence, &meta)) {
-            return;
-        }
+        ALOGE("no metadata specified");
+        return;
     }
 
     int64_t offset;
@@ -213,7 +207,7 @@
     return mInitCheck == OK ? 1 : 0;
 }
 
-sp<IMediaSource> AACExtractor::getTrack(size_t index) {
+sp<MediaSource> AACExtractor::getTrack(size_t index) {
     if (mInitCheck != OK || index != 0) {
         return NULL;
     }
@@ -337,7 +331,13 @@
 
 ////////////////////////////////////////////////////////////////////////////////
 
-bool SniffAAC(
+static MediaExtractor* CreateExtractor(
+        const sp<DataSource> &source,
+        const sp<AMessage>& meta) {
+    return new AACExtractor(source, meta);
+}
+
+static MediaExtractor::CreatorFunc Sniff(
         const sp<DataSource> &source, String8 *mimeType, float *confidence,
         sp<AMessage> *meta) {
     off64_t pos = 0;
@@ -346,7 +346,7 @@
         uint8_t id3header[10];
         if (source->readAt(pos, id3header, sizeof(id3header))
                 < (ssize_t)sizeof(id3header)) {
-            return false;
+            return NULL;
         }
 
         if (memcmp("ID3", id3header, 3)) {
@@ -372,7 +372,7 @@
     uint8_t header[2];
 
     if (source->readAt(pos, &header, 2) != 2) {
-        return false;
+        return NULL;
     }
 
     // ADTS syncword
@@ -383,10 +383,26 @@
         *meta = new AMessage;
         (*meta)->setInt64("offset", pos);
 
-        return true;
+        return CreateExtractor;
     }
 
-    return false;
+    return NULL;
 }
 
-}  // namespace android
+
+extern "C" {
+// This is the only symbol that needs to be exported
+__attribute__ ((visibility ("default")))
+MediaExtractor::ExtractorDef GETEXTRACTORDEF() {
+    return {
+        MediaExtractor::EXTRACTORDEF_VERSION,
+        UUID("4fd80eae-03d2-4d72-9eb9-48fa6bb54613"),
+        1, // version
+        "AAC Extractor",
+        Sniff
+    };
+}
+
+} // extern "C"
+
+} // namespace android
diff --git a/media/libstagefright/include/AACExtractor.h b/media/extractors/aac/AACExtractor.h
similarity index 93%
rename from media/libstagefright/include/AACExtractor.h
rename to media/extractors/aac/AACExtractor.h
index bd4c41c..aede185 100644
--- a/media/libstagefright/include/AACExtractor.h
+++ b/media/extractors/aac/AACExtractor.h
@@ -18,7 +18,7 @@
 
 #define AAC_EXTRACTOR_H_
 
-#include <media/stagefright/MediaExtractor.h>
+#include <media/MediaExtractor.h>
 
 #include <utils/Vector.h>
 
@@ -32,7 +32,7 @@
     AACExtractor(const sp<DataSource> &source, const sp<AMessage> &meta);
 
     virtual size_t countTracks();
-    virtual sp<IMediaSource> getTrack(size_t index);
+    virtual sp<MediaSource> getTrack(size_t index);
     virtual sp<MetaData> getTrackMetaData(size_t index, uint32_t flags);
 
     virtual sp<MetaData> getMetaData();
diff --git a/media/extractors/aac/Android.bp b/media/extractors/aac/Android.bp
new file mode 100644
index 0000000..7937a29
--- /dev/null
+++ b/media/extractors/aac/Android.bp
@@ -0,0 +1,39 @@
+cc_library_shared {
+
+    srcs: ["AACExtractor.cpp"],
+
+    include_dirs: [
+        "frameworks/av/media/libstagefright/",
+    ],
+
+    shared_libs: [
+        "liblog",
+        "libmediaextractor",
+        "libstagefright_foundation",
+        "libutils",
+    ],
+
+    name: "libaacextractor",
+    relative_install_path: "extractors",
+
+    compile_multilib: "first",
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+        "-fvisibility=hidden",
+    ],
+    version_script: "exports.lds",
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+        diag: {
+            cfi: true,
+        },
+    },
+
+}
diff --git a/media/libstagefright/matroska/MODULE_LICENSE_APACHE2 b/media/extractors/aac/MODULE_LICENSE_APACHE2
similarity index 100%
copy from media/libstagefright/matroska/MODULE_LICENSE_APACHE2
copy to media/extractors/aac/MODULE_LICENSE_APACHE2
diff --git a/media/libstagefright/matroska/NOTICE b/media/extractors/aac/NOTICE
similarity index 100%
copy from media/libstagefright/matroska/NOTICE
copy to media/extractors/aac/NOTICE
diff --git a/media/extractors/aac/exports.lds b/media/extractors/aac/exports.lds
new file mode 100644
index 0000000..b1309ee
--- /dev/null
+++ b/media/extractors/aac/exports.lds
@@ -0,0 +1 @@
+{ global: GETEXTRACTORDEF; local: *; };
diff --git a/media/libstagefright/AMRExtractor.cpp b/media/extractors/amr/AMRExtractor.cpp
similarity index 89%
rename from media/libstagefright/AMRExtractor.cpp
rename to media/extractors/amr/AMRExtractor.cpp
index 2892520..b8967bd 100644
--- a/media/libstagefright/AMRExtractor.cpp
+++ b/media/extractors/amr/AMRExtractor.cpp
@@ -18,14 +18,14 @@
 #define LOG_TAG "AMRExtractor"
 #include <utils/Log.h>
 
-#include "include/AMRExtractor.h"
+#include "AMRExtractor.h"
 
+#include <media/DataSource.h>
+#include <media/MediaSource.h>
 #include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/DataSource.h>
 #include <media/stagefright/MediaBufferGroup.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/MediaSource.h>
 #include <media/stagefright/MetaData.h>
 #include <utils/String8.h>
 
@@ -186,7 +186,7 @@
     return mInitCheck == OK ? 1 : 0;
 }
 
-sp<IMediaSource> AMRExtractor::getTrack(size_t index) {
+sp<MediaSource> AMRExtractor::getTrack(size_t index) {
     if (mInitCheck != OK || index != 0) {
         return NULL;
     }
@@ -362,4 +362,31 @@
     return false;
 }
 
+extern "C" {
+// This is the only symbol that needs to be exported
+__attribute__ ((visibility ("default")))
+MediaExtractor::ExtractorDef GETEXTRACTORDEF() {
+    return {
+        MediaExtractor::EXTRACTORDEF_VERSION,
+        UUID("c86639c9-2f31-40ac-a715-fa01b4493aaf"),
+        1,
+        "AMR Extractor",
+        [](
+                const sp<DataSource> &source,
+                String8 *mimeType,
+                float *confidence,
+                sp<AMessage> *meta __unused) -> MediaExtractor::CreatorFunc {
+            if (SniffAMR(source, mimeType, confidence, meta)) {
+                return [](
+                        const sp<DataSource> &source,
+                        const sp<AMessage>& meta __unused) -> MediaExtractor* {
+                    return new AMRExtractor(source);};
+            }
+            return NULL;
+        }
+    };
+}
+
+} // extern "C"
+
 }  // namespace android
diff --git a/media/libstagefright/include/AMRExtractor.h b/media/extractors/amr/AMRExtractor.h
similarity index 93%
rename from media/libstagefright/include/AMRExtractor.h
rename to media/extractors/amr/AMRExtractor.h
index 8abcb12..79b22d6 100644
--- a/media/libstagefright/include/AMRExtractor.h
+++ b/media/extractors/amr/AMRExtractor.h
@@ -19,7 +19,7 @@
 #define AMR_EXTRACTOR_H_
 
 #include <utils/Errors.h>
-#include <media/stagefright/MediaExtractor.h>
+#include <media/MediaExtractor.h>
 
 namespace android {
 
@@ -32,7 +32,7 @@
     explicit AMRExtractor(const sp<DataSource> &source);
 
     virtual size_t countTracks();
-    virtual sp<IMediaSource> getTrack(size_t index);
+    virtual sp<MediaSource> getTrack(size_t index);
     virtual sp<MetaData> getTrackMetaData(size_t index, uint32_t flags);
 
     virtual sp<MetaData> getMetaData();
diff --git a/media/extractors/amr/Android.bp b/media/extractors/amr/Android.bp
new file mode 100644
index 0000000..e5bbe31
--- /dev/null
+++ b/media/extractors/amr/Android.bp
@@ -0,0 +1,39 @@
+cc_library_shared {
+
+    srcs: ["AMRExtractor.cpp"],
+
+    include_dirs: [
+        "frameworks/av/media/libstagefright/include",
+    ],
+
+    shared_libs: [
+        "liblog",
+        "libmediaextractor",
+        "libstagefright_foundation",
+        "libutils",
+    ],
+
+    name: "libamrextractor",
+    relative_install_path: "extractors",
+
+    compile_multilib: "first",
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+        "-fvisibility=hidden",
+    ],
+    version_script: "exports.lds",
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+        diag: {
+            cfi: true,
+        },
+    },
+
+}
diff --git a/media/libstagefright/matroska/MODULE_LICENSE_APACHE2 b/media/extractors/amr/MODULE_LICENSE_APACHE2
similarity index 100%
copy from media/libstagefright/matroska/MODULE_LICENSE_APACHE2
copy to media/extractors/amr/MODULE_LICENSE_APACHE2
diff --git a/media/libstagefright/matroska/NOTICE b/media/extractors/amr/NOTICE
similarity index 100%
copy from media/libstagefright/matroska/NOTICE
copy to media/extractors/amr/NOTICE
diff --git a/media/extractors/amr/exports.lds b/media/extractors/amr/exports.lds
new file mode 100644
index 0000000..b1309ee
--- /dev/null
+++ b/media/extractors/amr/exports.lds
@@ -0,0 +1 @@
+{ global: GETEXTRACTORDEF; local: *; };
diff --git a/media/extractors/flac/Android.bp b/media/extractors/flac/Android.bp
new file mode 100644
index 0000000..84ba6d3
--- /dev/null
+++ b/media/extractors/flac/Android.bp
@@ -0,0 +1,44 @@
+cc_library_shared {
+
+    srcs: ["FLACExtractor.cpp"],
+
+    include_dirs: [
+        "frameworks/av/media/libstagefright/include",
+        "external/flac/include",
+    ],
+
+    shared_libs: [
+        "liblog",
+        "libmediaextractor",
+        "libstagefright_foundation",
+        "libutils",
+    ],
+
+    static_libs: [
+        "libFLAC",
+    ],
+
+    name: "libflacextractor",
+    relative_install_path: "extractors",
+
+    compile_multilib: "first",
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+        "-fvisibility=hidden",
+    ],
+    version_script: "exports.lds",
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+        diag: {
+            cfi: true,
+        },
+    },
+
+}
diff --git a/media/libstagefright/FLACExtractor.cpp b/media/extractors/flac/FLACExtractor.cpp
similarity index 84%
rename from media/libstagefright/FLACExtractor.cpp
rename to media/extractors/flac/FLACExtractor.cpp
index 1b88e5d..0c88246 100644
--- a/media/libstagefright/FLACExtractor.cpp
+++ b/media/extractors/flac/FLACExtractor.cpp
@@ -18,22 +18,147 @@
 #define LOG_TAG "FLACExtractor"
 #include <utils/Log.h>
 
-#include "include/FLACExtractor.h"
-// Vorbis comments
-#include "include/OggExtractor.h"
+#include "FLACExtractor.h"
 // libFLAC parser
 #include "FLAC/stream_decoder.h"
 
+#include <media/DataSource.h>
+#include <media/MediaSource.h>
+#include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/DataSource.h>
+#include <media/stagefright/foundation/base64.h>
+#include <media/stagefright/foundation/ByteUtils.h>
 #include <media/stagefright/MediaBufferGroup.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MetaData.h>
-#include <media/stagefright/MediaSource.h>
 #include <media/stagefright/MediaBuffer.h>
 
 namespace android {
 
+// also exists in OggExtractor, candidate for moving to utility/support library?
+static void extractAlbumArt(
+        const sp<MetaData> &fileMeta, const void *data, size_t size) {
+    ALOGV("extractAlbumArt from '%s'", (const char *)data);
+
+    sp<ABuffer> flacBuffer = decodeBase64(AString((const char *)data, size));
+    if (flacBuffer == NULL) {
+        ALOGE("malformed base64 encoded data.");
+        return;
+    }
+
+    size_t flacSize = flacBuffer->size();
+    uint8_t *flac = flacBuffer->data();
+    ALOGV("got flac of size %zu", flacSize);
+
+    uint32_t picType;
+    uint32_t typeLen;
+    uint32_t descLen;
+    uint32_t dataLen;
+    char type[128];
+
+    if (flacSize < 8) {
+        return;
+    }
+
+    picType = U32_AT(flac);
+
+    if (picType != 3) {
+        // This is not a front cover.
+        return;
+    }
+
+    typeLen = U32_AT(&flac[4]);
+    if (typeLen > sizeof(type) - 1) {
+        return;
+    }
+
+    // we've already checked above that flacSize >= 8
+    if (flacSize - 8 < typeLen) {
+        return;
+    }
+
+    memcpy(type, &flac[8], typeLen);
+    type[typeLen] = '\0';
+
+    ALOGV("picType = %d, type = '%s'", picType, type);
+
+    if (!strcmp(type, "-->")) {
+        // This is not inline cover art, but an external url instead.
+        return;
+    }
+
+    if (flacSize < 32 || flacSize - 32 < typeLen) {
+        return;
+    }
+
+    descLen = U32_AT(&flac[8 + typeLen]);
+    if (flacSize - 32 - typeLen < descLen) {
+        return;
+    }
+
+    dataLen = U32_AT(&flac[8 + typeLen + 4 + descLen + 16]);
+
+    // we've already checked above that (flacSize - 32 - typeLen - descLen) >= 0
+    if (flacSize - 32 - typeLen - descLen < dataLen) {
+        return;
+    }
+
+    ALOGV("got image data, %zu trailing bytes",
+         flacSize - 32 - typeLen - descLen - dataLen);
+
+    fileMeta->setData(
+            kKeyAlbumArt, 0, &flac[8 + typeLen + 4 + descLen + 20], dataLen);
+
+    fileMeta->setCString(kKeyAlbumArtMIME, type);
+}
+
+// also exists in OggExtractor, candidate for moving to utility/support library?
+static void parseVorbisComment(
+        const sp<MetaData> &fileMeta, const char *comment, size_t commentLength)
+{
+    struct {
+        const char *const mTag;
+        uint32_t mKey;
+    } kMap[] = {
+        { "TITLE", kKeyTitle },
+        { "ARTIST", kKeyArtist },
+        { "ALBUMARTIST", kKeyAlbumArtist },
+        { "ALBUM ARTIST", kKeyAlbumArtist },
+        { "COMPILATION", kKeyCompilation },
+        { "ALBUM", kKeyAlbum },
+        { "COMPOSER", kKeyComposer },
+        { "GENRE", kKeyGenre },
+        { "AUTHOR", kKeyAuthor },
+        { "TRACKNUMBER", kKeyCDTrackNumber },
+        { "DISCNUMBER", kKeyDiscNumber },
+        { "DATE", kKeyDate },
+        { "YEAR", kKeyYear },
+        { "LYRICIST", kKeyWriter },
+        { "METADATA_BLOCK_PICTURE", kKeyAlbumArt },
+        { "ANDROID_LOOP", kKeyAutoLoop },
+    };
+
+        for (size_t j = 0; j < sizeof(kMap) / sizeof(kMap[0]); ++j) {
+            size_t tagLen = strlen(kMap[j].mTag);
+            if (!strncasecmp(kMap[j].mTag, comment, tagLen)
+                    && comment[tagLen] == '=') {
+                if (kMap[j].mKey == kKeyAlbumArt) {
+                    extractAlbumArt(
+                            fileMeta,
+                            &comment[tagLen + 1],
+                            commentLength - tagLen - 1);
+                } else if (kMap[j].mKey == kKeyAutoLoop) {
+                    if (!strcasecmp(&comment[tagLen + 1], "true")) {
+                        fileMeta->setInt32(kKeyAutoLoop, true);
+                    }
+                } else {
+                    fileMeta->setCString(kMap[j].mKey, &comment[tagLen + 1]);
+                }
+            }
+        }
+
+}
+
 class FLACParser;
 
 class FLACSource : public MediaSource {
@@ -811,7 +936,7 @@
     return mInitCheck == OK ? 1 : 0;
 }
 
-sp<IMediaSource> FLACExtractor::getTrack(size_t index)
+sp<MediaSource> FLACExtractor::getTrack(size_t index)
 {
     if (mInitCheck != OK || index > 0) {
         return NULL;
@@ -864,4 +989,32 @@
     return true;
 }
 
+
+extern "C" {
+// This is the only symbol that needs to be exported
+__attribute__ ((visibility ("default")))
+MediaExtractor::ExtractorDef GETEXTRACTORDEF() {
+    return {
+        MediaExtractor::EXTRACTORDEF_VERSION,
+            UUID("1364b048-cc45-4fda-9934-327d0ebf9829"),
+            1,
+            "FLAC Extractor",
+            [](
+                    const sp<DataSource> &source,
+                    String8 *mimeType,
+                    float *confidence,
+                    sp<AMessage> *meta __unused) -> MediaExtractor::CreatorFunc {
+                if (SniffFLAC(source, mimeType, confidence, meta)) {
+                    return [](
+                            const sp<DataSource> &source,
+                            const sp<AMessage>& meta __unused) -> MediaExtractor* {
+                        return new FLACExtractor(source);};
+                }
+                return NULL;
+            }
+     };
+}
+
+} // extern "C"
+
 }  // namespace android
diff --git a/media/libstagefright/include/FLACExtractor.h b/media/extractors/flac/FLACExtractor.h
similarity index 91%
rename from media/libstagefright/include/FLACExtractor.h
rename to media/extractors/flac/FLACExtractor.h
index 51bc139..6907ceb 100644
--- a/media/libstagefright/include/FLACExtractor.h
+++ b/media/extractors/flac/FLACExtractor.h
@@ -17,8 +17,8 @@
 #ifndef FLAC_EXTRACTOR_H_
 #define FLAC_EXTRACTOR_H_
 
-#include <media/stagefright/DataSource.h>
-#include <media/stagefright/MediaExtractor.h>
+#include <media/DataSource.h>
+#include <media/MediaExtractor.h>
 #include <utils/String8.h>
 
 namespace android {
@@ -32,7 +32,7 @@
     explicit FLACExtractor(const sp<DataSource> &source);
 
     virtual size_t countTracks();
-    virtual sp<IMediaSource> getTrack(size_t index);
+    virtual sp<MediaSource> getTrack(size_t index);
     virtual sp<MetaData> getTrackMetaData(size_t index, uint32_t flags);
 
     virtual sp<MetaData> getMetaData();
diff --git a/media/libstagefright/matroska/MODULE_LICENSE_APACHE2 b/media/extractors/flac/MODULE_LICENSE_APACHE2
similarity index 100%
copy from media/libstagefright/matroska/MODULE_LICENSE_APACHE2
copy to media/extractors/flac/MODULE_LICENSE_APACHE2
diff --git a/media/libstagefright/matroska/NOTICE b/media/extractors/flac/NOTICE
similarity index 100%
copy from media/libstagefright/matroska/NOTICE
copy to media/extractors/flac/NOTICE
diff --git a/media/extractors/flac/exports.lds b/media/extractors/flac/exports.lds
new file mode 100644
index 0000000..b1309ee
--- /dev/null
+++ b/media/extractors/flac/exports.lds
@@ -0,0 +1 @@
+{ global: GETEXTRACTORDEF; local: *; };
diff --git a/media/extractors/midi/Android.bp b/media/extractors/midi/Android.bp
new file mode 100644
index 0000000..9af128e
--- /dev/null
+++ b/media/extractors/midi/Android.bp
@@ -0,0 +1,43 @@
+cc_library_shared {
+
+    srcs: ["MidiExtractor.cpp"],
+
+    include_dirs: [
+        "frameworks/av/media/libstagefright/include",
+    ],
+
+    shared_libs: [
+        "liblog",
+        "libmediaextractor",
+        "libstagefright_foundation",
+        "libutils",
+    ],
+
+    static_libs: [
+        "libmedia_midiiowrapper",
+        "libsonivox",
+    ],
+    name: "libmidiextractor",
+    relative_install_path: "extractors",
+
+    compile_multilib: "first",
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+        "-fvisibility=hidden",
+    ],
+    version_script: "exports.lds",
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+        diag: {
+            cfi: true,
+        },
+    },
+
+}
diff --git a/media/libstagefright/matroska/MODULE_LICENSE_APACHE2 b/media/extractors/midi/MODULE_LICENSE_APACHE2
similarity index 100%
copy from media/libstagefright/matroska/MODULE_LICENSE_APACHE2
copy to media/extractors/midi/MODULE_LICENSE_APACHE2
diff --git a/media/libstagefright/MidiExtractor.cpp b/media/extractors/midi/MidiExtractor.cpp
similarity index 89%
rename from media/libstagefright/MidiExtractor.cpp
rename to media/extractors/midi/MidiExtractor.cpp
index 7930bbb..a8509fc 100644
--- a/media/libstagefright/MidiExtractor.cpp
+++ b/media/extractors/midi/MidiExtractor.cpp
@@ -18,14 +18,14 @@
 #define LOG_TAG "MidiExtractor"
 #include <utils/Log.h>
 
-#include "include/MidiExtractor.h"
+#include "MidiExtractor.h"
 
 #include <media/MidiIoWrapper.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/MediaBufferGroup.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MetaData.h>
-#include <media/stagefright/MediaSource.h>
+#include <media/MediaSource.h>
 #include <libsonivox/eas_reverb.h>
 
 namespace android {
@@ -282,7 +282,7 @@
     return mInitCheck == OK ? 1 : 0;
 }
 
-sp<IMediaSource> MidiExtractor::getTrack(size_t index)
+sp<MediaSource> MidiExtractor::getTrack(size_t index)
 {
     if (mInitCheck != OK || index > 0) {
         return NULL;
@@ -323,4 +323,31 @@
 
 }
 
+extern "C" {
+// This is the only symbol that needs to be exported
+__attribute__ ((visibility ("default")))
+MediaExtractor::ExtractorDef GETEXTRACTORDEF() {
+    return {
+        MediaExtractor::EXTRACTORDEF_VERSION,
+        UUID("ef6cca0a-f8a2-43e6-ba5f-dfcd7c9a7ef2"),
+        1,
+        "MIDI Extractor",
+        [](
+                const sp<DataSource> &source,
+                String8 *mimeType,
+                float *confidence,
+                sp<AMessage> *meta __unused) -> MediaExtractor::CreatorFunc {
+            if (SniffMidi(source, mimeType, confidence, meta)) {
+                return [](
+                        const sp<DataSource> &source,
+                        const sp<AMessage>& meta __unused) -> MediaExtractor* {
+                    return new MidiExtractor(source);};
+            }
+            return NULL;
+        }
+    };
+}
+
+} // extern "C"
+
 }  // namespace android
diff --git a/media/libstagefright/include/MidiExtractor.h b/media/extractors/midi/MidiExtractor.h
similarity index 94%
rename from media/libstagefright/include/MidiExtractor.h
rename to media/extractors/midi/MidiExtractor.h
index 94d2d08..0fae94a 100644
--- a/media/libstagefright/include/MidiExtractor.h
+++ b/media/extractors/midi/MidiExtractor.h
@@ -17,8 +17,8 @@
 #ifndef MIDI_EXTRACTOR_H_
 #define MIDI_EXTRACTOR_H_
 
-#include <media/stagefright/DataSource.h>
-#include <media/stagefright/MediaExtractor.h>
+#include <media/DataSource.h>
+#include <media/MediaExtractor.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaBufferGroup.h>
 #include <media/MidiIoWrapper.h>
@@ -56,7 +56,7 @@
     explicit MidiExtractor(const sp<DataSource> &source);
 
     virtual size_t countTracks();
-    virtual sp<IMediaSource> getTrack(size_t index);
+    virtual sp<MediaSource> getTrack(size_t index);
     virtual sp<MetaData> getTrackMetaData(size_t index, uint32_t flags);
 
     virtual sp<MetaData> getMetaData();
diff --git a/media/libstagefright/matroska/NOTICE b/media/extractors/midi/NOTICE
similarity index 100%
copy from media/libstagefright/matroska/NOTICE
copy to media/extractors/midi/NOTICE
diff --git a/media/extractors/midi/exports.lds b/media/extractors/midi/exports.lds
new file mode 100644
index 0000000..b1309ee
--- /dev/null
+++ b/media/extractors/midi/exports.lds
@@ -0,0 +1 @@
+{ global: GETEXTRACTORDEF; local: *; };
diff --git a/media/extractors/mkv/Android.bp b/media/extractors/mkv/Android.bp
new file mode 100644
index 0000000..0301ffa
--- /dev/null
+++ b/media/extractors/mkv/Android.bp
@@ -0,0 +1,47 @@
+cc_library_shared {
+
+    srcs: ["MatroskaExtractor.cpp"],
+
+    include_dirs: [
+        "external/flac/include",
+        "external/libvpx/libwebm",
+        "frameworks/av/media/libstagefright/flac/dec",
+        "frameworks/av/media/libstagefright/include",
+    ],
+
+    shared_libs: [
+        "liblog",
+        "libmediaextractor",
+        "libstagefright_foundation",
+        "libutils",
+    ],
+
+    static_libs: [
+        "libstagefright_flacdec",
+        "libwebm",
+    ],
+
+    name: "libmkvextractor",
+    relative_install_path: "extractors",
+
+    compile_multilib: "first",
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+        "-fvisibility=hidden",
+    ],
+    version_script: "exports.lds",
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+        diag: {
+            cfi: true,
+        },
+    },
+
+}
diff --git a/media/libstagefright/matroska/MODULE_LICENSE_APACHE2 b/media/extractors/mkv/MODULE_LICENSE_APACHE2
similarity index 100%
copy from media/libstagefright/matroska/MODULE_LICENSE_APACHE2
copy to media/extractors/mkv/MODULE_LICENSE_APACHE2
diff --git a/media/libstagefright/matroska/MatroskaExtractor.cpp b/media/extractors/mkv/MatroskaExtractor.cpp
similarity index 94%
rename from media/libstagefright/matroska/MatroskaExtractor.cpp
rename to media/extractors/mkv/MatroskaExtractor.cpp
index 462eff6..e199f03 100644
--- a/media/libstagefright/matroska/MatroskaExtractor.cpp
+++ b/media/extractors/mkv/MatroskaExtractor.cpp
@@ -20,20 +20,20 @@
 
 #include "FLACDecoder.h"
 #include "MatroskaExtractor.h"
-#include "avc_utils.h"
 
+#include <media/DataSource.h>
+#include <media/MediaSource.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AUtils.h>
 #include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ByteUtils.h>
 #include <media/stagefright/foundation/ColorUtils.h>
+#include <media/stagefright/foundation/avc_utils.h>
 #include <media/stagefright/foundation/hexdump.h>
-#include <media/stagefright/DataSource.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/MediaSource.h>
 #include <media/stagefright/MetaData.h>
-#include <media/stagefright/Utils.h>
 #include <utils/String8.h>
 
 #include <inttypes.h>
@@ -542,6 +542,31 @@
     return ptr[0] << 16 | ptr[1] << 8 | ptr[2];
 }
 
+static AString uriDebugString(const AString &uri) {
+    // find scheme
+    AString scheme;
+    const char *chars = uri.c_str();
+    for (size_t i = 0; i < uri.size(); i++) {
+        const char c = chars[i];
+        if (!isascii(c)) {
+            break;
+        } else if (isalpha(c)) {
+            continue;
+        } else if (i == 0) {
+            // first character must be a letter
+            break;
+        } else if (isdigit(c) || c == '+' || c == '.' || c =='-') {
+            continue;
+        } else if (c != ':') {
+            break;
+        }
+        scheme = AString(uri, 0, i);
+        scheme.append("://<suppressed>");
+        return scheme;
+    }
+    return AString("<no-scheme URI suppressed>");
+}
+
 void MatroskaSource::clearPendingFrames() {
     while (!mPendingFrames.empty()) {
         MediaBuffer *frame = *mPendingFrames.begin();
@@ -678,18 +703,22 @@
 
     int64_t seekTimeUs;
     ReadOptions::SeekMode mode;
-    if (options && options->getSeekTo(&seekTimeUs, &mode)
-            && !mExtractor->isLiveStreaming()) {
-        clearPendingFrames();
+    if (options && options->getSeekTo(&seekTimeUs, &mode)) {
+        if (mode == ReadOptions::SEEK_FRAME_INDEX) {
+            return ERROR_UNSUPPORTED;
+        }
 
-        // The audio we want is located by using the Cues to seek the video
-        // stream to find the target Cluster then iterating to finalize for
-        // audio.
-        int64_t actualFrameTimeUs;
-        mBlockIter.seek(seekTimeUs, mIsAudio, &actualFrameTimeUs);
+        if (!mExtractor->isLiveStreaming()) {
+            clearPendingFrames();
 
-        if (mode == ReadOptions::SEEK_CLOSEST) {
-            targetSampleTimeUs = actualFrameTimeUs;
+            // The audio we want is located by using the Cues to seek the video
+            // stream to find the target Cluster then iterating to finalize for
+            // audio.
+            int64_t actualFrameTimeUs;
+            mBlockIter.seek(seekTimeUs, mIsAudio, &actualFrameTimeUs);
+            if (mode == ReadOptions::SEEK_CLOSEST) {
+                targetSampleTimeUs = actualFrameTimeUs;
+            }
         }
     }
 
@@ -899,7 +928,7 @@
     return mTracks.size();
 }
 
-sp<IMediaSource> MatroskaExtractor::getTrack(size_t index) {
+sp<MediaSource> MatroskaExtractor::getTrack(size_t index) {
     if (index >= mTracks.size()) {
         return NULL;
     }
@@ -1535,4 +1564,32 @@
     return true;
 }
 
+
+extern "C" {
+// This is the only symbol that needs to be exported
+__attribute__ ((visibility ("default")))
+MediaExtractor::ExtractorDef GETEXTRACTORDEF() {
+    return {
+        MediaExtractor::EXTRACTORDEF_VERSION,
+        UUID("abbedd92-38c4-4904-a4c1-b3f45f899980"),
+        1,
+        "Matroska Extractor",
+        [](
+                const sp<DataSource> &source,
+                String8 *mimeType,
+                float *confidence,
+                sp<AMessage> *meta __unused) -> MediaExtractor::CreatorFunc {
+            if (SniffMatroska(source, mimeType, confidence, meta)) {
+                return [](
+                        const sp<DataSource> &source,
+                        const sp<AMessage>& meta __unused) -> MediaExtractor* {
+                    return new MatroskaExtractor(source);};
+            }
+            return NULL;
+        }
+    };
+}
+
+} // extern "C"
+
 }  // namespace android
diff --git a/media/libstagefright/matroska/MatroskaExtractor.h b/media/extractors/mkv/MatroskaExtractor.h
similarity index 96%
rename from media/libstagefright/matroska/MatroskaExtractor.h
rename to media/extractors/mkv/MatroskaExtractor.h
index 19775ce..26f8d19 100644
--- a/media/libstagefright/matroska/MatroskaExtractor.h
+++ b/media/extractors/mkv/MatroskaExtractor.h
@@ -20,7 +20,7 @@
 
 #include "mkvparser/mkvparser.h"
 
-#include <media/stagefright/MediaExtractor.h>
+#include <media/MediaExtractor.h>
 #include <utils/Vector.h>
 #include <utils/threads.h>
 
@@ -38,7 +38,7 @@
 
     virtual size_t countTracks();
 
-    virtual sp<IMediaSource> getTrack(size_t index);
+    virtual sp<MediaSource> getTrack(size_t index);
 
     virtual sp<MetaData> getTrackMetaData(
             size_t index, uint32_t flags);
diff --git a/media/libstagefright/matroska/NOTICE b/media/extractors/mkv/NOTICE
similarity index 100%
copy from media/libstagefright/matroska/NOTICE
copy to media/extractors/mkv/NOTICE
diff --git a/media/extractors/mkv/exports.lds b/media/extractors/mkv/exports.lds
new file mode 100644
index 0000000..b1309ee
--- /dev/null
+++ b/media/extractors/mkv/exports.lds
@@ -0,0 +1 @@
+{ global: GETEXTRACTORDEF; local: *; };
diff --git a/media/extractors/mp3/Android.bp b/media/extractors/mp3/Android.bp
new file mode 100644
index 0000000..d93562c
--- /dev/null
+++ b/media/extractors/mp3/Android.bp
@@ -0,0 +1,47 @@
+cc_library_shared {
+
+    srcs: [
+            "MP3Extractor.cpp",
+            "VBRISeeker.cpp",
+            "XINGSeeker.cpp",
+    ],
+
+    include_dirs: [
+        "frameworks/av/media/libstagefright/include",
+    ],
+
+    shared_libs: [
+        "liblog",
+        "libmediaextractor",
+        "libstagefright_foundation",
+        "libutils",
+    ],
+
+    static_libs: [
+        "libstagefright_id3",
+    ],
+
+    name: "libmp3extractor",
+    relative_install_path: "extractors",
+
+    compile_multilib: "first",
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+        "-fvisibility=hidden",
+    ],
+    version_script: "exports.lds",
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+        diag: {
+            cfi: true,
+        },
+    },
+
+}
diff --git a/media/libstagefright/MP3Extractor.cpp b/media/extractors/mp3/MP3Extractor.cpp
similarity index 94%
rename from media/libstagefright/MP3Extractor.cpp
rename to media/extractors/mp3/MP3Extractor.cpp
index 13f9928..f26ed25 100644
--- a/media/libstagefright/MP3Extractor.cpp
+++ b/media/extractors/mp3/MP3Extractor.cpp
@@ -18,23 +18,23 @@
 #define LOG_TAG "MP3Extractor"
 #include <utils/Log.h>
 
-#include "include/MP3Extractor.h"
+#include "MP3Extractor.h"
 
-#include "include/avc_utils.h"
-#include "include/ID3.h"
-#include "include/VBRISeeker.h"
-#include "include/XINGSeeker.h"
+#include "ID3.h"
+#include "VBRISeeker.h"
+#include "XINGSeeker.h"
 
+#include <media/DataSource.h>
+#include <media/MediaSource.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/DataSource.h>
+#include <media/stagefright/foundation/avc_utils.h>
+#include <media/stagefright/foundation/ByteUtils.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaBufferGroup.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/MediaSource.h>
 #include <media/stagefright/MetaData.h>
-#include <media/stagefright/Utils.h>
 #include <utils/String8.h>
 
 namespace android {
@@ -407,7 +407,7 @@
     return mInitCheck != OK ? 0 : 1;
 }
 
-sp<IMediaSource> MP3Extractor::getTrack(size_t index) {
+sp<MediaSource> MP3Extractor::getTrack(size_t index) {
     if (mInitCheck != OK || index != 0) {
         return NULL;
     }
@@ -666,7 +666,13 @@
     return meta;
 }
 
-bool SniffMP3(
+static MediaExtractor* CreateExtractor(
+        const sp<DataSource> &source,
+        const sp<AMessage>& meta) {
+    return new MP3Extractor(source, meta);
+}
+
+static MediaExtractor::CreatorFunc Sniff(
         const sp<DataSource> &source, String8 *mimeType,
         float *confidence, sp<AMessage> *meta) {
     off64_t pos = 0;
@@ -674,15 +680,15 @@
     uint32_t header;
     uint8_t mpeg_header[5];
     if (source->readAt(0, mpeg_header, sizeof(mpeg_header)) < (ssize_t)sizeof(mpeg_header)) {
-        return false;
+        return NULL;
     }
 
     if (!memcmp("\x00\x00\x01\xba", mpeg_header, 4) && (mpeg_header[4] >> 4) == 2) {
         ALOGV("MPEG1PS container is not supported!");
-        return false;
+        return NULL;
     }
     if (!Resync(source, 0, &pos, &post_id3_pos, &header)) {
-        return false;
+        return NULL;
     }
 
     *meta = new AMessage;
@@ -693,7 +699,22 @@
     *mimeType = MEDIA_MIMETYPE_AUDIO_MPEG;
     *confidence = 0.2f;
 
-    return true;
+    return CreateExtractor;
 }
 
+extern "C" {
+// This is the only symbol that needs to be exported
+__attribute__ ((visibility ("default")))
+MediaExtractor::ExtractorDef GETEXTRACTORDEF() {
+    return {
+        MediaExtractor::EXTRACTORDEF_VERSION,
+        UUID("812a3f6c-c8cf-46de-b529-3774b14103d4"),
+        1, // version
+        "MP3 Extractor",
+        Sniff
+    };
+}
+
+} // extern "C"
+
 }  // namespace android
diff --git a/media/libstagefright/include/MP3Extractor.h b/media/extractors/mp3/MP3Extractor.h
similarity index 93%
rename from media/libstagefright/include/MP3Extractor.h
rename to media/extractors/mp3/MP3Extractor.h
index 2fd04f2..f0ab6b0 100644
--- a/media/libstagefright/include/MP3Extractor.h
+++ b/media/extractors/mp3/MP3Extractor.h
@@ -19,7 +19,7 @@
 #define MP3_EXTRACTOR_H_
 
 #include <utils/Errors.h>
-#include <media/stagefright/MediaExtractor.h>
+#include <media/MediaExtractor.h>
 
 namespace android {
 
@@ -34,7 +34,7 @@
     MP3Extractor(const sp<DataSource> &source, const sp<AMessage> &meta);
 
     virtual size_t countTracks();
-    virtual sp<IMediaSource> getTrack(size_t index);
+    virtual sp<MediaSource> getTrack(size_t index);
     virtual sp<MetaData> getTrackMetaData(size_t index, uint32_t flags);
 
     virtual sp<MetaData> getMetaData();
diff --git a/media/libstagefright/include/MP3Seeker.h b/media/extractors/mp3/MP3Seeker.h
similarity index 100%
rename from media/libstagefright/include/MP3Seeker.h
rename to media/extractors/mp3/MP3Seeker.h
diff --git a/media/libstagefright/VBRISeeker.cpp b/media/extractors/mp3/VBRISeeker.cpp
similarity index 96%
rename from media/libstagefright/VBRISeeker.cpp
rename to media/extractors/mp3/VBRISeeker.cpp
index 5b8f23a..e7db6fd 100644
--- a/media/libstagefright/VBRISeeker.cpp
+++ b/media/extractors/mp3/VBRISeeker.cpp
@@ -21,14 +21,13 @@
 
 #include <utils/Log.h>
 
-#include "include/VBRISeeker.h"
+#include "VBRISeeker.h"
 
-#include "include/avc_utils.h"
-#include "include/MP3Extractor.h"
+#include <media/stagefright/foundation/avc_utils.h>
 
 #include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/DataSource.h>
-#include <media/stagefright/Utils.h>
+#include <media/stagefright/foundation/ByteUtils.h>
+#include <media/DataSource.h>
 
 namespace android {
 
diff --git a/media/libstagefright/include/VBRISeeker.h b/media/extractors/mp3/VBRISeeker.h
similarity index 97%
rename from media/libstagefright/include/VBRISeeker.h
rename to media/extractors/mp3/VBRISeeker.h
index c57d571..87258b0 100644
--- a/media/libstagefright/include/VBRISeeker.h
+++ b/media/extractors/mp3/VBRISeeker.h
@@ -18,7 +18,7 @@
 
 #define VBRI_SEEKER_H_
 
-#include "include/MP3Seeker.h"
+#include "MP3Seeker.h"
 
 #include <utils/Vector.h>
 
diff --git a/media/libstagefright/XINGSeeker.cpp b/media/extractors/mp3/XINGSeeker.cpp
similarity index 96%
rename from media/libstagefright/XINGSeeker.cpp
rename to media/extractors/mp3/XINGSeeker.cpp
index 81ed9c6..fa59701 100644
--- a/media/libstagefright/XINGSeeker.cpp
+++ b/media/extractors/mp3/XINGSeeker.cpp
@@ -17,11 +17,11 @@
 #define LOG_TAG "XINGSEEKER"
 #include <utils/Log.h>
 
-#include "include/XINGSeeker.h"
-#include "include/avc_utils.h"
+#include "XINGSeeker.h"
+#include <media/stagefright/foundation/avc_utils.h>
 
-#include <media/stagefright/DataSource.h>
-#include <media/stagefright/Utils.h>
+#include <media/stagefright/foundation/ByteUtils.h>
+#include <media/DataSource.h>
 
 namespace android {
 
diff --git a/media/libstagefright/include/XINGSeeker.h b/media/extractors/mp3/XINGSeeker.h
similarity index 97%
rename from media/libstagefright/include/XINGSeeker.h
rename to media/extractors/mp3/XINGSeeker.h
index cce04f0..37077c4 100644
--- a/media/libstagefright/include/XINGSeeker.h
+++ b/media/extractors/mp3/XINGSeeker.h
@@ -18,7 +18,7 @@
 
 #define XING_SEEKER_H_
 
-#include "include/MP3Seeker.h"
+#include "MP3Seeker.h"
 
 namespace android {
 
diff --git a/media/extractors/mp3/exports.lds b/media/extractors/mp3/exports.lds
new file mode 100644
index 0000000..b1309ee
--- /dev/null
+++ b/media/extractors/mp3/exports.lds
@@ -0,0 +1 @@
+{ global: GETEXTRACTORDEF; local: *; };
diff --git a/media/extractors/mp4/Android.bp b/media/extractors/mp4/Android.bp
new file mode 100644
index 0000000..fce8dd6
--- /dev/null
+++ b/media/extractors/mp4/Android.bp
@@ -0,0 +1,49 @@
+cc_library_shared {
+
+    srcs: [
+        "ItemTable.cpp",
+        "MPEG4Extractor.cpp",
+        "SampleIterator.cpp",
+        "SampleTable.cpp",
+    ],
+
+    include_dirs: [
+        "frameworks/av/media/libstagefright/",
+    ],
+
+    shared_libs: [
+        "liblog",
+        "libmediaextractor",
+        "libstagefright_foundation",
+        "libutils",
+    ],
+
+    static_libs: [
+        "libstagefright_esds",
+        "libstagefright_id3",
+    ],
+
+    name: "libmp4extractor",
+    relative_install_path: "extractors",
+
+    compile_multilib: "first",
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+        "-fvisibility=hidden",
+    ],
+    version_script: "exports.lds",
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+        diag: {
+            cfi: true,
+        },
+    },
+
+}
diff --git a/media/libstagefright/ItemTable.cpp b/media/extractors/mp4/ItemTable.cpp
similarity index 83%
rename from media/libstagefright/ItemTable.cpp
rename to media/extractors/mp4/ItemTable.cpp
index 7bc4f3c..85c66b2 100644
--- a/media/libstagefright/ItemTable.cpp
+++ b/media/extractors/mp4/ItemTable.cpp
@@ -17,14 +17,14 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "ItemTable"
 
-#include <include/ItemTable.h>
-#include <media/MediaDefs.h>
-#include <media/stagefright/DataSource.h>
+#include <ItemTable.h>
+#include <media/DataSource.h>
 #include <media/stagefright/MetaData.h>
 #include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/Utils.h>
 #include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ByteUtils.h>
 #include <media/stagefright/foundation/hexdump.h>
+#include <media/stagefright/foundation/MediaDefs.h>
 #include <utils/Log.h>
 
 namespace android {
@@ -40,8 +40,9 @@
     friend struct ItemReference;
     friend struct ItemProperty;
 
-    ImageItem() : ImageItem(0) {}
-    ImageItem(uint32_t _type) : type(_type),
+    ImageItem() : ImageItem(0, 0, false) {}
+    ImageItem(uint32_t _type, uint32_t _id, bool _hidden) :
+            type(_type), itemId(_id), hidden(_hidden),
             rows(0), columns(0), width(0), height(0), rotation(0),
             offset(0), size(0), nextTileIndex(0) {}
 
@@ -61,6 +62,8 @@
     }
 
     uint32_t type;
+    uint32_t itemId;
+    bool hidden;
     int32_t rows;
     int32_t columns;
     int32_t width;
@@ -470,41 +473,7 @@
 
     uint32_t itemId() { return mItemId; }
 
-    void apply(KeyedVector<uint32_t, ImageItem> &itemIdToImageMap) const {
-        ssize_t imageIndex = itemIdToImageMap.indexOfKey(mItemId);
-
-        // ignore non-image items
-        if (imageIndex < 0) {
-            return;
-        }
-
-        ALOGV("attach reference type 0x%x to item id %d)", type(), mItemId);
-
-        if (type() == FOURCC('d', 'i', 'm', 'g')) {
-            ImageItem &image = itemIdToImageMap.editValueAt(imageIndex);
-            if (!image.dimgRefs.empty()) {
-                ALOGW("dimgRefs if not clean!");
-            }
-            image.dimgRefs.appendVector(mRefs);
-        } else if (type() == FOURCC('t', 'h', 'm', 'b')) {
-            for (size_t i = 0; i < mRefs.size(); i++) {
-                imageIndex = itemIdToImageMap.indexOfKey(mRefs[i]);
-
-                // ignore non-image items
-                if (imageIndex < 0) {
-                    continue;
-                }
-                ALOGV("Image item id %d uses thumbnail item id %d", mRefs[i], mItemId);
-                ImageItem &image = itemIdToImageMap.editValueAt(imageIndex);
-                if (!image.thumbnails.empty()) {
-                    ALOGW("already has thumbnails!");
-                }
-                image.thumbnails.push_back(mItemId);
-            }
-        } else {
-            ALOGW("ignoring unsupported ref type 0x%x", type());
-        }
-    }
+    void apply(KeyedVector<uint32_t, ImageItem> &itemIdToItemMap) const;
 
 private:
     uint32_t mItemId;
@@ -514,6 +483,64 @@
     DISALLOW_EVIL_CONSTRUCTORS(ItemReference);
 };
 
+void ItemReference::apply(KeyedVector<uint32_t, ImageItem> &itemIdToItemMap) const {
+    ssize_t itemIndex = itemIdToItemMap.indexOfKey(mItemId);
+
+    // ignore non-image items
+    if (itemIndex < 0) {
+        return;
+    }
+
+    ALOGV("attach reference type 0x%x to item id %d)", type(), mItemId);
+
+    if (type() == FOURCC('d', 'i', 'm', 'g')) {
+        ImageItem &derivedImage = itemIdToItemMap.editValueAt(itemIndex);
+        if (!derivedImage.dimgRefs.empty()) {
+            ALOGW("dimgRefs if not clean!");
+        }
+        derivedImage.dimgRefs.appendVector(mRefs);
+
+        for (size_t i = 0; i < mRefs.size(); i++) {
+            itemIndex = itemIdToItemMap.indexOfKey(mRefs[i]);
+
+            // ignore non-image items
+            if (itemIndex < 0) {
+                continue;
+            }
+            ImageItem &sourceImage = itemIdToItemMap.editValueAt(itemIndex);
+
+            // mark the source image of the derivation as hidden
+            sourceImage.hidden = true;
+        }
+    } else if (type() == FOURCC('t', 'h', 'm', 'b')) {
+        // mark thumbnail image as hidden, these can be retrieved if the client
+        // request thumbnail explicitly, but won't be exposed as displayables.
+        ImageItem &thumbImage = itemIdToItemMap.editValueAt(itemIndex);
+        thumbImage.hidden = true;
+
+        for (size_t i = 0; i < mRefs.size(); i++) {
+            itemIndex = itemIdToItemMap.indexOfKey(mRefs[i]);
+
+            // ignore non-image items
+            if (itemIndex < 0) {
+                continue;
+            }
+            ALOGV("Image item id %d uses thumbnail item id %d", mRefs[i], mItemId);
+            ImageItem &masterImage = itemIdToItemMap.editValueAt(itemIndex);
+            if (!masterImage.thumbnails.empty()) {
+                ALOGW("already has thumbnails!");
+            }
+            masterImage.thumbnails.push_back(mItemId);
+        }
+    } else if (type() == FOURCC('a', 'u', 'x', 'l')) {
+        // mark auxiliary image as hidden
+        ImageItem &auxImage = itemIdToItemMap.editValueAt(itemIndex);
+        auxImage.hidden = true;
+    } else {
+        ALOGW("ignoring unsupported ref type 0x%x", type());
+    }
+}
+
 status_t ItemReference::parse(off64_t offset, size_t size) {
     if (size < mRefIdSize + 2) {
         return ERROR_MALFORMED;
@@ -940,6 +967,7 @@
 struct ItemInfo {
     uint32_t itemId;
     uint32_t itemType;
+    bool hidden;
 };
 
 struct InfeBox : public FullBox {
@@ -985,45 +1013,7 @@
     }
 
     if (version() == 0 || version() == 1) {
-        if (size < 4) {
-            return ERROR_MALFORMED;
-        }
-        uint16_t item_id;
-        if (!source()->getUInt16(offset, &item_id)) {
-            return ERROR_IO;
-        }
-        ALOGV("item_id %d", item_id);
-        uint16_t item_protection_index;
-        if (!source()->getUInt16(offset + 2, &item_protection_index)) {
-            return ERROR_IO;
-        }
-        offset += 4;
-        size -= 4;
-
-        String8 item_name;
-        if (!parseNullTerminatedString(&offset, &size, &item_name)) {
-            return ERROR_MALFORMED;
-        }
-
-        String8 content_type;
-        if (!parseNullTerminatedString(&offset, &size, &content_type)) {
-            return ERROR_MALFORMED;
-        }
-
-        String8 content_encoding;
-        if (!parseNullTerminatedString(&offset, &size, &content_encoding)) {
-            return ERROR_MALFORMED;
-        }
-
-        if (version() == 1) {
-            uint32_t extension_type;
-            if (!source()->getUInt32(offset, &extension_type)) {
-                return ERROR_IO;
-            }
-            offset++;
-            size--;
-            // TODO: handle this case
-        }
+        return ERROR_UNSUPPORTED;
     } else { // version >= 2
         uint32_t item_id;
         size_t itemIdSize = (version() == 2) ? 2 : 4;
@@ -1048,6 +1038,9 @@
 
         itemInfo->itemId = item_id;
         itemInfo->itemType = item_type;
+        // According to HEIF spec, (flags & 1) indicates the image is hidden
+        // and not supposed to be displayed.
+        itemInfo->hidden = (flags() & 1);
 
         char itemTypeString[5];
         MakeFourCCString(item_type, itemTypeString);
@@ -1140,12 +1133,13 @@
     InfeBox infeBox(source());
     ItemInfo itemInfo;
     status_t err = infeBox.parse(offset, size, &itemInfo);
-    if (err != OK) {
-        return err;
+    if (err == OK) {
+        mItemInfos->push_back(itemInfo);
+        mHasGrids |= (itemInfo.itemType == FOURCC('g', 'r', 'i', 'd'));
     }
-    mItemInfos->push_back(itemInfo);
-    mHasGrids |= (itemInfo.itemType == FOURCC('g', 'r', 'i', 'd'));
-    return OK;
+    // InfeBox parse returns ERROR_UNSUPPORTED if the box if an unsupported
+    // version. Ignore this error as it's not fatal.
+    return (err == ERROR_UNSUPPORTED) ? OK : err;
 }
 
 //////////////////////////////////////////////////////////////////
@@ -1156,7 +1150,7 @@
       mIdatOffset(0),
       mIdatSize(0),
       mImageItemsValid(false),
-      mCurrentImageIndex(0) {
+      mCurrentItemIndex(0) {
     mRequiredBoxes.insert('iprp');
     mRequiredBoxes.insert('iloc');
     mRequiredBoxes.insert('pitm');
@@ -1311,8 +1305,8 @@
             continue;
         }
 
-        ssize_t imageIndex = mItemIdToImageMap.indexOfKey(info.itemId);
-        if (imageIndex >= 0) {
+        ssize_t itemIndex = mItemIdToItemMap.indexOfKey(info.itemId);
+        if (itemIndex >= 0) {
             ALOGW("ignoring duplicate image item id %d", info.itemId);
             continue;
         }
@@ -1330,7 +1324,7 @@
             return ERROR_MALFORMED;
         }
 
-        ImageItem image(info.itemType);
+        ImageItem image(info.itemType, info.itemId, info.hidden);
 
         ALOGV("adding %s: itemId %d", image.isGrid() ? "grid" : "image", info.itemId);
 
@@ -1351,7 +1345,7 @@
             image.offset = offset;
             image.size = size;
         }
-        mItemIdToImageMap.add(info.itemId, image);
+        mItemIdToItemMap.add(info.itemId, image);
     }
 
     for (size_t i = 0; i < mAssociations.size(); i++) {
@@ -1359,7 +1353,30 @@
     }
 
     for (size_t i = 0; i < mItemReferences.size(); i++) {
-        mItemReferences[i]->apply(mItemIdToImageMap);
+        mItemReferences[i]->apply(mItemIdToItemMap);
+    }
+
+    bool foundPrimary = false;
+    for (size_t i = 0; i < mItemIdToItemMap.size(); i++) {
+        // add all non-hidden images, also add the primary even if it's marked
+        // hidden, in case the primary is set to a thumbnail
+        bool isPrimary = (mItemIdToItemMap[i].itemId == mPrimaryItemId);
+        if (!mItemIdToItemMap[i].hidden || isPrimary) {
+            mDisplayables.push_back(i);
+        }
+        foundPrimary |= isPrimary;
+    }
+
+    ALOGV("found %zu displayables", mDisplayables.size());
+
+    // fail if no displayables are found
+    if (mDisplayables.empty()) {
+        return ERROR_MALFORMED;
+    }
+
+    // if the primary item id is invalid, set primary to the first displayable
+    if (!foundPrimary) {
+        mPrimaryItemId = mItemIdToItemMap[mDisplayables[0]].itemId;
     }
 
     mImageItemsValid = true;
@@ -1367,10 +1384,10 @@
 }
 
 void ItemTable::attachProperty(const AssociationEntry &association) {
-    ssize_t imageIndex = mItemIdToImageMap.indexOfKey(association.itemId);
+    ssize_t itemIndex = mItemIdToItemMap.indexOfKey(association.itemId);
 
     // ignore non-image items
-    if (imageIndex < 0) {
+    if (itemIndex < 0) {
         return;
     }
 
@@ -1383,29 +1400,36 @@
     ALOGV("attach property %d to item id %d)",
             propertyIndex, association.itemId);
 
-    mItemProperties[propertyIndex]->attachTo(
-            mItemIdToImageMap.editValueAt(imageIndex));
+    mItemProperties[propertyIndex]->attachTo(mItemIdToItemMap.editValueAt(itemIndex));
 }
 
-sp<MetaData> ItemTable::getImageMeta() {
+uint32_t ItemTable::countImages() const {
+    return mImageItemsValid ? mDisplayables.size() : 0;
+}
+
+sp<MetaData> ItemTable::getImageMeta(const uint32_t imageIndex) {
     if (!mImageItemsValid) {
         return NULL;
     }
 
-    ssize_t imageIndex = mItemIdToImageMap.indexOfKey(mPrimaryItemId);
-    if (imageIndex < 0) {
-        ALOGE("Primary item id %d not found!", mPrimaryItemId);
+    if (imageIndex >= mDisplayables.size()) {
+        ALOGE("%s: invalid image index %u", __FUNCTION__, imageIndex);
         return NULL;
     }
+    const uint32_t itemIndex = mDisplayables[imageIndex];
+    ALOGV("image[%u]: item index %u", imageIndex, itemIndex);
 
-    ALOGV("primary image index %zu", imageIndex);
-
-    const ImageItem *image = &mItemIdToImageMap[imageIndex];
+    const ImageItem *image = &mItemIdToItemMap[itemIndex];
 
     sp<MetaData> meta = new MetaData;
-    meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_HEVC);
+    meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC);
 
-    ALOGV("setting image size %dx%d", image->width, image->height);
+    if (image->itemId == mPrimaryItemId) {
+        meta->setInt32(kKeyTrackIsDefault, 1);
+    }
+
+    ALOGV("image[%u]: size %dx%d", imageIndex, image->width, image->height);
+
     meta->setInt32(kKeyWidth, image->width);
     meta->setInt32(kKeyHeight, image->height);
     if (image->rotation != 0) {
@@ -1421,43 +1445,38 @@
     meta->setInt32(kKeyMaxInputSize, image->width * image->height * 1.5);
 
     if (!image->thumbnails.empty()) {
-        ssize_t thumbnailIndex = mItemIdToImageMap.indexOfKey(image->thumbnails[0]);
-        if (thumbnailIndex >= 0) {
-            const ImageItem &thumbnail = mItemIdToImageMap[thumbnailIndex];
+        ssize_t thumbItemIndex = mItemIdToItemMap.indexOfKey(image->thumbnails[0]);
+        if (thumbItemIndex >= 0) {
+            const ImageItem &thumbnail = mItemIdToItemMap[thumbItemIndex];
 
             meta->setInt32(kKeyThumbnailWidth, thumbnail.width);
             meta->setInt32(kKeyThumbnailHeight, thumbnail.height);
             meta->setData(kKeyThumbnailHVCC, kTypeHVCC,
                     thumbnail.hvcc->data(), thumbnail.hvcc->size());
-            ALOGV("thumbnail meta: %dx%d, index %zd",
-                    thumbnail.width, thumbnail.height, thumbnailIndex);
+            ALOGV("image[%u]: thumbnail: size %dx%d, item index %zd",
+                    imageIndex, thumbnail.width, thumbnail.height, thumbItemIndex);
         } else {
-            ALOGW("Referenced thumbnail does not exist!");
+            ALOGW("%s: Referenced thumbnail does not exist!", __FUNCTION__);
         }
     }
 
     if (image->isGrid()) {
-        ssize_t tileIndex = mItemIdToImageMap.indexOfKey(image->dimgRefs[0]);
-        if (tileIndex < 0) {
+        ssize_t tileItemIndex = mItemIdToItemMap.indexOfKey(image->dimgRefs[0]);
+        if (tileItemIndex < 0) {
             return NULL;
         }
-        // when there are tiles, (kKeyWidth, kKeyHeight) is the full tiled area,
-        // and (kKeyDisplayWidth, kKeyDisplayHeight) may be smaller than that.
-        meta->setInt32(kKeyDisplayWidth, image->width);
-        meta->setInt32(kKeyDisplayHeight, image->height);
-        int32_t gridRows = image->rows, gridCols = image->columns;
+        meta->setInt32(kKeyGridRows, image->rows);
+        meta->setInt32(kKeyGridCols, image->columns);
 
         // point image to the first tile for grid size and HVCC
-        image = &mItemIdToImageMap.editValueAt(tileIndex);
-        meta->setInt32(kKeyWidth, image->width * gridCols);
-        meta->setInt32(kKeyHeight, image->height * gridRows);
+        image = &mItemIdToItemMap.editValueAt(tileItemIndex);
         meta->setInt32(kKeyGridWidth, image->width);
         meta->setInt32(kKeyGridHeight, image->height);
         meta->setInt32(kKeyMaxInputSize, image->width * image->height * 1.5);
     }
 
     if (image->hvcc == NULL) {
-        ALOGE("hvcc is missing!");
+        ALOGE("%s: hvcc is missing for image[%u]!", __FUNCTION__, imageIndex);
         return NULL;
     }
     meta->setData(kKeyHVCC, kTypeHVCC, image->hvcc->data(), image->hvcc->size());
@@ -1468,88 +1487,87 @@
     return meta;
 }
 
-uint32_t ItemTable::countImages() const {
-    return mImageItemsValid ? mItemIdToImageMap.size() : 0;
-}
-
-status_t ItemTable::findPrimaryImage(uint32_t *imageIndex) {
+status_t ItemTable::findImageItem(const uint32_t imageIndex, uint32_t *itemIndex) {
     if (!mImageItemsValid) {
         return INVALID_OPERATION;
     }
 
-    ssize_t index = mItemIdToImageMap.indexOfKey(mPrimaryItemId);
-    if (index < 0) {
-        return ERROR_MALFORMED;
+    if (imageIndex >= mDisplayables.size()) {
+        ALOGE("%s: invalid image index %d", __FUNCTION__, imageIndex);
+        return BAD_VALUE;
     }
 
-    *imageIndex = index;
+    *itemIndex = mDisplayables[imageIndex];
+
+    ALOGV("image[%u]: item index %u", imageIndex, *itemIndex);
     return OK;
 }
 
-status_t ItemTable::findThumbnail(uint32_t *imageIndex) {
+status_t ItemTable::findThumbnailItem(const uint32_t imageIndex, uint32_t *itemIndex) {
     if (!mImageItemsValid) {
         return INVALID_OPERATION;
     }
 
-    ssize_t primaryIndex = mItemIdToImageMap.indexOfKey(mPrimaryItemId);
-    if (primaryIndex < 0) {
-        ALOGE("Primary item id %d not found!", mPrimaryItemId);
-        return ERROR_MALFORMED;
+    if (imageIndex >= mDisplayables.size()) {
+        ALOGE("%s: invalid image index %d", __FUNCTION__, imageIndex);
+        return BAD_VALUE;
     }
 
-    const ImageItem &primaryImage = mItemIdToImageMap[primaryIndex];
-    if (primaryImage.thumbnails.empty()) {
-        ALOGW("Using primary in place of thumbnail.");
-        *imageIndex = primaryIndex;
+    uint32_t masterItemIndex = mDisplayables[imageIndex];
+
+    const ImageItem &masterImage = mItemIdToItemMap[masterItemIndex];
+    if (masterImage.thumbnails.empty()) {
+        *itemIndex = masterItemIndex;
         return OK;
     }
 
-    ssize_t thumbnailIndex = mItemIdToImageMap.indexOfKey(
-            primaryImage.thumbnails[0]);
-    if (thumbnailIndex < 0) {
-        ALOGE("Thumbnail item id %d not found!", primaryImage.thumbnails[0]);
-        return ERROR_MALFORMED;
+    ssize_t thumbItemIndex = mItemIdToItemMap.indexOfKey(masterImage.thumbnails[0]);
+    if (thumbItemIndex < 0) {
+        ALOGW("%s: Thumbnail item id %d not found, use master instead",
+                __FUNCTION__, masterImage.thumbnails[0]);
+        *itemIndex = masterItemIndex;
+        return OK;
     }
 
-    *imageIndex = thumbnailIndex;
+    *itemIndex = thumbItemIndex;
     return OK;
 }
 
 status_t ItemTable::getImageOffsetAndSize(
-        uint32_t *imageIndex, off64_t *offset, size_t *size) {
+        uint32_t *itemIndex, off64_t *offset, size_t *size) {
     if (!mImageItemsValid) {
         return INVALID_OPERATION;
     }
 
-    if (imageIndex != NULL) {
-        if (*imageIndex >= mItemIdToImageMap.size()) {
-            ALOGE("Bad image index!");
+    if (itemIndex != NULL) {
+        if (*itemIndex >= mItemIdToItemMap.size()) {
+            ALOGE("%s: Bad item index!", __FUNCTION__);
             return BAD_VALUE;
         }
-        mCurrentImageIndex = *imageIndex;
+        mCurrentItemIndex = *itemIndex;
     }
 
-    ImageItem &image = mItemIdToImageMap.editValueAt(mCurrentImageIndex);
+    ImageItem &image = mItemIdToItemMap.editValueAt(mCurrentItemIndex);
     if (image.isGrid()) {
         uint32_t tileItemId;
-        status_t err = image.getNextTileItemId(&tileItemId, imageIndex != NULL);
+        status_t err = image.getNextTileItemId(&tileItemId, itemIndex != NULL);
         if (err != OK) {
             return err;
         }
-        ssize_t tileImageIndex = mItemIdToImageMap.indexOfKey(tileItemId);
-        if (tileImageIndex < 0) {
+        ssize_t tileItemIndex = mItemIdToItemMap.indexOfKey(tileItemId);
+        if (tileItemIndex < 0) {
             return ERROR_END_OF_STREAM;
         }
-        *offset = mItemIdToImageMap[tileImageIndex].offset;
-        *size = mItemIdToImageMap[tileImageIndex].size;
+        *offset = mItemIdToItemMap[tileItemIndex].offset;
+        *size = mItemIdToItemMap[tileItemIndex].size;
     } else {
-        if (imageIndex == NULL) {
+        if (itemIndex == NULL) {
             // For single images, we only allow it to be read once, after that
-            // it's EOS.  New image index must be requested each time.
+            // it's EOS.  New item index must be requested each time.
             return ERROR_END_OF_STREAM;
         }
-        *offset = mItemIdToImageMap[mCurrentImageIndex].offset;
-        *size = mItemIdToImageMap[mCurrentImageIndex].size;
+        *offset = mItemIdToItemMap[mCurrentItemIndex].offset;
+        *size = mItemIdToItemMap[mCurrentItemIndex].size;
     }
 
     return OK;
diff --git a/media/libstagefright/include/ItemTable.h b/media/extractors/mp4/ItemTable.h
similarity index 85%
rename from media/libstagefright/include/ItemTable.h
rename to media/extractors/mp4/ItemTable.h
index 5a6af5e..3d2e2ae 100644
--- a/media/libstagefright/include/ItemTable.h
+++ b/media/extractors/mp4/ItemTable.h
@@ -49,12 +49,12 @@
     status_t parse(uint32_t type, off64_t offset, size_t size);
 
     bool isValid() { return mImageItemsValid; }
-    sp<MetaData> getImageMeta();
     uint32_t countImages() const;
-    status_t findPrimaryImage(uint32_t *imageIndex);
-    status_t findThumbnail(uint32_t *thumbnailIndex);
+    sp<MetaData> getImageMeta(const uint32_t imageIndex);
+    status_t findImageItem(const uint32_t imageIndex, uint32_t *itemIndex);
+    status_t findThumbnailItem(const uint32_t imageIndex, uint32_t *itemIndex);
     status_t getImageOffsetAndSize(
-            uint32_t *imageIndex, off64_t *offset, size_t *size);
+            uint32_t *itemIndex, off64_t *offset, size_t *size);
 
 protected:
     ~ItemTable();
@@ -76,8 +76,9 @@
     std::set<uint32_t> mBoxesSeen;
 
     bool mImageItemsValid;
-    uint32_t mCurrentImageIndex;
-    KeyedVector<uint32_t, ImageItem> mItemIdToImageMap;
+    uint32_t mCurrentItemIndex;
+    KeyedVector<uint32_t, ImageItem> mItemIdToItemMap;
+    Vector<uint32_t> mDisplayables;
 
     status_t parseIlocBox(off64_t offset, size_t size);
     status_t parseIinfBox(off64_t offset, size_t size);
diff --git a/media/libstagefright/matroska/MODULE_LICENSE_APACHE2 b/media/extractors/mp4/MODULE_LICENSE_APACHE2
similarity index 100%
copy from media/libstagefright/matroska/MODULE_LICENSE_APACHE2
copy to media/extractors/mp4/MODULE_LICENSE_APACHE2
diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/extractors/mp4/MPEG4Extractor.cpp
similarity index 96%
rename from media/libstagefright/MPEG4Extractor.cpp
rename to media/extractors/mp4/MPEG4Extractor.cpp
index a8b6614..938bd5d 100644
--- a/media/libstagefright/MPEG4Extractor.cpp
+++ b/media/extractors/mp4/MPEG4Extractor.cpp
@@ -26,28 +26,29 @@
 
 #include <utils/Log.h>
 
-#include "include/MPEG4Extractor.h"
-#include "include/SampleTable.h"
-#include "include/ItemTable.h"
+#include "MPEG4Extractor.h"
+#include "SampleTable.h"
+#include "ItemTable.h"
 #include "include/ESDS.h"
 
+#include <media/MediaSource.h>
 #include <media/stagefright/foundation/ABitReader.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/foundation/ByteUtils.h>
 #include <media/stagefright/foundation/ColorUtils.h>
+#include <media/stagefright/foundation/avc_utils.h>
 #include <media/stagefright/foundation/hexdump.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaBufferGroup.h>
 #include <media/stagefright/MediaDefs.h>
-#include <media/stagefright/MediaSource.h>
 #include <media/stagefright/MetaData.h>
 #include <utils/String8.h>
 
 #include <byteswap.h>
 #include "include/ID3.h"
-#include "include/avc_utils.h"
 
 #ifndef UINT32_MAX
 #define UINT32_MAX       (4294967295U)
@@ -137,7 +138,7 @@
 
     uint8_t *mSrcBuffer;
 
-    bool mIsHEIF;
+    bool mIsHeif;
     sp<ItemTable> mItemTable;
 
     size_t parseNALSize(const uint8_t *data) const;
@@ -337,7 +338,7 @@
     return false;
 }
 
-MPEG4Extractor::MPEG4Extractor(const sp<DataSource> &source)
+MPEG4Extractor::MPEG4Extractor(const sp<DataSource> &source, const char *mime)
     : mMoofOffset(0),
       mMoofFound(false),
       mMdatFound(false),
@@ -345,12 +346,15 @@
       mInitCheck(NO_INIT),
       mHeaderTimescale(0),
       mIsQT(false),
-      mIsHEIF(false),
+      mIsHeif(false),
+      mHasMoovBox(false),
+      mPreferHeif(mime != NULL && !strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_HEIF)),
       mFirstTrack(NULL),
       mLastTrack(NULL),
       mFileMetaData(new MetaData),
       mFirstSINF(NULL),
       mIsDrm(false) {
+    ALOGV("mime=%s, mPreferHeif=%d", mime, mPreferHeif);
 }
 
 MPEG4Extractor::~MPEG4Extractor() {
@@ -575,8 +579,9 @@
     status_t err;
     bool sawMoovOrSidx = false;
 
-    while (!((sawMoovOrSidx && (mMdatFound || mMoofFound)) ||
-            (mIsHEIF && (mItemTable != NULL) && mItemTable->isValid()))) {
+    while (!((mHasMoovBox && sawMoovOrSidx && (mMdatFound || mMoofFound)) ||
+             (mIsHeif && (mPreferHeif || !mHasMoovBox) &&
+                     (mItemTable != NULL) && mItemTable->isValid()))) {
         off64_t orig_offset = offset;
         err = parseChunk(&offset, 0);
 
@@ -593,12 +598,43 @@
         }
     }
 
+    if (mIsHeif && (mItemTable != NULL) && (mItemTable->countImages() > 0)) {
+        for (uint32_t imageIndex = 0;
+                imageIndex < mItemTable->countImages(); imageIndex++) {
+            sp<MetaData> meta = mItemTable->getImageMeta(imageIndex);
+            if (meta == NULL) {
+                ALOGE("heif image %u has no meta!", imageIndex);
+                continue;
+            }
+
+            ALOGV("adding HEIF image track %u", imageIndex);
+            Track *track = new Track;
+            track->next = NULL;
+            if (mLastTrack != NULL) {
+                mLastTrack->next = track;
+            } else {
+                mFirstTrack = track;
+            }
+            mLastTrack = track;
+
+            track->meta = meta;
+            track->meta->setInt32(kKeyTrackID, imageIndex);
+            track->includes_expensive_metadata = false;
+            track->skipTrack = false;
+            track->timescale = 0;
+        }
+    }
+
     if (mInitCheck == OK) {
         if (findTrackByMimePrefix("video/") != NULL) {
             mFileMetaData->setCString(
                     kKeyMIMEType, MEDIA_MIMETYPE_CONTAINER_MPEG4);
         } else if (findTrackByMimePrefix("audio/") != NULL) {
             mFileMetaData->setCString(kKeyMIMEType, "audio/mp4");
+        } else if (findTrackByMimePrefix(
+                MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC) != NULL) {
+            mFileMetaData->setCString(
+                    kKeyMIMEType, MEDIA_MIMETYPE_CONTAINER_HEIF);
         } else {
             mFileMetaData->setCString(kKeyMIMEType, "application/octet-stream");
         }
@@ -629,28 +665,6 @@
         free(buf);
     }
 
-    if (mIsHEIF) {
-        sp<MetaData> meta = mItemTable->getImageMeta();
-        if (meta == NULL) {
-            return ERROR_MALFORMED;
-        }
-
-        Track *track = mLastTrack;
-        if (track != NULL) {
-            ALOGW("track is set before metadata is fully processed");
-        } else {
-            track = new Track;
-            track->next = NULL;
-            mFirstTrack = mLastTrack = track;
-        }
-
-        track->meta = meta;
-        track->meta->setInt32(kKeyTrackID, 0);
-        track->includes_expensive_metadata = false;
-        track->skipTrack = false;
-        track->timescale = 0;
-    }
-
     return mInitCheck;
 }
 
@@ -1052,6 +1066,7 @@
                 }
                 isTrack = true;
 
+                ALOGV("adding new track");
                 Track *track = new Track;
                 track->next = NULL;
                 if (mLastTrack) {
@@ -1099,6 +1114,7 @@
                 }
 
                 if (mLastTrack->skipTrack) {
+                    ALOGV("skipping this track...");
                     Track *cur = mFirstTrack;
 
                     if (cur == mLastTrack) {
@@ -1275,6 +1291,25 @@
             break;
         }
 
+        case FOURCC('t', 'r', 'e', 'f'):
+        {
+            *offset += chunk_size;
+
+            if (mLastTrack == NULL) {
+                return ERROR_MALFORMED;
+            }
+
+            // Skip thumbnail track for now since we don't have an
+            // API to retrieve it yet.
+            // The thumbnail track can't be accessed by negative index or time,
+            // because each timed sample has its own corresponding thumbnail
+            // in the thumbnail track. We'll need a dedicated API to retrieve
+            // thumbnail at time instead.
+            mLastTrack->skipTrack = true;
+
+            break;
+        }
+
         case FOURCC('p', 's', 's', 'h'):
         {
             *offset += chunk_size;
@@ -1773,6 +1808,8 @@
                             mLastTrack->meta->setInt32(kKeyFrameRate, frameRate);
                         }
                     }
+                    ALOGV("setting frame count %zu", nSamples);
+                    mLastTrack->meta->setInt32(kKeyFrameCount, nSamples);
                 }
             }
 
@@ -2104,7 +2141,7 @@
         case FOURCC('i', 'r', 'e', 'f'):
         case FOURCC('i', 'p', 'r', 'o'):
         {
-            if (mIsHEIF) {
+            if (mIsHeif) {
                 if (mItemTable == NULL) {
                     mItemTable = new ItemTable(mDataSource);
                 }
@@ -2484,10 +2521,22 @@
 
             if (brandSet.count(FOURCC('q', 't', ' ', ' ')) > 0) {
                 mIsQT = true;
-            } else if (brandSet.count(FOURCC('m', 'i', 'f', '1')) > 0
-                    && brandSet.count(FOURCC('h', 'e', 'i', 'c')) > 0) {
-                mIsHEIF = true;
-                ALOGV("identified HEIF image");
+            } else {
+                if (brandSet.count(FOURCC('m', 'i', 'f', '1')) > 0
+                 && brandSet.count(FOURCC('h', 'e', 'i', 'c')) > 0) {
+                    ALOGV("identified HEIF image");
+
+                    mIsHeif = true;
+                    brandSet.erase(FOURCC('m', 'i', 'f', '1'));
+                    brandSet.erase(FOURCC('h', 'e', 'i', 'c'));
+                }
+
+                if (!brandSet.empty()) {
+                    // This means that the file should have moov box.
+                    // It could be any iso files (mp4, heifs, etc.)
+                    mHasMoovBox = true;
+                    ALOGV("identified HEIF image with other tracks");
+                }
             }
 
             *offset = stop_offset;
@@ -3363,7 +3412,7 @@
     }
 }
 
-sp<IMediaSource> MPEG4Extractor::getTrack(size_t index) {
+sp<MediaSource> MPEG4Extractor::getTrack(size_t index) {
     status_t err;
     if ((err = readMetaData()) != OK) {
         return NULL;
@@ -3406,6 +3455,7 @@
         return NULL;
     }
 
+    sp<ItemTable> itemTable;
     if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)) {
         uint32_t type;
         const void *data;
@@ -3419,7 +3469,8 @@
         if (size < 7 || ptr[0] != 1) {  // configurationVersion == 1
             return NULL;
         }
-    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC)) {
+    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC)
+            || !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC)) {
         uint32_t type;
         const void *data;
         size_t size;
@@ -3432,11 +3483,14 @@
         if (size < 22 || ptr[0] != 1) {  // configurationVersion == 1
             return NULL;
         }
+        if (!strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC)) {
+            itemTable = mItemTable;
+        }
     }
 
     sp<MPEG4Source> source =  new MPEG4Source(this,
             track->meta, mDataSource, track->timescale, track->sampleTable,
-            mSidxEntries, trex, mMoofOffset, mItemTable);
+            mSidxEntries, trex, mMoofOffset, itemTable);
     if (source->init() != OK) {
         return NULL;
     }
@@ -3864,7 +3918,7 @@
       mBuffer(NULL),
       mWantsNALFragments(false),
       mSrcBuffer(NULL),
-      mIsHEIF(itemTable != NULL),
+      mIsHeif(itemTable != NULL),
       mItemTable(itemTable) {
 
     memset(&mTrackFragmentHeaderInfo, 0, sizeof(mTrackFragmentHeaderInfo));
@@ -3886,7 +3940,8 @@
     CHECK(success);
 
     mIsAVC = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC);
-    mIsHEVC = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC);
+    mIsHEVC = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC) ||
+              !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC);
 
     if (mIsAVC) {
         uint32_t type;
@@ -4640,15 +4695,19 @@
     int64_t seekTimeUs;
     ReadOptions::SeekMode mode;
     if (options && options->getSeekTo(&seekTimeUs, &mode)) {
-        if (mIsHEIF) {
+        if (mIsHeif) {
             CHECK(mSampleTable == NULL);
             CHECK(mItemTable != NULL);
+            int32_t imageIndex;
+            if (!mFormat->findInt32(kKeyTrackID, &imageIndex)) {
+                return ERROR_MALFORMED;
+            }
 
             status_t err;
             if (seekTimeUs >= 0) {
-                err = mItemTable->findPrimaryImage(&mCurrentSampleIndex);
+                err = mItemTable->findImageItem(imageIndex, &mCurrentSampleIndex);
             } else {
-                err = mItemTable->findThumbnail(&mCurrentSampleIndex);
+                err = mItemTable->findThumbnailItem(imageIndex, &mCurrentSampleIndex);
             }
             if (err != OK) {
                 return err;
@@ -4666,6 +4725,9 @@
                 case ReadOptions::SEEK_CLOSEST:
                     findFlags = SampleTable::kFlagClosest;
                     break;
+                case ReadOptions::SEEK_FRAME_INDEX:
+                    findFlags = SampleTable::kFlagFrameIndex;
+                    break;
                 default:
                     CHECK(!"Should not be here.");
                     break;
@@ -4676,7 +4738,8 @@
                     seekTimeUs, 1000000, mTimescale,
                     &sampleIndex, findFlags);
 
-            if (mode == ReadOptions::SEEK_CLOSEST) {
+            if (mode == ReadOptions::SEEK_CLOSEST
+                    || mode == ReadOptions::SEEK_FRAME_INDEX) {
                 // We found the closest sample already, now we want the sync
                 // sample preceding it (or the sample itself of course), even
                 // if the subsequent sync sample is closer.
@@ -4708,7 +4771,8 @@
                 return err;
             }
 
-            if (mode == ReadOptions::SEEK_CLOSEST) {
+            if (mode == ReadOptions::SEEK_CLOSEST
+                || mode == ReadOptions::SEEK_FRAME_INDEX) {
                 targetSampleTimeUs = (sampleTime * 1000000ll) / mTimescale;
             }
 
@@ -4744,7 +4808,7 @@
         newBuffer = true;
 
         status_t err;
-        if (!mIsHEIF) {
+        if (!mIsHeif) {
             err = mSampleTable->getMetaDataForSample(
                     mCurrentSampleIndex, &offset, &size, &cts, &isSyncSample, &stts);
         } else {
@@ -5311,11 +5375,6 @@
     return NULL;
 }
 
-void MPEG4Extractor::populateMetrics() {
-    ALOGV("MPEG4Extractor::populateMetrics");
-    // write into mAnalyticsItem
-}
-
 static bool LegacySniffMPEG4(
         const sp<DataSource> &source, String8 *mimeType, float *confidence) {
     uint8_t header[8];
@@ -5331,7 +5390,8 @@
         || !memcmp(header, "ftypisom", 8) || !memcmp(header, "ftypM4V ", 8)
         || !memcmp(header, "ftypM4A ", 8) || !memcmp(header, "ftypf4v ", 8)
         || !memcmp(header, "ftypkddi", 8) || !memcmp(header, "ftypM4VP", 8)
-        || !memcmp(header, "ftypmif1", 8) || !memcmp(header, "ftypheic", 8)) {
+        || !memcmp(header, "ftypmif1", 8) || !memcmp(header, "ftypheic", 8)
+        || !memcmp(header, "ftypmsf1", 8) || !memcmp(header, "ftyphevc", 8)) {
         *mimeType = MEDIA_MIMETYPE_CONTAINER_MPEG4;
         *confidence = 0.4;
 
@@ -5362,6 +5422,8 @@
         FOURCC('3', 'g', '2', 'b'),
         FOURCC('m', 'i', 'f', '1'),  // HEIF image
         FOURCC('h', 'e', 'i', 'c'),  // HEIF image
+        FOURCC('m', 's', 'f', '1'),  // HEIF image sequence
+        FOURCC('h', 'e', 'v', 'c'),  // HEIF image sequence
     };
 
     for (size_t i = 0;
@@ -5497,19 +5559,42 @@
     return true;
 }
 
-bool SniffMPEG4(
-        const sp<DataSource> &source, String8 *mimeType, float *confidence,
+static MediaExtractor* CreateExtractor(
+        const sp<DataSource> &source,
+        const sp<AMessage>& meta __unused) {
+    return new MPEG4Extractor(source);
+}
+
+static MediaExtractor::CreatorFunc Sniff(
+        const sp<DataSource> &source,
+        String8 *mimeType,
+        float *confidence,
         sp<AMessage> *meta) {
     if (BetterSniffMPEG4(source, mimeType, confidence, meta)) {
-        return true;
+        return CreateExtractor;
     }
 
     if (LegacySniffMPEG4(source, mimeType, confidence)) {
         ALOGW("Identified supported mpeg4 through LegacySniffMPEG4.");
-        return true;
+        return CreateExtractor;
     }
 
-    return false;
+    return NULL;
 }
 
+extern "C" {
+// This is the only symbol that needs to be exported
+__attribute__ ((visibility ("default")))
+MediaExtractor::ExtractorDef GETEXTRACTORDEF() {
+    return {
+        MediaExtractor::EXTRACTORDEF_VERSION,
+        UUID("27575c67-4417-4c54-8d3d-8e626985a164"),
+        1, // version
+        "MP4 Extractor",
+        Sniff
+    };
+}
+
+} // extern "C"
+
 }  // namespace android
diff --git a/media/libstagefright/include/MPEG4Extractor.h b/media/extractors/mp4/MPEG4Extractor.h
similarity index 92%
rename from media/libstagefright/include/MPEG4Extractor.h
rename to media/extractors/mp4/MPEG4Extractor.h
index 214a3de..8bfecaa 100644
--- a/media/libstagefright/include/MPEG4Extractor.h
+++ b/media/extractors/mp4/MPEG4Extractor.h
@@ -20,9 +20,8 @@
 
 #include <arpa/inet.h>
 
-#include <media/stagefright/DataSource.h>
-#include <media/stagefright/MediaExtractor.h>
-#include <media/stagefright/Utils.h>
+#include <media/DataSource.h>
+#include <media/MediaExtractor.h>
 #include <utils/List.h>
 #include <utils/Vector.h>
 #include <utils/String8.h>
@@ -53,10 +52,10 @@
 class MPEG4Extractor : public MediaExtractor {
 public:
     // Extractor assumes ownership of "source".
-    explicit MPEG4Extractor(const sp<DataSource> &source);
+    explicit MPEG4Extractor(const sp<DataSource> &source, const char *mime = NULL);
 
     virtual size_t countTracks();
-    virtual sp<IMediaSource> getTrack(size_t index);
+    virtual sp<MediaSource> getTrack(size_t index);
     virtual sp<MetaData> getTrackMetaData(size_t index, uint32_t flags);
 
     virtual sp<MetaData> getMetaData();
@@ -70,8 +69,6 @@
 protected:
     virtual ~MPEG4Extractor();
 
-    virtual void populateMetrics();
-
 private:
 
     struct PsshInfo {
@@ -104,7 +101,9 @@
     status_t mInitCheck;
     uint32_t mHeaderTimescale;
     bool mIsQT;
-    bool mIsHEIF;
+    bool mIsHeif;
+    bool mHasMoovBox;
+    bool mPreferHeif;
 
     Track *mFirstTrack, *mLastTrack;
 
diff --git a/media/libstagefright/matroska/NOTICE b/media/extractors/mp4/NOTICE
similarity index 100%
copy from media/libstagefright/matroska/NOTICE
copy to media/extractors/mp4/NOTICE
diff --git a/media/libstagefright/SampleIterator.cpp b/media/extractors/mp4/SampleIterator.cpp
similarity index 98%
rename from media/libstagefright/SampleIterator.cpp
rename to media/extractors/mp4/SampleIterator.cpp
index 75f744d..78cc691 100644
--- a/media/libstagefright/SampleIterator.cpp
+++ b/media/extractors/mp4/SampleIterator.cpp
@@ -18,15 +18,15 @@
 //#define LOG_NDEBUG 0
 #include <utils/Log.h>
 
-#include "include/SampleIterator.h"
+#include "SampleIterator.h"
 
 #include <arpa/inet.h>
 
+#include <media/DataSource.h>
 #include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/DataSource.h>
-#include <media/stagefright/Utils.h>
+#include <media/stagefright/foundation/ByteUtils.h>
 
-#include "include/SampleTable.h"
+#include "SampleTable.h"
 
 namespace android {
 
diff --git a/media/libstagefright/include/SampleIterator.h b/media/extractors/mp4/SampleIterator.h
similarity index 100%
rename from media/libstagefright/include/SampleIterator.h
rename to media/extractors/mp4/SampleIterator.h
diff --git a/media/libstagefright/SampleTable.cpp b/media/extractors/mp4/SampleTable.cpp
similarity index 98%
rename from media/libstagefright/SampleTable.cpp
rename to media/extractors/mp4/SampleTable.cpp
index 1d2a931..378d63a 100644
--- a/media/libstagefright/SampleTable.cpp
+++ b/media/extractors/mp4/SampleTable.cpp
@@ -20,14 +20,14 @@
 
 #include <limits>
 
-#include "include/SampleTable.h"
-#include "include/SampleIterator.h"
+#include "SampleTable.h"
+#include "SampleIterator.h"
 
 #include <arpa/inet.h>
 
+#include <media/DataSource.h>
 #include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/DataSource.h>
-#include <media/stagefright/Utils.h>
+#include <media/stagefright/foundation/ByteUtils.h>
 
 /* TODO: remove after being merged into other branches */
 #ifndef UINT32_MAX
@@ -724,6 +724,14 @@
         return ERROR_OUT_OF_RANGE;
     }
 
+    if (flags == kFlagFrameIndex) {
+        if (req_time >= mNumSampleSizes) {
+            return ERROR_OUT_OF_RANGE;
+        }
+        *sample_index = mSampleTimeEntries[req_time].mSampleIndex;
+        return OK;
+    }
+
     uint32_t left = 0;
     uint32_t right_plus_one = mNumSampleSizes;
     while (left < right_plus_one) {
diff --git a/media/libstagefright/include/SampleTable.h b/media/extractors/mp4/SampleTable.h
similarity index 98%
rename from media/libstagefright/include/SampleTable.h
rename to media/extractors/mp4/SampleTable.h
index eb1a674..466e26b 100644
--- a/media/libstagefright/include/SampleTable.h
+++ b/media/extractors/mp4/SampleTable.h
@@ -72,7 +72,8 @@
     enum {
         kFlagBefore,
         kFlagAfter,
-        kFlagClosest
+        kFlagClosest,
+        kFlagFrameIndex,
     };
     status_t findSampleAtTime(
             uint64_t req_time, uint64_t scale_num, uint64_t scale_den,
diff --git a/media/extractors/mp4/exports.lds b/media/extractors/mp4/exports.lds
new file mode 100644
index 0000000..b1309ee
--- /dev/null
+++ b/media/extractors/mp4/exports.lds
@@ -0,0 +1 @@
+{ global: GETEXTRACTORDEF; local: *; };
diff --git a/media/extractors/mpeg2/Android.bp b/media/extractors/mpeg2/Android.bp
new file mode 100644
index 0000000..818f9b0
--- /dev/null
+++ b/media/extractors/mpeg2/Android.bp
@@ -0,0 +1,55 @@
+cc_library_shared {
+
+    srcs: [
+        "ExtractorBundle.cpp",
+        "MPEG2PSExtractor.cpp",
+        "MPEG2TSExtractor.cpp",
+    ],
+
+    include_dirs: [
+        "frameworks/av/media/libstagefright",
+        "frameworks/av/media/libstagefright/include",
+    ],
+
+    shared_libs: [
+        "android.hardware.cas@1.0",
+        "android.hardware.cas.native@1.0",
+        "android.hidl.token@1.0-utils",
+        "libbinder",
+        "libcrypto",
+        "libcutils",
+        "libhidlbase",
+        "liblog",
+        "libmediaextractor",
+        "libstagefright_foundation",
+        "libutils",
+    ],
+
+    static_libs: [
+        "libstagefright_mpeg2support",
+    ],
+
+    name: "libmpeg2extractor",
+    relative_install_path: "extractors",
+
+    compile_multilib: "first",
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+        "-fvisibility=hidden",
+    ],
+    version_script: "exports.lds",
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+        diag: {
+            cfi: true,
+        },
+    },
+
+}
diff --git a/media/extractors/mpeg2/ExtractorBundle.cpp b/media/extractors/mpeg2/ExtractorBundle.cpp
new file mode 100644
index 0000000..d5682e9
--- /dev/null
+++ b/media/extractors/mpeg2/ExtractorBundle.cpp
@@ -0,0 +1,59 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MPEG2ExtractorBundle"
+#include <utils/Log.h>
+
+#include <media/MediaExtractor.h>
+#include "MPEG2PSExtractor.h"
+#include "MPEG2TSExtractor.h"
+
+namespace android {
+
+extern "C" {
+// This is the only symbol that needs to be exported
+__attribute__ ((visibility ("default")))
+MediaExtractor::ExtractorDef GETEXTRACTORDEF() {
+    return {
+        MediaExtractor::EXTRACTORDEF_VERSION,
+        UUID("3d1dcfeb-e40a-436d-a574-c2438a555e5f"),
+        1,
+        "MPEG2-PS/TS Extractor",
+        [](
+                const sp<DataSource> &source,
+                String8 *mimeType,
+                float *confidence,
+                sp<AMessage> *meta __unused) -> MediaExtractor::CreatorFunc {
+            if (SniffMPEG2TS(source, mimeType, confidence, meta)) {
+                return [](
+                        const sp<DataSource> &source,
+                        const sp<AMessage>& meta __unused) -> MediaExtractor* {
+                    return new MPEG2TSExtractor(source);};
+            } else if (SniffMPEG2PS(source, mimeType, confidence, meta)) {
+                        return [](
+                                const sp<DataSource> &source,
+                                const sp<AMessage>& meta __unused) -> MediaExtractor* {
+                            return new MPEG2PSExtractor(source);};
+            }
+            return NULL;
+        }
+    };
+}
+
+} // extern "C"
+
+} // namespace android
diff --git a/media/libstagefright/matroska/MODULE_LICENSE_APACHE2 b/media/extractors/mpeg2/MODULE_LICENSE_APACHE2
similarity index 100%
copy from media/libstagefright/matroska/MODULE_LICENSE_APACHE2
copy to media/extractors/mpeg2/MODULE_LICENSE_APACHE2
diff --git a/media/libstagefright/mpeg2ts/MPEG2PSExtractor.cpp b/media/extractors/mpeg2/MPEG2PSExtractor.cpp
similarity index 98%
rename from media/libstagefright/mpeg2ts/MPEG2PSExtractor.cpp
rename to media/extractors/mpeg2/MPEG2PSExtractor.cpp
index 078a5f0..c519caf 100644
--- a/media/libstagefright/mpeg2ts/MPEG2PSExtractor.cpp
+++ b/media/extractors/mpeg2/MPEG2PSExtractor.cpp
@@ -18,22 +18,22 @@
 #define LOG_TAG "MPEG2PSExtractor"
 #include <utils/Log.h>
 
-#include "include/MPEG2PSExtractor.h"
+#include "MPEG2PSExtractor.h"
 
-#include "AnotherPacketSource.h"
-#include "ESQueue.h"
+#include "mpeg2ts/AnotherPacketSource.h"
+#include "mpeg2ts/ESQueue.h"
 
+#include <media/DataSource.h>
+#include <media/MediaSource.h>
 #include <media/stagefright/foundation/ABitReader.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/ByteUtils.h>
 #include <media/stagefright/foundation/hexdump.h>
-#include <media/stagefright/DataSource.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/MediaSource.h>
 #include <media/stagefright/MetaData.h>
-#include <media/stagefright/Utils.h>
 #include <utils/String8.h>
 
 #include <inttypes.h>
@@ -125,7 +125,7 @@
     return mTracks.size();
 }
 
-sp<IMediaSource> MPEG2PSExtractor::getTrack(size_t index) {
+sp<MediaSource> MPEG2PSExtractor::getTrack(size_t index) {
     if (index >= mTracks.size()) {
         return NULL;
     }
diff --git a/media/libstagefright/include/MPEG2PSExtractor.h b/media/extractors/mpeg2/MPEG2PSExtractor.h
similarity index 94%
rename from media/libstagefright/include/MPEG2PSExtractor.h
rename to media/extractors/mpeg2/MPEG2PSExtractor.h
index f5471b3..ab3ab05 100644
--- a/media/libstagefright/include/MPEG2PSExtractor.h
+++ b/media/extractors/mpeg2/MPEG2PSExtractor.h
@@ -19,7 +19,7 @@
 #define MPEG2_PS_EXTRACTOR_H_
 
 #include <media/stagefright/foundation/ABase.h>
-#include <media/stagefright/MediaExtractor.h>
+#include <media/MediaExtractor.h>
 #include <utils/threads.h>
 #include <utils/KeyedVector.h>
 
@@ -34,7 +34,7 @@
     explicit MPEG2PSExtractor(const sp<DataSource> &source);
 
     virtual size_t countTracks();
-    virtual sp<IMediaSource> getTrack(size_t index);
+    virtual sp<MediaSource> getTrack(size_t index);
     virtual sp<MetaData> getTrackMetaData(size_t index, uint32_t flags);
 
     virtual sp<MetaData> getMetaData();
diff --git a/media/libstagefright/mpeg2ts/MPEG2TSExtractor.cpp b/media/extractors/mpeg2/MPEG2TSExtractor.cpp
similarity index 97%
rename from media/libstagefright/mpeg2ts/MPEG2TSExtractor.cpp
rename to media/extractors/mpeg2/MPEG2TSExtractor.cpp
index 9d684e0..fef4d30 100644
--- a/media/libstagefright/mpeg2ts/MPEG2TSExtractor.cpp
+++ b/media/extractors/mpeg2/MPEG2TSExtractor.cpp
@@ -20,23 +20,23 @@
 #include <inttypes.h>
 #include <utils/Log.h>
 
-#include "include/MPEG2TSExtractor.h"
-#include "include/NuCachedSource2.h"
+#include "MPEG2TSExtractor.h"
 
+#include <media/DataSource.h>
+#include <media/IStreamSource.h>
+#include <media/MediaSource.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/ALooper.h>
 #include <media/stagefright/foundation/AUtils.h>
-#include <media/stagefright/DataSource.h>
+#include <media/stagefright/foundation/MediaKeys.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/MediaSource.h>
 #include <media/stagefright/MetaData.h>
-#include <media/IStreamSource.h>
 #include <utils/String8.h>
 
-#include "AnotherPacketSource.h"
-#include "ATSParser.h"
+#include "mpeg2ts/AnotherPacketSource.h"
+#include "mpeg2ts/ATSParser.h"
 
 #include <hidl/HybridInterface.h>
 #include <android/hardware/cas/1.0/ICas.h>
@@ -129,7 +129,7 @@
     return mSourceImpls.size();
 }
 
-sp<IMediaSource> MPEG2TSExtractor::getTrack(size_t index) {
+sp<MediaSource> MPEG2TSExtractor::getTrack(size_t index) {
     if (index >= mSourceImpls.size()) {
         return NULL;
     }
@@ -513,6 +513,8 @@
                 --index;
             }
             break;
+        default:
+            return ERROR_UNSUPPORTED;
     }
     if (!shouldSeekBeyond || mOffset <= mSeekSyncPoints->valueAt(index)) {
         int64_t actualSeekTimeUs = mSeekSyncPoints->keyAt(index);
@@ -559,7 +561,7 @@
 status_t MPEG2TSExtractor::queueDiscontinuityForSeek(int64_t actualSeekTimeUs) {
     // Signal discontinuity
     sp<AMessage> extra(new AMessage);
-    extra->setInt64(IStreamListener::kKeyMediaTimeUs, actualSeekTimeUs);
+    extra->setInt64(kATSParserKeyMediaTimeUs, actualSeekTimeUs);
     mParser->signalDiscontinuity(ATSParser::DISCONTINUITY_TIME, extra);
 
     // After discontinuity, impl should only have discontinuities
diff --git a/media/libstagefright/include/MPEG2TSExtractor.h b/media/extractors/mpeg2/MPEG2TSExtractor.h
similarity index 95%
rename from media/libstagefright/include/MPEG2TSExtractor.h
rename to media/extractors/mpeg2/MPEG2TSExtractor.h
index ac93b5e..55356bf 100644
--- a/media/libstagefright/include/MPEG2TSExtractor.h
+++ b/media/extractors/mpeg2/MPEG2TSExtractor.h
@@ -19,8 +19,8 @@
 #define MPEG2_TS_EXTRACTOR_H_
 
 #include <media/stagefright/foundation/ABase.h>
-#include <media/stagefright/MediaExtractor.h>
-#include <media/stagefright/MediaSource.h>
+#include <media/MediaExtractor.h>
+#include <media/MediaSource.h>
 #include <utils/threads.h>
 #include <utils/KeyedVector.h>
 #include <utils/Vector.h>
@@ -40,7 +40,7 @@
     explicit MPEG2TSExtractor(const sp<DataSource> &source);
 
     virtual size_t countTracks();
-    virtual sp<IMediaSource> getTrack(size_t index);
+    virtual sp<MediaSource> getTrack(size_t index);
     virtual sp<MetaData> getTrackMetaData(size_t index, uint32_t flags);
 
     virtual sp<MetaData> getMetaData();
diff --git a/media/libstagefright/matroska/NOTICE b/media/extractors/mpeg2/NOTICE
similarity index 100%
copy from media/libstagefright/matroska/NOTICE
copy to media/extractors/mpeg2/NOTICE
diff --git a/media/extractors/mpeg2/exports.lds b/media/extractors/mpeg2/exports.lds
new file mode 100644
index 0000000..b1309ee
--- /dev/null
+++ b/media/extractors/mpeg2/exports.lds
@@ -0,0 +1 @@
+{ global: GETEXTRACTORDEF; local: *; };
diff --git a/media/extractors/ogg/Android.bp b/media/extractors/ogg/Android.bp
new file mode 100644
index 0000000..2c09a5f
--- /dev/null
+++ b/media/extractors/ogg/Android.bp
@@ -0,0 +1,44 @@
+cc_library_shared {
+
+    srcs: ["OggExtractor.cpp"],
+
+    include_dirs: [
+        "frameworks/av/media/libstagefright/include",
+        "external/tremolo",
+    ],
+
+    shared_libs: [
+        "liblog",
+        "libmediaextractor",
+        "libstagefright_foundation",
+        "libutils",
+    ],
+
+    static_libs: [
+        "libvorbisidec",
+    ],
+
+    name: "liboggextractor",
+    relative_install_path: "extractors",
+
+    compile_multilib: "first",
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+        "-fvisibility=hidden",
+    ],
+    version_script: "exports.lds",
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+        diag: {
+            cfi: true,
+        },
+    },
+
+}
diff --git a/media/libstagefright/matroska/MODULE_LICENSE_APACHE2 b/media/extractors/ogg/MODULE_LICENSE_APACHE2
similarity index 100%
copy from media/libstagefright/matroska/MODULE_LICENSE_APACHE2
copy to media/extractors/ogg/MODULE_LICENSE_APACHE2
diff --git a/media/libstagefright/matroska/NOTICE b/media/extractors/ogg/NOTICE
similarity index 100%
copy from media/libstagefright/matroska/NOTICE
copy to media/extractors/ogg/NOTICE
diff --git a/media/libstagefright/OggExtractor.cpp b/media/extractors/ogg/OggExtractor.cpp
similarity index 96%
rename from media/libstagefright/OggExtractor.cpp
rename to media/extractors/ogg/OggExtractor.cpp
index 766230a..f42a6a8 100644
--- a/media/libstagefright/OggExtractor.cpp
+++ b/media/extractors/ogg/OggExtractor.cpp
@@ -18,20 +18,20 @@
 #define LOG_TAG "OggExtractor"
 #include <utils/Log.h>
 
-#include "include/OggExtractor.h"
+#include "OggExtractor.h"
 
 #include <cutils/properties.h>
+#include <media/DataSource.h>
+#include <media/MediaSource.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/base64.h>
-#include <media/stagefright/DataSource.h>
+#include <media/stagefright/foundation/ByteUtils.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaBufferGroup.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/MediaSource.h>
 #include <media/stagefright/MetaData.h>
-#include <media/stagefright/Utils.h>
 #include <utils/String8.h>
 
 extern "C" {
@@ -1176,19 +1176,8 @@
     return (mVi.bitrate_lower + mVi.bitrate_upper) / 2;
 }
 
-void MyOggExtractor::parseFileMetaData() {
-    mFileMeta = new MetaData;
-    mFileMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_CONTAINER_OGG);
-
-    for (int i = 0; i < mVc.comments; ++i) {
-        const char *comment = mVc.user_comments[i];
-        size_t commentLength = mVc.comment_lengths[i];
-        parseVorbisComment(mFileMeta, comment, commentLength);
-        //ALOGI("comment #%d: '%s'", i + 1, mVc.user_comments[i]);
-    }
-}
-
-void parseVorbisComment(
+// also exists in FLACExtractor, candidate for moving to utility/support library?
+static void parseVorbisComment(
         const sp<MetaData> &fileMeta, const char *comment, size_t commentLength)
 {
     struct {
@@ -1234,6 +1223,7 @@
 
 }
 
+// also exists in FLACExtractor, candidate for moving to utility/support library?
 static void extractAlbumArt(
         const sp<MetaData> &fileMeta, const void *data, size_t size) {
     ALOGV("extractAlbumArt from '%s'", (const char *)data);
@@ -1310,6 +1300,19 @@
     fileMeta->setCString(kKeyAlbumArtMIME, type);
 }
 
+void MyOggExtractor::parseFileMetaData() {
+    mFileMeta = new MetaData;
+    mFileMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_CONTAINER_OGG);
+
+    for (int i = 0; i < mVc.comments; ++i) {
+        const char *comment = mVc.user_comments[i];
+        size_t commentLength = mVc.comment_lengths[i];
+        parseVorbisComment(mFileMeta, comment, commentLength);
+        //ALOGI("comment #%d: '%s'", i + 1, mVc.user_comments[i]);
+    }
+}
+
+
 ////////////////////////////////////////////////////////////////////////////////
 
 OggExtractor::OggExtractor(const sp<DataSource> &source)
@@ -1345,7 +1348,7 @@
     return mInitCheck != OK ? 0 : 1;
 }
 
-sp<IMediaSource> OggExtractor::getTrack(size_t index) {
+sp<MediaSource> OggExtractor::getTrack(size_t index) {
     if (index >= 1) {
         return NULL;
     }
@@ -1366,18 +1369,41 @@
     return mImpl->getFileMetaData();
 }
 
-bool SniffOgg(
-        const sp<DataSource> &source, String8 *mimeType, float *confidence,
+static MediaExtractor* CreateExtractor(
+        const sp<DataSource> &source,
+        const sp<AMessage>& meta __unused) {
+    return new OggExtractor(source);
+}
+
+static MediaExtractor::CreatorFunc Sniff(
+        const sp<DataSource> &source,
+        String8 *mimeType,
+        float *confidence,
         sp<AMessage> *) {
     char tmp[4];
     if (source->readAt(0, tmp, 4) < 4 || memcmp(tmp, "OggS", 4)) {
-        return false;
+        return NULL;
     }
 
     mimeType->setTo(MEDIA_MIMETYPE_CONTAINER_OGG);
     *confidence = 0.2f;
 
-    return true;
+    return CreateExtractor;
 }
 
+extern "C" {
+// This is the only symbol that needs to be exported
+__attribute__ ((visibility ("default")))
+MediaExtractor::ExtractorDef GETEXTRACTORDEF() {
+    return {
+        MediaExtractor::EXTRACTORDEF_VERSION,
+        UUID("8cc5cd06-f772-495e-8a62-cba9649374e9"),
+        1, // version
+        "Ogg Extractor",
+        Sniff
+    };
+}
+
+} // extern "C"
+
 }  // namespace android
diff --git a/media/libstagefright/include/OggExtractor.h b/media/extractors/ogg/OggExtractor.h
similarity index 87%
rename from media/libstagefright/include/OggExtractor.h
rename to media/extractors/ogg/OggExtractor.h
index 55aafed..0f7fe5f 100644
--- a/media/libstagefright/include/OggExtractor.h
+++ b/media/extractors/ogg/OggExtractor.h
@@ -19,7 +19,7 @@
 #define OGG_EXTRACTOR_H_
 
 #include <utils/Errors.h>
-#include <media/stagefright/MediaExtractor.h>
+#include <media/MediaExtractor.h>
 
 namespace android {
 
@@ -34,7 +34,7 @@
     explicit OggExtractor(const sp<DataSource> &source);
 
     virtual size_t countTracks();
-    virtual sp<IMediaSource> getTrack(size_t index);
+    virtual sp<MediaSource> getTrack(size_t index);
     virtual sp<MetaData> getTrackMetaData(size_t index, uint32_t flags);
 
     virtual sp<MetaData> getMetaData();
@@ -59,9 +59,6 @@
         const sp<DataSource> &source, String8 *mimeType, float *confidence,
         sp<AMessage> *);
 
-void parseVorbisComment(
-        const sp<MetaData> &fileMeta, const char *comment, size_t commentLength);
-
 }  // namespace android
 
 #endif  // OGG_EXTRACTOR_H_
diff --git a/media/extractors/ogg/exports.lds b/media/extractors/ogg/exports.lds
new file mode 100644
index 0000000..b1309ee
--- /dev/null
+++ b/media/extractors/ogg/exports.lds
@@ -0,0 +1 @@
+{ global: GETEXTRACTORDEF; local: *; };
diff --git a/media/extractors/wav/Android.bp b/media/extractors/wav/Android.bp
new file mode 100644
index 0000000..65c71ef
--- /dev/null
+++ b/media/extractors/wav/Android.bp
@@ -0,0 +1,43 @@
+cc_library_shared {
+
+    srcs: ["WAVExtractor.cpp"],
+
+    include_dirs: [
+        "frameworks/av/media/libstagefright/include",
+    ],
+
+    shared_libs: [
+        "liblog",
+        "libmediaextractor",
+        "libstagefright_foundation",
+        "libutils",
+    ],
+
+    static_libs: [
+        "libfifo",
+    ],
+
+    name: "libwavextractor",
+    relative_install_path: "extractors",
+
+    compile_multilib: "first",
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+        "-fvisibility=hidden",
+    ],
+    version_script: "exports.lds",
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+        diag: {
+            cfi: true,
+        },
+    },
+
+}
diff --git a/media/libstagefright/matroska/MODULE_LICENSE_APACHE2 b/media/extractors/wav/MODULE_LICENSE_APACHE2
similarity index 100%
copy from media/libstagefright/matroska/MODULE_LICENSE_APACHE2
copy to media/extractors/wav/MODULE_LICENSE_APACHE2
diff --git a/media/libstagefright/matroska/NOTICE b/media/extractors/wav/NOTICE
similarity index 100%
copy from media/libstagefright/matroska/NOTICE
copy to media/extractors/wav/NOTICE
diff --git a/media/libstagefright/WAVExtractor.cpp b/media/extractors/wav/WAVExtractor.cpp
similarity index 94%
rename from media/libstagefright/WAVExtractor.cpp
rename to media/extractors/wav/WAVExtractor.cpp
index 780b746..6c5f893 100644
--- a/media/libstagefright/WAVExtractor.cpp
+++ b/media/extractors/wav/WAVExtractor.cpp
@@ -18,15 +18,15 @@
 #define LOG_TAG "WAVExtractor"
 #include <utils/Log.h>
 
-#include "include/WAVExtractor.h"
+#include "WAVExtractor.h"
 
 #include <audio_utils/primitives.h>
+#include <media/DataSource.h>
+#include <media/MediaSource.h>
 #include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/DataSource.h>
 #include <media/stagefright/MediaBufferGroup.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/MediaSource.h>
 #include <media/stagefright/MetaData.h>
 #include <utils/String8.h>
 #include <cutils/bitops.h>
@@ -120,7 +120,7 @@
     return mInitCheck == OK ? 1 : 0;
 }
 
-sp<IMediaSource> WAVExtractor::getTrack(size_t index) {
+sp<MediaSource> WAVExtractor::getTrack(size_t index) {
     if (mInitCheck != OK || index > 0) {
         return NULL;
     }
@@ -544,27 +544,50 @@
 
 ////////////////////////////////////////////////////////////////////////////////
 
-bool SniffWAV(
-        const sp<DataSource> &source, String8 *mimeType, float *confidence,
+static MediaExtractor* CreateExtractor(
+        const sp<DataSource> &source,
+        const sp<AMessage>& meta __unused) {
+    return new WAVExtractor(source);
+}
+
+static MediaExtractor::CreatorFunc Sniff(
+        const sp<DataSource> &source,
+        String8 *mimeType,
+        float *confidence,
         sp<AMessage> *) {
     char header[12];
     if (source->readAt(0, header, sizeof(header)) < (ssize_t)sizeof(header)) {
-        return false;
+        return NULL;
     }
 
     if (memcmp(header, "RIFF", 4) || memcmp(&header[8], "WAVE", 4)) {
-        return false;
+        return NULL;
     }
 
     sp<MediaExtractor> extractor = new WAVExtractor(source);
     if (extractor->countTracks() == 0) {
-        return false;
+        return NULL;
     }
 
     *mimeType = MEDIA_MIMETYPE_CONTAINER_WAV;
     *confidence = 0.3f;
 
-    return true;
+    return CreateExtractor;
 }
 
-}  // namespace android
+extern "C" {
+// This is the only symbol that needs to be exported
+__attribute__ ((visibility ("default")))
+MediaExtractor::ExtractorDef GETEXTRACTORDEF() {
+    return {
+        MediaExtractor::EXTRACTORDEF_VERSION,
+        UUID("7d613858-5837-4a38-84c5-332d1cddee27"),
+        1, // version
+        "WAV Extractor",
+        Sniff
+    };
+}
+
+} // extern "C"
+
+} // namespace android
diff --git a/media/libstagefright/include/WAVExtractor.h b/media/extractors/wav/WAVExtractor.h
similarity index 88%
rename from media/libstagefright/include/WAVExtractor.h
rename to media/extractors/wav/WAVExtractor.h
index 12ad441..98a2dfa 100644
--- a/media/libstagefright/include/WAVExtractor.h
+++ b/media/extractors/wav/WAVExtractor.h
@@ -19,7 +19,7 @@
 #define WAV_EXTRACTOR_H_
 
 #include <utils/Errors.h>
-#include <media/stagefright/MediaExtractor.h>
+#include <media/MediaExtractor.h>
 
 namespace android {
 
@@ -33,7 +33,7 @@
     explicit WAVExtractor(const sp<DataSource> &source);
 
     virtual size_t countTracks();
-    virtual sp<IMediaSource> getTrack(size_t index);
+    virtual sp<MediaSource> getTrack(size_t index);
     virtual sp<MetaData> getTrackMetaData(size_t index, uint32_t flags);
 
     virtual sp<MetaData> getMetaData();
@@ -61,10 +61,6 @@
     WAVExtractor &operator=(const WAVExtractor &);
 };
 
-bool SniffWAV(
-        const sp<DataSource> &source, String8 *mimeType, float *confidence,
-        sp<AMessage> *);
-
 }  // namespace android
 
 #endif  // WAV_EXTRACTOR_H_
diff --git a/media/extractors/wav/exports.lds b/media/extractors/wav/exports.lds
new file mode 100644
index 0000000..b1309ee
--- /dev/null
+++ b/media/extractors/wav/exports.lds
@@ -0,0 +1 @@
+{ global: GETEXTRACTORDEF; local: *; };
diff --git a/media/libaaudio/examples/Android.bp b/media/libaaudio/examples/Android.bp
index f2e00a7..639fab2 100644
--- a/media/libaaudio/examples/Android.bp
+++ b/media/libaaudio/examples/Android.bp
@@ -1,4 +1,6 @@
+subdirs = ["*"]
+
 cc_library_headers {
     name: "libaaudio_example_utils",
-    export_include_dirs: ["."],
+    export_include_dirs: ["utils"],
 }
diff --git a/media/libaaudio/examples/input_monitor/Android.bp b/media/libaaudio/examples/input_monitor/Android.bp
new file mode 100644
index 0000000..d8c5843
--- /dev/null
+++ b/media/libaaudio/examples/input_monitor/Android.bp
@@ -0,0 +1,17 @@
+cc_test {
+    name: "input_monitor",
+    gtest: false,
+    srcs: ["src/input_monitor.cpp"],
+    cflags: ["-Wall", "-Werror"],
+    shared_libs: ["libaaudio"],
+    header_libs: ["libaaudio_example_utils"],
+}
+
+cc_test {
+    name: "input_monitor_callback",
+    gtest: false,
+    srcs: ["src/input_monitor_callback.cpp"],
+    cflags: ["-Wall", "-Werror"],
+    shared_libs: ["libaaudio"],
+    header_libs: ["libaaudio_example_utils"],
+}
diff --git a/media/libaaudio/examples/loopback/Android.bp b/media/libaaudio/examples/loopback/Android.bp
new file mode 100644
index 0000000..5b7d956
--- /dev/null
+++ b/media/libaaudio/examples/loopback/Android.bp
@@ -0,0 +1,12 @@
+cc_test {
+    name: "aaudio_loopback",
+    gtest: false,
+    srcs: ["src/loopback.cpp"],
+    cflags: ["-Wall", "-Werror"],
+    static_libs: ["libsndfile"],
+    shared_libs: [
+        "libaaudio",
+        "libaudioutils",
+        ],
+    header_libs: ["libaaudio_example_utils"],
+}
diff --git a/media/libaaudio/examples/loopback/jni/Android.mk b/media/libaaudio/examples/loopback/jni/Android.mk
index 1fe3def..aebe877 100644
--- a/media/libaaudio/examples/loopback/jni/Android.mk
+++ b/media/libaaudio/examples/loopback/jni/Android.mk
@@ -10,6 +10,7 @@
 # NDK recommends using this kind of relative path instead of an absolute path.
 LOCAL_SRC_FILES:= ../src/loopback.cpp
 LOCAL_CFLAGS := -Wall -Werror
-LOCAL_SHARED_LIBRARIES := libaaudio
+LOCAL_STATIC_LIBRARIES := libsndfile
+LOCAL_SHARED_LIBRARIES := libaaudio libaudioutils
 LOCAL_MODULE := aaudio_loopback
 include $(BUILD_EXECUTABLE)
diff --git a/media/libaaudio/examples/loopback/src/LoopbackAnalyzer.h b/media/libaaudio/examples/loopback/src/LoopbackAnalyzer.h
index 276b45f..b83851a 100644
--- a/media/libaaudio/examples/loopback/src/LoopbackAnalyzer.h
+++ b/media/libaaudio/examples/loopback/src/LoopbackAnalyzer.h
@@ -30,6 +30,8 @@
 #include <stdlib.h>
 #include <unistd.h>
 
+#include <audio_utils/sndfile.h>
+
 // Tag for machine readable results as property = value pairs
 #define LOOPBACK_RESULT_TAG      "RESULT: "
 #define LOOPBACK_SAMPLE_RATE     48000
@@ -37,6 +39,7 @@
 #define MILLIS_PER_SECOND        1000
 
 #define MAX_ZEROTH_PARTIAL_BINS  40
+constexpr double MAX_ECHO_GAIN = 10.0; // based on experiments, otherwise autocorrelation too noisy
 
 static const float s_Impulse[] = {
         0.0f, 0.0f, 0.0f, 0.0f, 0.2f, // silence on each side of the impulse
@@ -156,6 +159,8 @@
                             const float *needle, int needleSize,
                             LatencyReport *report) {
     const double threshold = 0.1;
+    printf("measureLatencyFromEchos: haystackSize = %d, needleSize = %d\n",
+           haystackSize, needleSize);
 
     // Find first peak
     int first = (int) (findFirstMatch(haystack,
@@ -173,7 +178,7 @@
                                       needleSize,
                                       threshold) + 0.5);
 
-    printf("first = %d, again at %d\n", first, again);
+    printf("measureLatencyFromEchos: first = %d, again at %d\n", first, again);
     first = again;
 
     // Allocate results array
@@ -270,37 +275,60 @@
         return mData;
     }
 
+    void setSampleRate(int32_t sampleRate) {
+        mSampleRate = sampleRate;
+    }
+
+    int32_t getSampleRate() {
+        return mSampleRate;
+    }
+
     int save(const char *fileName, bool writeShorts = true) {
+        SNDFILE *sndFile = nullptr;
         int written = 0;
-        const int chunkSize = 64;
-        FILE *fid = fopen(fileName, "wb");
-        if (fid == NULL) {
+        SF_INFO info = {
+                .frames = mFrameCounter,
+                .samplerate = mSampleRate,
+                .channels = 1,
+                .format = SF_FORMAT_WAV | (writeShorts ? SF_FORMAT_PCM_16 : SF_FORMAT_FLOAT)
+        };
+
+        sndFile = sf_open(fileName, SFM_WRITE, &info);
+        if (sndFile == nullptr) {
+            printf("AudioRecording::save(%s) failed to open file\n", fileName);
             return -errno;
         }
 
-        if (writeShorts) {
-            int16_t buffer[chunkSize];
-            int32_t framesLeft = mFrameCounter;
-            int32_t cursor = 0;
-            while (framesLeft) {
-                int32_t framesToWrite = framesLeft < chunkSize ? framesLeft : chunkSize;
-                for (int i = 0; i < framesToWrite; i++) {
-                    buffer[i] = (int16_t) (mData[cursor++] * 32767);
-                }
-                written += fwrite(buffer, sizeof(int16_t), framesToWrite, fid);
-                framesLeft -= framesToWrite;
-            }
-        } else {
-            written = (int) fwrite(mData, sizeof(float), mFrameCounter, fid);
-        }
-        fclose(fid);
+        written = sf_writef_float(sndFile, mData, mFrameCounter);
+
+        sf_close(sndFile);
         return written;
     }
 
+    int load(const char *fileName) {
+        SNDFILE *sndFile = nullptr;
+        SF_INFO info;
+
+        sndFile = sf_open(fileName, SFM_READ, &info);
+        if (sndFile == nullptr) {
+            printf("AudioRecording::load(%s) failed to open file\n", fileName);
+            return -errno;
+        }
+
+        assert(info.channels == 1);
+
+        allocate(info.frames);
+        mFrameCounter = sf_readf_float(sndFile, mData, info.frames);
+
+        sf_close(sndFile);
+        return mFrameCounter;
+    }
+
 private:
     float  *mData = nullptr;
     int32_t mFrameCounter = 0;
     int32_t mMaxFrames = 0;
+    int32_t mSampleRate = 48000; // common default
 };
 
 // ====================================================================================
@@ -320,11 +348,25 @@
 
     virtual void printStatus() {};
 
+    virtual int getResult() {
+        return -1;
+    }
+
     virtual bool isDone() {
         return false;
     }
 
-    void setSampleRate(int32_t sampleRate) {
+    virtual int save(const char *fileName) {
+        (void) fileName;
+        return AAUDIO_ERROR_UNIMPLEMENTED;
+    }
+
+    virtual int load(const char *fileName) {
+        (void) fileName;
+        return AAUDIO_ERROR_UNIMPLEMENTED;
+    }
+
+    virtual void setSampleRate(int32_t sampleRate) {
         mSampleRate = sampleRate;
     }
 
@@ -395,7 +437,13 @@
 public:
 
     EchoAnalyzer() : LoopbackProcessor() {
-        audioRecorder.allocate(2 * LOOPBACK_SAMPLE_RATE);
+        mAudioRecording.allocate(2 * getSampleRate());
+        mAudioRecording.setSampleRate(getSampleRate());
+    }
+
+    void setSampleRate(int32_t sampleRate) override {
+        LoopbackProcessor::setSampleRate(sampleRate);
+        mAudioRecording.setSampleRate(sampleRate);
     }
 
     void reset() override {
@@ -406,8 +454,12 @@
         mState = STATE_INITIAL_SILENCE;
     }
 
+    virtual int getResult() {
+        return mState == STATE_DONE ? 0 : -1;
+    }
+
     virtual bool isDone() {
-        return mState == STATE_DONE;
+        return mState == STATE_DONE || mState == STATE_FAILED;
     }
 
     void setGain(float gain) {
@@ -423,31 +475,24 @@
         printf("EchoAnalyzer ---------------\n");
         printf(LOOPBACK_RESULT_TAG "measured.gain          = %f\n", mMeasuredLoopGain);
         printf(LOOPBACK_RESULT_TAG "echo.gain              = %f\n", mEchoGain);
-        printf(LOOPBACK_RESULT_TAG "frame.count            = %d\n", mFrameCounter);
         printf(LOOPBACK_RESULT_TAG "test.state             = %d\n", mState);
         if (mMeasuredLoopGain >= 0.9999) {
             printf("   ERROR - clipping, turn down volume slightly\n");
         } else {
             const float *needle = s_Impulse;
             int needleSize = (int) (sizeof(s_Impulse) / sizeof(float));
-            float *haystack = audioRecorder.getData();
-            int haystackSize = audioRecorder.size();
-            measureLatencyFromEchos(haystack, haystackSize, needle, needleSize, &latencyReport);
-            if (latencyReport.confidence < 0.01) {
-                printf("   ERROR - confidence too low = %f\n", latencyReport.confidence);
+            float *haystack = mAudioRecording.getData();
+            int haystackSize = mAudioRecording.size();
+            measureLatencyFromEchos(haystack, haystackSize, needle, needleSize, &mLatencyReport);
+            if (mLatencyReport.confidence < 0.01) {
+                printf("   ERROR - confidence too low = %f\n", mLatencyReport.confidence);
             } else {
-                double latencyMillis = 1000.0 * latencyReport.latencyInFrames / getSampleRate();
-                printf(LOOPBACK_RESULT_TAG "latency.frames        = %8.2f\n", latencyReport.latencyInFrames);
+                double latencyMillis = 1000.0 * mLatencyReport.latencyInFrames / getSampleRate();
+                printf(LOOPBACK_RESULT_TAG "latency.frames        = %8.2f\n", mLatencyReport.latencyInFrames);
                 printf(LOOPBACK_RESULT_TAG "latency.msec          = %8.2f\n", latencyMillis);
-                printf(LOOPBACK_RESULT_TAG "latency.confidence    = %8.6f\n", latencyReport.confidence);
+                printf(LOOPBACK_RESULT_TAG "latency.confidence    = %8.6f\n", mLatencyReport.confidence);
             }
         }
-
-        {
-#define ECHO_FILENAME "/data/oboe_echo.raw"
-            int written = audioRecorder.save(ECHO_FILENAME);
-            printf("Echo wrote %d mono samples to %s on Android device\n", written, ECHO_FILENAME);
-        }
     }
 
     void printStatus() override {
@@ -491,13 +536,18 @@
                 // If we get several in a row then go to next state.
                 if (peak > mPulseThreshold) {
                     if (mDownCounter-- <= 0) {
-                        nextState = STATE_WAITING_FOR_SILENCE;
                         //printf("%5d: switch to STATE_WAITING_FOR_SILENCE, measured peak = %f\n",
                         //       mLoopCounter, peak);
                         mDownCounter = 8;
                         mMeasuredLoopGain = peak;  // assumes original pulse amplitude is one
                         // Calculate gain that will give us a nice decaying echo.
                         mEchoGain = mDesiredEchoGain / mMeasuredLoopGain;
+                        if (mEchoGain > MAX_ECHO_GAIN) {
+                            printf("ERROR - loop gain too low. Increase the volume.\n");
+                            nextState = STATE_FAILED;
+                        } else {
+                            nextState = STATE_WAITING_FOR_SILENCE;
+                        }
                     }
                 } else {
                     mDownCounter = 8;
@@ -524,14 +574,14 @@
                 break;
 
             case STATE_SENDING_PULSE:
-                audioRecorder.write(inputData, inputChannelCount, numFrames);
+                mAudioRecording.write(inputData, inputChannelCount, numFrames);
                 sendImpulse(outputData, outputChannelCount);
                 nextState = STATE_GATHERING_ECHOS;
                 //printf("%5d: switch to STATE_GATHERING_ECHOS\n", mLoopCounter);
                 break;
 
             case STATE_GATHERING_ECHOS:
-                numWritten = audioRecorder.write(inputData, inputChannelCount, numFrames);
+                numWritten = mAudioRecording.write(inputData, inputChannelCount, numFrames);
                 peak = measurePeakAmplitude(inputData, inputChannelCount, numFrames);
                 if (peak > mMeasuredLoopGain) {
                     mMeasuredLoopGain = peak;  // AGC might be raising gain so adjust it on the fly.
@@ -565,6 +615,14 @@
         mLoopCounter++;
     }
 
+    int save(const char *fileName) override {
+        return mAudioRecording.save(fileName);
+    }
+
+    int load(const char *fileName) override {
+        return mAudioRecording.load(fileName);
+    }
+
 private:
 
     enum echo_state_t {
@@ -573,22 +631,22 @@
         STATE_WAITING_FOR_SILENCE,
         STATE_SENDING_PULSE,
         STATE_GATHERING_ECHOS,
-        STATE_DONE
+        STATE_DONE,
+        STATE_FAILED
     };
 
-    int           mDownCounter = 500;
-    int           mLoopCounter = 0;
-    float         mPulseThreshold = 0.02f;
-    float         mSilenceThreshold = 0.002f;
-    float         mMeasuredLoopGain = 0.0f;
-    float         mDesiredEchoGain = 0.95f;
-    float         mEchoGain = 1.0f;
-    echo_state_t  mState = STATE_INITIAL_SILENCE;
-    int32_t       mFrameCounter = 0;
+    int             mDownCounter = 500;
+    int             mLoopCounter = 0;
+    float           mPulseThreshold = 0.02f;
+    float           mSilenceThreshold = 0.002f;
+    float           mMeasuredLoopGain = 0.0f;
+    float           mDesiredEchoGain = 0.95f;
+    float           mEchoGain = 1.0f;
+    echo_state_t    mState = STATE_INITIAL_SILENCE;
 
-    AudioRecording     audioRecorder;
-    LatencyReport      latencyReport;
-    PeakDetector       mPeakDetector;
+    AudioRecording  mAudioRecording; // contains only the input after the gain detection burst
+    LatencyReport   mLatencyReport;
+    // PeakDetector    mPeakDetector;
 };
 
 
@@ -602,6 +660,10 @@
 class SineAnalyzer : public LoopbackProcessor {
 public:
 
+    virtual int getResult() {
+        return mState == STATE_LOCKED ? 0 : -1;
+    }
+
     void report() override {
         printf("SineAnalyzer ------------------\n");
         printf(LOOPBACK_RESULT_TAG "peak.amplitude     = %7.5f\n", mPeakAmplitude);
diff --git a/media/libaaudio/examples/loopback/src/loopback.cpp b/media/libaaudio/examples/loopback/src/loopback.cpp
index b678d8a..d23d907 100644
--- a/media/libaaudio/examples/loopback/src/loopback.cpp
+++ b/media/libaaudio/examples/loopback/src/loopback.cpp
@@ -37,10 +37,10 @@
 
 // Tag for machine readable results as property = value pairs
 #define RESULT_TAG              "RESULT: "
-#define SAMPLE_RATE             48000
 #define NUM_SECONDS             5
 #define NUM_INPUT_CHANNELS      1
-#define FILENAME                "/data/oboe_input.raw"
+#define FILENAME_ALL            "/data/loopback_all.wav"
+#define FILENAME_ECHOS          "/data/loopback_echos.wav"
 #define APP_VERSION             "0.1.22"
 
 struct LoopbackData {
@@ -61,7 +61,7 @@
 
     SineAnalyzer       sineAnalyzer;
     EchoAnalyzer       echoAnalyzer;
-    AudioRecording     audioRecorder;
+    AudioRecording     audioRecording;
     LoopbackProcessor *loopbackProcessor;
 };
 
@@ -126,7 +126,7 @@
             result = AAUDIO_CALLBACK_RESULT_STOP;
         } else if (framesRead > 0) {
 
-            myData->audioRecorder.write(myData->inputData,
+            myData->audioRecording.write(myData->inputData,
                                         myData->actualInputChannelCount,
                                         numFrames);
 
@@ -159,21 +159,28 @@
 }
 
 static void usage() {
-    printf("loopback: -n{numBursts} -p{outPerf} -P{inPerf} -t{test} -g{gain} -f{freq}\n");
-    printf("          -c{inputChannels}\n");
-    printf("          -f{freq}  sine frequency\n");
-    printf("          -g{gain}  recirculating loopback gain\n");
-    printf("          -m enable MMAP mode\n");
-    printf("          -n{numBursts} buffer size, for example 2 for double buffered\n");
-    printf("          -p{outPerf}  set output AAUDIO_PERFORMANCE_MODE*\n");
-    printf("          -P{inPerf}   set input AAUDIO_PERFORMANCE_MODE*\n");
+    printf("Usage: aaudio_loopback [OPTION]...\n\n");
+    printf("          -c{channels}      number of output channels\n");
+    printf("          -C{channels}      number of input channels\n");
+    printf("          -g{gain}          recirculating loopback gain\n");
+    printf("          -m{0|1|2|3}       set MMAP policy\n");
+    printf("              0 = _UNSPECIFIED\n");
+    printf("              1 = _NEVER\n");
+    printf("              2 = _AUTO, also if -m is used with no number\n");
+    printf("              3 = _ALWAYS\n");
+    printf("          -n{numBursts}     buffer size, for example 2 for double buffered\n");
+    printf("          -p{outPerf}       set output AAUDIO_PERFORMANCE_MODE*\n");
+    printf("          -P{inPerf}        set input AAUDIO_PERFORMANCE_MODE*\n");
     printf("              n for _NONE\n");
     printf("              l for _LATENCY\n");
-    printf("              p for _POWER_SAVING;\n");
-    printf("          -t{test}   select test mode\n");
+    printf("              p for _POWER_SAVING\n");
+    printf("          -t{test}          select test mode\n");
     printf("              m for sine magnitude\n");
     printf("              e for echo latency (default)\n");
-    printf("For example:  loopback -b2 -pl -Pn\n");
+    printf("              f for file latency, analyzes %s\n\n", FILENAME_ECHOS);
+    printf("          -x                use EXCLUSIVE mode for output\n");
+    printf("          -X                use EXCLUSIVE mode for input\n");
+    printf("Example:  aaudio_loopback -n2 -pl -Pl -x\n");
 }
 
 static aaudio_performance_mode_t parsePerformanceMode(char c) {
@@ -199,6 +206,7 @@
 enum {
     TEST_SINE_MAGNITUDE = 0,
     TEST_ECHO_LATENCY,
+    TEST_FILE_LATENCY,
 };
 
 static int parseTestMode(char c) {
@@ -211,6 +219,9 @@
         case 'e':
             testMode = TEST_ECHO_LATENCY;
             break;
+        case 'f':
+            testMode = TEST_FILE_LATENCY;
+            break;
         default:
             printf("ERROR in value test mode %c\n", c);
             break;
@@ -248,13 +259,13 @@
 int main(int argc, const char **argv)
 {
 
-    AAudioArgsParser     argParser;
-    AAudioSimplePlayer   player;
-    AAudioSimpleRecorder recorder;
-    LoopbackData         loopbackData;
-    AAudioStream        *outputStream = nullptr;
+    AAudioArgsParser      argParser;
+    AAudioSimplePlayer    player;
+    AAudioSimpleRecorder  recorder;
+    LoopbackData          loopbackData;
+    AAudioStream         *outputStream = nullptr;
 
-    aaudio_result_t      result = AAUDIO_OK;
+    aaudio_result_t       result = AAUDIO_OK;
     aaudio_sharing_mode_t requestedInputSharingMode     = AAUDIO_SHARING_MODE_SHARED;
     int                   requestedInputChannelCount = NUM_INPUT_CHANNELS;
     const aaudio_format_t requestedInputFormat = AAUDIO_FORMAT_PCM_I16;
@@ -262,6 +273,7 @@
     aaudio_format_t       actualInputFormat;
     aaudio_format_t       actualOutputFormat;
     aaudio_performance_mode_t inputPerformanceLevel = AAUDIO_PERFORMANCE_MODE_LOW_LATENCY;
+    int32_t               actualSampleRate = 0;
 
     int testMode = TEST_ECHO_LATENCY;
     double gain = 1.0;
@@ -318,7 +330,6 @@
 
     int32_t requestedDuration = argParser.getDurationSeconds();
     int32_t recordingDuration = std::min(60, requestedDuration);
-    loopbackData.audioRecorder.allocate(recordingDuration * SAMPLE_RATE);
 
     switch(testMode) {
         case TEST_SINE_MAGNITUDE:
@@ -328,6 +339,16 @@
             loopbackData.echoAnalyzer.setGain(gain);
             loopbackData.loopbackProcessor = &loopbackData.echoAnalyzer;
             break;
+        case TEST_FILE_LATENCY: {
+            loopbackData.echoAnalyzer.setGain(gain);
+
+            loopbackData.loopbackProcessor = &loopbackData.echoAnalyzer;
+            int read = loopbackData.loopbackProcessor->load(FILENAME_ECHOS);
+            printf("main() read %d mono samples from %s on Android device\n", read, FILENAME_ECHOS);
+            loopbackData.loopbackProcessor->report();
+            return 0;
+        }
+            break;
         default:
             exit(1);
             break;
@@ -338,7 +359,7 @@
     result = player.open(argParser, MyDataCallbackProc, MyErrorCallbackProc, &loopbackData);
     if (result != AAUDIO_OK) {
         fprintf(stderr, "ERROR -  player.open() returned %d\n", result);
-        goto finish;
+        exit(1);
     }
     outputStream = player.getStream();
     argParser.compareWithStream(outputStream);
@@ -346,6 +367,10 @@
     actualOutputFormat = AAudioStream_getFormat(outputStream);
     assert(actualOutputFormat == AAUDIO_FORMAT_PCM_FLOAT);
 
+    actualSampleRate = AAudioStream_getSampleRate(outputStream);
+    loopbackData.audioRecording.allocate(recordingDuration * actualSampleRate);
+    loopbackData.audioRecording.setSampleRate(actualSampleRate);
+
     printf("INPUT stream ----------------------------------------\n");
     // Use different parameters for the input.
     argParser.setNumberOfBursts(AAUDIO_UNSPECIFIED);
@@ -374,7 +399,7 @@
 
     // Allocate a buffer for the audio data.
     loopbackData.inputFramesMaximum = 32 * framesPerBurst;
-    loopbackData.inputBuffersToDiscard = 100;
+    loopbackData.inputBuffersToDiscard = 200;
 
     loopbackData.inputData = new int16_t[loopbackData.inputFramesMaximum
                                          * loopbackData.actualInputChannelCount];
@@ -430,25 +455,31 @@
         }
     }
 
-    printf("input error = %d = %s\n",
-                loopbackData.inputError, AAudio_convertResultToText(loopbackData.inputError));
+    if (loopbackData.loopbackProcessor->getResult() < 0) {
+        printf("Test failed!\n");
+    } else {
+        printf("input error = %d = %s\n",
+               loopbackData.inputError, AAudio_convertResultToText(loopbackData.inputError));
 
-    printf("AAudioStream_getXRunCount %d\n", AAudioStream_getXRunCount(outputStream));
-    printf("framesRead    = %8d\n", (int) AAudioStream_getFramesRead(outputStream));
-    printf("framesWritten = %8d\n", (int) AAudioStream_getFramesWritten(outputStream));
-    printf("min numFrames = %8d\n", (int) loopbackData.minNumFrames);
-    printf("max numFrames = %8d\n", (int) loopbackData.maxNumFrames);
+        printf("AAudioStream_getXRunCount %d\n", AAudioStream_getXRunCount(outputStream));
+        printf("framesRead    = %8d\n", (int) AAudioStream_getFramesRead(outputStream));
+        printf("framesWritten = %8d\n", (int) AAudioStream_getFramesWritten(outputStream));
+        printf("min numFrames = %8d\n", (int) loopbackData.minNumFrames);
+        printf("max numFrames = %8d\n", (int) loopbackData.maxNumFrames);
 
-    if (loopbackData.inputError == AAUDIO_OK) {
-        if (testMode == TEST_SINE_MAGNITUDE) {
-            printAudioGraph(loopbackData.audioRecorder, 200);
+        if (loopbackData.inputError == AAUDIO_OK) {
+            if (testMode == TEST_SINE_MAGNITUDE) {
+                printAudioGraph(loopbackData.audioRecording, 200);
+            }
+            loopbackData.loopbackProcessor->report();
         }
-        loopbackData.loopbackProcessor->report();
-    }
 
-    {
-        int written = loopbackData.audioRecorder.save(FILENAME);
-        printf("main() wrote %d mono samples to %s on Android device\n", written, FILENAME);
+        int written = loopbackData.loopbackProcessor->save(FILENAME_ECHOS);
+        printf("main() wrote %d mono samples to %s on Android device\n", written,
+               FILENAME_ECHOS);
+        printf("main() loopbackData.audioRecording.getSampleRate() = %d\n", loopbackData.audioRecording.getSampleRate());
+        written = loopbackData.audioRecording.save(FILENAME_ALL);
+        printf("main() wrote %d mono samples to %s on Android device\n", written, FILENAME_ALL);
     }
 
 finish:
diff --git a/media/libaaudio/examples/utils/AAudioArgsParser.h b/media/libaaudio/examples/utils/AAudioArgsParser.h
index ada37e2..142b295 100644
--- a/media/libaaudio/examples/utils/AAudioArgsParser.h
+++ b/media/libaaudio/examples/utils/AAudioArgsParser.h
@@ -17,6 +17,8 @@
 #ifndef AAUDIO_EXAMPLE_ARGS_PARSER_H
 #define AAUDIO_EXAMPLE_ARGS_PARSER_H
 
+#define MAX_CHANNELS                     8
+
 #include <cctype>
 #include <unistd.h>
 #include <stdio.h>
@@ -39,6 +41,10 @@
     }
 
     void setChannelCount(int32_t channelCount) {
+        if (channelCount > MAX_CHANNELS) {
+            printf("Sorry, MAX of %d channels!\n", MAX_CHANNELS);
+            channelCount = MAX_CHANNELS;
+        }
         mChannelCount = channelCount;
     }
 
diff --git a/media/libaaudio/examples/utils/AAudioSimplePlayer.h b/media/libaaudio/examples/utils/AAudioSimplePlayer.h
index 606c4ba..54b77ba 100644
--- a/media/libaaudio/examples/utils/AAudioSimplePlayer.h
+++ b/media/libaaudio/examples/utils/AAudioSimplePlayer.h
@@ -19,11 +19,10 @@
 #ifndef AAUDIO_SIMPLE_PLAYER_H
 #define AAUDIO_SIMPLE_PLAYER_H
 
-#include <unistd.h>
 #include <sched.h>
+#include <unistd.h>
 
 #include <aaudio/AAudio.h>
-#include <atomic>
 #include "AAudioArgsParser.h"
 #include "SineGenerator.h"
 
@@ -31,12 +30,12 @@
 #define SHARING_MODE  AAUDIO_SHARING_MODE_SHARED
 #define PERFORMANCE_MODE AAUDIO_PERFORMANCE_MODE_NONE
 
-// Arbitrary period for glitches, once per second at 48000 Hz.
-#define FORCED_UNDERRUN_PERIOD_FRAMES    48000
+// Arbitrary period for glitches
+#define FORCED_UNDERRUN_PERIOD_FRAMES    (2 * 48000)
 // How long to sleep in a callback to cause an intentional glitch. For testing.
 #define FORCED_UNDERRUN_SLEEP_MICROS     (10 * 1000)
 
-#define MAX_TIMESTAMPS   16
+#define MAX_TIMESTAMPS                   16
 
 typedef struct Timestamp {
     int64_t position;
@@ -70,13 +69,6 @@
     }
 
     // TODO Extract a common base class for record and playback.
-    /**
-     * Also known as "sample rate"
-     * Only call this after open() has been called.
-     */
-    int32_t getFramesPerSecond() const {
-        return getSampleRate(); // alias
-    }
 
     /**
      * Only call this after open() has been called.
@@ -172,12 +164,12 @@
         result = AAudioStreamBuilder_openStream(builder, &mStream);
 
         AAudioStreamBuilder_delete(builder);
+
         return result;
     }
 
     aaudio_result_t close() {
         if (mStream != nullptr) {
-            printf("call AAudioStream_close(%p)\n", mStream);  fflush(stdout);
             AAudioStream_close(mStream);
             mStream = nullptr;
         }
@@ -212,13 +204,35 @@
         aaudio_result_t result = AAudioStream_requestStop(mStream);
         if (result != AAUDIO_OK) {
             printf("ERROR - AAudioStream_requestStop() returned %d %s\n",
-                    result, AAudio_convertResultToText(result));
+                   result, AAudio_convertResultToText(result));
         }
         int32_t xRunCount = AAudioStream_getXRunCount(mStream);
         printf("AAudioStream_getXRunCount %d\n", xRunCount);
         return result;
     }
 
+    // Pause the stream. AAudio will stop calling your callback function.
+    aaudio_result_t pause() {
+        aaudio_result_t result = AAudioStream_requestPause(mStream);
+        if (result != AAUDIO_OK) {
+            printf("ERROR - AAudioStream_requestPause() returned %d %s\n",
+                   result, AAudio_convertResultToText(result));
+        }
+        int32_t xRunCount = AAudioStream_getXRunCount(mStream);
+        printf("AAudioStream_getXRunCount %d\n", xRunCount);
+        return result;
+    }
+
+    // Flush the stream. AAudio will stop calling your callback function.
+    aaudio_result_t flush() {
+        aaudio_result_t result = AAudioStream_requestFlush(mStream);
+        if (result != AAUDIO_OK) {
+            printf("ERROR - AAudioStream_requestFlush() returned %d %s\n",
+                   result, AAudio_convertResultToText(result));
+        }
+        return result;
+    }
+
     AAudioStream *getStream() const {
         return mStream;
     }
@@ -232,23 +246,49 @@
 
 typedef struct SineThreadedData_s {
 
-    SineGenerator  sineOsc1;
-    SineGenerator  sineOsc2;
-    Timestamp      timestamps[MAX_TIMESTAMPS];
-    int64_t        framesTotal = 0;
-    int64_t        nextFrameToGlitch = FORCED_UNDERRUN_PERIOD_FRAMES;
-    int32_t        minNumFrames = INT32_MAX;
-    int32_t        maxNumFrames = 0;
-    int32_t        timestampCount = 0; // in timestamps
+    SineGenerator      sineOscillators[MAX_CHANNELS];
+    Timestamp          timestamps[MAX_TIMESTAMPS];
+    int64_t            framesTotal = 0;
+    int64_t            nextFrameToGlitch = FORCED_UNDERRUN_PERIOD_FRAMES;
+    int32_t            minNumFrames = INT32_MAX;
+    int32_t            maxNumFrames = 0;
+    int32_t            timestampCount = 0; // in timestamps
+    int32_t            sampleRate = 48000;
+    int32_t            prefixToneFrames = 0;
+    bool               sweepSetup = false;
 
-    int            scheduler = 0;
-    bool           schedulerChecked = false;
-    bool           forceUnderruns = false;
+    int                scheduler = 0;
+    bool               schedulerChecked = false;
+    bool               forceUnderruns = false;
 
     AAudioSimplePlayer simplePlayer;
     int32_t            callbackCount = 0;
     WakeUp             waker{AAUDIO_OK};
 
+    /**
+     * Set sampleRate first.
+     */
+    void setupSineBlip() {
+        for (int i = 0; i < MAX_CHANNELS; ++i) {
+            double centerFrequency = 880.0 * (i + 2);
+            sineOscillators[i].setup(centerFrequency, sampleRate);
+            sineOscillators[i].setSweep(centerFrequency, centerFrequency, 0.0);
+        }
+    }
+
+    void setupSineSweeps() {
+        for (int i = 0; i < MAX_CHANNELS; ++i) {
+            double centerFrequency = 220.0 * (i + 2);
+            sineOscillators[i].setup(centerFrequency, sampleRate);
+            double minFrequency = centerFrequency * 2.0 / 3.0;
+            // Change range slightly so they will go out of phase.
+            double maxFrequency = centerFrequency * 3.0 / 2.0;
+            double sweepSeconds = 5.0 + i;
+            sineOscillators[i].setSweep(minFrequency, maxFrequency, sweepSeconds);
+        }
+        sweepSetup = true;
+    }
+
 } SineThreadedData_t;
 
 // Callback function that fills the audio output buffer.
@@ -265,9 +305,11 @@
         return AAUDIO_CALLBACK_RESULT_STOP;
     }
     SineThreadedData_t *sineData = (SineThreadedData_t *) userData;
-    sineData->callbackCount++;
 
-    sineData->framesTotal += numFrames;
+    // Play an initial high tone so we can tell whether the beginning was truncated.
+    if (!sineData->sweepSetup && sineData->framesTotal >= sineData->prefixToneFrames) {
+        sineData->setupSineSweeps();
+    }
 
     if (sineData->forceUnderruns) {
         if (sineData->framesTotal > sineData->nextFrameToGlitch) {
@@ -301,33 +343,32 @@
     }
 
     int32_t samplesPerFrame = AAudioStream_getChannelCount(stream);
-    // This code only plays on the first one or two channels.
-    // TODO Support arbitrary number of channels.
+
+
+    int numActiveOscilators = (samplesPerFrame > MAX_CHANNELS) ? MAX_CHANNELS : samplesPerFrame;
     switch (AAudioStream_getFormat(stream)) {
         case AAUDIO_FORMAT_PCM_I16: {
             int16_t *audioBuffer = (int16_t *) audioData;
-            // Render sine waves as shorts to first channel.
-            sineData->sineOsc1.render(&audioBuffer[0], samplesPerFrame, numFrames);
-            // Render sine waves to second channel if there is one.
-            if (samplesPerFrame > 1) {
-                sineData->sineOsc2.render(&audioBuffer[1], samplesPerFrame, numFrames);
+            for (int i = 0; i < numActiveOscilators; ++i) {
+                sineData->sineOscillators[i].render(&audioBuffer[i], samplesPerFrame,
+                                                    numFrames);
             }
         }
-        break;
+            break;
         case AAUDIO_FORMAT_PCM_FLOAT: {
             float *audioBuffer = (float *) audioData;
-            // Render sine waves as floats to first channel.
-            sineData->sineOsc1.render(&audioBuffer[0], samplesPerFrame, numFrames);
-            // Render sine waves to second channel if there is one.
-            if (samplesPerFrame > 1) {
-                sineData->sineOsc2.render(&audioBuffer[1], samplesPerFrame, numFrames);
+            for (int i = 0; i < numActiveOscilators; ++i) {
+                sineData->sineOscillators[i].render(&audioBuffer[i], samplesPerFrame,
+                                                    numFrames);
             }
         }
-        break;
+            break;
         default:
             return AAUDIO_CALLBACK_RESULT_STOP;
     }
 
+    sineData->callbackCount++;
+    sineData->framesTotal += numFrames;
     return AAUDIO_CALLBACK_RESULT_CONTINUE;
 }
 
diff --git a/media/libaaudio/examples/utils/AAudioSimpleRecorder.h b/media/libaaudio/examples/utils/AAudioSimpleRecorder.h
index 1344273..869fad0 100644
--- a/media/libaaudio/examples/utils/AAudioSimpleRecorder.h
+++ b/media/libaaudio/examples/utils/AAudioSimpleRecorder.h
@@ -178,7 +178,6 @@
 
     aaudio_result_t close() {
         if (mStream != nullptr) {
-            printf("call AAudioStream_close(%p)\n", mStream);  fflush(stdout);
             AAudioStream_close(mStream);
             mStream = nullptr;
         }
diff --git a/media/libaaudio/examples/utils/SineGenerator.h b/media/libaaudio/examples/utils/SineGenerator.h
index a755582..9e6d46d 100644
--- a/media/libaaudio/examples/utils/SineGenerator.h
+++ b/media/libaaudio/examples/utils/SineGenerator.h
@@ -31,20 +31,20 @@
     }
 
     void setSweep(double frequencyLow, double frequencyHigh, double seconds) {
-        mPhaseIncrementLow = frequencyLow * M_PI * 2 / mFrameRate;
-        mPhaseIncrementHigh = frequencyHigh * M_PI * 2 / mFrameRate;
-
-        double numFrames = seconds * mFrameRate;
-        mUpScaler = pow((frequencyHigh / frequencyLow), (1.0 / numFrames));
-        mDownScaler = 1.0 / mUpScaler;
-        mGoingUp = true;
-        mSweeping = true;
+        mSweeping = seconds > 0.0;
+        if (mSweeping) {
+            mPhaseIncrementLow = frequencyLow * M_PI * 2 / mFrameRate;
+            mPhaseIncrementHigh = frequencyHigh * M_PI * 2 / mFrameRate;
+            double numFrames = seconds * mFrameRate;
+            mUpScaler = pow((frequencyHigh / frequencyLow), (1.0 / numFrames));
+            mDownScaler = 1.0 / mUpScaler;
+        }
     }
 
     void render(int16_t *buffer, int32_t channelStride, int32_t numFrames) {
         int sampleIndex = 0;
         for (int i = 0; i < numFrames; i++) {
-            buffer[sampleIndex] = (int16_t) (32767 * sin(mPhase) * mAmplitude);
+            buffer[sampleIndex] = (int16_t) (INT16_MAX * sin(mPhase) * mAmplitude);
             sampleIndex += channelStride;
             advancePhase();
         }
@@ -61,6 +61,7 @@
     void setAmplitude(double amplitude) {
         mAmplitude = amplitude;
     }
+
     double getAmplitude() const {
         return mAmplitude;
     }
diff --git a/media/libaaudio/examples/write_sine/Android.bp b/media/libaaudio/examples/write_sine/Android.bp
new file mode 100644
index 0000000..aa25e67
--- /dev/null
+++ b/media/libaaudio/examples/write_sine/Android.bp
@@ -0,0 +1,15 @@
+cc_test {
+    name: "write_sine",
+    srcs: ["src/write_sine.cpp"],
+    cflags: ["-Wall", "-Werror"],
+    shared_libs: ["libaaudio"],
+    header_libs: ["libaaudio_example_utils"],
+}
+
+cc_test {
+    name: "write_sine_callback",
+    srcs: ["src/write_sine_callback.cpp"],
+    cflags: ["-Wall", "-Werror"],
+    shared_libs: ["libaaudio"],
+    header_libs: ["libaaudio_example_utils"],
+}
diff --git a/media/libaaudio/examples/write_sine/src/write_sine.cpp b/media/libaaudio/examples/write_sine/src/write_sine.cpp
index 8c6f783..38e1e4c 100644
--- a/media/libaaudio/examples/write_sine/src/write_sine.cpp
+++ b/media/libaaudio/examples/write_sine/src/write_sine.cpp
@@ -47,6 +47,7 @@
     int32_t  framesToPlay = 0;
     int32_t  framesLeft = 0;
     int32_t  xRunCount = 0;
+    int      numActiveOscilators = 0;
     float   *floatData = nullptr;
     int16_t *shortData = nullptr;
 
@@ -76,8 +77,8 @@
     actualSampleRate = AAudioStream_getSampleRate(aaudioStream);
     actualDataFormat = AAudioStream_getFormat(aaudioStream);
 
-    myData.sineOsc1.setup(440.0, actualSampleRate);
-    myData.sineOsc2.setup(660.0, actualSampleRate);
+    myData.sampleRate = actualSampleRate;
+    myData.setupSineSweeps();
 
     // Some DMA might use very short bursts of 16 frames. We don't need to write such small
     // buffers. But it helps to use a multiple of the burst size for predictable scheduling.
@@ -116,19 +117,18 @@
     // Play for a while.
     framesToPlay = actualSampleRate * argParser.getDurationSeconds();
     framesLeft = framesToPlay;
+    numActiveOscilators = (actualChannelCount > MAX_CHANNELS) ? MAX_CHANNELS : actualChannelCount;
     while (framesLeft > 0) {
-
+        // Render as FLOAT or PCM
         if (actualDataFormat == AAUDIO_FORMAT_PCM_FLOAT) {
-            // Render sine waves to left and right channels.
-            myData.sineOsc1.render(&floatData[0], actualChannelCount, framesPerWrite);
-            if (actualChannelCount > 1) {
-                myData.sineOsc2.render(&floatData[1], actualChannelCount, framesPerWrite);
+            for (int i = 0; i < numActiveOscilators; ++i) {
+                myData.sineOscillators[i].render(&floatData[i], actualChannelCount,
+                                                  framesPerWrite);
             }
         } else if (actualDataFormat == AAUDIO_FORMAT_PCM_I16) {
-            // Render sine waves to left and right channels.
-            myData.sineOsc1.render(&shortData[0], actualChannelCount, framesPerWrite);
-            if (actualChannelCount > 1) {
-                myData.sineOsc2.render(&shortData[1], actualChannelCount, framesPerWrite);
+            for (int i = 0; i < numActiveOscilators; ++i) {
+                myData.sineOscillators[i].render(&shortData[i], actualChannelCount,
+                                                  framesPerWrite);
             }
         }
 
diff --git a/media/libaaudio/examples/write_sine/src/write_sine_callback.cpp b/media/libaaudio/examples/write_sine/src/write_sine_callback.cpp
index 4f9cde6..5d41fd0 100644
--- a/media/libaaudio/examples/write_sine/src/write_sine_callback.cpp
+++ b/media/libaaudio/examples/write_sine/src/write_sine_callback.cpp
@@ -28,7 +28,7 @@
 #include <aaudio/AAudio.h>
 #include "AAudioExampleUtils.h"
 #include "AAudioSimplePlayer.h"
-#include "../../utils/AAudioSimplePlayer.h"
+#include "AAudioArgsParser.h"
 
 /**
  * Open stream, play some sine waves, then close the stream.
@@ -36,37 +36,39 @@
  * @param argParser
  * @return AAUDIO_OK or negative error code
  */
-static aaudio_result_t testOpenPlayClose(AAudioArgsParser &argParser)
+static aaudio_result_t testOpenPlayClose(AAudioArgsParser &argParser,
+                                         int32_t loopCount,
+                                         int32_t prefixToneMsec,
+                                         bool forceUnderruns)
 {
     SineThreadedData_t myData;
     AAudioSimplePlayer &player = myData.simplePlayer;
     aaudio_result_t    result = AAUDIO_OK;
     bool               disconnected = false;
+    bool               bailOut = false;
     int64_t            startedAtNanos;
 
     printf("----------------------- run complete test --------------------------\n");
     myData.schedulerChecked = false;
     myData.callbackCount = 0;
-    myData.forceUnderruns = false; // set true to test AAudioStream_getXRunCount()
+    myData.forceUnderruns = forceUnderruns; // test AAudioStream_getXRunCount()
 
     result = player.open(argParser,
                          SimplePlayerDataCallbackProc, SimplePlayerErrorCallbackProc, &myData);
     if (result != AAUDIO_OK) {
-        fprintf(stderr, "ERROR -  player.open() returned %d\n", result);
+        fprintf(stderr, "ERROR -  player.open() returned %s\n",
+                AAudio_convertResultToText(result));
         goto error;
     }
 
     argParser.compareWithStream(player.getStream());
 
-    // Setup sine wave generators.
-    {
-        int32_t actualSampleRate = player.getSampleRate();
-        myData.sineOsc1.setup(440.0, actualSampleRate);
-        myData.sineOsc1.setSweep(300.0, 600.0, 5.0);
-        myData.sineOsc1.setAmplitude(0.2);
-        myData.sineOsc2.setup(660.0, actualSampleRate);
-        myData.sineOsc2.setSweep(350.0, 900.0, 7.0);
-        myData.sineOsc2.setAmplitude(0.2);
+    myData.sampleRate = player.getSampleRate();
+    myData.prefixToneFrames = prefixToneMsec * myData.sampleRate / 1000;
+    if (myData.prefixToneFrames > 0) {
+        myData.setupSineBlip();
+    } else {
+        myData.setupSineSweeps();
     }
 
 #if 0
@@ -78,42 +80,93 @@
     }
 #endif
 
-    result = player.start();
-    if (result != AAUDIO_OK) {
-        fprintf(stderr, "ERROR - player.start() returned %d\n", result);
-        goto error;
-    }
+    for (int loopIndex = 0; loopIndex < loopCount; loopIndex++) {
+        // Only play data on every other loop so we can hear if there is stale data.
+        double amplitude;
+        int32_t durationSeconds;
+        if ((loopIndex & 1) == 0) {
+            printf("--------------- SINE ------\n");
+            amplitude = 0.2;
+            durationSeconds = argParser.getDurationSeconds();
+        } else {
+            printf("--------------- QUIET -----\n");
+            amplitude = 0.0;
+            durationSeconds = 2; // just wait briefly when quiet
+        }
+        for (int i = 0; i < MAX_CHANNELS; ++i) {
+            myData.sineOscillators[i].setAmplitude(amplitude);
+        }
 
-    // Play a sine wave in the background.
-    printf("Sleep for %d seconds while audio plays in a callback thread.\n",
-           argParser.getDurationSeconds());
-    startedAtNanos = getNanoseconds(CLOCK_MONOTONIC);
-    for (int second = 0; second < argParser.getDurationSeconds(); second++)
-    {
-        // Sleep a while. Wake up early if there is an error, for example a DISCONNECT.
-        long ret = myData.waker.wait(AAUDIO_OK, NANOS_PER_SECOND);
-        int64_t millis = (getNanoseconds(CLOCK_MONOTONIC) - startedAtNanos) / NANOS_PER_MILLISECOND;
-        result = myData.waker.get();
-        printf("wait() returns %ld, aaudio_result = %d, at %6d millis"
-               ", second = %d, framesWritten = %8d, underruns = %d\n",
-               ret, result, (int) millis,
-               second,
-               (int) AAudioStream_getFramesWritten(player.getStream()),
-               (int) AAudioStream_getXRunCount(player.getStream()));
+        result = player.start();
         if (result != AAUDIO_OK) {
-            if (result == AAUDIO_ERROR_DISCONNECTED) {
-                disconnected = true;
+            fprintf(stderr, "ERROR - player.start() returned %d\n", result);
+            goto error;
+        }
+
+        // Play a sine wave in the background.
+        printf("Sleep for %d seconds while audio plays in a callback thread. %d of %d\n",
+               argParser.getDurationSeconds(), (loopIndex + 1), loopCount);
+        startedAtNanos = getNanoseconds(CLOCK_MONOTONIC);
+        for (int second = 0; second < durationSeconds; second++) {
+            // Sleep a while. Wake up early if there is an error, for example a DISCONNECT.
+            long ret = myData.waker.wait(AAUDIO_OK, NANOS_PER_SECOND);
+            int64_t millis =
+                    (getNanoseconds(CLOCK_MONOTONIC) - startedAtNanos) / NANOS_PER_MILLISECOND;
+            result = myData.waker.get();
+            printf("wait() returns %ld, aaudio_result = %d, at %6d millis"
+                           ", second = %3d, framesWritten = %8d, underruns = %d\n",
+                   ret, result, (int) millis,
+                   second,
+                   (int) AAudioStream_getFramesWritten(player.getStream()),
+                   (int) AAudioStream_getXRunCount(player.getStream()));
+            if (result != AAUDIO_OK) {
+                disconnected = (result == AAUDIO_ERROR_DISCONNECTED);
+                bailOut = true;
+                break;
             }
+        }
+        printf("AAudio result = %d = %s\n", result, AAudio_convertResultToText(result));
+
+        // Alternate between using stop or pause for each sine/quiet pair.
+        // Repeat this pattern: {sine-stop-quiet-stop-sine-pause-quiet-pause}
+        if ((loopIndex & 2) == 0) {
+            printf("STOP, callback # = %d\n", myData.callbackCount);
+            result = player.stop();
+        } else {
+            printf("PAUSE/FLUSH, callback # = %d\n", myData.callbackCount);
+            result = player.pause();
+            if (result != AAUDIO_OK) {
+                goto error;
+            }
+            result = player.flush();
+        }
+        if (result != AAUDIO_OK) {
+            goto error;
+        }
+
+        if (bailOut) {
             break;
         }
-    }
-    printf("AAudio result = %d = %s\n", result, AAudio_convertResultToText(result));
 
-    printf("call stop() callback # = %d\n", myData.callbackCount);
-    result = player.stop();
-    if (result != AAUDIO_OK) {
-        goto error;
+        {
+            aaudio_stream_state_t state = AAudioStream_getState(player.getStream());
+            aaudio_stream_state_t finalState = AAUDIO_STREAM_STATE_UNINITIALIZED;
+            int64_t timeoutNanos = 2000 * NANOS_PER_MILLISECOND;
+            result = AAudioStream_waitForStateChange(player.getStream(), state,
+                                                     &finalState, timeoutNanos);
+            printf("waitForStateChange returns %s, state = %s\n",
+                   AAudio_convertResultToText(result),
+                   AAudio_convertStreamStateToText(finalState));
+            int64_t written = AAudioStream_getFramesWritten(player.getStream());
+            int64_t read = AAudioStream_getFramesRead(player.getStream());
+            printf("   framesWritten = %lld, framesRead = %lld, diff = %d\n",
+                   (long long) written,
+                   (long long) read,
+                   (int) (written - read));
+        }
+
     }
+
     printf("call close()\n");
     result = player.close();
     if (result != AAUDIO_OK) {
@@ -147,23 +200,59 @@
     return disconnected ? AAUDIO_ERROR_DISCONNECTED : result;
 }
 
+static void usage() {
+    AAudioArgsParser::usage();
+    printf("      -l{count} loopCount start/stop, every other one is silent\n");
+    printf("      -t{msec}  play a high pitched tone at the beginning\n");
+    printf("      -u        force periodic Underruns by sleeping in callback\n");
+}
+
 int main(int argc, const char **argv)
 {
     AAudioArgsParser   argParser;
     aaudio_result_t    result;
+    int32_t            loopCount = 1;
+    int32_t            prefixToneMsec = 0;
+    bool               forceUnderruns = false;
 
     // Make printf print immediately so that debug info is not stuck
     // in a buffer if we hang or crash.
     setvbuf(stdout, nullptr, _IONBF, (size_t) 0);
 
-    printf("%s - Play a sine sweep using an AAudio callback V0.1.2\n", argv[0]);
+    printf("%s - Play a sine sweep using an AAudio callback V0.1.3\n", argv[0]);
 
-    if (argParser.parseArgs(argc, argv)) {
-        return EXIT_FAILURE;
+    for (int i = 1; i < argc; i++) {
+        const char *arg = argv[i];
+        if (argParser.parseArg(arg)) {
+            // Handle options that are not handled by the ArgParser
+            if (arg[0] == '-') {
+                char option = arg[1];
+                switch (option) {
+                    case 'l':
+                        loopCount = atoi(&arg[2]);
+                        break;
+                    case 't':
+                        prefixToneMsec = atoi(&arg[2]);
+                        break;
+                    case 'u':
+                        forceUnderruns = true;
+                        break;
+                    default:
+                        usage();
+                        exit(EXIT_FAILURE);
+                        break;
+                }
+            } else {
+                usage();
+                exit(EXIT_FAILURE);
+                break;
+            }
+        }
     }
 
     // Keep looping until we can complete the test without disconnecting.
-    while((result = testOpenPlayClose(argParser)) == AAUDIO_ERROR_DISCONNECTED);
+    while((result = testOpenPlayClose(argParser, loopCount, prefixToneMsec, forceUnderruns))
+            == AAUDIO_ERROR_DISCONNECTED);
 
     return (result) ? EXIT_FAILURE : EXIT_SUCCESS;
 }
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index 3c23736..00c43dc 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -137,6 +137,149 @@
 };
 typedef int32_t aaudio_performance_mode_t;
 
+/**
+ * The USAGE attribute expresses "why" you are playing a sound, what is this sound used for.
+ * This information is used by certain platforms or routing policies
+ * to make more refined volume or routing decisions.
+ *
+ * Note that these match the equivalent values in AudioAttributes in the Android Java API.
+ */
+enum {
+    /**
+     * Use this for streaming media, music performance, video, podcasts, etcetera.
+     */
+    AAUDIO_USAGE_MEDIA = 1,
+
+    /**
+     * Use this for voice over IP, telephony, etcetera.
+     */
+    AAUDIO_USAGE_VOICE_COMMUNICATION = 2,
+
+    /**
+     * Use this for sounds associated with telephony such as busy tones, DTMF, etcetera.
+     */
+    AAUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING = 3,
+
+    /**
+     * Use this to demand the users attention.
+     */
+    AAUDIO_USAGE_ALARM = 4,
+
+    /**
+     * Use this for notifying the user when a message has arrived or some
+     * other background event has occured.
+     */
+    AAUDIO_USAGE_NOTIFICATION = 5,
+
+    /**
+     * Use this when the phone rings.
+     */
+    AAUDIO_USAGE_NOTIFICATION_RINGTONE = 6,
+
+    /**
+     * Use this to attract the users attention when, for example, the battery is low.
+     */
+    AAUDIO_USAGE_NOTIFICATION_EVENT = 10,
+
+    /**
+     * Use this for screen readers, etcetera.
+     */
+    AAUDIO_USAGE_ASSISTANCE_ACCESSIBILITY = 11,
+
+    /**
+     * Use this for driving or navigation directions.
+     */
+    AAUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE = 12,
+
+    /**
+     * Use this for user interface sounds, beeps, etcetera.
+     */
+    AAUDIO_USAGE_ASSISTANCE_SONIFICATION = 13,
+
+    /**
+     * Use this for game audio and sound effects.
+     */
+    AAUDIO_USAGE_GAME = 14,
+
+    /**
+     * Use this for audio responses to user queries, audio instructions or help utterances.
+     */
+    AAUDIO_USAGE_ASSISTANT = 16
+};
+typedef int32_t aaudio_usage_t;
+
+/**
+ * The CONTENT_TYPE attribute describes "what" you are playing.
+ * It expresses the general category of the content. This information is optional.
+ * But in case it is known (for instance {@link #AAUDIO_CONTENT_TYPE_MOVIE} for a
+ * movie streaming service or {@link #AAUDIO_CONTENT_TYPE_SPEECH} for
+ * an audio book application) this information might be used by the audio framework to
+ * enforce audio focus.
+ *
+ * Note that these match the equivalent values in AudioAttributes in the Android Java API.
+ */
+enum {
+
+    /**
+     * Use this for spoken voice, audio books, etcetera.
+     */
+    AAUDIO_CONTENT_TYPE_SPEECH = 1,
+
+    /**
+     * Use this for pre-recorded or live music.
+     */
+    AAUDIO_CONTENT_TYPE_MUSIC = 2,
+
+    /**
+     * Use this for a movie or video soundtrack.
+     */
+    AAUDIO_CONTENT_TYPE_MOVIE = 3,
+
+    /**
+     * Use this for sound is designed to accompany a user action,
+     * such as a click or beep sound made when the user presses a button.
+     */
+    AAUDIO_CONTENT_TYPE_SONIFICATION = 4
+};
+typedef int32_t aaudio_content_type_t;
+
+/**
+ * Defines the audio source.
+ * An audio source defines both a default physical source of audio signal, and a recording
+ * configuration.
+ *
+ * Note that these match the equivalent values in MediaRecorder.AudioSource in the Android Java API.
+ */
+enum {
+    /**
+     * Use this preset when other presets do not apply.
+     */
+    AAUDIO_INPUT_PRESET_GENERIC = 1,
+
+    /**
+     * Use this preset when recording video.
+     */
+    AAUDIO_INPUT_PRESET_CAMCORDER = 5,
+
+    /**
+     * Use this preset when doing speech recognition.
+     */
+    AAUDIO_INPUT_PRESET_VOICE_RECOGNITION = 6,
+
+    /**
+     * Use this preset when doing telephony or voice messaging.
+     */
+    AAUDIO_INPUT_PRESET_VOICE_COMMUNICATION = 7,
+
+    /**
+     * Use this preset to obtain an input with no effects.
+     * Note that this input will not have automatic gain control
+     * so the recorded volume may be very low.
+     */
+    AAUDIO_INPUT_PRESET_UNPROCESSED = 9,
+};
+typedef int32_t aaudio_input_preset_t;
+
 typedef struct AAudioStreamStruct         AAudioStream;
 typedef struct AAudioStreamBuilderStruct  AAudioStreamBuilder;
 
@@ -308,6 +451,52 @@
                                                 aaudio_performance_mode_t mode);
 
 /**
+ * Set the intended use case for the stream.
+ *
+ * The AAudio system will use this information to optimize the
+ * behavior of the stream.
+ * This could, for example, affect how volume and focus is handled for the stream.
+ *
+ * The default, if you do not call this function, is AAUDIO_USAGE_MEDIA.
+ *
+ * @param builder reference provided by AAudio_createStreamBuilder()
+ * @param usage the desired usage, eg. AAUDIO_USAGE_GAME
+ */
+AAUDIO_API void AAudioStreamBuilder_setUsage(AAudioStreamBuilder* builder,
+                                                       aaudio_usage_t usage);
+
+/**
+ * Set the type of audio data that the stream will carry.
+ *
+ * The AAudio system will use this information to optimize the
+ * behavior of the stream.
+ * This could, for example, affect whether a stream is paused when a notification occurs.
+ *
+ * The default, if you do not call this function, is AAUDIO_CONTENT_TYPE_MUSIC.
+ *
+ * @param builder reference provided by AAudio_createStreamBuilder()
+ * @param contentType the type of audio data, eg. AAUDIO_CONTENT_TYPE_SPEECH
+ */
+AAUDIO_API void AAudioStreamBuilder_setContentType(AAudioStreamBuilder* builder,
+                                             aaudio_content_type_t contentType);
+
+/**
+ * Set the input (capture) preset for the stream.
+ *
+ * The AAudio system will use this information to optimize the
+ * behavior of the stream.
+ * This could, for example, affect which microphones are used and how the
+ * recorded data is processed.
+ *
+ * The default, if you do not call this function, is AAUDIO_INPUT_PRESET_GENERIC.
+ *
+ * @param builder reference provided by AAudio_createStreamBuilder()
+ * @param inputPreset the desired configuration for recording
+ */
+AAUDIO_API void AAudioStreamBuilder_setInputPreset(AAudioStreamBuilder* builder,
+                                                   aaudio_input_preset_t inputPreset);
+
+/**
  * Return one of these values from the data callback function.
  */
 enum {
@@ -820,6 +1009,30 @@
                                       int64_t *framePosition,
                                       int64_t *timeNanoseconds);
 
+/**
+ * Return the use case for the stream.
+ *
+ * @param stream reference provided by AAudioStreamBuilder_openStream()
+ * @return frames read
+ */
+AAUDIO_API aaudio_usage_t AAudioStream_getUsage(AAudioStream* stream);
+
+/**
+ * Return the content type for the stream.
+ *
+ * @param stream reference provided by AAudioStreamBuilder_openStream()
+ * @return content type, for example AAUDIO_CONTENT_TYPE_MUSIC
+ */
+AAUDIO_API aaudio_content_type_t AAudioStream_getContentType(AAudioStream* stream);
+
+/**
+ * Return the input preset for the stream.
+ *
+ * @param stream reference provided by AAudioStreamBuilder_openStream()
+ * @return input preset, for example AAUDIO_INPUT_PRESET_CAMCORDER
+ */
+AAUDIO_API aaudio_input_preset_t AAudioStream_getInputPreset(AAudioStream* stream);
+
 #ifdef __cplusplus
 }
 #endif
diff --git a/media/libaaudio/libaaudio.map.txt b/media/libaaudio/libaaudio.map.txt
index 2ba5250..fca7b50 100644
--- a/media/libaaudio/libaaudio.map.txt
+++ b/media/libaaudio/libaaudio.map.txt
@@ -17,6 +17,9 @@
     AAudioStreamBuilder_setSharingMode;
     AAudioStreamBuilder_setDirection;
     AAudioStreamBuilder_setBufferCapacityInFrames;
+    AAudioStreamBuilder_setUsage;
+    AAudioStreamBuilder_setContentType;
+    AAudioStreamBuilder_setInputPreset;
     AAudioStreamBuilder_openStream;
     AAudioStreamBuilder_delete;
     AAudioStream_close;
@@ -42,6 +45,9 @@
     AAudioStream_getFormat;
     AAudioStream_getSharingMode;
     AAudioStream_getDirection;
+    AAudioStream_getUsage;
+    AAudioStream_getContentType;
+    AAudioStream_getInputPreset;
     AAudioStream_getFramesWritten;
     AAudioStream_getFramesRead;
     AAudioStream_getTimestamp;
diff --git a/media/libaaudio/src/binding/AAudioBinderClient.cpp b/media/libaaudio/src/binding/AAudioBinderClient.cpp
index 07ee2de..dd620e3 100644
--- a/media/libaaudio/src/binding/AAudioBinderClient.cpp
+++ b/media/libaaudio/src/binding/AAudioBinderClient.cpp
@@ -15,7 +15,7 @@
  */
 
 
-#define LOG_TAG "AAudio"
+#define LOG_TAG "AAudioBinderClient"
 //#define LOG_NDEBUG 0
 #include <utils/Log.h>
 
@@ -61,11 +61,11 @@
         , Singleton<AAudioBinderClient>() {
     gKeepBinderClient = this; // so this singleton won't get deleted
     mAAudioClient = new AAudioClient(this);
-    ALOGV("AAudioBinderClient() this = %p, created mAAudioClient = %p", this, mAAudioClient.get());
+    ALOGV("%s - this = %p, created mAAudioClient = %p", __func__, this, mAAudioClient.get());
 }
 
 AAudioBinderClient::~AAudioBinderClient() {
-    ALOGV("AAudioBinderClient()::~AAudioBinderClient() destroying %p", this);
+    ALOGV("%s - destroying %p", __func__, this);
     Mutex::Autolock _l(mServiceLock);
     if (mAAudioService != 0) {
         IInterface::asBinder(mAAudioService)->unlinkToDeath(mAAudioClient);
@@ -137,7 +137,7 @@
         stream = service->openStream(request, configurationOutput);
 
         if (stream == AAUDIO_ERROR_NO_SERVICE) {
-            ALOGE("AAudioBinderClient::openStream lost connection to AAudioService.");
+            ALOGE("openStream lost connection to AAudioService.");
             dropAAudioService(); // force a reconnect
         } else {
             break;
diff --git a/media/libaaudio/src/binding/AAudioServiceMessage.h b/media/libaaudio/src/binding/AAudioServiceMessage.h
index 54e8001..9779f24 100644
--- a/media/libaaudio/src/binding/AAudioServiceMessage.h
+++ b/media/libaaudio/src/binding/AAudioServiceMessage.h
@@ -38,13 +38,16 @@
     AAUDIO_SERVICE_EVENT_FLUSHED,
     AAUDIO_SERVICE_EVENT_CLOSED,
     AAUDIO_SERVICE_EVENT_DISCONNECTED,
-    AAUDIO_SERVICE_EVENT_VOLUME
+    AAUDIO_SERVICE_EVENT_VOLUME,
+    AAUDIO_SERVICE_EVENT_XRUN
 } aaudio_service_event_t;
 
 struct AAudioMessageEvent {
     aaudio_service_event_t event;
-    double                 dataDouble;
-    int64_t                dataLong;
+    union {
+        double  dataDouble;
+        int64_t dataLong;
+    };
 };
 
 typedef struct AAudioServiceMessage_s {
diff --git a/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp b/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp
index 153fce3..97672a0 100644
--- a/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp
+++ b/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp
@@ -50,6 +50,12 @@
     if (status != NO_ERROR) goto error;
     status = parcel->writeInt32(getBufferCapacity());
     if (status != NO_ERROR) goto error;
+    status = parcel->writeInt32((int32_t) getUsage());
+    if (status != NO_ERROR) goto error;
+    status = parcel->writeInt32((int32_t) getContentType());
+    if (status != NO_ERROR) goto error;
+    status = parcel->writeInt32((int32_t) getInputPreset());
+    if (status != NO_ERROR) goto error;
     return NO_ERROR;
 error:
     ALOGE("AAudioStreamConfiguration.writeToParcel(): write failed = %d", status);
@@ -69,16 +75,25 @@
     setSamplesPerFrame(value);
     status = parcel->readInt32(&value);
     if (status != NO_ERROR) goto error;
-    setSharingMode(value);
+    setSharingMode((aaudio_sharing_mode_t) value);
     status = parcel->readInt32(&value);
     if (status != NO_ERROR) goto error;
-    setFormat(value);
+    setFormat((aaudio_format_t) value);
     status = parcel->readInt32(&value);
     if (status != NO_ERROR) goto error;
     setDirection((aaudio_direction_t) value);
     status = parcel->readInt32(&value);
     if (status != NO_ERROR) goto error;
     setBufferCapacity(value);
+    status = parcel->readInt32(&value);
+    if (status != NO_ERROR) goto error;
+    setUsage((aaudio_usage_t) value);
+    status = parcel->readInt32(&value);
+    if (status != NO_ERROR) goto error;
+    setContentType((aaudio_content_type_t) value);
+    status = parcel->readInt32(&value);
+    if (status != NO_ERROR) goto error;
+    setInputPreset((aaudio_input_preset_t) value);
     return NO_ERROR;
 error:
     ALOGE("AAudioStreamConfiguration.readFromParcel(): read failed = %d", status);
diff --git a/media/libaaudio/src/binding/AAudioStreamRequest.cpp b/media/libaaudio/src/binding/AAudioStreamRequest.cpp
index 1200ab2..c30c5b9 100644
--- a/media/libaaudio/src/binding/AAudioStreamRequest.cpp
+++ b/media/libaaudio/src/binding/AAudioStreamRequest.cpp
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-#define LOG_TAG "AAudio"
+#define LOG_TAG "AAudioStreamRequest"
 //#define LOG_NDEBUG 0
 #include <utils/Log.h>
 
@@ -58,7 +58,7 @@
     return NO_ERROR;
 
 error:
-    ALOGE("AAudioStreamRequest.writeToParcel(): write failed = %d", status);
+    ALOGE("writeToParcel(): write failed = %d", status);
     return status;
 }
 
@@ -80,7 +80,7 @@
     return NO_ERROR;
 
 error:
-    ALOGE("AAudioStreamRequest.readFromParcel(): read failed = %d", status);
+    ALOGE("readFromParcel(): read failed = %d", status);
     return status;
 }
 
@@ -89,9 +89,9 @@
 }
 
 void AAudioStreamRequest::dump() const {
-    ALOGD("AAudioStreamRequest mUserId    = %d", mUserId);
-    ALOGD("AAudioStreamRequest mProcessId = %d", mProcessId);
-    ALOGD("AAudioStreamRequest mSharingModeMatchRequired = %d", mSharingModeMatchRequired);
-    ALOGD("AAudioStreamRequest mInService = %d", mInService);
+    ALOGD("mUserId    = %d", mUserId);
+    ALOGD("mProcessId = %d", mProcessId);
+    ALOGD("mSharingModeMatchRequired = %d", mSharingModeMatchRequired);
+    ALOGD("mInService = %d", mInService);
     mConfiguration.dump();
 }
diff --git a/media/libaaudio/src/binding/AudioEndpointParcelable.cpp b/media/libaaudio/src/binding/AudioEndpointParcelable.cpp
index 1a97555..9eed96d 100644
--- a/media/libaaudio/src/binding/AudioEndpointParcelable.cpp
+++ b/media/libaaudio/src/binding/AudioEndpointParcelable.cpp
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-#define LOG_TAG "AAudio"
+#define LOG_TAG "AudioEndpointParcelable"
 //#define LOG_NDEBUG 0
 #include <utils/Log.h>
 
@@ -112,49 +112,49 @@
 aaudio_result_t AudioEndpointParcelable::validate() {
     aaudio_result_t result;
     if (mNumSharedMemories < 0 || mNumSharedMemories >= MAX_SHARED_MEMORIES) {
-        ALOGE("AudioEndpointParcelable invalid mNumSharedMemories = %d", mNumSharedMemories);
+        ALOGE("invalid mNumSharedMemories = %d", mNumSharedMemories);
         return AAUDIO_ERROR_INTERNAL;
     }
     for (int i = 0; i < mNumSharedMemories; i++) {
         result = mSharedMemories[i].validate();
         if (result != AAUDIO_OK) {
-            ALOGE("AudioEndpointParcelable invalid mSharedMemories[%d] = %d", i, result);
+            ALOGE("invalid mSharedMemories[%d] = %d", i, result);
             return result;
         }
     }
     if ((result = mUpMessageQueueParcelable.validate()) != AAUDIO_OK) {
-        ALOGE("AudioEndpointParcelable invalid mUpMessageQueueParcelable = %d", result);
+        ALOGE("invalid mUpMessageQueueParcelable = %d", result);
         return result;
     }
     if ((result = mDownMessageQueueParcelable.validate()) != AAUDIO_OK) {
-        ALOGE("AudioEndpointParcelable invalid mDownMessageQueueParcelable = %d", result);
+        ALOGE("invalid mDownMessageQueueParcelable = %d", result);
         return result;
     }
     if ((result = mUpDataQueueParcelable.validate()) != AAUDIO_OK) {
-        ALOGE("AudioEndpointParcelable invalid mUpDataQueueParcelable = %d", result);
+        ALOGE("invalid mUpDataQueueParcelable = %d", result);
         return result;
     }
     if ((result = mDownDataQueueParcelable.validate()) != AAUDIO_OK) {
-        ALOGE("AudioEndpointParcelable invalid mDownDataQueueParcelable = %d", result);
+        ALOGE("invalid mDownDataQueueParcelable = %d", result);
         return result;
     }
     return AAUDIO_OK;
 }
 
 void AudioEndpointParcelable::dump() {
-    ALOGD("AudioEndpointParcelable ======================================= BEGIN");
-    ALOGD("AudioEndpointParcelable mNumSharedMemories = %d", mNumSharedMemories);
+    ALOGD("======================================= BEGIN");
+    ALOGD("mNumSharedMemories = %d", mNumSharedMemories);
     for (int i = 0; i < mNumSharedMemories; i++) {
         mSharedMemories[i].dump();
     }
-    ALOGD("AudioEndpointParcelable mUpMessageQueueParcelable =========");
+    ALOGD("mUpMessageQueueParcelable =========");
     mUpMessageQueueParcelable.dump();
-    ALOGD("AudioEndpointParcelable mDownMessageQueueParcelable =======");
+    ALOGD("mDownMessageQueueParcelable =======");
     mDownMessageQueueParcelable.dump();
-    ALOGD("AudioEndpointParcelable mUpDataQueueParcelable ============");
+    ALOGD("mUpDataQueueParcelable ============");
     mUpDataQueueParcelable.dump();
-    ALOGD("AudioEndpointParcelable mDownDataQueueParcelable ==========");
+    ALOGD("mDownDataQueueParcelable ==========");
     mDownDataQueueParcelable.dump();
-    ALOGD("AudioEndpointParcelable ======================================= END");
+    ALOGD("======================================= END");
 }
 
diff --git a/media/libaaudio/src/binding/RingBufferParcelable.cpp b/media/libaaudio/src/binding/RingBufferParcelable.cpp
index 6b74b21..2babbff 100644
--- a/media/libaaudio/src/binding/RingBufferParcelable.cpp
+++ b/media/libaaudio/src/binding/RingBufferParcelable.cpp
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-#define LOG_TAG "AAudio"
+#define LOG_TAG "RingBufferParcelable"
 //#define LOG_NDEBUG 0
 #include <utils/Log.h>
 
@@ -97,7 +97,7 @@
     }
     return NO_ERROR;
 error:
-    ALOGE("RingBufferParcelable::writeToParcel() error = %d", status);
+    ALOGE("writeToParcel() error = %d", status);
     return status;
 }
 
@@ -120,7 +120,7 @@
     }
     return NO_ERROR;
 error:
-    ALOGE("RingBufferParcelable::readFromParcel() error = %d", status);
+    ALOGE("readFromParcel() error = %d", status);
     return status;
 }
 
@@ -154,27 +154,27 @@
 aaudio_result_t RingBufferParcelable::validate() {
     aaudio_result_t result;
     if (mCapacityInFrames < 0 || mCapacityInFrames >= 32 * 1024) {
-        ALOGE("RingBufferParcelable invalid mCapacityInFrames = %d", mCapacityInFrames);
+        ALOGE("invalid mCapacityInFrames = %d", mCapacityInFrames);
         return AAUDIO_ERROR_INTERNAL;
     }
     if (mBytesPerFrame < 0 || mBytesPerFrame >= 256) {
-        ALOGE("RingBufferParcelable invalid mBytesPerFrame = %d", mBytesPerFrame);
+        ALOGE("invalid mBytesPerFrame = %d", mBytesPerFrame);
         return AAUDIO_ERROR_INTERNAL;
     }
     if (mFramesPerBurst < 0 || mFramesPerBurst >= 16 * 1024) {
-        ALOGE("RingBufferParcelable invalid mFramesPerBurst = %d", mFramesPerBurst);
+        ALOGE("invalid mFramesPerBurst = %d", mFramesPerBurst);
         return AAUDIO_ERROR_INTERNAL;
     }
     if ((result = mReadCounterParcelable.validate()) != AAUDIO_OK) {
-        ALOGE("RingBufferParcelable invalid mReadCounterParcelable = %d", result);
+        ALOGE("invalid mReadCounterParcelable = %d", result);
         return result;
     }
     if ((result = mWriteCounterParcelable.validate()) != AAUDIO_OK) {
-        ALOGE("RingBufferParcelable invalid mWriteCounterParcelable = %d", result);
+        ALOGE("invalid mWriteCounterParcelable = %d", result);
         return result;
     }
     if ((result = mDataParcelable.validate()) != AAUDIO_OK) {
-        ALOGE("RingBufferParcelable invalid mDataParcelable = %d", result);
+        ALOGE("invalid mDataParcelable = %d", result);
         return result;
     }
     return AAUDIO_OK;
@@ -182,11 +182,11 @@
 
 
 void RingBufferParcelable::dump() {
-    ALOGD("RingBufferParcelable mCapacityInFrames = %d ---------", mCapacityInFrames);
+    ALOGD("mCapacityInFrames = %d ---------", mCapacityInFrames);
     if (mCapacityInFrames > 0) {
-        ALOGD("RingBufferParcelable mBytesPerFrame = %d", mBytesPerFrame);
-        ALOGD("RingBufferParcelable mFramesPerBurst = %d", mFramesPerBurst);
-        ALOGD("RingBufferParcelable mFlags = %u", mFlags);
+        ALOGD("mBytesPerFrame = %d", mBytesPerFrame);
+        ALOGD("mFramesPerBurst = %d", mFramesPerBurst);
+        ALOGD("mFlags = %u", mFlags);
         mReadCounterParcelable.dump();
         mWriteCounterParcelable.dump();
         mDataParcelable.dump();
diff --git a/media/libaaudio/src/binding/SharedMemoryParcelable.cpp b/media/libaaudio/src/binding/SharedMemoryParcelable.cpp
index 90217ab..4e3e5d1 100644
--- a/media/libaaudio/src/binding/SharedMemoryParcelable.cpp
+++ b/media/libaaudio/src/binding/SharedMemoryParcelable.cpp
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-#define LOG_TAG "AAudio"
+#define LOG_TAG "SharedMemoryParcelable"
 //#define LOG_NDEBUG 0
 #include <utils/Log.h>
 
@@ -43,8 +43,7 @@
 
 void SharedMemoryParcelable::setup(const unique_fd& fd, int32_t sizeInBytes) {
     mFd.reset(dup(fd.get())); // store a duplicate fd
-    ALOGV("SharedMemoryParcelable::setup(%d -> %d, %d) this = %p\n",
-          fd.get(), mFd.get(), sizeInBytes, this);
+    ALOGV("setup(%d -> %d, %d) this = %p\n", fd.get(), mFd.get(), sizeInBytes, this);
     mSizeInBytes = sizeInBytes;
 }
 
@@ -52,7 +51,7 @@
     status_t status = parcel->writeInt32(mSizeInBytes);
     if (status != NO_ERROR) return status;
     if (mSizeInBytes > 0) {
-        ALOGV("SharedMemoryParcelable::writeToParcel() mFd = %d, this = %p\n", mFd.get(), this);
+        ALOGV("writeToParcel() mFd = %d, this = %p\n", mFd.get(), this);
         status = parcel->writeUniqueFileDescriptor(mFd);
         ALOGE_IF(status != NO_ERROR, "SharedMemoryParcelable writeDupFileDescriptor failed : %d",
                  status);
@@ -70,8 +69,7 @@
         unique_fd mmapFd;
         status = parcel->readUniqueFileDescriptor(&mmapFd);
         if (status != NO_ERROR) {
-            ALOGE("SharedMemoryParcelable::readFromParcel() readUniqueFileDescriptor() failed : %d",
-                  status);
+            ALOGE("readFromParcel() readUniqueFileDescriptor() failed : %d", status);
         } else {
             // Resolve the memory now while we still have the FD from the Parcel.
             // Closing the FD will not affect the shared memory once mmap() has been called.
@@ -85,7 +83,7 @@
     if (mResolvedAddress != MMAP_UNRESOLVED_ADDRESS) {
         int err = munmap(mResolvedAddress, mSizeInBytes);
         if (err < 0) {
-            ALOGE("SharedMemoryParcelable::close() munmap() failed %d", err);
+            ALOGE("close() munmap() failed %d", err);
             return AAudioConvert_androidToAAudioResult(err);
         }
         mResolvedAddress = MMAP_UNRESOLVED_ADDRESS;
@@ -97,8 +95,7 @@
     mResolvedAddress = (uint8_t *) mmap(0, mSizeInBytes, PROT_READ | PROT_WRITE,
                                         MAP_SHARED, fd.get(), 0);
     if (mResolvedAddress == MMAP_UNRESOLVED_ADDRESS) {
-        ALOGE("SharedMemoryParcelable mmap() failed for fd = %d, errno = %s",
-              fd.get(), strerror(errno));
+        ALOGE("mmap() failed for fd = %d, errno = %s", fd.get(), strerror(errno));
         return AAUDIO_ERROR_INTERNAL;
     }
     return AAUDIO_OK;
@@ -107,10 +104,10 @@
 aaudio_result_t SharedMemoryParcelable::resolve(int32_t offsetInBytes, int32_t sizeInBytes,
                                               void **regionAddressPtr) {
     if (offsetInBytes < 0) {
-        ALOGE("SharedMemoryParcelable illegal offsetInBytes = %d", offsetInBytes);
+        ALOGE("illegal offsetInBytes = %d", offsetInBytes);
         return AAUDIO_ERROR_OUT_OF_RANGE;
     } else if ((offsetInBytes + sizeInBytes) > mSizeInBytes) {
-        ALOGE("SharedMemoryParcelable out of range, offsetInBytes = %d, "
+        ALOGE("out of range, offsetInBytes = %d, "
                       "sizeInBytes = %d, mSizeInBytes = %d",
               offsetInBytes, sizeInBytes, mSizeInBytes);
         return AAUDIO_ERROR_OUT_OF_RANGE;
@@ -122,16 +119,15 @@
         if (mFd.get() != -1) {
             result = resolveSharedMemory(mFd);
         } else {
-            ALOGE("SharedMemoryParcelable has no file descriptor for shared memory.");
+            ALOGE("has no file descriptor for shared memory.");
             result = AAUDIO_ERROR_INTERNAL;
         }
     }
 
     if (result == AAUDIO_OK && mResolvedAddress != MMAP_UNRESOLVED_ADDRESS) {
         *regionAddressPtr = mResolvedAddress + offsetInBytes;
-        ALOGV("SharedMemoryParcelable mResolvedAddress = %p", mResolvedAddress);
-        ALOGV("SharedMemoryParcelable offset by %d, *regionAddressPtr = %p",
-              offsetInBytes, *regionAddressPtr);
+        ALOGV("mResolvedAddress = %p", mResolvedAddress);
+        ALOGV("offset by %d, *regionAddressPtr = %p", offsetInBytes, *regionAddressPtr);
     }
     return result;
 }
@@ -142,14 +138,14 @@
 
 aaudio_result_t SharedMemoryParcelable::validate() {
     if (mSizeInBytes < 0 || mSizeInBytes >= MAX_MMAP_SIZE_BYTES) {
-        ALOGE("SharedMemoryParcelable invalid mSizeInBytes = %d", mSizeInBytes);
+        ALOGE("invalid mSizeInBytes = %d", mSizeInBytes);
         return AAUDIO_ERROR_OUT_OF_RANGE;
     }
     return AAUDIO_OK;
 }
 
 void SharedMemoryParcelable::dump() {
-    ALOGD("SharedMemoryParcelable mFd = %d", mFd.get());
-    ALOGD("SharedMemoryParcelable mSizeInBytes = %d", mSizeInBytes);
-    ALOGD("SharedMemoryParcelable mResolvedAddress = %p", mResolvedAddress);
+    ALOGD("mFd = %d", mFd.get());
+    ALOGD("mSizeInBytes = %d", mSizeInBytes);
+    ALOGD("mResolvedAddress = %p", mResolvedAddress);
 }
diff --git a/media/libaaudio/src/binding/SharedRegionParcelable.cpp b/media/libaaudio/src/binding/SharedRegionParcelable.cpp
index 7381dcb..7aa80bf 100644
--- a/media/libaaudio/src/binding/SharedRegionParcelable.cpp
+++ b/media/libaaudio/src/binding/SharedRegionParcelable.cpp
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-#define LOG_TAG "AAudio"
+#define LOG_TAG "SharedRegionParcelable"
 //#define LOG_NDEBUG 0
 #include <utils/Log.h>
 
@@ -71,7 +71,7 @@
         return AAUDIO_OK;
     }
     if (mSharedMemoryIndex < 0) {
-        ALOGE("SharedRegionParcelable invalid mSharedMemoryIndex = %d", mSharedMemoryIndex);
+        ALOGE("invalid mSharedMemoryIndex = %d", mSharedMemoryIndex);
         return AAUDIO_ERROR_INTERNAL;
     }
     SharedMemoryParcelable *memoryParcel = &memoryParcels[mSharedMemoryIndex];
@@ -80,16 +80,16 @@
 
 aaudio_result_t SharedRegionParcelable::validate() {
     if (mSizeInBytes < 0 || mSizeInBytes >= MAX_MMAP_SIZE_BYTES) {
-        ALOGE("SharedRegionParcelable invalid mSizeInBytes = %d", mSizeInBytes);
+        ALOGE("invalid mSizeInBytes = %d", mSizeInBytes);
         return AAUDIO_ERROR_OUT_OF_RANGE;
     }
     if (mSizeInBytes > 0) {
         if (mOffsetInBytes < 0 || mOffsetInBytes >= MAX_MMAP_OFFSET_BYTES) {
-            ALOGE("SharedRegionParcelable invalid mOffsetInBytes = %d", mOffsetInBytes);
+            ALOGE("invalid mOffsetInBytes = %d", mOffsetInBytes);
             return AAUDIO_ERROR_OUT_OF_RANGE;
         }
         if (mSharedMemoryIndex < 0 || mSharedMemoryIndex >= MAX_SHARED_MEMORIES) {
-            ALOGE("SharedRegionParcelable invalid mSharedMemoryIndex = %d", mSharedMemoryIndex);
+            ALOGE("invalid mSharedMemoryIndex = %d", mSharedMemoryIndex);
             return AAUDIO_ERROR_INTERNAL;
         }
     }
@@ -97,9 +97,9 @@
 }
 
 void SharedRegionParcelable::dump() {
-    ALOGD("SharedRegionParcelable mSizeInBytes = %d -----", mSizeInBytes);
+    ALOGD("mSizeInBytes = %d -----", mSizeInBytes);
     if (mSizeInBytes > 0) {
-        ALOGD("SharedRegionParcelable mSharedMemoryIndex = %d", mSharedMemoryIndex);
-        ALOGD("SharedRegionParcelable mOffsetInBytes = %d", mOffsetInBytes);
+        ALOGD("mSharedMemoryIndex = %d", mSharedMemoryIndex);
+        ALOGD("mOffsetInBytes = %d", mOffsetInBytes);
     }
 }
diff --git a/media/libaaudio/src/client/AudioEndpoint.cpp b/media/libaaudio/src/client/AudioEndpoint.cpp
index 604eed5..f8e34d1 100644
--- a/media/libaaudio/src/client/AudioEndpoint.cpp
+++ b/media/libaaudio/src/client/AudioEndpoint.cpp
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-#define LOG_TAG "AAudio"
+#define LOG_TAG "AudioEndpoint"
 //#define LOG_NDEBUG 0
 #include <utils/Log.h>
 
@@ -45,6 +45,7 @@
     delete mUpCommandQueue;
 }
 
+// TODO Consider moving to a method in RingBufferDescriptor
 static aaudio_result_t AudioEndpoint_validateQueueDescriptor(const char *type,
                                                   const RingBufferDescriptor *descriptor) {
     if (descriptor == nullptr) {
@@ -127,19 +128,19 @@
     // ============================ up message queue =============================
     const RingBufferDescriptor *descriptor = &pEndpointDescriptor->upMessageQueueDescriptor;
     if(descriptor->bytesPerFrame != sizeof(AAudioServiceMessage)) {
-        ALOGE("AudioEndpoint.configure() bytesPerFrame != sizeof(AAudioServiceMessage) = %d",
+        ALOGE("configure() bytesPerFrame != sizeof(AAudioServiceMessage) = %d",
               descriptor->bytesPerFrame);
         return AAUDIO_ERROR_INTERNAL;
     }
 
     if(descriptor->readCounterAddress == nullptr || descriptor->writeCounterAddress == nullptr) {
-        ALOGE("AudioEndpoint.configure() NULL counter address");
+        ALOGE("configure() NULL counter address");
         return AAUDIO_ERROR_NULL;
     }
 
     // Prevent memory leak and reuse.
     if(mUpCommandQueue != nullptr || mDataQueue != nullptr) {
-        ALOGE("AudioEndpoint.configure() endpoint already used");
+        ALOGE("configure() endpoint already used");
         return AAUDIO_ERROR_INTERNAL;
     }
 
@@ -153,8 +154,8 @@
 
     // ============================ data queue =============================
     descriptor = &pEndpointDescriptor->dataQueueDescriptor;
-    ALOGV("AudioEndpoint.configure() data framesPerBurst = %d", descriptor->framesPerBurst);
-    ALOGV("AudioEndpoint.configure() data readCounterAddress = %p",
+    ALOGV("configure() data framesPerBurst = %d", descriptor->framesPerBurst);
+    ALOGV("configure() data readCounterAddress = %p",
           descriptor->readCounterAddress);
 
     // An example of free running is when the other side is read or written by hardware DMA
@@ -163,7 +164,7 @@
                              ? descriptor->readCounterAddress // read by other side
                              : descriptor->writeCounterAddress; // written by other side
     mFreeRunning = (remoteCounter == nullptr);
-    ALOGV("AudioEndpoint.configure() mFreeRunning = %d", mFreeRunning ? 1 : 0);
+    ALOGV("configure() mFreeRunning = %d", mFreeRunning ? 1 : 0);
 
     int64_t *readCounterAddress = (descriptor->readCounterAddress == nullptr)
                                   ? &mDataReadCounter
@@ -258,8 +259,8 @@
 }
 
 void AudioEndpoint::dump() const {
-    ALOGD("AudioEndpoint: data readCounter  = %lld", (long long) mDataQueue->getReadCounter());
-    ALOGD("AudioEndpoint: data writeCounter = %lld", (long long) mDataQueue->getWriteCounter());
+    ALOGD("data readCounter  = %lld", (long long) mDataQueue->getReadCounter());
+    ALOGD("data writeCounter = %lld", (long long) mDataQueue->getWriteCounter());
 }
 
 void AudioEndpoint::eraseDataMemory() {
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index 2fdbfaf..6d5a64f 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -61,15 +61,14 @@
         , mClockModel()
         , mAudioEndpoint()
         , mServiceStreamHandle(AAUDIO_HANDLE_INVALID)
-        , mFramesPerBurst(16)
         , mInService(inService)
         , mServiceInterface(serviceInterface)
         , mAtomicTimestamp()
         , mWakeupDelayNanos(AAudioProperty_getWakeupDelayMicros() * AAUDIO_NANOS_PER_MICROSECOND)
         , mMinimumSleepNanos(AAudioProperty_getMinimumSleepMicros() * AAUDIO_NANOS_PER_MICROSECOND)
         {
-    ALOGD("AudioStreamInternal(): mWakeupDelayNanos = %d, mMinimumSleepNanos = %d",
-          mWakeupDelayNanos, mMinimumSleepNanos);
+    ALOGD("%s - mWakeupDelayNanos = %d, mMinimumSleepNanos = %d",
+          __func__, mWakeupDelayNanos, mMinimumSleepNanos);
 }
 
 AudioStreamInternal::~AudioStreamInternal() {
@@ -79,11 +78,12 @@
 
     aaudio_result_t result = AAUDIO_OK;
     int32_t capacity;
+    int32_t framesPerBurst;
     AAudioStreamRequest request;
     AAudioStreamConfiguration configurationOutput;
 
     if (getState() != AAUDIO_STREAM_STATE_UNINITIALIZED) {
-        ALOGE("AudioStreamInternal::open(): already open! state = %d", getState());
+        ALOGE("%s - already open! state = %d", __func__, getState());
         return AAUDIO_ERROR_INVALID_STATE;
     }
 
@@ -117,7 +117,7 @@
     mServiceStreamHandle = mServiceInterface.openStream(request, configurationOutput);
     if (mServiceStreamHandle < 0) {
         result = mServiceStreamHandle;
-        ALOGE("AudioStreamInternal::open(): openStream() returned %d", result);
+        ALOGE("%s - openStream() returned %d", __func__, result);
         return result;
     }
 
@@ -151,17 +151,19 @@
         goto error;
     }
 
-    mFramesPerBurst = mEndpointDescriptor.dataQueueDescriptor.framesPerBurst;
-    capacity = mEndpointDescriptor.dataQueueDescriptor.capacityInFrames;
 
     // Validate result from server.
-    if (mFramesPerBurst < 16 || mFramesPerBurst > 16 * 1024) {
-        ALOGE("AudioStreamInternal::open(): framesPerBurst out of range = %d", mFramesPerBurst);
+    framesPerBurst = mEndpointDescriptor.dataQueueDescriptor.framesPerBurst;
+    if (framesPerBurst < MIN_FRAMES_PER_BURST || framesPerBurst > MAX_FRAMES_PER_BURST) {
+        ALOGE("%s - framesPerBurst out of range = %d", __func__, framesPerBurst);
         result = AAUDIO_ERROR_OUT_OF_RANGE;
         goto error;
     }
-    if (capacity < mFramesPerBurst || capacity > 32 * 1024) {
-        ALOGE("AudioStreamInternal::open(): bufferCapacity out of range = %d", capacity);
+    mFramesPerBurst = framesPerBurst; // only save good value
+
+    capacity = mEndpointDescriptor.dataQueueDescriptor.capacityInFrames;
+    if (capacity < mFramesPerBurst || capacity > MAX_BUFFER_CAPACITY_IN_FRAMES) {
+        ALOGE("%s - bufferCapacity out of range = %d", __func__, capacity);
         result = AAUDIO_ERROR_OUT_OF_RANGE;
         goto error;
     }
@@ -169,16 +171,16 @@
     mClockModel.setSampleRate(getSampleRate());
     mClockModel.setFramesPerBurst(mFramesPerBurst);
 
-    if (getDataCallbackProc()) {
+    if (isDataCallbackSet()) {
         mCallbackFrames = builder.getFramesPerDataCallback();
         if (mCallbackFrames > getBufferCapacity() / 2) {
-            ALOGE("AudioStreamInternal::open(): framesPerCallback too big = %d, capacity = %d",
-                  mCallbackFrames, getBufferCapacity());
+            ALOGE("%s - framesPerCallback too big = %d, capacity = %d",
+                  __func__, mCallbackFrames, getBufferCapacity());
             result = AAUDIO_ERROR_OUT_OF_RANGE;
             goto error;
 
         } else if (mCallbackFrames < 0) {
-            ALOGE("AudioStreamInternal::open(): framesPerCallback negative");
+            ALOGE("%s - framesPerCallback negative", __func__);
             result = AAUDIO_ERROR_OUT_OF_RANGE;
             goto error;
 
@@ -240,7 +242,7 @@
 static void *aaudio_callback_thread_proc(void *context)
 {
     AudioStreamInternal *stream = (AudioStreamInternal *)context;
-    //LOGD("AudioStreamInternal(): oboe_callback_thread, stream = %p", stream);
+    //LOGD("oboe_callback_thread, stream = %p", stream);
     if (stream != NULL) {
         return stream->callbackLoop();
     } else {
@@ -288,7 +290,7 @@
     mNeedCatchUp.request();  // Ask data processing code to catch up when first timestamp received.
 
     // Start data callback thread.
-    if (result == AAUDIO_OK && getDataCallbackProc() != nullptr) {
+    if (result == AAUDIO_OK && isDataCallbackSet()) {
         // Launch the callback loop thread.
         int64_t periodNanos = mCallbackFrames
                               * AAUDIO_NANOS_PER_SECOND
@@ -448,32 +450,32 @@
     aaudio_result_t result = AAUDIO_OK;
     switch (message->event.event) {
         case AAUDIO_SERVICE_EVENT_STARTED:
-            ALOGD("AudioStreamInternal::onEventFromServer() got AAUDIO_SERVICE_EVENT_STARTED");
+            ALOGD("%s - got AAUDIO_SERVICE_EVENT_STARTED", __func__);
             if (getState() == AAUDIO_STREAM_STATE_STARTING) {
                 setState(AAUDIO_STREAM_STATE_STARTED);
             }
             break;
         case AAUDIO_SERVICE_EVENT_PAUSED:
-            ALOGD("AudioStreamInternal::onEventFromServer() got AAUDIO_SERVICE_EVENT_PAUSED");
+            ALOGD("%s - got AAUDIO_SERVICE_EVENT_PAUSED", __func__);
             if (getState() == AAUDIO_STREAM_STATE_PAUSING) {
                 setState(AAUDIO_STREAM_STATE_PAUSED);
             }
             break;
         case AAUDIO_SERVICE_EVENT_STOPPED:
-            ALOGD("AudioStreamInternal::onEventFromServer() got AAUDIO_SERVICE_EVENT_STOPPED");
+            ALOGD("%s - got AAUDIO_SERVICE_EVENT_STOPPED", __func__);
             if (getState() == AAUDIO_STREAM_STATE_STOPPING) {
                 setState(AAUDIO_STREAM_STATE_STOPPED);
             }
             break;
         case AAUDIO_SERVICE_EVENT_FLUSHED:
-            ALOGD("AudioStreamInternal::onEventFromServer() got AAUDIO_SERVICE_EVENT_FLUSHED");
+            ALOGD("%s - got AAUDIO_SERVICE_EVENT_FLUSHED", __func__);
             if (getState() == AAUDIO_STREAM_STATE_FLUSHING) {
                 setState(AAUDIO_STREAM_STATE_FLUSHED);
                 onFlushFromServer();
             }
             break;
         case AAUDIO_SERVICE_EVENT_CLOSED:
-            ALOGD("AudioStreamInternal::onEventFromServer() got AAUDIO_SERVICE_EVENT_CLOSED");
+            ALOGD("%s - got AAUDIO_SERVICE_EVENT_CLOSED", __func__);
             setState(AAUDIO_STREAM_STATE_CLOSED);
             break;
         case AAUDIO_SERVICE_EVENT_DISCONNECTED:
@@ -483,18 +485,18 @@
             }
             result = AAUDIO_ERROR_DISCONNECTED;
             setState(AAUDIO_STREAM_STATE_DISCONNECTED);
-            ALOGW("WARNING - AudioStreamInternal::onEventFromServer()"
-                          " AAUDIO_SERVICE_EVENT_DISCONNECTED - FIFO cleared");
+            ALOGW("%s - AAUDIO_SERVICE_EVENT_DISCONNECTED - FIFO cleared", __func__);
             break;
         case AAUDIO_SERVICE_EVENT_VOLUME:
             mStreamVolume = (float)message->event.dataDouble;
             doSetVolume();
-            ALOGD("AudioStreamInternal::onEventFromServer() AAUDIO_SERVICE_EVENT_VOLUME %lf",
-                     message->event.dataDouble);
+            ALOGD("%s - AAUDIO_SERVICE_EVENT_VOLUME %lf", __func__, message->event.dataDouble);
+            break;
+        case AAUDIO_SERVICE_EVENT_XRUN:
+            mXRunCount = static_cast<int32_t>(message->event.dataLong);
             break;
         default:
-            ALOGW("WARNING - AudioStreamInternal::onEventFromServer() Unrecognized event = %d",
-                 (int) message->event.event);
+            ALOGE("%s - Unrecognized event = %d", __func__, (int) message->event.event);
             break;
     }
     return result;
@@ -519,8 +521,7 @@
                 break;
 
             default:
-                ALOGE("WARNING - drainTimestampsFromService() Unrecognized what = %d",
-                      (int) message.what);
+                ALOGE("%s - unrecognized message.what = %d", __func__, (int) message.what);
                 result = AAUDIO_ERROR_INTERNAL;
                 break;
         }
@@ -533,7 +534,6 @@
     aaudio_result_t result = AAUDIO_OK;
 
     while (result == AAUDIO_OK) {
-        //ALOGD("AudioStreamInternal::processCommands() - looping, %d", result);
         AAudioServiceMessage message;
         if (mAudioEndpoint.readUpCommand(&message) != 1) {
             break; // no command this time, no problem
@@ -552,8 +552,7 @@
             break;
 
         default:
-            ALOGE("WARNING - processCommands() Unrecognized what = %d",
-                 (int) message.what);
+            ALOGE("%s - unrecognized message.what = %d", __func__, (int) message.what);
             result = AAUDIO_ERROR_INTERNAL;
             break;
         }
@@ -614,13 +613,13 @@
             if (wakeTimeNanos > deadlineNanos) {
                 // If we time out, just return the framesWritten so far.
                 // TODO remove after we fix the deadline bug
-                ALOGW("AudioStreamInternal::processData(): entered at %lld nanos, currently %lld",
+                ALOGW("processData(): entered at %lld nanos, currently %lld",
                       (long long) entryTimeNanos, (long long) currentTimeNanos);
-                ALOGW("AudioStreamInternal::processData(): TIMEOUT after %lld nanos",
+                ALOGW("processData(): TIMEOUT after %lld nanos",
                       (long long) timeoutNanoseconds);
-                ALOGW("AudioStreamInternal::processData(): wakeTime = %lld, deadline = %lld nanos",
+                ALOGW("processData(): wakeTime = %lld, deadline = %lld nanos",
                       (long long) wakeTimeNanos, (long long) deadlineNanos);
-                ALOGW("AudioStreamInternal::processData(): past deadline by %d micros",
+                ALOGW("processData(): past deadline by %d micros",
                       (int)((wakeTimeNanos - deadlineNanos) / AAUDIO_NANOS_PER_MICROSECOND));
                 mClockModel.dump();
                 mAudioEndpoint.dump();
@@ -655,14 +654,29 @@
 }
 
 aaudio_result_t AudioStreamInternal::setBufferSize(int32_t requestedFrames) {
+    int32_t adjustedFrames = requestedFrames;
     int32_t actualFrames = 0;
-    // Round to the next highest burst size.
-    if (getFramesPerBurst() > 0) {
-        int32_t numBursts = (requestedFrames + getFramesPerBurst() - 1) / getFramesPerBurst();
-        requestedFrames = numBursts * getFramesPerBurst();
+    int32_t maximumSize = getBufferCapacity();
+
+    // Clip to minimum size so that rounding up will work better.
+    if (adjustedFrames < 1) {
+        adjustedFrames = 1;
     }
 
-    aaudio_result_t result = mAudioEndpoint.setBufferSizeInFrames(requestedFrames, &actualFrames);
+    if (adjustedFrames > maximumSize) {
+        // Clip to maximum size.
+        adjustedFrames = maximumSize;
+    } else {
+        // Round to the next highest burst size.
+        int32_t numBursts = (adjustedFrames + mFramesPerBurst - 1) / mFramesPerBurst;
+        adjustedFrames = numBursts * mFramesPerBurst;
+        // Rounding may have gone above maximum.
+        if (adjustedFrames > maximumSize) {
+            adjustedFrames = maximumSize;
+        }
+    }
+
+    aaudio_result_t result = mAudioEndpoint.setBufferSizeInFrames(adjustedFrames, &actualFrames);
     ALOGD("setBufferSize() req = %d => %d", requestedFrames, actualFrames);
     if (result < 0) {
         return result;
@@ -680,7 +694,7 @@
 }
 
 int32_t AudioStreamInternal::getFramesPerBurst() const {
-    return mEndpointDescriptor.dataQueueDescriptor.framesPerBurst;
+    return mFramesPerBurst;
 }
 
 aaudio_result_t AudioStreamInternal::joinThread(void** returnArg) {
diff --git a/media/libaaudio/src/client/AudioStreamInternal.h b/media/libaaudio/src/client/AudioStreamInternal.h
index 47024c0..117756d 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.h
+++ b/media/libaaudio/src/client/AudioStreamInternal.h
@@ -34,6 +34,12 @@
 
 namespace aaudio {
 
+    // These are intended to be outside the range of what is normally encountered.
+    // TODO MAXes should probably be much bigger.
+    constexpr int32_t MIN_FRAMES_PER_BURST = 16; // arbitrary
+    constexpr int32_t MAX_FRAMES_PER_BURST = 16 * 1024;  // arbitrary
+    constexpr int32_t MAX_BUFFER_CAPACITY_IN_FRAMES = 32 * 1024;  // arbitrary
+
 // A stream that talks to the AAudioService or directly to a HAL.
 class AudioStreamInternal : public AudioStream {
 
@@ -141,7 +147,7 @@
     AudioEndpoint            mAudioEndpoint;   // source for reads or sink for writes
     aaudio_handle_t          mServiceStreamHandle; // opaque handle returned from service
 
-    int32_t                  mFramesPerBurst;     // frames per HAL transfer
+    int32_t                  mFramesPerBurst = MIN_FRAMES_PER_BURST; // frames per HAL transfer
     int32_t                  mXRunCount = 0;      // how many underrun events?
 
     // Offset from underlying frame position.
diff --git a/media/libaaudio/src/client/AudioStreamInternalCapture.cpp b/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
index b792ecd..62f0fc8 100644
--- a/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
@@ -14,7 +14,8 @@
  * limitations under the License.
  */
 
-#define LOG_TAG (mInService ? "AAudioService" : "AAudio")
+#define LOG_TAG (mInService ? "AudioStreamInternalCapture_Service" \
+                          : "AudioStreamInternalCapture_Client")
 //#define LOG_NDEBUG 0
 #include <utils/Log.h>
 
@@ -101,7 +102,8 @@
     }
 
     // If the write index passed the read index then consider it an overrun.
-    if (mAudioEndpoint.getEmptyFramesAvailable() < 0) {
+    // For shared streams, the xRunCount is passed up from the service.
+    if (mAudioEndpoint.isFreeRunning() && mAudioEndpoint.getEmptyFramesAvailable() < 0) {
         mXRunCount++;
         if (ATRACE_ENABLED()) {
             ATRACE_INT("aaOverRuns", mXRunCount);
@@ -152,7 +154,7 @@
 
 aaudio_result_t AudioStreamInternalCapture::readNowWithConversion(void *buffer,
                                                                 int32_t numFrames) {
-    // ALOGD("AudioStreamInternalCapture::readNowWithConversion(%p, %d)",
+    // ALOGD("readNowWithConversion(%p, %d)",
     //              buffer, numFrames);
     WrappingBuffer wrappingBuffer;
     uint8_t *destination = (uint8_t *) buffer;
@@ -201,7 +203,7 @@
     int32_t framesProcessed = numFrames - framesLeft;
     mAudioEndpoint.advanceReadIndex(framesProcessed);
 
-    //ALOGD("AudioStreamInternalCapture::readNowWithConversion() returns %d", framesProcessed);
+    //ALOGD("readNowWithConversion() returns %d", framesProcessed);
     return framesProcessed;
 }
 
@@ -215,14 +217,14 @@
     // Prevent retrograde motion.
     mLastFramesWritten = std::max(mLastFramesWritten,
                                   framesWrittenHardware + mFramesOffsetFromService);
-    //ALOGD("AudioStreamInternalCapture::getFramesWritten() returns %lld",
+    //ALOGD("getFramesWritten() returns %lld",
     //      (long long)mLastFramesWritten);
     return mLastFramesWritten;
 }
 
 int64_t AudioStreamInternalCapture::getFramesRead() {
     int64_t frames = mAudioEndpoint.getDataReadCounter() + mFramesOffsetFromService;
-    //ALOGD("AudioStreamInternalCapture::getFramesRead() returns %lld", (long long)frames);
+    //ALOGD("getFramesRead() returns %lld", (long long)frames);
     return frames;
 }
 
@@ -230,8 +232,7 @@
 void *AudioStreamInternalCapture::callbackLoop() {
     aaudio_result_t result = AAUDIO_OK;
     aaudio_data_callback_result_t callbackResult = AAUDIO_CALLBACK_RESULT_CONTINUE;
-    AAudioStream_dataCallback appCallback = getDataCallbackProc();
-    if (appCallback == nullptr) return NULL;
+    if (!isDataCallbackSet()) return NULL;
 
     // result might be a frame count
     while (mCallbackEnabled.load() && isActive() && (result >= 0)) {
@@ -242,35 +243,25 @@
         // This is a BLOCKING READ!
         result = read(mCallbackBuffer, mCallbackFrames, timeoutNanos);
         if ((result != mCallbackFrames)) {
-            ALOGE("AudioStreamInternalCapture(): callbackLoop: read() returned %d", result);
+            ALOGE("callbackLoop: read() returned %d", result);
             if (result >= 0) {
                 // Only read some of the frames requested. Must have timed out.
                 result = AAUDIO_ERROR_TIMEOUT;
             }
-            AAudioStream_errorCallback errorCallback = getErrorCallbackProc();
-            if (errorCallback != nullptr) {
-                (*errorCallback)(
-                        (AAudioStream *) this,
-                        getErrorCallbackUserData(),
-                        result);
-            }
+            maybeCallErrorCallback(result);
             break;
         }
 
         // Call application using the AAudio callback interface.
-        callbackResult = (*appCallback)(
-                (AAudioStream *) this,
-                getDataCallbackUserData(),
-                mCallbackBuffer,
-                mCallbackFrames);
+        callbackResult = maybeCallDataCallback(mCallbackBuffer, mCallbackFrames);
 
         if (callbackResult == AAUDIO_CALLBACK_RESULT_STOP) {
-            ALOGD("AudioStreamInternalCapture(): callback returned AAUDIO_CALLBACK_RESULT_STOP");
+            ALOGD("callback returned AAUDIO_CALLBACK_RESULT_STOP");
             break;
         }
     }
 
-    ALOGD("AudioStreamInternalCapture(): callbackLoop() exiting, result = %d, isActive() = %d",
+    ALOGD("callbackLoop() exiting, result = %d, isActive() = %d",
           result, (int) isActive());
     return NULL;
 }
diff --git a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
index 1e02eee..5de6a11 100644
--- a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
@@ -14,7 +14,8 @@
  * limitations under the License.
  */
 
-#define LOG_TAG (mInService ? "AAudioService" : "AAudio")
+#define LOG_TAG (mInService ? "AudioStreamInternalPlay_Service" \
+                          : "AudioStreamInternalPlay_Client")
 //#define LOG_NDEBUG 0
 #include <utils/Log.h>
 
@@ -139,7 +140,8 @@
     }
 
     // If the read index passed the write index then consider it an underrun.
-    if (mAudioEndpoint.getFullFramesAvailable() < 0) {
+    // For shared streams, the xRunCount is passed up from the service.
+    if (mAudioEndpoint.isFreeRunning() && mAudioEndpoint.getFullFramesAvailable() < 0) {
         mXRunCount++;
         if (ATRACE_ENABLED()) {
             ATRACE_INT("aaUnderRuns", mXRunCount);
@@ -218,8 +220,7 @@
             // Data conversion.
             float levelFrom;
             float levelTo;
-            bool ramping = mVolumeRamp.nextSegment(framesToWrite * getSamplesPerFrame(),
-                                                   &levelFrom, &levelTo);
+            bool ramping = mVolumeRamp.nextSegment(framesToWrite, &levelFrom, &levelTo);
             // The formats are validated when the stream is opened so we do not have to
             // check for illegal combinations here.
             // TODO factor this out into a utility function
@@ -322,18 +323,13 @@
 void *AudioStreamInternalPlay::callbackLoop() {
     aaudio_result_t result = AAUDIO_OK;
     aaudio_data_callback_result_t callbackResult = AAUDIO_CALLBACK_RESULT_CONTINUE;
-    AAudioStream_dataCallback appCallback = getDataCallbackProc();
-    if (appCallback == nullptr) return NULL;
+    if (!isDataCallbackSet()) return NULL;
     int64_t timeoutNanos = calculateReasonableTimeout(mCallbackFrames);
 
     // result might be a frame count
     while (mCallbackEnabled.load() && isActive() && (result >= 0)) {
         // Call application using the AAudio callback interface.
-        callbackResult = (*appCallback)(
-                (AAudioStream *) this,
-                getDataCallbackUserData(),
-                mCallbackBuffer,
-                mCallbackFrames);
+        callbackResult = maybeCallDataCallback(mCallbackBuffer, mCallbackFrames);
 
         if (callbackResult == AAUDIO_CALLBACK_RESULT_CONTINUE) {
             // Write audio data to stream. This is a BLOCKING WRITE!
@@ -344,13 +340,7 @@
                     // Only wrote some of the frames requested. Must have timed out.
                     result = AAUDIO_ERROR_TIMEOUT;
                 }
-                AAudioStream_errorCallback errorCallback = getErrorCallbackProc();
-                if (errorCallback != nullptr) {
-                    (*errorCallback)(
-                            (AAudioStream *) this,
-                            getErrorCallbackUserData(),
-                            result);
-                }
+                maybeCallErrorCallback(result);
                 break;
             }
         } else if (callbackResult == AAUDIO_CALLBACK_RESULT_STOP) {
diff --git a/media/libaaudio/src/client/IsochronousClockModel.cpp b/media/libaaudio/src/client/IsochronousClockModel.cpp
index bac69f1..95b52be 100644
--- a/media/libaaudio/src/client/IsochronousClockModel.cpp
+++ b/media/libaaudio/src/client/IsochronousClockModel.cpp
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-#define LOG_TAG "AAudio"
+#define LOG_TAG "IsochronousClockModel"
 //#define LOG_NDEBUG 0
 #include <log/log.h>
 
@@ -41,20 +41,20 @@
 }
 
 void IsochronousClockModel::setPositionAndTime(int64_t framePosition, int64_t nanoTime) {
-    ALOGV("IsochronousClockModel::setPositionAndTime(%lld, %lld)",
+    ALOGV("setPositionAndTime(%lld, %lld)",
           (long long) framePosition, (long long) nanoTime);
     mMarkerFramePosition = framePosition;
     mMarkerNanoTime = nanoTime;
 }
 
 void IsochronousClockModel::start(int64_t nanoTime) {
-    ALOGV("IsochronousClockModel::start(nanos = %lld)\n", (long long) nanoTime);
+    ALOGV("start(nanos = %lld)\n", (long long) nanoTime);
     mMarkerNanoTime = nanoTime;
     mState = STATE_STARTING;
 }
 
 void IsochronousClockModel::stop(int64_t nanoTime) {
-    ALOGV("IsochronousClockModel::stop(nanos = %lld)\n", (long long) nanoTime);
+    ALOGV("stop(nanos = %lld)\n", (long long) nanoTime);
     setPositionAndTime(convertTimeToPosition(nanoTime), nanoTime);
     // TODO should we set position?
     mState = STATE_STOPPED;
@@ -156,7 +156,7 @@
     int64_t framesDelta = nextBurstPosition - mMarkerFramePosition;
     int64_t nanosDelta = convertDeltaPositionToTime(framesDelta);
     int64_t time = mMarkerNanoTime + nanosDelta;
-//    ALOGD("IsochronousClockModel::convertPositionToTime: pos = %llu --> time = %llu",
+//    ALOGD("convertPositionToTime: pos = %llu --> time = %llu",
 //         (unsigned long long)framePosition,
 //         (unsigned long long)time);
     return time;
@@ -171,19 +171,19 @@
     int64_t nextBurstPosition = mMarkerFramePosition + framesDelta;
     int64_t nextBurstIndex = nextBurstPosition / mFramesPerBurst;
     int64_t position = nextBurstIndex * mFramesPerBurst;
-//    ALOGD("IsochronousClockModel::convertTimeToPosition: time = %llu --> pos = %llu",
+//    ALOGD("convertTimeToPosition: time = %llu --> pos = %llu",
 //         (unsigned long long)nanoTime,
 //         (unsigned long long)position);
-//    ALOGD("IsochronousClockModel::convertTimeToPosition: framesDelta = %llu, mFramesPerBurst = %d",
+//    ALOGD("convertTimeToPosition: framesDelta = %llu, mFramesPerBurst = %d",
 //         (long long) framesDelta, mFramesPerBurst);
     return position;
 }
 
 void IsochronousClockModel::dump() const {
-    ALOGD("IsochronousClockModel::mMarkerFramePosition = %lld", (long long) mMarkerFramePosition);
-    ALOGD("IsochronousClockModel::mMarkerNanoTime      = %lld", (long long) mMarkerNanoTime);
-    ALOGD("IsochronousClockModel::mSampleRate          = %6d", mSampleRate);
-    ALOGD("IsochronousClockModel::mFramesPerBurst      = %6d", mFramesPerBurst);
-    ALOGD("IsochronousClockModel::mMaxLatenessInNanos  = %6d", mMaxLatenessInNanos);
-    ALOGD("IsochronousClockModel::mState               = %6d", mState);
+    ALOGD("mMarkerFramePosition = %lld", (long long) mMarkerFramePosition);
+    ALOGD("mMarkerNanoTime      = %lld", (long long) mMarkerNanoTime);
+    ALOGD("mSampleRate          = %6d", mSampleRate);
+    ALOGD("mFramesPerBurst      = %6d", mFramesPerBurst);
+    ALOGD("mMaxLatenessInNanos  = %6d", mMaxLatenessInNanos);
+    ALOGD("mState               = %6d", mState);
 }
diff --git a/media/libaaudio/src/core/AAudioAudio.cpp b/media/libaaudio/src/core/AAudioAudio.cpp
index 1eaee81..9e5ca8e 100644
--- a/media/libaaudio/src/core/AAudioAudio.cpp
+++ b/media/libaaudio/src/core/AAudioAudio.cpp
@@ -18,6 +18,8 @@
 //#define LOG_NDEBUG 0
 #include <utils/Log.h>
 
+#include <inttypes.h>
+#include <mutex>
 #include <time.h>
 #include <pthread.h>
 
@@ -175,6 +177,24 @@
     streamBuilder->setSharingMode(sharingMode);
 }
 
+AAUDIO_API void AAudioStreamBuilder_setUsage(AAudioStreamBuilder* builder,
+                                             aaudio_usage_t usage) {
+    AudioStreamBuilder *streamBuilder = convertAAudioBuilderToStreamBuilder(builder);
+    streamBuilder->setUsage(usage);
+}
+
+AAUDIO_API void AAudioStreamBuilder_setContentType(AAudioStreamBuilder* builder,
+                                                   aaudio_content_type_t contentType) {
+    AudioStreamBuilder *streamBuilder = convertAAudioBuilderToStreamBuilder(builder);
+    streamBuilder->setContentType(contentType);
+}
+
+AAUDIO_API void AAudioStreamBuilder_setInputPreset(AAudioStreamBuilder* builder,
+                                                   aaudio_input_preset_t inputPreset) {
+    AudioStreamBuilder *streamBuilder = convertAAudioBuilderToStreamBuilder(builder);
+    streamBuilder->setInputPreset(inputPreset);
+}
+
 AAUDIO_API void AAudioStreamBuilder_setBufferCapacityInFrames(AAudioStreamBuilder* builder,
                                                         int32_t frames)
 {
@@ -238,15 +258,26 @@
 
 AAUDIO_API aaudio_result_t  AAudioStream_close(AAudioStream* stream)
 {
+    aaudio_result_t result = AAUDIO_ERROR_NULL;
     AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
-    ALOGD("AAudioStream_close(%p)", stream);
+    ALOGD("AAudioStream_close(%p) called ---------------", stream);
     if (audioStream != nullptr) {
-        audioStream->close();
-        audioStream->unregisterPlayerBase();
-        delete audioStream;
-        return AAUDIO_OK;
+        result = audioStream->safeClose();
+        // Close will only fail if called illegally, for example, from a callback.
+        // That would result in deleting an active stream, which would cause a crash.
+        if (result == AAUDIO_OK) {
+            audioStream->unregisterPlayerBase();
+            delete audioStream;
+        } else {
+            ALOGW("%s attempt to close failed. Close it from another thread.", __func__);
+        }
     }
-    return AAUDIO_ERROR_NULL;
+    // We're potentially freeing `stream` above, so its use here makes some
+    // static analysis tools unhappy. Casting to uintptr_t helps assure
+    // said tools that we're not doing anything bad here.
+    ALOGD("AAudioStream_close(%#" PRIxPTR ") returned %d ---------",
+          reinterpret_cast<uintptr_t>(stream), result);
+    return result;
 }
 
 AAUDIO_API aaudio_result_t  AAudioStream_requestStart(AAudioStream* stream)
@@ -269,7 +300,7 @@
 {
     AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
     ALOGD("AAudioStream_requestFlush(%p)", stream);
-    return audioStream->requestFlush();
+    return audioStream->safeFlush();
 }
 
 AAUDIO_API aaudio_result_t  AAudioStream_requestStop(AAudioStream* stream)
@@ -324,7 +355,7 @@
     }
 
     // Don't allow writes when playing with a callback.
-    if (audioStream->getDataCallbackProc() != nullptr && audioStream->isActive()) {
+    if (audioStream->isDataCallbackActive()) {
         ALOGE("Cannot write to a callback stream when running.");
         return AAUDIO_ERROR_INVALID_STATE;
     }
@@ -434,6 +465,24 @@
     return audioStream->getSharingMode();
 }
 
+AAUDIO_API aaudio_usage_t AAudioStream_getUsage(AAudioStream* stream)
+{
+    AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
+    return audioStream->getUsage();
+}
+
+AAUDIO_API aaudio_content_type_t AAudioStream_getContentType(AAudioStream* stream)
+{
+    AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
+    return audioStream->getContentType();
+}
+
+AAUDIO_API aaudio_input_preset_t AAudioStream_getInputPreset(AAudioStream* stream)
+{
+    AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
+    return audioStream->getInputPreset();
+}
+
 AAUDIO_API int64_t AAudioStream_getFramesWritten(AAudioStream* stream)
 {
     AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
diff --git a/media/libaaudio/src/core/AAudioStreamParameters.cpp b/media/libaaudio/src/core/AAudioStreamParameters.cpp
index 82445e7..23c4eb8 100644
--- a/media/libaaudio/src/core/AAudioStreamParameters.cpp
+++ b/media/libaaudio/src/core/AAudioStreamParameters.cpp
@@ -15,7 +15,7 @@
  */
 
 
-#define LOG_TAG "AAudio"
+#define LOG_TAG "AAudioStreamParameters"
 #include <utils/Log.h>
 #include <hardware/audio.h>
 
@@ -42,17 +42,20 @@
     mAudioFormat     = other.mAudioFormat;
     mDirection       = other.mDirection;
     mBufferCapacity  = other.mBufferCapacity;
+    mUsage           = other.mUsage;
+    mContentType     = other.mContentType;
+    mInputPreset     = other.mInputPreset;
 }
 
 aaudio_result_t AAudioStreamParameters::validate() const {
     if (mSamplesPerFrame != AAUDIO_UNSPECIFIED
         && (mSamplesPerFrame < SAMPLES_PER_FRAME_MIN || mSamplesPerFrame > SAMPLES_PER_FRAME_MAX)) {
-        ALOGE("AAudioStreamParameters: channelCount out of range = %d", mSamplesPerFrame);
+        ALOGE("channelCount out of range = %d", mSamplesPerFrame);
         return AAUDIO_ERROR_OUT_OF_RANGE;
     }
 
     if (mDeviceId < 0) {
-        ALOGE("AAudioStreamParameters: deviceId out of range = %d", mDeviceId);
+        ALOGE("deviceId out of range = %d", mDeviceId);
         return AAUDIO_ERROR_OUT_OF_RANGE;
     }
 
@@ -61,7 +64,7 @@
         case AAUDIO_SHARING_MODE_SHARED:
             break;
         default:
-            ALOGE("AAudioStreamParameters: illegal sharingMode = %d", mSharingMode);
+            ALOGE("illegal sharingMode = %d", mSharingMode);
             return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
             // break;
     }
@@ -72,19 +75,19 @@
         case AAUDIO_FORMAT_PCM_FLOAT:
             break; // valid
         default:
-            ALOGE("AAudioStreamParameters: audioFormat not valid = %d", mAudioFormat);
+            ALOGE("audioFormat not valid = %d", mAudioFormat);
             return AAUDIO_ERROR_INVALID_FORMAT;
             // break;
     }
 
     if (mSampleRate != AAUDIO_UNSPECIFIED
         && (mSampleRate < SAMPLE_RATE_HZ_MIN || mSampleRate > SAMPLE_RATE_HZ_MAX)) {
-        ALOGE("AAudioStreamParameters: sampleRate out of range = %d", mSampleRate);
+        ALOGE("sampleRate out of range = %d", mSampleRate);
         return AAUDIO_ERROR_INVALID_RATE;
     }
 
     if (mBufferCapacity < 0) {
-        ALOGE("AAudioStreamParameters: bufferCapacity out of range = %d", mBufferCapacity);
+        ALOGE("bufferCapacity out of range = %d", mBufferCapacity);
         return AAUDIO_ERROR_OUT_OF_RANGE;
     }
 
@@ -93,7 +96,55 @@
         case AAUDIO_DIRECTION_OUTPUT:
             break; // valid
         default:
-            ALOGE("AAudioStreamParameters: direction not valid = %d", mDirection);
+            ALOGE("direction not valid = %d", mDirection);
+            return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
+            // break;
+    }
+
+    switch (mUsage) {
+        case AAUDIO_UNSPECIFIED:
+        case AAUDIO_USAGE_MEDIA:
+        case AAUDIO_USAGE_VOICE_COMMUNICATION:
+        case AAUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING:
+        case AAUDIO_USAGE_ALARM:
+        case AAUDIO_USAGE_NOTIFICATION:
+        case AAUDIO_USAGE_NOTIFICATION_RINGTONE:
+        case AAUDIO_USAGE_NOTIFICATION_EVENT:
+        case AAUDIO_USAGE_ASSISTANCE_ACCESSIBILITY:
+        case AAUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE:
+        case AAUDIO_USAGE_ASSISTANCE_SONIFICATION:
+        case AAUDIO_USAGE_GAME:
+        case AAUDIO_USAGE_ASSISTANT:
+            break; // valid
+        default:
+            ALOGE("usage not valid = %d", mUsage);
+            return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
+            // break;
+    }
+
+    switch (mContentType) {
+        case AAUDIO_UNSPECIFIED:
+        case AAUDIO_CONTENT_TYPE_MUSIC:
+        case AAUDIO_CONTENT_TYPE_MOVIE:
+        case AAUDIO_CONTENT_TYPE_SONIFICATION:
+        case AAUDIO_CONTENT_TYPE_SPEECH:
+            break; // valid
+        default:
+            ALOGE("content type not valid = %d", mContentType);
+            return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
+            // break;
+    }
+
+    switch (mInputPreset) {
+        case AAUDIO_UNSPECIFIED:
+        case AAUDIO_INPUT_PRESET_GENERIC:
+        case AAUDIO_INPUT_PRESET_CAMCORDER:
+        case AAUDIO_INPUT_PRESET_VOICE_COMMUNICATION:
+        case AAUDIO_INPUT_PRESET_VOICE_RECOGNITION:
+        case AAUDIO_INPUT_PRESET_UNPROCESSED:
+            break; // valid
+        default:
+            ALOGE("input preset not valid = %d", mInputPreset);
             return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
             // break;
     }
@@ -102,12 +153,15 @@
 }
 
 void AAudioStreamParameters::dump() const {
-    ALOGD("AAudioStreamParameters mDeviceId        = %d", mDeviceId);
-    ALOGD("AAudioStreamParameters mSampleRate      = %d", mSampleRate);
-    ALOGD("AAudioStreamParameters mSamplesPerFrame = %d", mSamplesPerFrame);
-    ALOGD("AAudioStreamParameters mSharingMode     = %d", (int)mSharingMode);
-    ALOGD("AAudioStreamParameters mAudioFormat     = %d", (int)mAudioFormat);
-    ALOGD("AAudioStreamParameters mDirection       = %d", mDirection);
-    ALOGD("AAudioStreamParameters mBufferCapacity  = %d", mBufferCapacity);
+    ALOGD("mDeviceId        = %6d", mDeviceId);
+    ALOGD("mSampleRate      = %6d", mSampleRate);
+    ALOGD("mSamplesPerFrame = %6d", mSamplesPerFrame);
+    ALOGD("mSharingMode     = %6d", (int)mSharingMode);
+    ALOGD("mAudioFormat     = %6d", (int)mAudioFormat);
+    ALOGD("mDirection       = %6d", mDirection);
+    ALOGD("mBufferCapacity  = %6d", mBufferCapacity);
+    ALOGD("mUsage           = %6d", mUsage);
+    ALOGD("mContentType     = %6d", mContentType);
+    ALOGD("mInputPreset     = %6d", mInputPreset);
 }
 
diff --git a/media/libaaudio/src/core/AAudioStreamParameters.h b/media/libaaudio/src/core/AAudioStreamParameters.h
index 5e67c93..0c173f5 100644
--- a/media/libaaudio/src/core/AAudioStreamParameters.h
+++ b/media/libaaudio/src/core/AAudioStreamParameters.h
@@ -88,6 +88,30 @@
         mDirection = direction;
     }
 
+    aaudio_usage_t getUsage() const {
+        return mUsage;
+    }
+
+    void setUsage(aaudio_usage_t usage) {
+        mUsage = usage;
+    }
+
+    aaudio_content_type_t getContentType() const {
+        return mContentType;
+    }
+
+    void setContentType(aaudio_content_type_t contentType) {
+        mContentType = contentType;
+    }
+
+    aaudio_input_preset_t getInputPreset() const {
+        return mInputPreset;
+    }
+
+    void setInputPreset(aaudio_input_preset_t inputPreset) {
+        mInputPreset = inputPreset;
+    }
+
     int32_t calculateBytesPerFrame() const {
         return getSamplesPerFrame() * AAudioConvert_formatToSizeInBytes(getFormat());
     }
@@ -109,6 +133,9 @@
     aaudio_sharing_mode_t      mSharingMode     = AAUDIO_SHARING_MODE_SHARED;
     aaudio_format_t            mAudioFormat     = AAUDIO_FORMAT_UNSPECIFIED;
     aaudio_direction_t         mDirection       = AAUDIO_DIRECTION_OUTPUT;
+    aaudio_usage_t             mUsage           = AAUDIO_UNSPECIFIED;
+    aaudio_content_type_t      mContentType     = AAUDIO_UNSPECIFIED;
+    aaudio_input_preset_t      mInputPreset     = AAUDIO_UNSPECIFIED;
     int32_t                    mBufferCapacity  = AAUDIO_UNSPECIFIED;
 };
 
diff --git a/media/libaaudio/src/core/AudioStream.cpp b/media/libaaudio/src/core/AudioStream.cpp
index 8dcc37a..289e0db 100644
--- a/media/libaaudio/src/core/AudioStream.cpp
+++ b/media/libaaudio/src/core/AudioStream.cpp
@@ -43,7 +43,7 @@
     LOG_ALWAYS_FATAL_IF(!(getState() == AAUDIO_STREAM_STATE_CLOSED
                           || getState() == AAUDIO_STREAM_STATE_UNINITIALIZED
                           || getState() == AAUDIO_STREAM_STATE_DISCONNECTED),
-                        "aaudio stream still in use, state = %s",
+                        "~AudioStream() - still in use, state = %s",
                         AAudio_convertStreamStateToText(getState()));
 
     mPlayerBase->clearParentReference(); // remove reference to this AudioStream
@@ -74,15 +74,28 @@
     }
 
     // Copy parameters from the Builder because the Builder may be deleted after this call.
+    // TODO AudioStream should be a subclass of AudioStreamParameters
     mSamplesPerFrame = builder.getSamplesPerFrame();
     mSampleRate = builder.getSampleRate();
     mDeviceId = builder.getDeviceId();
     mFormat = builder.getFormat();
     mSharingMode = builder.getSharingMode();
     mSharingModeMatchRequired = builder.isSharingModeMatchRequired();
-
     mPerformanceMode = builder.getPerformanceMode();
 
+    mUsage = builder.getUsage();
+    if (mUsage == AAUDIO_UNSPECIFIED) {
+        mUsage = AAUDIO_USAGE_MEDIA;
+    }
+    mContentType = builder.getContentType();
+    if (mContentType == AAUDIO_UNSPECIFIED) {
+        mContentType = AAUDIO_CONTENT_TYPE_MUSIC;
+    }
+    mInputPreset = builder.getInputPreset();
+    if (mInputPreset == AAUDIO_UNSPECIFIED) {
+        mInputPreset = AAUDIO_INPUT_PRESET_GENERIC;
+    }
+
     // callbacks
     mFramesPerDataCallback = builder.getFramesPerDataCallback();
     mDataCallbackProc = builder.getDataCallbackProc();
@@ -91,18 +104,62 @@
     mErrorCallbackUserData = builder.getErrorCallbackUserData();
 
     // This is very helpful for debugging in the future. Please leave it in.
-    ALOGI("AudioStream::open() rate = %d, channels = %d, format = %d, sharing = %s, dir = %s",
+    ALOGI("open() rate = %d, channels = %d, format = %d, sharing = %s, dir = %s",
           mSampleRate, mSamplesPerFrame, mFormat,
           AudioStream_convertSharingModeToShortText(mSharingMode),
           (getDirection() == AAUDIO_DIRECTION_OUTPUT) ? "OUTPUT" : "INPUT");
-    ALOGI("AudioStream::open() device = %d, perfMode = %d, callback: %s with frames = %d",
+    ALOGI("open() device = %d, perfMode = %d, callback: %s with frames = %d",
           mDeviceId, mPerformanceMode,
-          (mDataCallbackProc == nullptr ? "OFF" : "ON"),
+          (isDataCallbackSet() ? "ON" : "OFF"),
           mFramesPerDataCallback);
 
     return AAUDIO_OK;
 }
 
+aaudio_result_t AudioStream::safeStart() {
+    std::lock_guard<std::mutex> lock(mStreamLock);
+    if (collidesWithCallback()) {
+        ALOGE("%s cannot be called from a callback!", __func__);
+        return AAUDIO_ERROR_INVALID_STATE;
+    }
+    return requestStart();
+}
+
+aaudio_result_t AudioStream::safePause() {
+    std::lock_guard<std::mutex> lock(mStreamLock);
+    if (collidesWithCallback()) {
+        ALOGE("%s cannot be called from a callback!", __func__);
+        return AAUDIO_ERROR_INVALID_STATE;
+    }
+    return requestPause();
+}
+
+aaudio_result_t AudioStream::safeFlush() {
+    std::lock_guard<std::mutex> lock(mStreamLock);
+    if (collidesWithCallback()) {
+        ALOGE("%s cannot be called from a callback!", __func__);
+        return AAUDIO_ERROR_INVALID_STATE;
+    }
+    return requestFlush();
+}
+
+aaudio_result_t AudioStream::safeStop() {
+    std::lock_guard<std::mutex> lock(mStreamLock);
+    if (collidesWithCallback()) {
+        ALOGE("%s cannot be called from a callback!", __func__);
+        return AAUDIO_ERROR_INVALID_STATE;
+    }
+    return requestStop();
+}
+
+aaudio_result_t AudioStream::safeClose() {
+    std::lock_guard<std::mutex> lock(mStreamLock);
+    if (collidesWithCallback()) {
+        ALOGE("%s cannot be called from a callback!", __func__);
+        return AAUDIO_ERROR_INVALID_STATE;
+    }
+    return close();
+}
 
 aaudio_result_t AudioStream::waitForStateChange(aaudio_stream_state_t currentState,
                                                 aaudio_stream_state_t *nextState,
@@ -163,7 +220,7 @@
                                      void* threadArg)
 {
     if (mHasThread) {
-        ALOGE("AudioStream::createThread() - mHasThread already true");
+        ALOGE("createThread() - mHasThread already true");
         return AAUDIO_ERROR_INVALID_STATE;
     }
     if (threadProc == nullptr) {
@@ -185,7 +242,7 @@
 aaudio_result_t AudioStream::joinThread(void** returnArg, int64_t timeoutNanoseconds)
 {
     if (!mHasThread) {
-        ALOGE("AudioStream::joinThread() - but has no thread");
+        ALOGE("joinThread() - but has no thread");
         return AAUDIO_ERROR_INVALID_STATE;
     }
 #if 0
@@ -200,6 +257,58 @@
 }
 
 
+aaudio_data_callback_result_t AudioStream::maybeCallDataCallback(void *audioData,
+                                                                 int32_t numFrames) {
+    aaudio_data_callback_result_t result = AAUDIO_CALLBACK_RESULT_STOP;
+    AAudioStream_dataCallback dataCallback = getDataCallbackProc();
+    if (dataCallback != nullptr) {
+        // Store thread ID of caller to detect stop() and close() calls from callback.
+        pid_t expected = CALLBACK_THREAD_NONE;
+        if (mDataCallbackThread.compare_exchange_strong(expected, gettid())) {
+            result = (*dataCallback)(
+                    (AAudioStream *) this,
+                    getDataCallbackUserData(),
+                    audioData,
+                    numFrames);
+            mDataCallbackThread.store(CALLBACK_THREAD_NONE);
+        } else {
+            ALOGW("%s() data callback already running!", __func__);
+        }
+    }
+    return result;
+}
+
+void AudioStream::maybeCallErrorCallback(aaudio_result_t result) {
+    AAudioStream_errorCallback errorCallback = getErrorCallbackProc();
+    if (errorCallback != nullptr) {
+        // Store thread ID of caller to detect stop() and close() calls from callback.
+        pid_t expected = CALLBACK_THREAD_NONE;
+        if (mErrorCallbackThread.compare_exchange_strong(expected, gettid())) {
+            (*errorCallback)(
+                    (AAudioStream *) this,
+                    getErrorCallbackUserData(),
+                    result);
+            mErrorCallbackThread.store(CALLBACK_THREAD_NONE);
+        } else {
+            ALOGW("%s() error callback already running!", __func__);
+        }
+    }
+}
+
+// Is this running on the same thread as a callback?
+// Note: This cannot be implemented using a thread_local because that would
+// require using a thread_local variable that is shared between streams.
+// So a thread_local variable would prevent stopping or closing stream A from
+// a callback on stream B, which is currently legal and not so terrible.
+bool AudioStream::collidesWithCallback() const {
+    pid_t thisThread = gettid();
+    // Compare the current thread ID with the thread ID of the callback
+    // threads to see it they match. If so then this code is being
+    // called from one of the stream callback functions.
+    return ((mErrorCallbackThread.load() == thisThread)
+            || (mDataCallbackThread.load() == thisThread));
+}
+
 #if AAUDIO_USE_VOLUME_SHAPER
 android::media::VolumeShaper::Status AudioStream::applyVolumeShaper(
         const android::media::VolumeShaper::Configuration& configuration __unused,
diff --git a/media/libaaudio/src/core/AudioStream.h b/media/libaaudio/src/core/AudioStream.h
index 34202d2..82e7189 100644
--- a/media/libaaudio/src/core/AudioStream.h
+++ b/media/libaaudio/src/core/AudioStream.h
@@ -39,6 +39,8 @@
 
 class AudioStreamBuilder;
 
+constexpr pid_t        CALLBACK_THREAD_NONE = 0;
+
 /**
  * AAudio audio stream.
  */
@@ -49,8 +51,22 @@
 
     virtual ~AudioStream();
 
+    /**
+     * Lock a mutex and make sure we are not calling from a callback function.
+     * @return result of requestStart();
+     */
+    aaudio_result_t safeStart();
+
+    aaudio_result_t safePause();
+
+    aaudio_result_t safeFlush();
+
+    aaudio_result_t safeStop();
+
+    aaudio_result_t safeClose();
 
     // =========== Begin ABSTRACT methods ===========================
+protected:
 
     /* Asynchronous requests.
      * Use waitForStateChange() to wait for completion.
@@ -70,6 +86,7 @@
 
     virtual aaudio_result_t requestStop() = 0;
 
+public:
     virtual aaudio_result_t getTimestamp(clockid_t clockId,
                                        int64_t *framePosition,
                                        int64_t *timeNanoseconds) = 0;
@@ -81,7 +98,6 @@
      */
     virtual aaudio_result_t updateStateMachine() = 0;
 
-
     // =========== End ABSTRACT methods ===========================
 
     virtual aaudio_result_t waitForStateChange(aaudio_stream_state_t currentState,
@@ -188,6 +204,18 @@
 
     virtual aaudio_direction_t getDirection() const = 0;
 
+    aaudio_usage_t getUsage() const {
+        return mUsage;
+    }
+
+    aaudio_content_type_t getContentType() const {
+        return mContentType;
+    }
+
+    aaudio_input_preset_t getInputPreset() const {
+        return mInputPreset;
+    }
+
     /**
      * This is only valid after setSamplesPerFrame() and setFormat() have been called.
      */
@@ -209,13 +237,19 @@
     AAudioStream_dataCallback getDataCallbackProc() const {
         return mDataCallbackProc;
     }
+
     AAudioStream_errorCallback getErrorCallbackProc() const {
         return mErrorCallbackProc;
     }
 
+    aaudio_data_callback_result_t maybeCallDataCallback(void *audioData, int32_t numFrames);
+
+    void maybeCallErrorCallback(aaudio_result_t result);
+
     void *getDataCallbackUserData() const {
         return mDataCallbackUserData;
     }
+
     void *getErrorCallbackUserData() const {
         return mErrorCallbackUserData;
     }
@@ -224,10 +258,25 @@
         return mFramesPerDataCallback;
     }
 
-    bool isDataCallbackActive() {
-        return (mDataCallbackProc != nullptr) && isActive();
+    /**
+     * @return true if data callback has been specified
+     */
+    bool isDataCallbackSet() const {
+        return mDataCallbackProc != nullptr;
     }
 
+    /**
+     * @return true if data callback has been specified and stream is running
+     */
+    bool isDataCallbackActive() const {
+        return isDataCallbackSet() && isActive();
+    }
+
+    /**
+     * @return true if called from the same thread as the callback
+     */
+    bool collidesWithCallback() const;
+
     // ============== I/O ===========================
     // A Stream will only implement read() or write() depending on its direction.
     virtual aaudio_result_t write(const void *buffer __unused,
@@ -248,7 +297,7 @@
         doSetVolume(); // apply this change
     }
 
-    float getDuckAndMuteVolume() {
+    float getDuckAndMuteVolume() const {
         return mDuckAndMuteVolume;
     }
 
@@ -331,17 +380,17 @@
 
         android::status_t playerStart() override {
             // mParent should NOT be null. So go ahead and crash if it is.
-            mResult = mParent->requestStart();
+            mResult = mParent->safeStart();
             return AAudioConvert_aaudioToAndroidStatus(mResult);
         }
 
         android::status_t playerPause() override {
-            mResult = mParent->requestPause();
+            mResult = mParent->safePause();
             return AAudioConvert_aaudioToAndroidStatus(mResult);
         }
 
         android::status_t playerStop() override {
-            mResult = mParent->requestStop();
+            mResult = mParent->safeStop();
             return AAudioConvert_aaudioToAndroidStatus(mResult);
         }
 
@@ -405,8 +454,6 @@
         mDeviceId = deviceId;
     }
 
-    std::mutex           mStreamMutex;
-
     std::atomic<bool>    mCallbackEnabled{false};
 
     float                mDuckAndMuteVolume = 1.0f;
@@ -422,39 +469,45 @@
     }
 
 private:
+
+    std::mutex                 mStreamLock;
+
     const android::sp<MyPlayerBase>   mPlayerBase;
 
     // These do not change after open().
-    int32_t                mSamplesPerFrame = AAUDIO_UNSPECIFIED;
-    int32_t                mSampleRate = AAUDIO_UNSPECIFIED;
-    int32_t                mDeviceId = AAUDIO_UNSPECIFIED;
-    aaudio_sharing_mode_t  mSharingMode = AAUDIO_SHARING_MODE_SHARED;
-    bool                   mSharingModeMatchRequired = false; // must match sharing mode requested
-    aaudio_format_t        mFormat = AAUDIO_FORMAT_UNSPECIFIED;
-    aaudio_stream_state_t  mState = AAUDIO_STREAM_STATE_UNINITIALIZED;
-
-    aaudio_performance_mode_t mPerformanceMode = AAUDIO_PERFORMANCE_MODE_NONE;
+    int32_t                     mSamplesPerFrame = AAUDIO_UNSPECIFIED;
+    int32_t                     mSampleRate = AAUDIO_UNSPECIFIED;
+    int32_t                     mDeviceId = AAUDIO_UNSPECIFIED;
+    aaudio_sharing_mode_t       mSharingMode = AAUDIO_SHARING_MODE_SHARED;
+    bool                        mSharingModeMatchRequired = false; // must match sharing mode requested
+    aaudio_format_t             mFormat = AAUDIO_FORMAT_UNSPECIFIED;
+    aaudio_stream_state_t       mState = AAUDIO_STREAM_STATE_UNINITIALIZED;
+    aaudio_performance_mode_t   mPerformanceMode = AAUDIO_PERFORMANCE_MODE_NONE;
+    aaudio_usage_t              mUsage           = AAUDIO_USAGE_MEDIA;
+    aaudio_content_type_t       mContentType     = AAUDIO_CONTENT_TYPE_MUSIC;
+    aaudio_input_preset_t       mInputPreset     = AAUDIO_INPUT_PRESET_GENERIC;
 
     // callback ----------------------------------
 
     AAudioStream_dataCallback   mDataCallbackProc = nullptr;  // external callback functions
     void                       *mDataCallbackUserData = nullptr;
     int32_t                     mFramesPerDataCallback = AAUDIO_UNSPECIFIED; // frames
+    std::atomic<pid_t>          mDataCallbackThread{CALLBACK_THREAD_NONE};
 
     AAudioStream_errorCallback  mErrorCallbackProc = nullptr;
     void                       *mErrorCallbackUserData = nullptr;
+    std::atomic<pid_t>          mErrorCallbackThread{CALLBACK_THREAD_NONE};
 
     // background thread ----------------------------------
-    bool                   mHasThread = false;
-    pthread_t              mThread; // initialized in constructor
+    bool                        mHasThread = false;
+    pthread_t                   mThread; // initialized in constructor
 
     // These are set by the application thread and then read by the audio pthread.
-    std::atomic<int64_t>   mPeriodNanoseconds; // for tuning SCHED_FIFO threads
+    std::atomic<int64_t>        mPeriodNanoseconds; // for tuning SCHED_FIFO threads
     // TODO make atomic?
-    aaudio_audio_thread_proc_t mThreadProc = nullptr;
-    void*                  mThreadArg = nullptr;
-    aaudio_result_t        mThreadRegistrationResult = AAUDIO_OK;
-
+    aaudio_audio_thread_proc_t  mThreadProc = nullptr;
+    void                       *mThreadArg = nullptr;
+    aaudio_result_t             mThreadRegistrationResult = AAUDIO_OK;
 
 };
 
diff --git a/media/libaaudio/src/core/AudioStreamBuilder.cpp b/media/libaaudio/src/core/AudioStreamBuilder.cpp
index 09ebb3e..f7cb8d6 100644
--- a/media/libaaudio/src/core/AudioStreamBuilder.cpp
+++ b/media/libaaudio/src/core/AudioStreamBuilder.cpp
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-#define LOG_TAG "AAudio"
+#define LOG_TAG "AudioStreamBuilder"
 //#define LOG_NDEBUG 0
 #include <utils/Log.h>
 
@@ -87,7 +87,7 @@
             break;
 
         default:
-            ALOGE("AudioStreamBuilder(): bad direction = %d", direction);
+            ALOGE("bad direction = %d", direction);
             result = AAUDIO_ERROR_ILLEGAL_ARGUMENT;
     }
     return result;
@@ -99,7 +99,7 @@
 aaudio_result_t AudioStreamBuilder::build(AudioStream** streamPtr) {
     AudioStream *audioStream = nullptr;
     if (streamPtr == nullptr) {
-        ALOGE("AudioStreamBuilder::build() streamPtr is null");
+        ALOGE("build() streamPtr is null");
         return AAUDIO_ERROR_NULL;
     }
     *streamPtr = nullptr;
@@ -124,13 +124,13 @@
     if (mapExclusivePolicy == AAUDIO_UNSPECIFIED) {
         mapExclusivePolicy = AAUDIO_MMAP_EXCLUSIVE_POLICY_DEFAULT;
     }
-    ALOGD("AudioStreamBuilder(): mmapPolicy = %d, mapExclusivePolicy = %d",
+    ALOGD("mmapPolicy = %d, mapExclusivePolicy = %d",
           mmapPolicy, mapExclusivePolicy);
 
     aaudio_sharing_mode_t sharingMode = getSharingMode();
     if ((sharingMode == AAUDIO_SHARING_MODE_EXCLUSIVE)
         && (mapExclusivePolicy == AAUDIO_POLICY_NEVER)) {
-        ALOGW("AudioStreamBuilder(): EXCLUSIVE sharing mode not supported. Use SHARED.");
+        ALOGW("EXCLUSIVE sharing mode not supported. Use SHARED.");
         sharingMode = AAUDIO_SHARING_MODE_SHARED;
         setSharingMode(sharingMode);
     }
@@ -156,7 +156,7 @@
             audioStream = nullptr;
 
             if (isMMap && allowLegacy) {
-                ALOGD("AudioStreamBuilder.build() MMAP stream did not open so try Legacy path");
+                ALOGD("build() MMAP stream did not open so try Legacy path");
                 // If MMAP stream failed to open then TRY using a legacy stream.
                 result = builder_createStream(getDirection(), sharingMode,
                                               false, &audioStream);
@@ -190,7 +190,7 @@
         case AAUDIO_PERFORMANCE_MODE_LOW_LATENCY:
             break;
         default:
-            ALOGE("AudioStreamBuilder: illegal performanceMode = %d", mPerformanceMode);
+            ALOGE("illegal performanceMode = %d", mPerformanceMode);
             return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
             // break;
     }
@@ -199,7 +199,7 @@
     if (mFramesPerDataCallback != AAUDIO_UNSPECIFIED
         && (mFramesPerDataCallback < FRAMES_PER_DATA_CALLBACK_MIN
             || mFramesPerDataCallback > FRAMES_PER_DATA_CALLBACK_MAX)) {
-        ALOGE("AudioStreamBuilder: framesPerDataCallback out of range = %d",
+        ALOGE("framesPerDataCallback out of range = %d",
               mFramesPerDataCallback);
         return AAUDIO_ERROR_OUT_OF_RANGE;
     }
diff --git a/media/libaaudio/src/fifo/FifoBuffer.cpp b/media/libaaudio/src/fifo/FifoBuffer.cpp
index a869886..e6e7c8e 100644
--- a/media/libaaudio/src/fifo/FifoBuffer.cpp
+++ b/media/libaaudio/src/fifo/FifoBuffer.cpp
@@ -43,7 +43,7 @@
     int32_t bytesPerBuffer = bytesPerFrame * capacityInFrames;
     mStorage = new uint8_t[bytesPerBuffer];
     mStorageOwned = true;
-    ALOGD("FifoBuffer: capacityInFrames = %d, bytesPerFrame = %d",
+    ALOGD("capacityInFrames = %d, bytesPerFrame = %d",
           capacityInFrames, bytesPerFrame);
 }
 
diff --git a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
index ee2504d..c5dfb7c 100644
--- a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
@@ -48,19 +48,14 @@
     return AudioStreamLegacy_callback;
 }
 
-int32_t AudioStreamLegacy::callDataCallbackFrames(uint8_t *buffer, int32_t numFrames) {
+aaudio_data_callback_result_t AudioStreamLegacy::callDataCallbackFrames(uint8_t *buffer, int32_t numFrames) {
     if (getDirection() == AAUDIO_DIRECTION_INPUT) {
         // Increment before because we already got the data from the device.
         incrementFramesRead(numFrames);
     }
 
     // Call using the AAudio callback interface.
-    AAudioStream_dataCallback appCallback = getDataCallbackProc();
-    aaudio_data_callback_result_t callbackResult = (*appCallback)(
-            (AAudioStream *) this,
-            getDataCallbackUserData(),
-            buffer,
-            numFrames);
+    aaudio_data_callback_result_t callbackResult = maybeCallDataCallback(buffer, numFrames);
 
     if (callbackResult == AAUDIO_CALLBACK_RESULT_CONTINUE
             && getDirection() == AAUDIO_DIRECTION_OUTPUT) {
@@ -73,22 +68,26 @@
 // Implement FixedBlockProcessor
 int32_t AudioStreamLegacy::onProcessFixedBlock(uint8_t *buffer, int32_t numBytes) {
     int32_t numFrames = numBytes / getBytesPerFrame();
-    return callDataCallbackFrames(buffer, numFrames);
+    return (int32_t) callDataCallbackFrames(buffer, numFrames);
 }
 
 void AudioStreamLegacy::processCallbackCommon(aaudio_callback_operation_t opcode, void *info) {
     aaudio_data_callback_result_t callbackResult;
+    // This illegal size can be used to AudioFlinger to stop calling us.
+    // This takes advantage of AudioFlinger killing the stream.
+    // TODO need API change in AudioRecord and AudioTrack
+    const size_t SIZE_STOP_CALLBACKS = SIZE_MAX;
 
     switch (opcode) {
         case AAUDIO_CALLBACK_OPERATION_PROCESS_DATA: {
-            checkForDisconnectRequest();
+            (void) checkForDisconnectRequest(true);
 
             // Note that this code assumes an AudioTrack::Buffer is the same as
             // AudioRecord::Buffer
             // TODO define our own AudioBuffer and pass it from the subclasses.
             AudioTrack::Buffer *audioBuffer = static_cast<AudioTrack::Buffer *>(info);
             if (getState() == AAUDIO_STREAM_STATE_DISCONNECTED || !mCallbackEnabled.load()) {
-                audioBuffer->size = 0; // silence the buffer
+                audioBuffer->size = SIZE_STOP_CALLBACKS;
             } else {
                 if (audioBuffer->frameCount == 0) {
                     return;
@@ -106,8 +105,11 @@
                 }
                 if (callbackResult == AAUDIO_CALLBACK_RESULT_CONTINUE) {
                     audioBuffer->size = audioBuffer->frameCount * getBytesPerFrame();
-                } else {
-                    audioBuffer->size = 0;
+                } else { // STOP or invalid result
+                    ALOGW("%s() stop stream by faking an error", __func__);
+                    audioBuffer->size = SIZE_STOP_CALLBACKS;
+                    // Disable the callback just in case AudioFlinger keeps trying to call us.
+                    mCallbackEnabled.store(false);
                 }
 
                 if (updateStateMachine() != AAUDIO_OK) {
@@ -130,26 +132,23 @@
     }
 }
 
-
-
-void AudioStreamLegacy::checkForDisconnectRequest() {
+aaudio_result_t AudioStreamLegacy::checkForDisconnectRequest(bool errorCallbackEnabled) {
     if (mRequestDisconnect.isRequested()) {
         ALOGD("checkForDisconnectRequest() mRequestDisconnect acknowledged");
-        forceDisconnect();
+        forceDisconnect(errorCallbackEnabled);
         mRequestDisconnect.acknowledge();
         mCallbackEnabled.store(false);
+        return AAUDIO_ERROR_DISCONNECTED;
+    } else {
+        return AAUDIO_OK;
     }
 }
 
-void AudioStreamLegacy::forceDisconnect() {
+void AudioStreamLegacy::forceDisconnect(bool errorCallbackEnabled) {
     if (getState() != AAUDIO_STREAM_STATE_DISCONNECTED) {
         setState(AAUDIO_STREAM_STATE_DISCONNECTED);
-        if (getErrorCallbackProc() != nullptr) {
-            (*getErrorCallbackProc())(
-                    (AAudioStream *) this,
-                    getErrorCallbackUserData(),
-                    AAUDIO_ERROR_DISCONNECTED
-            );
+        if (errorCallbackEnabled) {
+            maybeCallErrorCallback(AAUDIO_ERROR_DISCONNECTED);
         }
     }
 }
diff --git a/media/libaaudio/src/legacy/AudioStreamLegacy.h b/media/libaaudio/src/legacy/AudioStreamLegacy.h
index 7e28579..6a506b3 100644
--- a/media/libaaudio/src/legacy/AudioStreamLegacy.h
+++ b/media/libaaudio/src/legacy/AudioStreamLegacy.h
@@ -112,9 +112,14 @@
 
     void onAudioDeviceUpdate(audio_port_handle_t deviceId);
 
-    void checkForDisconnectRequest();
+    /*
+     * Check to see whether a callback thread has requested a disconnected.
+     * @param errorCallbackEnabled set true to call errorCallback on disconnect
+     * @return AAUDIO_OK or AAUDIO_ERROR_DISCONNECTED
+     */
+    aaudio_result_t checkForDisconnectRequest(bool errorCallbackEnabled);
 
-    void forceDisconnect();
+    void forceDisconnect(bool errorCallbackEnabled = true);
 
     void onStart() { mCallbackEnabled.store(true); }
     void onStop() { mCallbackEnabled.store(false); }
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.cpp b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
index bc6e60c..5f4ab9b 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
@@ -96,16 +96,32 @@
     }
     mCallbackBufferSize = builder.getFramesPerDataCallback();
 
-    ALOGD("AudioStreamRecord::open(), request notificationFrames = %u, frameCount = %u",
+    ALOGD("open(), request notificationFrames = %u, frameCount = %u",
           notificationFrames, (uint)frameCount);
+
+    // Don't call mAudioRecord->setInputDevice() because it will be overwritten by set()!
+    audio_port_handle_t selectedDeviceId = (getDeviceId() == AAUDIO_UNSPECIFIED)
+                                           ? AUDIO_PORT_HANDLE_NONE
+                                           : getDeviceId();
+
+    const audio_content_type_t contentType =
+            AAudioConvert_contentTypeToInternal(builder.getContentType());
+    const audio_source_t source =
+            AAudioConvert_inputPresetToAudioSource(builder.getInputPreset());
+
+    const audio_attributes_t attributes = {
+            .content_type = contentType,
+            .usage = AUDIO_USAGE_UNKNOWN, // only used for output
+            .source = source,
+            .flags = flags, // If attributes are set then the other flags parameter is ignored.
+            .tags = ""
+    };
+
     mAudioRecord = new AudioRecord(
             mOpPackageName // const String16& opPackageName TODO does not compile
             );
-    if (getDeviceId() != AAUDIO_UNSPECIFIED) {
-        mAudioRecord->setInputDevice(getDeviceId());
-    }
     mAudioRecord->set(
-            AUDIO_SOURCE_VOICE_RECOGNITION,
+            AUDIO_SOURCE_DEFAULT, // ignored because we pass attributes below
             getSampleRate(),
             format,
             channelMask,
@@ -116,17 +132,18 @@
             false /*threadCanCallJava*/,
             AUDIO_SESSION_ALLOCATE,
             streamTransferType,
-            flags
-            //   int uid = -1,
-            //   pid_t pid = -1,
-            //   const audio_attributes_t* pAttributes = nullptr
+            flags,
+            AUDIO_UID_INVALID, // DEFAULT uid
+            -1,                // DEFAULT pid
+            &attributes,
+            selectedDeviceId
             );
 
     // Did we get a valid track?
     status_t status = mAudioRecord->initCheck();
     if (status != OK) {
         close();
-        ALOGE("AudioStreamRecord::open(), initCheck() returned %d", status);
+        ALOGE("open(), initCheck() returned %d", status);
         return AAudioConvert_androidToAAudioResult(status);
     }
 
@@ -136,7 +153,7 @@
 
     int32_t actualSampleRate = mAudioRecord->getSampleRate();
     ALOGW_IF(actualSampleRate != getSampleRate(),
-             "AudioStreamRecord::open() sampleRate changed from %d to %d",
+             "open() sampleRate changed from %d to %d",
              getSampleRate(), actualSampleRate);
     setSampleRate(actualSampleRate);
 
@@ -164,10 +181,10 @@
 
     // Log warning if we did not get what we asked for.
     ALOGW_IF(actualFlags != flags,
-             "AudioStreamRecord::open() flags changed from 0x%08X to 0x%08X",
+             "open() flags changed from 0x%08X to 0x%08X",
              flags, actualFlags);
     ALOGW_IF(actualPerformanceMode != perfMode,
-             "AudioStreamRecord::open() perfMode changed from %d to %d",
+             "open() perfMode changed from %d to %d",
              perfMode, actualPerformanceMode);
 
     setState(AAUDIO_STREAM_STATE_OPEN);
@@ -238,8 +255,9 @@
     mAudioRecord->stop();
     mFramesRead.reset32();
     mTimestampPosition.reset32();
-    checkForDisconnectRequest();
-    return AAUDIO_OK;
+    // Pass false to prevent errorCallback from being called after disconnect
+    // when app has already requested a stop().
+    return checkForDisconnectRequest(false);
 }
 
 aaudio_result_t AudioStreamRecord::updateStateMachine()
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.cpp b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
index 0e9aaef..17a8d52 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
@@ -113,14 +113,30 @@
     }
     mCallbackBufferSize = builder.getFramesPerDataCallback();
 
-    ALOGD("AudioStreamTrack::open(), request notificationFrames = %d, frameCount = %u",
+    ALOGD("open(), request notificationFrames = %d, frameCount = %u",
           notificationFrames, (uint)frameCount);
-    mAudioTrack = new AudioTrack(); // TODO review
-    if (getDeviceId() != AAUDIO_UNSPECIFIED) {
-        mAudioTrack->setOutputDevice(getDeviceId());
-    }
+
+    // Don't call mAudioTrack->setDeviceId() because it will be overwritten by set()!
+    audio_port_handle_t selectedDeviceId = (getDeviceId() == AAUDIO_UNSPECIFIED)
+                                           ? AUDIO_PORT_HANDLE_NONE
+                                           : getDeviceId();
+
+    const audio_content_type_t contentType =
+            AAudioConvert_contentTypeToInternal(builder.getContentType());
+    const audio_usage_t usage =
+            AAudioConvert_usageToInternal(builder.getUsage());
+
+    const audio_attributes_t attributes = {
+            .content_type = contentType,
+            .usage = usage,
+            .source = AUDIO_SOURCE_DEFAULT, // only used for recording
+            .flags = flags, // If attributes are set then the other flags parameter is ignored.
+            .tags = ""
+    };
+
+    mAudioTrack = new AudioTrack();
     mAudioTrack->set(
-            (audio_stream_type_t) AUDIO_STREAM_MUSIC,
+            AUDIO_STREAM_DEFAULT,  // ignored because we pass attributes below
             getSampleRate(),
             format,
             channelMask,
@@ -129,17 +145,26 @@
             callback,
             callbackData,
             notificationFrames,
-            0 /*sharedBuffer*/,
-            false /*threadCanCallJava*/,
+            0,       // DEFAULT sharedBuffer*/,
+            false,   // DEFAULT threadCanCallJava
             AUDIO_SESSION_ALLOCATE,
-            streamTransferType
-            );
+            streamTransferType,
+            NULL,    // DEFAULT audio_offload_info_t
+            AUDIO_UID_INVALID, // DEFAULT uid
+            -1,      // DEFAULT pid
+            &attributes,
+            // WARNING - If doNotReconnect set true then audio stops after plugging and unplugging
+            // headphones a few times.
+            false,   // DEFAULT doNotReconnect,
+            1.0f,    // DEFAULT maxRequiredSpeed
+            selectedDeviceId
+    );
 
     // Did we get a valid track?
     status_t status = mAudioTrack->initCheck();
     if (status != NO_ERROR) {
         close();
-        ALOGE("AudioStreamTrack::open(), initCheck() returned %d", status);
+        ALOGE("open(), initCheck() returned %d", status);
         return AAudioConvert_androidToAAudioResult(status);
     }
 
@@ -153,7 +178,7 @@
 
     int32_t actualSampleRate = mAudioTrack->getSampleRate();
     ALOGW_IF(actualSampleRate != getSampleRate(),
-             "AudioStreamTrack::open() sampleRate changed from %d to %d",
+             "open() sampleRate changed from %d to %d",
              getSampleRate(), actualSampleRate);
     setSampleRate(actualSampleRate);
 
@@ -186,10 +211,10 @@
 
     // Log warning if we did not get what we asked for.
     ALOGW_IF(actualFlags != flags,
-             "AudioStreamTrack::open() flags changed from 0x%08X to 0x%08X",
+             "open() flags changed from 0x%08X to 0x%08X",
              flags, actualFlags);
     ALOGW_IF(actualPerformanceMode != perfMode,
-             "AudioStreamTrack::open() perfMode changed from %d to %d",
+             "open() perfMode changed from %d to %d",
              perfMode, actualPerformanceMode);
 
     return AAUDIO_OK;
@@ -224,10 +249,8 @@
 }
 
 aaudio_result_t AudioStreamTrack::requestStart() {
-    std::lock_guard<std::mutex> lock(mStreamMutex);
-
     if (mAudioTrack.get() == nullptr) {
-        ALOGE("AudioStreamTrack::requestStart() no AudioTrack");
+        ALOGE("requestStart() no AudioTrack");
         return AAUDIO_ERROR_INVALID_STATE;
     }
     // Get current position so we can detect when the track is playing.
@@ -247,13 +270,12 @@
 }
 
 aaudio_result_t AudioStreamTrack::requestPause() {
-    std::lock_guard<std::mutex> lock(mStreamMutex);
-
     if (mAudioTrack.get() == nullptr) {
         ALOGE("requestPause() no AudioTrack");
         return AAUDIO_ERROR_INVALID_STATE;
     } else if (getState() != AAUDIO_STREAM_STATE_STARTING
             && getState() != AAUDIO_STREAM_STATE_STARTED) {
+            // TODO What about DISCONNECTED?
         ALOGE("requestPause(), called when state is %s",
               AAudio_convertStreamStateToText(getState()));
         return AAUDIO_ERROR_INVALID_STATE;
@@ -261,22 +283,19 @@
     onStop();
     setState(AAUDIO_STREAM_STATE_PAUSING);
     mAudioTrack->pause();
-    checkForDisconnectRequest();
     status_t err = mAudioTrack->getPosition(&mPositionWhenPausing);
     if (err != OK) {
         return AAudioConvert_androidToAAudioResult(err);
     }
-    return AAUDIO_OK;
+    return checkForDisconnectRequest(false);
 }
 
 aaudio_result_t AudioStreamTrack::requestFlush() {
-    std::lock_guard<std::mutex> lock(mStreamMutex);
-
     if (mAudioTrack.get() == nullptr) {
-        ALOGE("AudioStreamTrack::requestFlush() no AudioTrack");
+        ALOGE("requestFlush() no AudioTrack");
         return AAUDIO_ERROR_INVALID_STATE;
     } else if (getState() != AAUDIO_STREAM_STATE_PAUSED) {
-        ALOGE("AudioStreamTrack::requestFlush() not paused");
+        ALOGE("requestFlush() not paused");
         return AAUDIO_ERROR_INVALID_STATE;
     }
     setState(AAUDIO_STREAM_STATE_FLUSHING);
@@ -288,10 +307,8 @@
 }
 
 aaudio_result_t AudioStreamTrack::requestStop() {
-    std::lock_guard<std::mutex> lock(mStreamMutex);
-
     if (mAudioTrack.get() == nullptr) {
-        ALOGE("AudioStreamTrack::requestStop() no AudioTrack");
+        ALOGE("requestStop() no AudioTrack");
         return AAUDIO_ERROR_INVALID_STATE;
     }
     onStop();
@@ -301,8 +318,7 @@
     mFramesWritten.reset32();
     mTimestampPosition.reset32();
     mAudioTrack->stop();
-    checkForDisconnectRequest();
-    return AAUDIO_OK;
+    return checkForDisconnectRequest(false);;
 }
 
 aaudio_result_t AudioStreamTrack::updateStateMachine()
diff --git a/media/libaaudio/src/utility/AAudioUtilities.cpp b/media/libaaudio/src/utility/AAudioUtilities.cpp
index 612ad27..c6adf33 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.cpp
+++ b/media/libaaudio/src/utility/AAudioUtilities.cpp
@@ -25,6 +25,8 @@
 
 #include "aaudio/AAudio.h"
 #include <aaudio/AAudioTesting.h>
+#include <math.h>
+#include <system/audio-base.h>
 
 #include "utility/AAudioUtilities.h"
 
@@ -50,44 +52,10 @@
     return size;
 }
 
-
 // TODO expose and call clamp16_from_float function in primitives.h
 static inline int16_t clamp16_from_float(float f) {
-    /* Offset is used to expand the valid range of [-1.0, 1.0) into the 16 lsbs of the
-     * floating point significand. The normal shift is 3<<22, but the -15 offset
-     * is used to multiply by 32768.
-     */
-    static const float offset = (float)(3 << (22 - 15));
-    /* zero = (0x10f << 22) =  0x43c00000 (not directly used) */
-    static const int32_t limneg = (0x10f << 22) /*zero*/ - 32768; /* 0x43bf8000 */
-    static const int32_t limpos = (0x10f << 22) /*zero*/ + 32767; /* 0x43c07fff */
-
-    union {
-        float f;
-        int32_t i;
-    } u;
-
-    u.f = f + offset; /* recenter valid range */
-    /* Now the valid range is represented as integers between [limneg, limpos].
-     * Clamp using the fact that float representation (as an integer) is an ordered set.
-     */
-    if (u.i < limneg)
-        u.i = -32768;
-    else if (u.i > limpos)
-        u.i = 32767;
-    return u.i; /* Return lower 16 bits, the part of interest in the significand. */
-}
-
-// Same but without clipping.
-// Convert -1.0f to +1.0f to -32768 to +32767
-static inline int16_t floatToInt16(float f) {
-    static const float offset = (float)(3 << (22 - 15));
-    union {
-        float f;
-        int32_t i;
-    } u;
-    u.f = f + offset; /* recenter valid range */
-    return u.i; /* Return lower 16 bits, the part of interest in the significand. */
+    static const float scale = 1 << 15;
+    return (int16_t) roundf(fmaxf(fminf(f * scale, scale - 1.f), -scale));
 }
 
 static float clipAndClampFloatToPcm16(float sample, float scaler) {
@@ -188,13 +156,14 @@
                        int32_t samplesPerFrame,
                        float amplitude1,
                        float amplitude2) {
-    float scaler = amplitude1 / SHORT_SCALE;
-    float delta = (amplitude2 - amplitude1) / (SHORT_SCALE * (float) numFrames);
+    // Because we are converting from int16 to 1nt16, we do not have to scale by 1/32768.
+    float scaler = amplitude1;
+    float delta = (amplitude2 - amplitude1) / numFrames;
     for (int frameIndex = 0; frameIndex < numFrames; frameIndex++) {
         for (int sampleIndex = 0; sampleIndex < samplesPerFrame; sampleIndex++) {
             // No need to clip because int16_t range is inherently limited.
             float sample =  *source++ * scaler;
-            *destination++ =  floatToInt16(sample);
+            *destination++ = (int16_t) roundf(sample);
         }
         scaler += delta;
     }
@@ -315,6 +284,61 @@
     return aaudioFormat;
 }
 
+// Make a message string from the condition.
+#define STATIC_ASSERT(condition) static_assert(condition, #condition)
+
+audio_usage_t AAudioConvert_usageToInternal(aaudio_usage_t usage) {
+    // The public aaudio_content_type_t constants are supposed to have the same
+    // values as the internal audio_content_type_t values.
+    STATIC_ASSERT(AAUDIO_USAGE_MEDIA == AUDIO_USAGE_MEDIA);
+    STATIC_ASSERT(AAUDIO_USAGE_VOICE_COMMUNICATION == AUDIO_USAGE_VOICE_COMMUNICATION);
+    STATIC_ASSERT(AAUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING
+                  == AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING);
+    STATIC_ASSERT(AAUDIO_USAGE_ALARM == AUDIO_USAGE_ALARM);
+    STATIC_ASSERT(AAUDIO_USAGE_NOTIFICATION == AUDIO_USAGE_NOTIFICATION);
+    STATIC_ASSERT(AAUDIO_USAGE_NOTIFICATION_RINGTONE
+                  == AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE);
+    STATIC_ASSERT(AAUDIO_USAGE_NOTIFICATION_EVENT == AUDIO_USAGE_NOTIFICATION_EVENT);
+    STATIC_ASSERT(AAUDIO_USAGE_ASSISTANCE_ACCESSIBILITY == AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY);
+    STATIC_ASSERT(AAUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE
+                  == AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE);
+    STATIC_ASSERT(AAUDIO_USAGE_ASSISTANCE_SONIFICATION == AUDIO_USAGE_ASSISTANCE_SONIFICATION);
+    STATIC_ASSERT(AAUDIO_USAGE_GAME == AUDIO_USAGE_GAME);
+    STATIC_ASSERT(AAUDIO_USAGE_ASSISTANT == AUDIO_USAGE_ASSISTANT);
+    if (usage == AAUDIO_UNSPECIFIED) {
+        usage = AAUDIO_USAGE_MEDIA;
+    }
+    return (audio_usage_t) usage; // same value
+}
+
+audio_content_type_t AAudioConvert_contentTypeToInternal(aaudio_content_type_t contentType) {
+    // The public aaudio_content_type_t constants are supposed to have the same
+    // values as the internal audio_content_type_t values.
+    STATIC_ASSERT(AAUDIO_CONTENT_TYPE_MUSIC == AUDIO_CONTENT_TYPE_MUSIC);
+    STATIC_ASSERT(AAUDIO_CONTENT_TYPE_SPEECH == AUDIO_CONTENT_TYPE_SPEECH);
+    STATIC_ASSERT(AAUDIO_CONTENT_TYPE_SONIFICATION == AUDIO_CONTENT_TYPE_SONIFICATION);
+    STATIC_ASSERT(AAUDIO_CONTENT_TYPE_MOVIE == AUDIO_CONTENT_TYPE_MOVIE);
+    if (contentType == AAUDIO_UNSPECIFIED) {
+        contentType = AAUDIO_CONTENT_TYPE_MUSIC;
+    }
+    return (audio_content_type_t) contentType; // same value
+}
+
+audio_source_t AAudioConvert_inputPresetToAudioSource(aaudio_input_preset_t preset) {
+    // The public aaudio_input_preset_t constants are supposed to have the same
+    // values as the internal audio_source_t values.
+    STATIC_ASSERT(AAUDIO_UNSPECIFIED == AUDIO_SOURCE_DEFAULT);
+    STATIC_ASSERT(AAUDIO_INPUT_PRESET_GENERIC == AUDIO_SOURCE_MIC);
+    STATIC_ASSERT(AAUDIO_INPUT_PRESET_CAMCORDER == AUDIO_SOURCE_CAMCORDER);
+    STATIC_ASSERT(AAUDIO_INPUT_PRESET_VOICE_RECOGNITION == AUDIO_SOURCE_VOICE_RECOGNITION);
+    STATIC_ASSERT(AAUDIO_INPUT_PRESET_VOICE_COMMUNICATION == AUDIO_SOURCE_VOICE_COMMUNICATION);
+    STATIC_ASSERT(AAUDIO_INPUT_PRESET_UNPROCESSED == AUDIO_SOURCE_UNPROCESSED);
+    if (preset == AAUDIO_UNSPECIFIED) {
+        preset = AAUDIO_INPUT_PRESET_GENERIC;
+    }
+    return (audio_source_t) preset; // same value
+}
+
 int32_t AAudioConvert_framesToBytes(int32_t numFrames,
                                             int32_t bytesPerFrame,
                                             int32_t *sizeInBytes) {
diff --git a/media/libaaudio/src/utility/AAudioUtilities.h b/media/libaaudio/src/utility/AAudioUtilities.h
index 3afa976..f2347f5 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.h
+++ b/media/libaaudio/src/utility/AAudioUtilities.h
@@ -167,6 +167,29 @@
 
 aaudio_format_t AAudioConvert_androidToAAudioDataFormat(audio_format_t format);
 
+
+/**
+ * Note that this function does not validate the passed in value.
+ * That is done somewhere else.
+ * @return internal value
+ */
+
+audio_usage_t AAudioConvert_usageToInternal(aaudio_usage_t usage);
+
+/**
+ * Note that this function does not validate the passed in value.
+ * That is done somewhere else.
+ * @return internal value
+ */
+audio_content_type_t AAudioConvert_contentTypeToInternal(aaudio_content_type_t contentType);
+
+/**
+ * Note that this function does not validate the passed in value.
+ * That is done somewhere else.
+ * @return internal audio source
+ */
+audio_source_t AAudioConvert_inputPresetToAudioSource(aaudio_input_preset_t preset);
+
 /**
  * @return the size of a sample of the given format in bytes or AAUDIO_ERROR_ILLEGAL_ARGUMENT
  */
diff --git a/media/libaaudio/src/utility/LinearRamp.h b/media/libaaudio/src/utility/LinearRamp.h
index ff09dce..2b1b8e0 100644
--- a/media/libaaudio/src/utility/LinearRamp.h
+++ b/media/libaaudio/src/utility/LinearRamp.h
@@ -87,7 +87,7 @@
 
     std::atomic<float>   mTarget;
 
-    int32_t mLengthInFrames  = 48000 / 50; // 20 msec at 48000 Hz
+    int32_t mLengthInFrames  = 48000 / 100; // 10 msec at 48000 Hz
     int32_t mRemaining       = 0;
     float   mLevelFrom       = 0.0f;
     float   mLevelTo         = 0.0f;
diff --git a/media/libaaudio/tests/Android.bp b/media/libaaudio/tests/Android.bp
index 19c56d3..33718fc 100644
--- a/media/libaaudio/tests/Android.bp
+++ b/media/libaaudio/tests/Android.bp
@@ -27,6 +27,7 @@
 
 cc_test {
     name: "test_timestamps",
+    defaults: ["libaaudio_tests_defaults"],
     srcs: ["test_timestamps.cpp"],
     header_libs: ["libaaudio_example_utils"],
     shared_libs: ["libaaudio"],
@@ -86,3 +87,52 @@
         "libutils",
     ],
 }
+
+cc_test {
+    name: "test_bad_disconnect",
+    defaults: ["libaaudio_tests_defaults"],
+    srcs: ["test_bad_disconnect.cpp"],
+    shared_libs: [
+        "libaaudio",
+        "libbinder",
+        "libcutils",
+        "libutils",
+    ],
+}
+
+cc_test {
+    name: "test_various",
+    defaults: ["libaaudio_tests_defaults"],
+    srcs: ["test_various.cpp"],
+    shared_libs: [
+        "libaaudio",
+        "libbinder",
+        "libcutils",
+        "libutils",
+    ],
+}
+
+cc_test {
+    name: "test_aaudio_monkey",
+    defaults: ["libaaudio_tests_defaults"],
+    srcs: ["test_aaudio_monkey.cpp"],
+    header_libs: ["libaaudio_example_utils"],
+    shared_libs: [
+        "libaaudio",
+        "libbinder",
+        "libcutils",
+        "libutils",
+    ],
+}
+
+cc_test {
+    name: "test_attributes",
+    defaults: ["libaaudio_tests_defaults"],
+    srcs: ["test_attributes.cpp"],
+    shared_libs: [
+        "libaaudio",
+        "libbinder",
+        "libcutils",
+        "libutils",
+    ],
+}
diff --git a/media/libaaudio/tests/test_aaudio_monkey.cpp b/media/libaaudio/tests/test_aaudio_monkey.cpp
new file mode 100644
index 0000000..be54835
--- /dev/null
+++ b/media/libaaudio/tests/test_aaudio_monkey.cpp
@@ -0,0 +1,307 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Try to trigger bugs by playing randomly on multiple streams.
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <vector>
+
+#include <aaudio/AAudio.h>
+#include "AAudioArgsParser.h"
+#include "AAudioExampleUtils.h"
+#include "AAudioSimplePlayer.h"
+#include "SineGenerator.h"
+
+#define DEFAULT_TIMEOUT_NANOS  (1 * NANOS_PER_SECOND)
+
+#define NUM_LOOPS          1000
+#define MAX_MICROS_DELAY   (2 * 1000 * 1000)
+
+// TODO Consider adding an input stream.
+#define PROB_START   (0.20)
+#define PROB_PAUSE   (PROB_START + 0.10)
+#define PROB_FLUSH   (PROB_PAUSE + 0.10)
+#define PROB_STOP    (PROB_FLUSH + 0.10)
+#define PROB_CLOSE   (PROB_STOP + 0.10)
+static_assert(PROB_CLOSE < 0.9, "Probability sum too high.");
+
+aaudio_data_callback_result_t AAudioMonkeyDataCallback(
+        AAudioStream *stream,
+        void *userData,
+        void *audioData,
+        int32_t numFrames);
+
+void AAudioMonkeyErrorCallbackProc(
+        AAudioStream *stream __unused,
+        void *userData __unused,
+        aaudio_result_t error) {
+    printf("Error Callback, error: %d\n",(int)error);
+}
+
+// This function is not thread safe. Only use this from a single thread.
+double nextRandomDouble() {
+    return drand48();
+}
+
+class AAudioMonkey : public AAudioSimplePlayer {
+public:
+
+    AAudioMonkey(int index, AAudioArgsParser *argParser)
+            : mArgParser(argParser)
+            , mIndex(index) {}
+
+    aaudio_result_t open() {
+        printf("Monkey # %d ---------------------------------------------- OPEN\n", mIndex);
+        double offset = mIndex * 50;
+        mSine1.setup(440.0, 48000);
+        mSine1.setSweep(300.0 + offset, 600.0 + offset, 5.0);
+        mSine2.setup(660.0, 48000);
+        mSine2.setSweep(350.0 + offset, 900.0 + offset, 7.0);
+
+        aaudio_result_t result = AAudioSimplePlayer::open(*mArgParser,
+                                      AAudioMonkeyDataCallback,
+                                      AAudioMonkeyErrorCallbackProc,
+                                      this);
+        if (result != AAUDIO_OK) {
+            printf("ERROR -  player.open() returned %d\n", result);
+        }
+
+        mArgParser->compareWithStream(getStream());
+        return result;
+    }
+
+    bool isOpen() {
+        return (getStream() != nullptr);
+
+    }
+    /**
+     *
+     * @return true if stream passes tests
+     */
+    bool validate() {
+        if (!isOpen()) return true; // closed is OK
+
+        // update and query stream state
+        aaudio_stream_state_t state = AAUDIO_STREAM_STATE_UNKNOWN;
+        aaudio_result_t result = AAudioStream_waitForStateChange(getStream(),
+            AAUDIO_STREAM_STATE_UNKNOWN, &state, 0);
+        if (result != AAUDIO_OK) {
+            printf("ERROR - AAudioStream_waitForStateChange returned %d\n", result);
+            return false;
+        }
+
+        int64_t framesRead = AAudioStream_getFramesRead(getStream());
+        int64_t framesWritten = AAudioStream_getFramesWritten(getStream());
+        int32_t xRuns = AAudioStream_getXRunCount(getStream());
+        // Print status
+        printf("%30s, framesWritten = %8lld, framesRead = %8lld, xRuns = %d\n",
+               AAudio_convertStreamStateToText(state),
+               (unsigned long long) framesWritten,
+               (unsigned long long) framesRead,
+               xRuns);
+
+        if (framesWritten < framesRead) {
+            printf("WARNING - UNDERFLOW - diff = %d !!!!!!!!!!!!\n",
+                   (int) (framesWritten - framesRead));
+        }
+        return true;
+    }
+
+    aaudio_result_t invoke() {
+        aaudio_result_t result = AAUDIO_OK;
+        if (!isOpen()) {
+            result = open();
+            if (result != AAUDIO_OK) return result;
+        }
+
+        if (!validate()) {
+            return -1;
+        }
+
+        double dice = nextRandomDouble();
+        // Select an action based on a weighted probability.
+        if (dice < PROB_START) {
+            printf("start\n");
+            result = AAudioStream_requestStart(getStream());
+        } else if (dice < PROB_PAUSE) {
+            printf("pause\n");
+            result = AAudioStream_requestPause(getStream());
+        } else if (dice < PROB_FLUSH) {
+            printf("flush\n");
+            result = AAudioStream_requestFlush(getStream());
+        } else if (dice < PROB_STOP) {
+            printf("stop\n");
+            result = AAudioStream_requestStop(getStream());
+        } else if (dice < PROB_CLOSE) {
+            printf("close\n");
+            result = close();
+        } else {
+            printf("do nothing\n");
+        }
+
+        if (result == AAUDIO_ERROR_INVALID_STATE) {
+            printf("    got AAUDIO_ERROR_INVALID_STATE - expected from a monkey\n");
+            result = AAUDIO_OK;
+        }
+        if (result == AAUDIO_OK && isOpen()) {
+            if (!validate()) {
+                result = -1;
+            }
+        }
+        return result;
+    }
+
+    aaudio_data_callback_result_t renderAudio(
+            AAudioStream *stream,
+            void *audioData,
+            int32_t numFrames) {
+
+        int32_t samplesPerFrame = AAudioStream_getChannelCount(stream);
+        // This code only plays on the first one or two channels.
+        // TODO Support arbitrary number of channels.
+        switch (AAudioStream_getFormat(stream)) {
+            case AAUDIO_FORMAT_PCM_I16: {
+                int16_t *audioBuffer = (int16_t *) audioData;
+                // Render sine waves as shorts to first channel.
+                mSine1.render(&audioBuffer[0], samplesPerFrame, numFrames);
+                // Render sine waves to second channel if there is one.
+                if (samplesPerFrame > 1) {
+                    mSine2.render(&audioBuffer[1], samplesPerFrame, numFrames);
+                }
+            }
+                break;
+            case AAUDIO_FORMAT_PCM_FLOAT: {
+                float *audioBuffer = (float *) audioData;
+                // Render sine waves as floats to first channel.
+                mSine1.render(&audioBuffer[0], samplesPerFrame, numFrames);
+                // Render sine waves to second channel if there is one.
+                if (samplesPerFrame > 1) {
+                    mSine2.render(&audioBuffer[1], samplesPerFrame, numFrames);
+                }
+            }
+                break;
+            default:
+                return AAUDIO_CALLBACK_RESULT_STOP;
+        }
+        return AAUDIO_CALLBACK_RESULT_CONTINUE;
+    }
+
+private:
+    const AAudioArgsParser  *mArgParser;
+    const int                mIndex;
+    SineGenerator            mSine1;
+    SineGenerator            mSine2;
+};
+
+// Callback function that fills the audio output buffer.
+aaudio_data_callback_result_t AAudioMonkeyDataCallback(
+        AAudioStream *stream,
+        void *userData,
+        void *audioData,
+        int32_t numFrames
+) {
+    // should not happen but just in case...
+    if (userData == nullptr) {
+        printf("ERROR - AAudioMonkeyDataCallback needs userData\n");
+        return AAUDIO_CALLBACK_RESULT_STOP;
+    }
+    AAudioMonkey *monkey = (AAudioMonkey *) userData;
+    return monkey->renderAudio(stream, audioData, numFrames);
+}
+
+
+static void usage() {
+    AAudioArgsParser::usage();
+    printf("      -i{seed}  Initial random seed\n");
+    printf("      -t{count} number of monkeys in the Troop\n");
+}
+
+int main(int argc, const char **argv) {
+    AAudioArgsParser argParser;
+    std::vector<AAudioMonkey> monkeys;
+    aaudio_result_t result;
+    int numMonkeys = 1;
+
+    // Make printf print immediately so that debug info is not stuck
+    // in a buffer if we hang or crash.
+    setvbuf(stdout, nullptr, _IONBF, (size_t) 0);
+
+    printf("%s - Monkeys\n", argv[0]);
+
+    long int seed = (long int)getNanoseconds();  // different every time by default
+
+    for (int i = 1; i < argc; i++) {
+        const char *arg = argv[i];
+        if (argParser.parseArg(arg)) {
+            // Handle options that are not handled by the ArgParser
+            if (arg[0] == '-') {
+                char option = arg[1];
+                switch (option) {
+                    case 'i':
+                        seed = atol(&arg[2]);
+                        break;
+                    case 't':
+                        numMonkeys = atoi(&arg[2]);
+                        break;
+                    default:
+                        usage();
+                        exit(EXIT_FAILURE);
+                        break;
+                }
+            } else {
+                usage();
+                exit(EXIT_FAILURE);
+                break;
+            }
+        }
+    }
+
+    srand48(seed);
+    printf("seed = %ld, nextRandomDouble() = %f\n", seed, nextRandomDouble());
+
+    for (int m = 0; m < numMonkeys; m++) {
+        monkeys.emplace_back(m, &argParser);
+    }
+
+    for (int i = 0; i < NUM_LOOPS; i++) {
+        // pick a random monkey and invoke it
+        double dice = nextRandomDouble();
+        int monkeyIndex = floor(dice * numMonkeys);
+        printf("----------- Monkey #%d\n", monkeyIndex);
+        result = monkeys[monkeyIndex].invoke();
+        if (result != AAUDIO_OK) {
+            goto error;
+        }
+
+        // sleep some random time
+        dice = nextRandomDouble();
+        dice = dice * dice * dice; // skew towards smaller delays
+        int micros = (int) (dice * MAX_MICROS_DELAY);
+        usleep(micros);
+
+        // TODO consider making this multi-threaded, one thread per monkey, to catch more bugs
+    }
+
+    printf("PASS\n");
+    return EXIT_SUCCESS;
+
+error:
+    printf("FAIL - AAudio result = %d = %s\n", result, AAudio_convertResultToText(result));
+    usleep(1000 * 1000); // give me time to stop the logcat
+    return EXIT_FAILURE;
+}
+
diff --git a/media/libaaudio/tests/test_attributes.cpp b/media/libaaudio/tests/test_attributes.cpp
new file mode 100644
index 0000000..9cbf113
--- /dev/null
+++ b/media/libaaudio/tests/test_attributes.cpp
@@ -0,0 +1,179 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Test AAudio attributes such as Usage, ContentType and InputPreset.
+
+#include <stdio.h>
+#include <unistd.h>
+
+#include <aaudio/AAudio.h>
+#include <gtest/gtest.h>
+
+constexpr int64_t kNanosPerSecond = 1000000000;
+constexpr int kNumFrames = 256;
+constexpr int kChannelCount = 2;
+
+constexpr int32_t DONT_SET = -1000;
+
+static void checkAttributes(aaudio_performance_mode_t perfMode,
+                            aaudio_usage_t usage,
+                            aaudio_content_type_t contentType,
+                            aaudio_input_preset_t preset = DONT_SET,
+                            aaudio_direction_t direction = AAUDIO_DIRECTION_OUTPUT) {
+
+    float *buffer = new float[kNumFrames * kChannelCount];
+
+    AAudioStreamBuilder *aaudioBuilder = nullptr;
+    AAudioStream *aaudioStream = nullptr;
+
+    // Use an AAudioStreamBuilder to contain requested parameters.
+    ASSERT_EQ(AAUDIO_OK, AAudio_createStreamBuilder(&aaudioBuilder));
+
+    // Request stream properties.
+    AAudioStreamBuilder_setPerformanceMode(aaudioBuilder, perfMode);
+    AAudioStreamBuilder_setDirection(aaudioBuilder, direction);
+
+    // Set the attribute in the builder.
+    if (usage != DONT_SET) {
+        AAudioStreamBuilder_setUsage(aaudioBuilder, usage);
+    }
+    if (contentType != DONT_SET) {
+        AAudioStreamBuilder_setContentType(aaudioBuilder, contentType);
+    }
+    if (preset != DONT_SET) {
+        AAudioStreamBuilder_setInputPreset(aaudioBuilder, preset);
+    }
+
+    // Create an AAudioStream using the Builder.
+    ASSERT_EQ(AAUDIO_OK, AAudioStreamBuilder_openStream(aaudioBuilder, &aaudioStream));
+    AAudioStreamBuilder_delete(aaudioBuilder);
+
+    // Make sure we get the same attributes back from the stream.
+    aaudio_usage_t expectedUsage =
+            (usage == DONT_SET || usage == AAUDIO_UNSPECIFIED)
+            ? AAUDIO_USAGE_MEDIA // default
+            : usage;
+    EXPECT_EQ(expectedUsage, AAudioStream_getUsage(aaudioStream));
+
+    aaudio_content_type_t expectedContentType =
+            (contentType == DONT_SET || contentType == AAUDIO_UNSPECIFIED)
+            ? AAUDIO_CONTENT_TYPE_MUSIC // default
+            : contentType;
+    EXPECT_EQ(expectedContentType, AAudioStream_getContentType(aaudioStream));
+
+    aaudio_input_preset_t expectedPreset =
+            (preset == DONT_SET || preset == AAUDIO_UNSPECIFIED)
+            ? AAUDIO_INPUT_PRESET_GENERIC // default
+            : preset;
+    EXPECT_EQ(expectedPreset, AAudioStream_getInputPreset(aaudioStream));
+
+    EXPECT_EQ(AAUDIO_OK, AAudioStream_requestStart(aaudioStream));
+
+    if (direction == AAUDIO_DIRECTION_INPUT) {
+        EXPECT_EQ(kNumFrames,
+                  AAudioStream_read(aaudioStream, buffer, kNumFrames, kNanosPerSecond));
+    } else {
+        EXPECT_EQ(kNumFrames,
+                  AAudioStream_write(aaudioStream, buffer, kNumFrames, kNanosPerSecond));
+    }
+
+    EXPECT_EQ(AAUDIO_OK, AAudioStream_requestStop(aaudioStream));
+
+    EXPECT_EQ(AAUDIO_OK, AAudioStream_close(aaudioStream));
+    delete[] buffer;
+}
+
+static const aaudio_usage_t sUsages[] = {
+    DONT_SET,
+    AAUDIO_UNSPECIFIED,
+    AAUDIO_USAGE_MEDIA,
+    AAUDIO_USAGE_VOICE_COMMUNICATION,
+    AAUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
+    AAUDIO_USAGE_ALARM,
+    AAUDIO_USAGE_NOTIFICATION,
+    AAUDIO_USAGE_NOTIFICATION_RINGTONE,
+    AAUDIO_USAGE_NOTIFICATION_EVENT,
+    AAUDIO_USAGE_ASSISTANCE_ACCESSIBILITY,
+    AAUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+    AAUDIO_USAGE_ASSISTANCE_SONIFICATION,
+    AAUDIO_USAGE_GAME,
+    AAUDIO_USAGE_ASSISTANT
+};
+
+static const aaudio_content_type_t sContentypes[] = {
+    DONT_SET,
+    AAUDIO_UNSPECIFIED,
+    AAUDIO_CONTENT_TYPE_SPEECH,
+    AAUDIO_CONTENT_TYPE_MUSIC,
+    AAUDIO_CONTENT_TYPE_MOVIE,
+    AAUDIO_CONTENT_TYPE_SONIFICATION
+};
+
+static const aaudio_input_preset_t sInputPresets[] = {
+    DONT_SET,
+    AAUDIO_UNSPECIFIED,
+    AAUDIO_INPUT_PRESET_GENERIC,
+    AAUDIO_INPUT_PRESET_CAMCORDER,
+    AAUDIO_INPUT_PRESET_VOICE_RECOGNITION,
+    AAUDIO_INPUT_PRESET_VOICE_COMMUNICATION,
+    AAUDIO_INPUT_PRESET_UNPROCESSED,
+};
+
+static void checkAttributesUsage(aaudio_performance_mode_t perfMode) {
+    for (aaudio_usage_t usage : sUsages) {
+        checkAttributes(perfMode, usage, DONT_SET);
+    }
+}
+
+static void checkAttributesContentType(aaudio_input_preset_t perfMode) {
+    for (aaudio_content_type_t contentType : sContentypes) {
+        checkAttributes(perfMode, DONT_SET, contentType);
+    }
+}
+
+static void checkAttributesInputPreset(aaudio_performance_mode_t perfMode) {
+    for (aaudio_input_preset_t inputPreset : sInputPresets) {
+        checkAttributes(perfMode,
+                        DONT_SET,
+                        DONT_SET,
+                        inputPreset,
+                        AAUDIO_DIRECTION_INPUT);
+    }
+}
+
+TEST(test_attributes, aaudio_usage_perfnone) {
+    checkAttributesUsage(AAUDIO_PERFORMANCE_MODE_NONE);
+}
+
+TEST(test_attributes, aaudio_content_type_perfnone) {
+    checkAttributesContentType(AAUDIO_PERFORMANCE_MODE_NONE);
+}
+
+TEST(test_attributes, aaudio_input_preset_perfnone) {
+    checkAttributesInputPreset(AAUDIO_PERFORMANCE_MODE_NONE);
+}
+
+TEST(test_attributes, aaudio_usage_lowlat) {
+    checkAttributesUsage(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
+}
+
+TEST(test_attributes, aaudio_content_type_lowlat) {
+    checkAttributesContentType(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
+}
+
+TEST(test_attributes, aaudio_input_preset_lowlat) {
+    checkAttributesInputPreset(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
+}
diff --git a/media/libaaudio/tests/test_bad_disconnect.cpp b/media/libaaudio/tests/test_bad_disconnect.cpp
new file mode 100644
index 0000000..435990d
--- /dev/null
+++ b/media/libaaudio/tests/test_bad_disconnect.cpp
@@ -0,0 +1,168 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Handle a DISCONNECT by only opening and starting a new stream
+ * without stopping and closing the old one.
+ * This caused the new stream to use the old disconnected device.
+ */
+
+#include <stdio.h>
+#include <thread>
+#include <unistd.h>
+
+#include <aaudio/AAudio.h>
+
+#define DEFAULT_TIMEOUT_NANOS  ((int64_t)1000000000)
+
+static void s_myErrorCallbackProc(
+        AAudioStream *stream,
+        void *userData,
+        aaudio_result_t error);
+
+struct AudioEngine {
+    AAudioStreamBuilder *builder = nullptr;
+    AAudioStream *stream = nullptr;
+    std::thread *thread = nullptr;
+    int64_t framesRead = 0;
+};
+
+AudioEngine s_AudioEngine;
+
+// Callback function that fills the audio output buffer.
+static aaudio_data_callback_result_t s_myDataCallbackProc(
+        AAudioStream *stream,
+        void *userData,
+        void *audioData,
+        int32_t numFrames
+) {
+    (void) userData;
+    (void) audioData;
+    (void) numFrames;
+    s_AudioEngine.framesRead = AAudioStream_getFramesRead(stream);
+    return AAUDIO_CALLBACK_RESULT_CONTINUE;
+}
+
+static aaudio_result_t s_StartAudio() {
+    int32_t framesPerBurst = 0;
+    int32_t deviceId = 0;
+
+    // Use an AAudioStreamBuilder to contain requested parameters.
+    aaudio_result_t result = AAudio_createStreamBuilder(&s_AudioEngine.builder);
+    if (result != AAUDIO_OK) {
+        printf("AAudio_createStreamBuilder returned %s",
+               AAudio_convertResultToText(result));
+        return result;
+    }
+
+    // Request stream properties.
+    AAudioStreamBuilder_setFormat(s_AudioEngine.builder, AAUDIO_FORMAT_PCM_FLOAT);
+    AAudioStreamBuilder_setPerformanceMode(s_AudioEngine.builder, AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
+    AAudioStreamBuilder_setDataCallback(s_AudioEngine.builder, s_myDataCallbackProc, nullptr);
+    AAudioStreamBuilder_setErrorCallback(s_AudioEngine.builder, s_myErrorCallbackProc, nullptr);
+
+    // Create an AAudioStream using the Builder.
+    result = AAudioStreamBuilder_openStream(s_AudioEngine.builder, &s_AudioEngine.stream);
+    if (result != AAUDIO_OK) {
+        printf("AAudioStreamBuilder_openStream returned %s",
+               AAudio_convertResultToText(result));
+        return result;
+    }
+
+    result = AAudioStream_requestStart(s_AudioEngine.stream);
+    if (result != AAUDIO_OK) {
+        printf("AAudioStream_requestStart returned %s",
+               AAudio_convertResultToText(result));
+    }
+
+    // Check to see what kind of stream we actually got.
+    deviceId = AAudioStream_getDeviceId(s_AudioEngine.stream);
+    framesPerBurst = AAudioStream_getFramesPerBurst(s_AudioEngine.stream);
+
+    printf("-------- started: deviceId = %3d, framesPerBurst = %3d\n", deviceId, framesPerBurst);
+
+    return result;
+}
+
+static aaudio_result_t s_StopAudio() {
+    aaudio_result_t result = AAUDIO_OK;
+    if (s_AudioEngine.stream != nullptr) {
+        result = AAudioStream_requestStop(s_AudioEngine.stream);
+        if (result != AAUDIO_OK) {
+            printf("AAudioStream_requestStop returned %s\n",
+                   AAudio_convertResultToText(result));
+        }
+        result = AAudioStream_close(s_AudioEngine.stream);
+        if (result != AAUDIO_OK) {
+            printf("AAudioStream_close returned %s\n",
+                   AAudio_convertResultToText(result));
+        }
+        s_AudioEngine.stream = nullptr;
+        AAudioStreamBuilder_delete(s_AudioEngine.builder);
+        s_AudioEngine.builder = nullptr;
+    }
+    return result;
+}
+
+static void s_StartThreadProc() {
+    // A good app would call s_StopAudio here! This test simulates a bad app.
+    s_StartAudio();
+    s_AudioEngine.thread = nullptr;
+}
+
+static void s_myErrorCallbackProc(
+        AAudioStream *stream __unused,
+        void *userData __unused,
+        aaudio_result_t error) {
+    if (error == AAUDIO_ERROR_DISCONNECTED) {
+        // Handle stream restart on a separate thread
+        if (s_AudioEngine.thread == nullptr) {
+            s_AudioEngine.thread = new std::thread(s_StartThreadProc);
+        }
+    }
+}
+
+int main(int argc, char **argv) {
+    (void) argc;
+    (void) argv;
+
+    aaudio_result_t result = AAUDIO_OK;
+
+    // Make printf print immediately so that debug info is not stuck
+    // in a buffer if we hang or crash.
+    setvbuf(stdout, nullptr, _IONBF, (size_t) 0);
+
+    printf("Test Bad Disconnect V1.0\n");
+    printf("\n=========== Please PLUG and UNPLUG headphones! ==============\n\n");
+    printf("You should see the deviceID change on each plug event.\n");
+    printf("Headphones will generally get a new deviceId each time.\n");
+    printf("Speakers will have the same deviceId each time.\n");
+    printf("The framesRead should reset on each plug event then increase over time.\n");
+    printf("\n");
+
+    result = s_StartAudio();
+
+    if (result == AAUDIO_OK) {
+        for (int i = 20; i > 0; i--) {
+            sleep(1);
+            printf("playing silence #%d, framesRead = %d\n", i, (int) s_AudioEngine.framesRead);
+        }
+    }
+
+    s_StopAudio();
+
+    printf("result = %d = %s\n", result, AAudio_convertResultToText(result));
+}
diff --git a/media/libaaudio/tests/test_linear_ramp.cpp b/media/libaaudio/tests/test_linear_ramp.cpp
index 5c53982..93226ba 100644
--- a/media/libaaudio/tests/test_linear_ramp.cpp
+++ b/media/libaaudio/tests/test_linear_ramp.cpp
@@ -15,13 +15,13 @@
  */
 
 #include <iostream>
+#include <math.h>
 
 #include <gtest/gtest.h>
 
 #include "utility/AAudioUtilities.h"
 #include "utility/LinearRamp.h"
 
-
 TEST(test_linear_ramp, linear_ramp_segments) {
     LinearRamp ramp;
     const float source[4] = {1.0f, 1.0f, 1.0f, 1.0f };
@@ -32,40 +32,40 @@
     ramp.setLengthInFrames(8);
     ramp.setTarget(8.0f);
 
-    ASSERT_EQ(8, ramp.getLengthInFrames());
+    EXPECT_EQ(8, ramp.getLengthInFrames());
 
     bool ramping = ramp.nextSegment(4, &levelFrom, &levelTo);
-    ASSERT_EQ(1, ramping);
-    ASSERT_EQ(0.0f, levelFrom);
-    ASSERT_EQ(4.0f, levelTo);
+    EXPECT_EQ(1, ramping);
+    EXPECT_EQ(0.0f, levelFrom);
+    EXPECT_EQ(4.0f, levelTo);
 
     AAudio_linearRamp(source, destination, 4, 1, levelFrom, levelTo);
-    ASSERT_EQ(0.0f, destination[0]);
-    ASSERT_EQ(1.0f, destination[1]);
-    ASSERT_EQ(2.0f, destination[2]);
-    ASSERT_EQ(3.0f, destination[3]);
+    EXPECT_EQ(0.0f, destination[0]);
+    EXPECT_EQ(1.0f, destination[1]);
+    EXPECT_EQ(2.0f, destination[2]);
+    EXPECT_EQ(3.0f, destination[3]);
 
     ramping = ramp.nextSegment(4, &levelFrom, &levelTo);
-    ASSERT_EQ(1, ramping);
-    ASSERT_EQ(4.0f, levelFrom);
-    ASSERT_EQ(8.0f, levelTo);
+    EXPECT_EQ(1, ramping);
+    EXPECT_EQ(4.0f, levelFrom);
+    EXPECT_EQ(8.0f, levelTo);
 
     AAudio_linearRamp(source, destination, 4, 1, levelFrom, levelTo);
-    ASSERT_EQ(4.0f, destination[0]);
-    ASSERT_EQ(5.0f, destination[1]);
-    ASSERT_EQ(6.0f, destination[2]);
-    ASSERT_EQ(7.0f, destination[3]);
+    EXPECT_EQ(4.0f, destination[0]);
+    EXPECT_EQ(5.0f, destination[1]);
+    EXPECT_EQ(6.0f, destination[2]);
+    EXPECT_EQ(7.0f, destination[3]);
 
     ramping = ramp.nextSegment(4, &levelFrom, &levelTo);
-    ASSERT_EQ(0, ramping);
-    ASSERT_EQ(8.0f, levelFrom);
-    ASSERT_EQ(8.0f, levelTo);
+    EXPECT_EQ(0, ramping);
+    EXPECT_EQ(8.0f, levelFrom);
+    EXPECT_EQ(8.0f, levelTo);
 
     AAudio_linearRamp(source, destination, 4, 1, levelFrom, levelTo);
-    ASSERT_EQ(8.0f, destination[0]);
-    ASSERT_EQ(8.0f, destination[1]);
-    ASSERT_EQ(8.0f, destination[2]);
-    ASSERT_EQ(8.0f, destination[3]);
+    EXPECT_EQ(8.0f, destination[0]);
+    EXPECT_EQ(8.0f, destination[1]);
+    EXPECT_EQ(8.0f, destination[2]);
+    EXPECT_EQ(8.0f, destination[3]);
 
 };
 
@@ -80,29 +80,101 @@
     ramp.setLengthInFrames(4);
     ramp.setTarget(8.0f);
     ramp.forceCurrent(4.0f);
-    ASSERT_EQ(4.0f, ramp.getCurrent());
+    EXPECT_EQ(4.0f, ramp.getCurrent());
 
     bool ramping = ramp.nextSegment(4, &levelFrom, &levelTo);
-    ASSERT_EQ(1, ramping);
-    ASSERT_EQ(4.0f, levelFrom);
-    ASSERT_EQ(8.0f, levelTo);
+    EXPECT_EQ(1, ramping);
+    EXPECT_EQ(4.0f, levelFrom);
+    EXPECT_EQ(8.0f, levelTo);
 
     AAudio_linearRamp(source, destination, 4, 1, levelFrom, levelTo);
-    ASSERT_EQ(4.0f, destination[0]);
-    ASSERT_EQ(5.0f, destination[1]);
-    ASSERT_EQ(6.0f, destination[2]);
-    ASSERT_EQ(7.0f, destination[3]);
+    EXPECT_EQ(4.0f, destination[0]);
+    EXPECT_EQ(5.0f, destination[1]);
+    EXPECT_EQ(6.0f, destination[2]);
+    EXPECT_EQ(7.0f, destination[3]);
 
     ramping = ramp.nextSegment(4, &levelFrom, &levelTo);
-    ASSERT_EQ(0, ramping);
-    ASSERT_EQ(8.0f, levelFrom);
-    ASSERT_EQ(8.0f, levelTo);
+    EXPECT_EQ(0, ramping);
+    EXPECT_EQ(8.0f, levelFrom);
+    EXPECT_EQ(8.0f, levelTo);
 
     AAudio_linearRamp(source, destination, 4, 1, levelFrom, levelTo);
-    ASSERT_EQ(8.0f, destination[0]);
-    ASSERT_EQ(8.0f, destination[1]);
-    ASSERT_EQ(8.0f, destination[2]);
-    ASSERT_EQ(8.0f, destination[3]);
+    EXPECT_EQ(8.0f, destination[0]);
+    EXPECT_EQ(8.0f, destination[1]);
+    EXPECT_EQ(8.0f, destination[2]);
+    EXPECT_EQ(8.0f, destination[3]);
 
 };
 
+constexpr int16_t kMaxI16 = INT16_MAX;
+constexpr int16_t kMinI16 = INT16_MIN;
+constexpr int16_t kHalfI16 = 16384;
+constexpr int16_t kTenthI16 = 3277;
+
+//void AAudioConvert_floatToPcm16(const float *source,
+//                                int16_t *destination,
+//                                int32_t numSamples,
+//                                float amplitude);
+TEST(test_linear_ramp, float_to_i16) {
+    const float source[] = {12345.6f, 1.0f, 0.5f, 0.1f, 0.0f, -0.1f, -0.5f, -1.0f, -12345.6f};
+    constexpr size_t count = sizeof(source) / sizeof(source[0]);
+    int16_t destination[count];
+    const int16_t expected[count] = {kMaxI16, kMaxI16, kHalfI16, kTenthI16, 0,
+                                     -kTenthI16, -kHalfI16, kMinI16, kMinI16};
+
+    AAudioConvert_floatToPcm16(source, destination, count, 1.0f);
+    for (size_t i = 0; i < count; i++) {
+        EXPECT_EQ(expected[i], destination[i]);
+    }
+
+}
+
+//void AAudioConvert_pcm16ToFloat(const int16_t *source,
+//                                float *destination,
+//                                int32_t numSamples,
+//                                float amplitude);
+TEST(test_linear_ramp, i16_to_float) {
+    const int16_t source[] = {kMaxI16, kHalfI16, kTenthI16, 0,
+                              -kTenthI16, -kHalfI16, kMinI16};
+    constexpr size_t count = sizeof(source) / sizeof(source[0]);
+    float destination[count];
+    const float expected[count] = {(32767.0f / 32768.0f), 0.5f, 0.1f, 0.0f, -0.1f, -0.5f, -1.0f};
+
+    AAudioConvert_pcm16ToFloat(source, destination, count, 1.0f);
+    for (size_t i = 0; i < count; i++) {
+        EXPECT_NEAR(expected[i], destination[i], 0.0001f);
+    }
+
+}
+
+//void AAudio_linearRamp(const int16_t *source,
+//                       int16_t *destination,
+//                       int32_t numFrames,
+//                       int32_t samplesPerFrame,
+//                       float amplitude1,
+//                       float amplitude2);
+TEST(test_linear_ramp, ramp_i16_to_i16) {
+    const int16_t source[] = {1, 1, 1, 1, 1, 1, 1, 1};
+    constexpr size_t count = sizeof(source) / sizeof(source[0]);
+    int16_t destination[count];
+    // Ramp will sweep from -1 to almost +1
+    const int16_t expected[count] = {
+            -1, // from -1.00
+            -1, // from -0.75
+            -1, // from -0.55, round away from zero
+            0,  // from -0.25, round up to zero
+            0,  // from  0.00
+            0,  // from  0.25, round down to zero
+            1,  // from  0.50, round away from zero
+            1   // from  0.75
+    };
+
+    // sweep across zero to test symmetry
+    constexpr float amplitude1 = -1.0;
+    constexpr float amplitude2 = 1.0;
+    AAudio_linearRamp(source, destination, count, 1, amplitude1, amplitude2);
+    for (size_t i = 0; i < count; i++) {
+        EXPECT_EQ(expected[i], destination[i]);
+    }
+
+}
diff --git a/media/libaaudio/tests/test_timestamps.cpp b/media/libaaudio/tests/test_timestamps.cpp
index b57f0a4..dfa7815 100644
--- a/media/libaaudio/tests/test_timestamps.cpp
+++ b/media/libaaudio/tests/test_timestamps.cpp
@@ -22,8 +22,7 @@
 
 #include <aaudio/AAudio.h>
 #include <aaudio/AAudioTesting.h>
-#include "utils/AAudioExampleUtils.h"
-#include "../examples/utils/AAudioExampleUtils.h"
+#include "AAudioExampleUtils.h"
 
 // Arbitrary period for glitches, once per second at 48000 Hz.
 #define FORCED_UNDERRUN_PERIOD_FRAMES    48000
diff --git a/media/libaaudio/tests/test_various.cpp b/media/libaaudio/tests/test_various.cpp
new file mode 100644
index 0000000..dc19985
--- /dev/null
+++ b/media/libaaudio/tests/test_various.cpp
@@ -0,0 +1,327 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Test various AAudio features including AAudioStream_setBufferSizeInFrames().
+
+#include <condition_variable>
+#include <mutex>
+#include <stdio.h>
+
+#include <android-base/macros.h>
+#include <aaudio/AAudio.h>
+
+#include <gtest/gtest.h>
+#include <unistd.h>
+
+
+// Callback function that does nothing.
+aaudio_data_callback_result_t NoopDataCallbackProc(
+        AAudioStream *stream,
+        void *userData,
+        void *audioData,
+        int32_t numFrames
+) {
+    (void) stream;
+    (void) userData;
+    (void) audioData;
+    (void) numFrames;
+    return AAUDIO_CALLBACK_RESULT_CONTINUE;
+}
+
+// Test AAudioStream_setBufferSizeInFrames()
+
+constexpr int64_t NANOS_PER_MILLISECOND = 1000 * 1000;
+
+//int foo() { // To fix Android Studio formatting when editing.
+TEST(test_various, aaudio_stop_when_open) {
+    AAudioStreamBuilder *aaudioBuilder = nullptr;
+    AAudioStream *aaudioStream = nullptr;
+
+// Use an AAudioStreamBuilder to contain requested parameters.
+    ASSERT_EQ(AAUDIO_OK, AAudio_createStreamBuilder(&aaudioBuilder));
+
+// Request stream properties.
+    AAudioStreamBuilder_setDataCallback(aaudioBuilder, NoopDataCallbackProc, nullptr);
+    AAudioStreamBuilder_setPerformanceMode(aaudioBuilder, AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
+
+// Create an AAudioStream using the Builder.
+    EXPECT_EQ(AAUDIO_OK, AAudioStreamBuilder_openStream(aaudioBuilder, &aaudioStream));
+
+
+    aaudio_stream_state_t state = AAUDIO_STREAM_STATE_UNKNOWN;
+    EXPECT_EQ(AAUDIO_OK, AAudioStream_waitForStateChange(aaudioStream,
+                                                         AAUDIO_STREAM_STATE_UNKNOWN, &state,
+                                                         1000 * NANOS_PER_MILLISECOND));
+    EXPECT_EQ(AAUDIO_STREAM_STATE_OPEN, state);
+
+    EXPECT_EQ(AAUDIO_OK, AAudioStream_requestStop(aaudioStream));
+
+    state = AAUDIO_STREAM_STATE_UNKNOWN;
+    EXPECT_EQ(AAUDIO_OK, AAudioStream_waitForStateChange(aaudioStream,
+                                                         AAUDIO_STREAM_STATE_UNKNOWN, &state, 0));
+    EXPECT_EQ(AAUDIO_STREAM_STATE_OPEN, state);
+
+    AAudioStream_close(aaudioStream);
+    AAudioStreamBuilder_delete(aaudioBuilder);
+}
+
+//int boo() { // To fix Android Studio formatting when editing.
+TEST(test_various, aaudio_flush_when_started) {
+    AAudioStreamBuilder *aaudioBuilder = nullptr;
+    AAudioStream *aaudioStream = nullptr;
+
+// Use an AAudioStreamBuilder to contain requested parameters.
+    ASSERT_EQ(AAUDIO_OK, AAudio_createStreamBuilder(&aaudioBuilder));
+
+// Request stream properties.
+    AAudioStreamBuilder_setDataCallback(aaudioBuilder, NoopDataCallbackProc, nullptr);
+    AAudioStreamBuilder_setPerformanceMode(aaudioBuilder, AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
+
+// Create an AAudioStream using the Builder.
+    EXPECT_EQ(AAUDIO_OK, AAudioStreamBuilder_openStream(aaudioBuilder, &aaudioStream));
+    EXPECT_EQ(AAUDIO_OK, AAudioStream_requestStart(aaudioStream));
+
+    aaudio_stream_state_t state = AAUDIO_STREAM_STATE_UNKNOWN;
+    EXPECT_EQ(AAUDIO_OK, AAudioStream_waitForStateChange(aaudioStream,
+                                                         AAUDIO_STREAM_STATE_STARTING, &state,
+                                                         1000 * NANOS_PER_MILLISECOND));
+    EXPECT_EQ(AAUDIO_STREAM_STATE_STARTED, state);
+
+    EXPECT_EQ(AAUDIO_ERROR_INVALID_STATE, AAudioStream_requestFlush(aaudioStream));
+
+    state = AAUDIO_STREAM_STATE_UNKNOWN;
+    EXPECT_EQ(AAUDIO_OK, AAudioStream_waitForStateChange(aaudioStream,
+                                                         AAUDIO_STREAM_STATE_UNKNOWN, &state, 0));
+    EXPECT_EQ(AAUDIO_STREAM_STATE_STARTED, state);
+
+    AAudioStream_close(aaudioStream);
+    AAudioStreamBuilder_delete(aaudioBuilder);
+}
+
+//int main() { // To fix Android Studio formatting when editing.
+TEST(test_various, aaudio_set_buffer_size) {
+
+    int32_t bufferCapacity;
+    int32_t framesPerBurst = 0;
+    int32_t actualSize = 0;
+
+    AAudioStreamBuilder *aaudioBuilder = nullptr;
+    AAudioStream *aaudioStream = nullptr;
+
+    // Use an AAudioStreamBuilder to contain requested parameters.
+    ASSERT_EQ(AAUDIO_OK, AAudio_createStreamBuilder(&aaudioBuilder));
+
+    // Request stream properties.
+    AAudioStreamBuilder_setDataCallback(aaudioBuilder, NoopDataCallbackProc, nullptr);
+    AAudioStreamBuilder_setPerformanceMode(aaudioBuilder, AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
+
+    // Create an AAudioStream using the Builder.
+    EXPECT_EQ(AAUDIO_OK, AAudioStreamBuilder_openStream(aaudioBuilder, &aaudioStream));
+
+    // This is the number of frames that are read in one chunk by a DMA controller
+    // or a DSP or a mixer.
+    framesPerBurst = AAudioStream_getFramesPerBurst(aaudioStream);
+    bufferCapacity = AAudioStream_getBufferCapacityInFrames(aaudioStream);
+    printf("          bufferCapacity = %d, remainder = %d\n",
+           bufferCapacity, bufferCapacity % framesPerBurst);
+
+    actualSize = AAudioStream_setBufferSizeInFrames(aaudioStream, 0);
+    EXPECT_GT(actualSize, 0);
+    EXPECT_LE(actualSize, bufferCapacity);
+
+    actualSize = AAudioStream_setBufferSizeInFrames(aaudioStream, 2 * framesPerBurst);
+    EXPECT_GT(actualSize, framesPerBurst);
+    EXPECT_LE(actualSize, bufferCapacity);
+
+    actualSize = AAudioStream_setBufferSizeInFrames(aaudioStream, bufferCapacity - 1);
+    EXPECT_GT(actualSize, framesPerBurst);
+    EXPECT_LE(actualSize, bufferCapacity);
+
+    actualSize = AAudioStream_setBufferSizeInFrames(aaudioStream, bufferCapacity);
+    EXPECT_GT(actualSize, framesPerBurst);
+    EXPECT_LE(actualSize, bufferCapacity);
+
+    actualSize = AAudioStream_setBufferSizeInFrames(aaudioStream, bufferCapacity + 1);
+    EXPECT_GT(actualSize, framesPerBurst);
+    EXPECT_LE(actualSize, bufferCapacity);
+
+    actualSize = AAudioStream_setBufferSizeInFrames(aaudioStream, 1234567);
+    EXPECT_GT(actualSize, framesPerBurst);
+    EXPECT_LE(actualSize, bufferCapacity);
+
+    actualSize = AAudioStream_setBufferSizeInFrames(aaudioStream, INT32_MAX);
+    EXPECT_GT(actualSize, framesPerBurst);
+    EXPECT_LE(actualSize, bufferCapacity);
+
+    actualSize = AAudioStream_setBufferSizeInFrames(aaudioStream, INT32_MIN);
+    EXPECT_GT(actualSize, 0);
+    EXPECT_LE(actualSize, bufferCapacity);
+
+    AAudioStream_close(aaudioStream);
+    AAudioStreamBuilder_delete(aaudioBuilder);
+}
+
+
+// ************************************************************
+// Test to make sure that AAUDIO_CALLBACK_RESULT_STOP works.
+
+// Callback function that counts calls.
+aaudio_data_callback_result_t CallbackOnceProc(
+        AAudioStream *stream,
+        void *userData,
+        void *audioData,
+        int32_t numFrames
+) {
+    (void) stream;
+    (void) audioData;
+    (void) numFrames;
+
+    std::atomic<int32_t> *callbackCountPtr = (std::atomic<int32_t> *)userData;
+    (*callbackCountPtr)++;
+
+    return AAUDIO_CALLBACK_RESULT_STOP;
+}
+
+void checkCallbackOnce(aaudio_performance_mode_t perfMode) {
+
+    std::atomic<int32_t>   callbackCount{0};
+
+    AAudioStreamBuilder *aaudioBuilder = nullptr;
+    AAudioStream *aaudioStream = nullptr;
+
+    // Use an AAudioStreamBuilder to contain requested parameters.
+    ASSERT_EQ(AAUDIO_OK, AAudio_createStreamBuilder(&aaudioBuilder));
+
+    // Request stream properties.
+    AAudioStreamBuilder_setDataCallback(aaudioBuilder, CallbackOnceProc, &callbackCount);
+    AAudioStreamBuilder_setPerformanceMode(aaudioBuilder, perfMode);
+
+    // Create an AAudioStream using the Builder.
+    ASSERT_EQ(AAUDIO_OK, AAudioStreamBuilder_openStream(aaudioBuilder, &aaudioStream));
+    AAudioStreamBuilder_delete(aaudioBuilder);
+
+    ASSERT_EQ(AAUDIO_OK, AAudioStream_requestStart(aaudioStream));
+
+    sleep(1); // Give callback a chance to run many times.
+
+    EXPECT_EQ(AAUDIO_OK, AAudioStream_requestStop(aaudioStream));
+
+    EXPECT_EQ(1, callbackCount.load()); // should stop after first call
+
+    EXPECT_EQ(AAUDIO_OK, AAudioStream_close(aaudioStream));
+}
+
+TEST(test_various, aaudio_callback_once_none) {
+    checkCallbackOnce(AAUDIO_PERFORMANCE_MODE_NONE);
+}
+
+TEST(test_various, aaudio_callback_once_lowlat) {
+    checkCallbackOnce(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
+}
+
+// ************************************************************
+struct WakeUpCallbackData {
+    void wakeOther() {
+        // signal waiting test to wake up
+        {
+            std::lock_guard <std::mutex> lock(mutex);
+            finished = true;
+        }
+        conditionVariable.notify_one();
+    }
+
+    void waitForFinished() {
+        std::unique_lock <std::mutex> aLock(mutex);
+        conditionVariable.wait(aLock, [=] { return finished; });
+    }
+
+    // For signalling foreground test when callback finished
+    std::mutex              mutex;
+    std::condition_variable conditionVariable;
+    bool                    finished = false;
+};
+
+// Test to make sure we cannot call recursively into the system from a callback.
+struct DangerousData : public WakeUpCallbackData {
+    aaudio_result_t resultStart = AAUDIO_OK;
+    aaudio_result_t resultStop = AAUDIO_OK;
+    aaudio_result_t resultPause = AAUDIO_OK;
+    aaudio_result_t resultFlush = AAUDIO_OK;
+    aaudio_result_t resultClose = AAUDIO_OK;
+};
+
+// Callback function that tries to call back into the stream.
+aaudio_data_callback_result_t DangerousDataCallbackProc(
+        AAudioStream *stream,
+        void *userData,
+        void *audioData,
+        int32_t numFrames) {
+    (void) audioData;
+    (void) numFrames;
+
+    DangerousData *data = (DangerousData *)userData;
+    data->resultStart = AAudioStream_requestStart(stream);
+    data->resultStop = AAudioStream_requestStop(stream);
+    data->resultPause = AAudioStream_requestPause(stream);
+    data->resultFlush = AAudioStream_requestFlush(stream);
+    data->resultClose = AAudioStream_close(stream);
+
+    data->wakeOther();
+
+    return AAUDIO_CALLBACK_RESULT_STOP;
+}
+
+//int main() { // To fix Android Studio formatting when editing.
+void checkDangerousCallback(aaudio_performance_mode_t perfMode) {
+    DangerousData        dangerousData;
+    AAudioStreamBuilder *aaudioBuilder = nullptr;
+    AAudioStream        *aaudioStream = nullptr;
+
+    // Use an AAudioStreamBuilder to contain requested parameters.
+    ASSERT_EQ(AAUDIO_OK, AAudio_createStreamBuilder(&aaudioBuilder));
+
+    // Request stream properties.
+    AAudioStreamBuilder_setDataCallback(aaudioBuilder, DangerousDataCallbackProc, &dangerousData);
+    AAudioStreamBuilder_setPerformanceMode(aaudioBuilder, perfMode);
+
+    // Create an AAudioStream using the Builder.
+    ASSERT_EQ(AAUDIO_OK, AAudioStreamBuilder_openStream(aaudioBuilder, &aaudioStream));
+    AAudioStreamBuilder_delete(aaudioBuilder);
+
+    ASSERT_EQ(AAUDIO_OK, AAudioStream_requestStart(aaudioStream));
+
+    dangerousData.waitForFinished();
+
+    EXPECT_EQ(AAUDIO_OK, AAudioStream_requestStop(aaudioStream));
+
+    EXPECT_EQ(AAUDIO_ERROR_INVALID_STATE, dangerousData.resultStart);
+    EXPECT_EQ(AAUDIO_ERROR_INVALID_STATE, dangerousData.resultStop);
+    EXPECT_EQ(AAUDIO_ERROR_INVALID_STATE, dangerousData.resultPause);
+    EXPECT_EQ(AAUDIO_ERROR_INVALID_STATE, dangerousData.resultFlush);
+    EXPECT_EQ(AAUDIO_ERROR_INVALID_STATE, dangerousData.resultClose);
+
+    EXPECT_EQ(AAUDIO_OK, AAudioStream_close(aaudioStream));
+}
+
+//int main() { // To fix Android Studio formatting when editing.
+
+TEST(test_various, aaudio_callback_blockers_none) {
+    checkDangerousCallback(AAUDIO_PERFORMANCE_MODE_NONE);
+}
+
+TEST(test_various, aaudio_callback_blockers_lowlat) {
+    checkDangerousCallback(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
+}
diff --git a/media/libaudioclient/Android.bp b/media/libaudioclient/Android.bp
index 61c946c..94253a4 100644
--- a/media/libaudioclient/Android.bp
+++ b/media/libaudioclient/Android.bp
@@ -6,7 +6,22 @@
 
 cc_library_shared {
     name: "libaudioclient",
+
+    aidl: {
+        export_aidl_headers: true,
+        local_include_dirs: ["aidl"],
+        include_dirs: [
+            "frameworks/av/media/libaudioclient/aidl",
+        ],
+    },
+
     srcs: [
+        // AIDL files for audioclient interfaces
+        // The headers for these interfaces will be available to any modules that
+        // include libaudioclient, at the path "aidl/package/path/BnFoo.h"
+        "aidl/android/media/IAudioRecord.aidl",
+        ":libaudioclient_aidl",
+
         "AudioEffect.cpp",
         "AudioPolicy.cpp",
         "AudioRecord.cpp",
@@ -17,7 +32,6 @@
         "IAudioFlingerClient.cpp",
         "IAudioPolicyService.cpp",
         "IAudioPolicyServiceClient.cpp",
-        "IAudioRecord.cpp",
         "IAudioTrack.cpp",
         "IEffect.cpp",
         "IEffectClient.cpp",
@@ -33,10 +47,13 @@
         "libdl",
         "libaudioutils",
         "libaudiomanager",
+        "libmedia_helper",
+        "libmediametrics",
+        "libstagefright_foundation",
     ],
     export_shared_lib_headers: ["libbinder"],
 
-    local_include_dirs: ["include/media"],
+    local_include_dirs: ["include/media", "aidl"],
     header_libs: ["libaudioclient_headers"],
     export_header_lib_headers: ["libaudioclient_headers"],
 
@@ -56,3 +73,11 @@
         ],
     },
 }
+
+// AIDL interface between libaudioclient and framework.jar
+filegroup {
+    name: "libaudioclient_aidl",
+    srcs: [
+        "aidl/android/media/IPlayer.aidl",
+    ],
+}
diff --git a/media/libaudioclient/AudioRecord.cpp b/media/libaudioclient/AudioRecord.cpp
index ba4acc6..bc294c5 100644
--- a/media/libaudioclient/AudioRecord.cpp
+++ b/media/libaudioclient/AudioRecord.cpp
@@ -26,6 +26,8 @@
 #include <utils/Log.h>
 #include <private/media/AudioTrackShared.h>
 #include <media/IAudioFlinger.h>
+#include <media/MediaAnalyticsItem.h>
+#include <media/TypeConverter.h>
 
 #define WAIT_PERIOD_MS          10
 
@@ -65,12 +67,39 @@
 
 // ---------------------------------------------------------------------------
 
+static std::string audioFormatTypeString(audio_format_t value) {
+    std::string formatType;
+    if (FormatConverter::toString(value, formatType)) {
+        return formatType;
+    }
+    char rawbuffer[16];  // room for "%d"
+    snprintf(rawbuffer, sizeof(rawbuffer), "%d", value);
+    return rawbuffer;
+}
+
+void AudioRecord::MediaMetrics::gather(const AudioRecord *record)
+{
+    // key for media statistics is defined in the header
+    // attrs for media statistics
+    static constexpr char kAudioRecordChannelCount[] = "android.media.audiorecord.channels";
+    static constexpr char kAudioRecordFormat[] = "android.media.audiorecord.format";
+    static constexpr char kAudioRecordLatency[] = "android.media.audiorecord.latency";
+    static constexpr char kAudioRecordSampleRate[] = "android.media.audiorecord.samplerate";
+
+    // constructor guarantees mAnalyticsItem is valid
+
+    mAnalyticsItem->setInt32(kAudioRecordLatency, record->mLatency);
+    mAnalyticsItem->setInt32(kAudioRecordSampleRate, record->mSampleRate);
+    mAnalyticsItem->setInt32(kAudioRecordChannelCount, record->mChannelCount);
+    mAnalyticsItem->setCString(kAudioRecordFormat,
+                               audioFormatTypeString(record->mFormat).c_str());
+}
+
 AudioRecord::AudioRecord(const String16 &opPackageName)
     : mActive(false), mStatus(NO_INIT), mOpPackageName(opPackageName),
       mSessionId(AUDIO_SESSION_ALLOCATE),
       mPreviousPriority(ANDROID_PRIORITY_NORMAL), mPreviousSchedulingGroup(SP_DEFAULT),
-      mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE), mRoutedDeviceId(AUDIO_PORT_HANDLE_NONE),
-      mPortId(AUDIO_PORT_HANDLE_NONE)
+      mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE), mRoutedDeviceId(AUDIO_PORT_HANDLE_NONE)
 {
 }
 
@@ -89,24 +118,25 @@
         audio_input_flags_t flags,
         uid_t uid,
         pid_t pid,
-        const audio_attributes_t* pAttributes)
+        const audio_attributes_t* pAttributes,
+        audio_port_handle_t selectedDeviceId)
     : mActive(false),
       mStatus(NO_INIT),
       mOpPackageName(opPackageName),
       mSessionId(AUDIO_SESSION_ALLOCATE),
       mPreviousPriority(ANDROID_PRIORITY_NORMAL),
       mPreviousSchedulingGroup(SP_DEFAULT),
-      mProxy(NULL),
-      mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE),
-      mPortId(AUDIO_PORT_HANDLE_NONE)
+      mProxy(NULL)
 {
-    mStatus = set(inputSource, sampleRate, format, channelMask, frameCount, cbf, user,
+    (void)set(inputSource, sampleRate, format, channelMask, frameCount, cbf, user,
             notificationFrames, false /*threadCanCallJava*/, sessionId, transferType, flags,
-            uid, pid, pAttributes);
+            uid, pid, pAttributes, selectedDeviceId);
 }
 
 AudioRecord::~AudioRecord()
 {
+    mMediaMetrics.gather(this);
+
     if (mStatus == NO_ERROR) {
         // Make sure that callback function exits in the case where
         // it is looping on buffer empty condition in obtainBuffer().
@@ -148,14 +178,22 @@
         audio_input_flags_t flags,
         uid_t uid,
         pid_t pid,
-        const audio_attributes_t* pAttributes)
+        const audio_attributes_t* pAttributes,
+        audio_port_handle_t selectedDeviceId)
 {
+    status_t status = NO_ERROR;
+    uint32_t channelCount;
+    pid_t callingPid;
+    pid_t myPid;
+
     ALOGV("set(): inputSource %d, sampleRate %u, format %#x, channelMask %#x, frameCount %zu, "
           "notificationFrames %u, sessionId %d, transferType %d, flags %#x, opPackageName %s "
           "uid %d, pid %d",
           inputSource, sampleRate, format, channelMask, frameCount, notificationFrames,
           sessionId, transferType, flags, String8(mOpPackageName).string(), uid, pid);
 
+    mSelectedDeviceId = selectedDeviceId;
+
     switch (transferType) {
     case TRANSFER_DEFAULT:
         if (cbf == NULL || threadCanCallJava) {
@@ -167,7 +205,8 @@
     case TRANSFER_CALLBACK:
         if (cbf == NULL) {
             ALOGE("Transfer type TRANSFER_CALLBACK but cbf == NULL");
-            return BAD_VALUE;
+            status = BAD_VALUE;
+            goto exit;
         }
         break;
     case TRANSFER_OBTAIN:
@@ -175,14 +214,16 @@
         break;
     default:
         ALOGE("Invalid transfer type %d", transferType);
-        return BAD_VALUE;
+        status = BAD_VALUE;
+        goto exit;
     }
     mTransfer = transferType;
 
     // invariant that mAudioRecord != 0 is true only after set() returns successfully
     if (mAudioRecord != 0) {
         ALOGE("Track already in use");
-        return INVALID_OPERATION;
+        status = INVALID_OPERATION;
+        goto exit;
     }
 
     if (pAttributes == NULL) {
@@ -206,16 +247,18 @@
     // AudioFlinger capture only supports linear PCM
     if (!audio_is_valid_format(format) || !audio_is_linear_pcm(format)) {
         ALOGE("Format %#x is not linear pcm", format);
-        return BAD_VALUE;
+        status = BAD_VALUE;
+        goto exit;
     }
     mFormat = format;
 
     if (!audio_is_input_channel(channelMask)) {
         ALOGE("Invalid channel mask %#x", channelMask);
-        return BAD_VALUE;
+        status = BAD_VALUE;
+        goto exit;
     }
     mChannelMask = channelMask;
-    uint32_t channelCount = audio_channel_count_from_in_mask(channelMask);
+    channelCount = audio_channel_count_from_in_mask(channelMask);
     mChannelCount = channelCount;
 
     if (audio_is_linear_pcm(format)) {
@@ -224,28 +267,24 @@
         mFrameSize = sizeof(uint8_t);
     }
 
-    // mFrameCount is initialized in openRecord_l
+    // mFrameCount is initialized in createRecord_l
     mReqFrameCount = frameCount;
 
     mNotificationFramesReq = notificationFrames;
-    // mNotificationFramesAct is initialized in openRecord_l
+    // mNotificationFramesAct is initialized in createRecord_l
 
-    if (sessionId == AUDIO_SESSION_ALLOCATE) {
-        mSessionId = (audio_session_t) AudioSystem::newAudioUniqueId(AUDIO_UNIQUE_ID_USE_SESSION);
-    } else {
-        mSessionId = sessionId;
-    }
+    mSessionId = sessionId;
     ALOGV("set(): mSessionId %d", mSessionId);
 
-    int callingpid = IPCThreadState::self()->getCallingPid();
-    int mypid = getpid();
-    if (uid == AUDIO_UID_INVALID || (callingpid != mypid)) {
+    callingPid = IPCThreadState::self()->getCallingPid();
+    myPid = getpid();
+    if (uid == AUDIO_UID_INVALID || (callingPid != myPid)) {
         mClientUid = IPCThreadState::self()->getCallingUid();
     } else {
         mClientUid = uid;
     }
-    if (pid == -1 || (callingpid != mypid)) {
-        mClientPid = callingpid;
+    if (pid == -1 || (callingPid != myPid)) {
+        mClientPid = callingPid;
     } else {
         mClientPid = pid;
     }
@@ -260,7 +299,7 @@
     }
 
     // create the IAudioRecord
-    status_t status = openRecord_l(0 /*epoch*/, mOpPackageName);
+    status = createRecord_l(0 /*epoch*/, mOpPackageName);
 
     if (status != NO_ERROR) {
         if (mAudioRecordThread != 0) {
@@ -268,10 +307,9 @@
             mAudioRecordThread->requestExitAndWait();
             mAudioRecordThread.clear();
         }
-        return status;
+        goto exit;
     }
 
-    mStatus = NO_ERROR;
     mUserData = user;
     // TODO: add audio hardware input latency here
     mLatency = (1000LL * mFrameCount) / mSampleRate;
@@ -286,7 +324,9 @@
     mFramesRead = 0;
     mFramesReadServerOffset = 0;
 
-    return NO_ERROR;
+exit:
+    mStatus = status;
+    return status;
 }
 
 // -------------------------------------------------------------------------
@@ -323,7 +363,7 @@
 
     status_t status = NO_ERROR;
     if (!(flags & CBLK_INVALID)) {
-        status = mAudioRecord->start(event, triggerSession);
+        status = mAudioRecord->start(event, triggerSession).transactionError();
         if (status == DEAD_OBJECT) {
             flags |= CBLK_INVALID;
         }
@@ -489,6 +529,7 @@
                 mAudioRecord->stop();
             }
             android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
+            mProxy->interrupt();
         }
     }
     return NO_ERROR;
@@ -521,6 +562,27 @@
     return mRoutedDeviceId;
 }
 
+status_t AudioRecord::dump(int fd, const Vector<String16>& args __unused) const
+{
+    String8 result;
+
+    result.append(" AudioRecord::dump\n");
+    result.appendFormat("  status(%d), active(%d), session Id(%d)\n",
+                        mStatus, mActive, mSessionId);
+    result.appendFormat("  flags(%#x), req. flags(%#x), audio source(%d)\n",
+                        mFlags, mOrigFlags, mAttributes.source);
+    result.appendFormat("  format(%#x), channel mask(%#x), channel count(%u), sample rate(%u)\n",
+                  mFormat, mChannelMask, mChannelCount, mSampleRate);
+    result.appendFormat("  frame count(%zu), req. frame count(%zu)\n",
+                  mFrameCount, mReqFrameCount);
+    result.appendFormat("  notif. frame count(%u), req. notif. frame count(%u)\n",
+             mNotificationFramesAct, mNotificationFramesReq);
+    result.appendFormat("  input(%d), latency(%u), selected device Id(%d), routed device Id(%d)\n",
+                        mInput, mLatency, mSelectedDeviceId, mRoutedDeviceId);
+    ::write(fd, result.string(), result.size());
+    return NO_ERROR;
+}
+
 // -------------------------------------------------------------------------
 // TODO Move this macro to a common header file for enum to string conversion in audio framework.
 #define MEDIA_CASE_ENUM(name) case name: return #name
@@ -536,70 +598,29 @@
 }
 
 // must be called with mLock held
-status_t AudioRecord::openRecord_l(const Modulo<uint32_t> &epoch, const String16& opPackageName)
+status_t AudioRecord::createRecord_l(const Modulo<uint32_t> &epoch, const String16& opPackageName)
 {
     const sp<IAudioFlinger>& audioFlinger = AudioSystem::get_audio_flinger();
+    IAudioFlinger::CreateRecordInput input;
+    IAudioFlinger::CreateRecordOutput output;
+    audio_session_t originalSessionId;
+    sp<media::IAudioRecord> record;
+    void *iMemPointer;
+    audio_track_cblk_t* cblk;
+    status_t status;
+
     if (audioFlinger == 0) {
         ALOGE("Could not get audioflinger");
-        return NO_INIT;
+        status = NO_INIT;
+        goto exit;
     }
 
-    audio_io_handle_t input;
-
     // mFlags (not mOrigFlags) is modified depending on whether fast request is accepted.
     // After fast request is denied, we will request again if IAudioRecord is re-created.
 
-    status_t status;
-
-    // Not a conventional loop, but a retry loop for at most two iterations total.
-    // Try first maybe with FAST flag then try again without FAST flag if that fails.
-    // Exits loop normally via a return at the bottom, or with error via a break.
-    // The sp<> references will be dropped when re-entering scope.
-    // The lack of indentation is deliberate, to reduce code churn and ease merges.
-    for (;;) {
-    audio_config_base_t config  = {
-            .sample_rate = mSampleRate,
-            .channel_mask = mChannelMask,
-            .format = mFormat
-        };
-    mRoutedDeviceId = mSelectedDeviceId;
-    status = AudioSystem::getInputForAttr(&mAttributes, &input,
-                                        mSessionId,
-                                        // FIXME compare to AudioTrack
-                                        mClientPid,
-                                        mClientUid,
-                                        &config,
-                                        mFlags, &mRoutedDeviceId, &mPortId);
-
-    if (status != NO_ERROR || input == AUDIO_IO_HANDLE_NONE) {
-        ALOGE("Could not get audio input for session %d, record source %d, sample rate %u, "
-              "format %#x, channel mask %#x, flags %#x",
-              mSessionId, mAttributes.source, mSampleRate, mFormat, mChannelMask, mFlags);
-        return BAD_VALUE;
-    }
-
     // Now that we have a reference to an I/O handle and have not yet handed it off to AudioFlinger,
     // we must release it ourselves if anything goes wrong.
 
-#if 0
-    size_t afFrameCount;
-    status = AudioSystem::getFrameCount(input, &afFrameCount);
-    if (status != NO_ERROR) {
-        ALOGE("getFrameCount(input=%d) status %d", input, status);
-        break;
-    }
-#endif
-
-    uint32_t afSampleRate;
-    status = AudioSystem::getSamplingRate(input, &afSampleRate);
-    if (status != NO_ERROR) {
-        ALOGE("getSamplingRate(input=%d) status %d", input, status);
-        break;
-    }
-    if (mSampleRate == 0) {
-        mSampleRate = afSampleRate;
-    }
-
     // Client can only express a preference for FAST.  Server will perform additional tests.
     if (mFlags & AUDIO_INPUT_FLAG_FAST) {
         bool useCaseAllowed =
@@ -618,66 +639,41 @@
         if (!useCaseAllowed) {
             ALOGW("AUDIO_INPUT_FLAG_FAST denied, incompatible transfer = %s",
                   convertTransferToText(mTransfer));
-        }
-
-        // sample rates must also match
-        bool sampleRateAllowed = mSampleRate == afSampleRate;
-        if (!sampleRateAllowed) {
-            ALOGW("AUDIO_INPUT_FLAG_FAST denied, rates do not match %u Hz, require %u Hz",
-                  mSampleRate, afSampleRate);
-        }
-
-        bool fastAllowed = useCaseAllowed && sampleRateAllowed;
-        if (!fastAllowed) {
             mFlags = (audio_input_flags_t) (mFlags & ~(AUDIO_INPUT_FLAG_FAST |
                     AUDIO_INPUT_FLAG_RAW));
-            AudioSystem::releaseInput(input, mSessionId);
-            continue;   // retry
         }
     }
 
-    // The notification frame count is the period between callbacks, as suggested by the client
-    // but moderated by the server.  For record, the calculations are done entirely on server side.
-    size_t notificationFrames = mNotificationFramesReq;
-    size_t frameCount = mReqFrameCount;
-
-    audio_input_flags_t flags = mFlags;
-
-    pid_t tid = -1;
+    input.attr = mAttributes;
+    input.config.sample_rate = mSampleRate;
+    input.config.channel_mask = mChannelMask;
+    input.config.format = mFormat;
+    input.clientInfo.clientUid = mClientUid;
+    input.clientInfo.clientPid = mClientPid;
+    input.clientInfo.clientTid = -1;
     if (mFlags & AUDIO_INPUT_FLAG_FAST) {
         if (mAudioRecordThread != 0) {
-            tid = mAudioRecordThread->getTid();
+            input.clientInfo.clientTid = mAudioRecordThread->getTid();
         }
     }
+    input.opPackageName = opPackageName;
 
-    size_t temp = frameCount;   // temp may be replaced by a revised value of frameCount,
-                                // but we will still need the original value also
-    audio_session_t originalSessionId = mSessionId;
+    input.flags = mFlags;
+    // The notification frame count is the period between callbacks, as suggested by the client
+    // but moderated by the server.  For record, the calculations are done entirely on server side.
+    input.frameCount = mReqFrameCount;
+    input.notificationFrameCount = mNotificationFramesReq;
+    input.selectedDeviceId = mSelectedDeviceId;
+    input.sessionId = mSessionId;
+    originalSessionId = mSessionId;
 
-    sp<IMemory> iMem;           // for cblk
-    sp<IMemory> bufferMem;
-    sp<IAudioRecord> record = audioFlinger->openRecord(input,
-                                                       mSampleRate,
-                                                       mFormat,
-                                                       mChannelMask,
-                                                       opPackageName,
-                                                       &temp,
-                                                       &flags,
-                                                       mClientPid,
-                                                       tid,
-                                                       mClientUid,
-                                                       &mSessionId,
-                                                       &notificationFrames,
-                                                       iMem,
-                                                       bufferMem,
-                                                       &status,
-                                                       mPortId);
-    ALOGE_IF(originalSessionId != AUDIO_SESSION_ALLOCATE && mSessionId != originalSessionId,
-            "session ID changed from %d to %d", originalSessionId, mSessionId);
+    record = audioFlinger->createRecord(input,
+                                                              output,
+                                                              &status);
 
     if (status != NO_ERROR) {
         ALOGE("AudioFlinger could not create record track, status: %d", status);
-        break;
+        goto exit;
     }
     ALOG_ASSERT(record != 0);
 
@@ -685,41 +681,41 @@
     // so we are no longer responsible for releasing it.
 
     mAwaitBoost = false;
-    if (mFlags & AUDIO_INPUT_FLAG_FAST) {
-        if (flags & AUDIO_INPUT_FLAG_FAST) {
-            ALOGI("AUDIO_INPUT_FLAG_FAST successful; frameCount %zu -> %zu", frameCount, temp);
-            mAwaitBoost = true;
-        } else {
-            ALOGW("AUDIO_INPUT_FLAG_FAST denied by server; frameCount %zu -> %zu", frameCount, temp);
-            mFlags = (audio_input_flags_t) (mFlags & ~(AUDIO_INPUT_FLAG_FAST |
-                    AUDIO_INPUT_FLAG_RAW));
-            continue;   // retry
-        }
+    if (output.flags & AUDIO_INPUT_FLAG_FAST) {
+        ALOGI("AUDIO_INPUT_FLAG_FAST successful; frameCount %zu -> %zu",
+              mReqFrameCount, output.frameCount);
+        mAwaitBoost = true;
     }
-    mFlags = flags;
+    mFlags = output.flags;
+    mRoutedDeviceId = output.selectedDeviceId;
+    mSessionId = output.sessionId;
+    mSampleRate = output.sampleRate;
 
-    if (iMem == 0) {
+    if (output.cblk == 0) {
         ALOGE("Could not get control block");
-        return NO_INIT;
+        status = NO_INIT;
+        goto exit;
     }
-    void *iMemPointer = iMem->pointer();
+    iMemPointer = output.cblk ->pointer();
     if (iMemPointer == NULL) {
         ALOGE("Could not get control block pointer");
-        return NO_INIT;
+        status = NO_INIT;
+        goto exit;
     }
-    audio_track_cblk_t* cblk = static_cast<audio_track_cblk_t*>(iMemPointer);
+    cblk = static_cast<audio_track_cblk_t*>(iMemPointer);
 
     // Starting address of buffers in shared memory.
     // The buffers are either immediately after the control block,
     // or in a separate area at discretion of server.
     void *buffers;
-    if (bufferMem == 0) {
+    if (output.buffers == 0) {
         buffers = cblk + 1;
     } else {
-        buffers = bufferMem->pointer();
+        buffers = output.buffers->pointer();
         if (buffers == NULL) {
             ALOGE("Could not get buffer pointer");
-            return NO_INIT;
+            status = NO_INIT;
+            goto exit;
         }
     }
 
@@ -729,43 +725,42 @@
         mDeathNotifier.clear();
     }
     mAudioRecord = record;
-    mCblkMemory = iMem;
-    mBufferMemory = bufferMem;
+    mCblkMemory = output.cblk;
+    mBufferMemory = output.buffers;
     IPCThreadState::self()->flushCommands();
 
     mCblk = cblk;
-    // note that temp is the (possibly revised) value of frameCount
-    if (temp < frameCount || (frameCount == 0 && temp == 0)) {
-        ALOGW("Requested frameCount %zu but received frameCount %zu", frameCount, temp);
+    // note that output.frameCount is the (possibly revised) value of mReqFrameCount
+    if (output.frameCount < mReqFrameCount || (mReqFrameCount == 0 && output.frameCount == 0)) {
+        ALOGW("Requested frameCount %zu but received frameCount %zu",
+              mReqFrameCount,  output.frameCount);
     }
-    frameCount = temp;
 
     // Make sure that application is notified with sufficient margin before overrun.
     // The computation is done on server side.
-    if (mNotificationFramesReq > 0 && notificationFrames != mNotificationFramesReq) {
+    if (mNotificationFramesReq > 0 && output.notificationFrameCount != mNotificationFramesReq) {
         ALOGW("Server adjusted notificationFrames from %u to %zu for frameCount %zu",
-                mNotificationFramesReq, notificationFrames, frameCount);
+                mNotificationFramesReq, output.notificationFrameCount, output.frameCount);
     }
-    mNotificationFramesAct = (uint32_t) notificationFrames;
-
+    mNotificationFramesAct = (uint32_t)output.notificationFrameCount;
 
     //mInput != input includes the case where mInput == AUDIO_IO_HANDLE_NONE for first creation
-    if (mDeviceCallback != 0 && mInput != input) {
+    if (mDeviceCallback != 0 && mInput != output.inputId) {
         if (mInput != AUDIO_IO_HANDLE_NONE) {
             AudioSystem::removeAudioDeviceCallback(this, mInput);
         }
-        AudioSystem::addAudioDeviceCallback(this, input);
+        AudioSystem::addAudioDeviceCallback(this, output.inputId);
     }
 
     // We retain a copy of the I/O handle, but don't own the reference
-    mInput = input;
+    mInput = output.inputId;
     mRefreshRemaining = true;
 
-    mFrameCount = frameCount;
+    mFrameCount = output.frameCount;
     // If IAudioRecord is re-created, don't let the requested frameCount
     // decrease.  This can confuse clients that cache frameCount().
-    if (frameCount > mReqFrameCount) {
-        mReqFrameCount = frameCount;
+    if (mFrameCount > mReqFrameCount) {
+        mReqFrameCount = mFrameCount;
     }
 
     // update proxy
@@ -776,17 +771,9 @@
     mDeathNotifier = new DeathNotifier(this);
     IInterface::asBinder(mAudioRecord)->linkToDeath(mDeathNotifier, this);
 
-    return NO_ERROR;
-
-    // End of retry loop.
-    // The lack of indentation is deliberate, to reduce code churn and ease merges.
-    }
-
-// Arrive here on error, via a break
-    AudioSystem::releaseInput(input, mSessionId);
-    if (status == NO_ERROR) {
-        status = NO_INIT;
-    }
+exit:
+    mStatus = status;
+    // sp<IAudioTrack> track destructor will cause releaseOutput() to be called by AudioFlinger
     return status;
 }
 
@@ -1218,17 +1205,18 @@
 
     mFlags = mOrigFlags;
 
-    // if the new IAudioRecord is created, openRecord_l() will modify the
+    // if the new IAudioRecord is created, createRecord_l() will modify the
     // following member variables: mAudioRecord, mCblkMemory, mCblk, mBufferMemory.
     // It will also delete the strong references on previous IAudioRecord and IMemory
     Modulo<uint32_t> position(mProxy->getPosition());
     mNewPosition = position + mUpdatePeriod;
-    status_t result = openRecord_l(position, mOpPackageName);
+    status_t result = createRecord_l(position, mOpPackageName);
     if (result == NO_ERROR) {
         if (mActive) {
             // callback thread or sync event hasn't changed
             // FIXME this fails if we have a new AudioFlinger instance
-            result = mAudioRecord->start(AudioSystem::SYNC_EVENT_SAME, AUDIO_SESSION_NONE);
+            result = mAudioRecord->start(
+                AudioSystem::SYNC_EVENT_SAME, AUDIO_SESSION_NONE).transactionError();
         }
         mFramesReadServerOffset = mFramesRead; // server resets to zero so we need an offset.
     }
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index cdc75ac..50fe385 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -20,6 +20,7 @@
 #include <utils/Log.h>
 #include <binder/IServiceManager.h>
 #include <binder/ProcessState.h>
+#include <media/AudioResamplerPublic.h>
 #include <media/AudioSystem.h>
 #include <media/IAudioFlinger.h>
 #include <media/IAudioPolicyService.h>
@@ -253,6 +254,31 @@
     return volume ? 100 - int(dBConvertInverse * log(volume) + 0.5) : 0;
 }
 
+/* static */ size_t AudioSystem::calculateMinFrameCount(
+        uint32_t afLatencyMs, uint32_t afFrameCount, uint32_t afSampleRate,
+        uint32_t sampleRate, float speed /*, uint32_t notificationsPerBufferReq*/)
+{
+    // Ensure that buffer depth covers at least audio hardware latency
+    uint32_t minBufCount = afLatencyMs / ((1000 * afFrameCount) / afSampleRate);
+    if (minBufCount < 2) {
+        minBufCount = 2;
+    }
+#if 0
+    // The notificationsPerBufferReq parameter is not yet used for non-fast tracks,
+    // but keeping the code here to make it easier to add later.
+    if (minBufCount < notificationsPerBufferReq) {
+        minBufCount = notificationsPerBufferReq;
+    }
+#endif
+    ALOGV("calculateMinFrameCount afLatency %u  afFrameCount %u  afSampleRate %u  "
+            "sampleRate %u  speed %f  minBufCount: %u" /*"  notificationsPerBufferReq %u"*/,
+            afLatencyMs, afFrameCount, afSampleRate, sampleRate, speed, minBufCount
+            /*, notificationsPerBufferReq*/);
+    return minBufCount * sourceFramesNeededWithTimestretch(
+            sampleRate, afFrameCount, afSampleRate, speed);
+}
+
+
 status_t AudioSystem::getOutputSamplingRate(uint32_t* samplingRate, audio_stream_type_t streamType)
 {
     audio_io_handle_t output;
@@ -605,7 +631,7 @@
         || (channelMask != mInChannelMask)) {
         size_t inBuffSize = af->getInputBufferSize(sampleRate, format, channelMask);
         if (inBuffSize == 0) {
-            ALOGE("AudioSystem::getInputBufferSize failed sampleRate %d format %#x channelMask %x",
+            ALOGE("AudioSystem::getInputBufferSize failed sampleRate %d format %#x channelMask %#x",
                     sampleRate, format, channelMask);
             return BAD_VALUE;
         }
@@ -822,16 +848,11 @@
 }
 
 
-audio_io_handle_t AudioSystem::getOutput(audio_stream_type_t stream,
-                                    uint32_t samplingRate,
-                                    audio_format_t format,
-                                    audio_channel_mask_t channelMask,
-                                    audio_output_flags_t flags,
-                                    const audio_offload_info_t *offloadInfo)
+audio_io_handle_t AudioSystem::getOutput(audio_stream_type_t stream)
 {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return 0;
-    return aps->getOutput(stream, samplingRate, format, channelMask, flags, offloadInfo);
+    return aps->getOutput(stream);
 }
 
 status_t AudioSystem::getOutputForAttr(const audio_attributes_t *attr,
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index c6622cd..7670982 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -31,6 +31,8 @@
 #include <media/IAudioFlinger.h>
 #include <media/AudioPolicyHelper.h>
 #include <media/AudioResamplerPublic.h>
+#include <media/MediaAnalyticsItem.h>
+#include <media/TypeConverter.h>
 
 #define WAIT_PERIOD_MS                  10
 #define WAIT_STREAM_END_TIMEOUT_SEC     120
@@ -39,6 +41,8 @@
 namespace android {
 // ---------------------------------------------------------------------------
 
+using media::VolumeShaper;
+
 // TODO: Move to a separate .h
 
 template <typename T>
@@ -97,32 +101,6 @@
     return kFixPitch ? AUDIO_TIMESTRETCH_PITCH_NORMAL : pitch;
 }
 
-// Must match similar computation in createTrack_l in Threads.cpp.
-// TODO: Move to a common library
-static size_t calculateMinFrameCount(
-        uint32_t afLatencyMs, uint32_t afFrameCount, uint32_t afSampleRate,
-        uint32_t sampleRate, float speed /*, uint32_t notificationsPerBufferReq*/)
-{
-    // Ensure that buffer depth covers at least audio hardware latency
-    uint32_t minBufCount = afLatencyMs / ((1000 * afFrameCount) / afSampleRate);
-    if (minBufCount < 2) {
-        minBufCount = 2;
-    }
-#if 0
-    // The notificationsPerBufferReq parameter is not yet used for non-fast tracks,
-    // but keeping the code here to make it easier to add later.
-    if (minBufCount < notificationsPerBufferReq) {
-        minBufCount = notificationsPerBufferReq;
-    }
-#endif
-    ALOGV("calculateMinFrameCount afLatency %u  afFrameCount %u  afSampleRate %u  "
-            "sampleRate %u  speed %f  minBufCount: %u" /*"  notificationsPerBufferReq %u"*/,
-            afLatencyMs, afFrameCount, afSampleRate, sampleRate, speed, minBufCount
-            /*, notificationsPerBufferReq*/);
-    return minBufCount * sourceFramesNeededWithTimestretch(
-            sampleRate, afFrameCount, afSampleRate, speed);
-}
-
 // static
 status_t AudioTrack::getMinFrameCount(
         size_t* frameCount,
@@ -163,8 +141,8 @@
 
     // When called from createTrack, speed is 1.0f (normal speed).
     // This is rechecked again on setting playback rate (TODO: on setting sample rate, too).
-    *frameCount = calculateMinFrameCount(afLatency, afFrameCount, afSampleRate, sampleRate, 1.0f
-            /*, 0 notificationsPerBufferReq*/);
+    *frameCount = AudioSystem::calculateMinFrameCount(afLatency, afFrameCount, afSampleRate,
+                                              sampleRate, 1.0f /*, 0 notificationsPerBufferReq*/);
 
     // The formula above should always produce a non-zero value under normal circumstances:
     // AudioTrack.SAMPLE_RATE_HZ_MIN <= sampleRate <= AudioTrack.SAMPLE_RATE_HZ_MAX.
@@ -181,6 +159,65 @@
 
 // ---------------------------------------------------------------------------
 
+static std::string audioContentTypeString(audio_content_type_t value) {
+    std::string contentType;
+    if (AudioContentTypeConverter::toString(value, contentType)) {
+        return contentType;
+    }
+    char rawbuffer[16];  // room for "%d"
+    snprintf(rawbuffer, sizeof(rawbuffer), "%d", value);
+    return rawbuffer;
+}
+
+static std::string audioUsageString(audio_usage_t value) {
+    std::string usage;
+    if (UsageTypeConverter::toString(value, usage)) {
+        return usage;
+    }
+    char rawbuffer[16];  // room for "%d"
+    snprintf(rawbuffer, sizeof(rawbuffer), "%d", value);
+    return rawbuffer;
+}
+
+void AudioTrack::MediaMetrics::gather(const AudioTrack *track)
+{
+
+    // key for media statistics is defined in the header
+    // attrs for media statistics
+    static constexpr char kAudioTrackStreamType[] = "android.media.audiotrack.streamtype";
+    static constexpr char kAudioTrackContentType[] = "android.media.audiotrack.type";
+    static constexpr char kAudioTrackUsage[] = "android.media.audiotrack.usage";
+    static constexpr char kAudioTrackSampleRate[] = "android.media.audiotrack.samplerate";
+    static constexpr char kAudioTrackChannelMask[] = "android.media.audiotrack.channelmask";
+    static constexpr char kAudioTrackUnderrunFrames[] = "android.media.audiotrack.underrunframes";
+    static constexpr char kAudioTrackStartupGlitch[] = "android.media.audiotrack.glitch.startup";
+
+    // constructor guarantees mAnalyticsItem is valid
+
+    // must gather underrun info before cleaning mProxy information.
+    const int32_t underrunFrames = track->getUnderrunFrames();
+    if (underrunFrames != 0) {
+        mAnalyticsItem->setInt32(kAudioTrackUnderrunFrames, underrunFrames);
+    }
+
+    if (track->mTimestampStartupGlitchReported) {
+        mAnalyticsItem->setInt32(kAudioTrackStartupGlitch, 1);
+    }
+
+    if (track->mStreamType != -1) {
+        // deprecated, but this will tell us who still uses it.
+        mAnalyticsItem->setInt32(kAudioTrackStreamType, track->mStreamType);
+    }
+    // XXX: consider including from mAttributes: source type
+    mAnalyticsItem->setCString(kAudioTrackContentType,
+                               audioContentTypeString(track->mAttributes.content_type).c_str());
+    mAnalyticsItem->setCString(kAudioTrackUsage,
+                               audioUsageString(track->mAttributes.usage).c_str());
+    mAnalyticsItem->setInt32(kAudioTrackSampleRate, track->mSampleRate);
+    mAnalyticsItem->setInt64(kAudioTrackChannelMask, track->mChannelMask);
+}
+
+
 AudioTrack::AudioTrack()
     : mStatus(NO_INIT),
       mState(STATE_STOPPED),
@@ -188,8 +225,7 @@
       mPreviousSchedulingGroup(SP_DEFAULT),
       mPausedPosition(0),
       mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE),
-      mRoutedDeviceId(AUDIO_PORT_HANDLE_NONE),
-      mPortId(AUDIO_PORT_HANDLE_NONE)
+      mRoutedDeviceId(AUDIO_PORT_HANDLE_NONE)
 {
     mAttributes.content_type = AUDIO_CONTENT_TYPE_UNKNOWN;
     mAttributes.usage = AUDIO_USAGE_UNKNOWN;
@@ -214,19 +250,18 @@
         pid_t pid,
         const audio_attributes_t* pAttributes,
         bool doNotReconnect,
-        float maxRequiredSpeed)
+        float maxRequiredSpeed,
+        audio_port_handle_t selectedDeviceId)
     : mStatus(NO_INIT),
       mState(STATE_STOPPED),
       mPreviousPriority(ANDROID_PRIORITY_NORMAL),
       mPreviousSchedulingGroup(SP_DEFAULT),
-      mPausedPosition(0),
-      mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE),
-      mPortId(AUDIO_PORT_HANDLE_NONE)
+      mPausedPosition(0)
 {
-    mStatus = set(streamType, sampleRate, format, channelMask,
+    (void)set(streamType, sampleRate, format, channelMask,
             frameCount, flags, cbf, user, notificationFrames,
             0 /*sharedBuffer*/, false /*threadCanCallJava*/, sessionId, transferType,
-            offloadInfo, uid, pid, pAttributes, doNotReconnect, maxRequiredSpeed);
+            offloadInfo, uid, pid, pAttributes, doNotReconnect, maxRequiredSpeed, selectedDeviceId);
 }
 
 AudioTrack::AudioTrack(
@@ -252,10 +287,9 @@
       mPreviousPriority(ANDROID_PRIORITY_NORMAL),
       mPreviousSchedulingGroup(SP_DEFAULT),
       mPausedPosition(0),
-      mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE),
-      mPortId(AUDIO_PORT_HANDLE_NONE)
+      mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE)
 {
-    mStatus = set(streamType, sampleRate, format, channelMask,
+    (void)set(streamType, sampleRate, format, channelMask,
             0 /*frameCount*/, flags, cbf, user, notificationFrames,
             sharedBuffer, false /*threadCanCallJava*/, sessionId, transferType, offloadInfo,
             uid, pid, pAttributes, doNotReconnect, maxRequiredSpeed);
@@ -263,6 +297,9 @@
 
 AudioTrack::~AudioTrack()
 {
+    // pull together the numbers, before we clean up our structures
+    mMediaMetrics.gather(this);
+
     if (mStatus == NO_ERROR) {
         // Make sure that callback function exits in the case where
         // it is looping on buffer full condition in obtainBuffer().
@@ -308,14 +345,22 @@
         pid_t pid,
         const audio_attributes_t* pAttributes,
         bool doNotReconnect,
-        float maxRequiredSpeed)
+        float maxRequiredSpeed,
+        audio_port_handle_t selectedDeviceId)
 {
+    status_t status;
+    uint32_t channelCount;
+    pid_t callingPid;
+    pid_t myPid;
+
     ALOGV("set(): streamType %d, sampleRate %u, format %#x, channelMask %#x, frameCount %zu, "
           "flags #%x, notificationFrames %d, sessionId %d, transferType %d, uid %d, pid %d",
           streamType, sampleRate, format, channelMask, frameCount, flags, notificationFrames,
           sessionId, transferType, uid, pid);
 
     mThreadCanCallJava = threadCanCallJava;
+    mSelectedDeviceId = selectedDeviceId;
+    mSessionId = sessionId;
 
     switch (transferType) {
     case TRANSFER_DEFAULT:
@@ -330,25 +375,29 @@
     case TRANSFER_CALLBACK:
         if (cbf == NULL || sharedBuffer != 0) {
             ALOGE("Transfer type TRANSFER_CALLBACK but cbf == NULL || sharedBuffer != 0");
-            return BAD_VALUE;
+            status = BAD_VALUE;
+            goto exit;
         }
         break;
     case TRANSFER_OBTAIN:
     case TRANSFER_SYNC:
         if (sharedBuffer != 0) {
             ALOGE("Transfer type TRANSFER_OBTAIN but sharedBuffer != 0");
-            return BAD_VALUE;
+            status = BAD_VALUE;
+            goto exit;
         }
         break;
     case TRANSFER_SHARED:
         if (sharedBuffer == 0) {
             ALOGE("Transfer type TRANSFER_SHARED but sharedBuffer == 0");
-            return BAD_VALUE;
+            status = BAD_VALUE;
+            goto exit;
         }
         break;
     default:
         ALOGE("Invalid transfer type %d", transferType);
-        return BAD_VALUE;
+        status = BAD_VALUE;
+        goto exit;
     }
     mSharedBuffer = sharedBuffer;
     mTransfer = transferType;
@@ -362,7 +411,8 @@
     // invariant that mAudioTrack != 0 is true only after set() returns successfully
     if (mAudioTrack != 0) {
         ALOGE("Track already in use");
-        return INVALID_OPERATION;
+        status = INVALID_OPERATION;
+        goto exit;
     }
 
     // handle default values first.
@@ -372,7 +422,8 @@
     if (pAttributes == NULL) {
         if (uint32_t(streamType) >= AUDIO_STREAM_PUBLIC_CNT) {
             ALOGE("Invalid stream type %d", streamType);
-            return BAD_VALUE;
+            status = BAD_VALUE;
+            goto exit;
         }
         mStreamType = streamType;
 
@@ -404,16 +455,18 @@
     // validate parameters
     if (!audio_is_valid_format(format)) {
         ALOGE("Invalid format %#x", format);
-        return BAD_VALUE;
+        status = BAD_VALUE;
+        goto exit;
     }
     mFormat = format;
 
     if (!audio_is_output_channel(channelMask)) {
         ALOGE("Invalid channel mask %#x", channelMask);
-        return BAD_VALUE;
+        status = BAD_VALUE;
+        goto exit;
     }
     mChannelMask = channelMask;
-    uint32_t channelCount = audio_channel_count_from_out_mask(channelMask);
+    channelCount = audio_channel_count_from_out_mask(channelMask);
     mChannelCount = channelCount;
 
     // force direct flag if format is not linear PCM
@@ -448,7 +501,8 @@
 
     // sampling rate must be specified for direct outputs
     if (sampleRate == 0 && (flags & AUDIO_OUTPUT_FLAG_DIRECT) != 0) {
-        return BAD_VALUE;
+        status = BAD_VALUE;
+        goto exit;
     }
     mSampleRate = sampleRate;
     mOriginalSampleRate = sampleRate;
@@ -479,12 +533,14 @@
         if (!(flags & AUDIO_OUTPUT_FLAG_FAST)) {
             ALOGE("notificationFrames=%d not permitted for non-fast track",
                     notificationFrames);
-            return BAD_VALUE;
+            status = BAD_VALUE;
+            goto exit;
         }
         if (frameCount > 0) {
             ALOGE("notificationFrames=%d not permitted with non-zero frameCount=%zu",
                     notificationFrames, frameCount);
-            return BAD_VALUE;
+            status = BAD_VALUE;
+            goto exit;
         }
         mNotificationFramesReq = 0;
         const uint32_t minNotificationsPerBuffer = 1;
@@ -496,20 +552,15 @@
                 notificationFrames, minNotificationsPerBuffer, maxNotificationsPerBuffer);
     }
     mNotificationFramesAct = 0;
-    if (sessionId == AUDIO_SESSION_ALLOCATE) {
-        mSessionId = (audio_session_t) AudioSystem::newAudioUniqueId(AUDIO_UNIQUE_ID_USE_SESSION);
-    } else {
-        mSessionId = sessionId;
-    }
-    int callingpid = IPCThreadState::self()->getCallingPid();
-    int mypid = getpid();
-    if (uid == AUDIO_UID_INVALID || (callingpid != mypid)) {
+    callingPid = IPCThreadState::self()->getCallingPid();
+    myPid = getpid();
+    if (uid == AUDIO_UID_INVALID || (callingPid != myPid)) {
         mClientUid = IPCThreadState::self()->getCallingUid();
     } else {
         mClientUid = uid;
     }
-    if (pid == -1 || (callingpid != mypid)) {
-        mClientPid = callingpid;
+    if (pid == -1 || (callingPid != myPid)) {
+        mClientPid = callingPid;
     } else {
         mClientPid = pid;
     }
@@ -524,7 +575,7 @@
     }
 
     // create the IAudioTrack
-    status_t status = createTrack_l();
+    status = createTrack_l();
 
     if (status != NO_ERROR) {
         if (mAudioTrackThread != 0) {
@@ -532,10 +583,9 @@
             mAudioTrackThread->requestExitAndWait();
             mAudioTrackThread.clear();
         }
-        return status;
+        goto exit;
     }
 
-    mStatus = NO_ERROR;
     mUserData = user;
     mLoopCount = 0;
     mLoopStart = 0;
@@ -562,8 +612,11 @@
     mFramesWritten = 0;
     mFramesWrittenServerOffset = 0;
     mFramesWrittenAtRestore = -1; // -1 is a unique initializer.
-    mVolumeHandler = new VolumeHandler();
-    return NO_ERROR;
+    mVolumeHandler = new media::VolumeHandler();
+
+exit:
+    mStatus = status;
+    return status;
 }
 
 // -------------------------------------------------------------------------
@@ -1219,6 +1272,7 @@
         mSelectedDeviceId = deviceId;
         if (mStatus == NO_ERROR) {
             android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
+            mProxy->interrupt();
         }
     }
     return NO_ERROR;
@@ -1306,76 +1360,19 @@
 
 status_t AudioTrack::createTrack_l()
 {
+    status_t status;
+    bool callbackAdded = false;
+
     const sp<IAudioFlinger>& audioFlinger = AudioSystem::get_audio_flinger();
     if (audioFlinger == 0) {
         ALOGE("Could not get audioflinger");
-        return NO_INIT;
+        status = NO_INIT;
+        goto exit;
     }
 
-    audio_io_handle_t output;
-    audio_stream_type_t streamType = mStreamType;
-    audio_attributes_t *attr = (mStreamType == AUDIO_STREAM_DEFAULT) ? &mAttributes : NULL;
-    bool callbackAdded = false;
-
+    {
     // mFlags (not mOrigFlags) is modified depending on whether fast request is accepted.
     // After fast request is denied, we will request again if IAudioTrack is re-created.
-
-    status_t status;
-    audio_config_t config = AUDIO_CONFIG_INITIALIZER;
-    config.sample_rate = mSampleRate;
-    config.channel_mask = mChannelMask;
-    config.format = mFormat;
-    config.offload_info = mOffloadInfoCopy;
-    mRoutedDeviceId = mSelectedDeviceId;
-    status = AudioSystem::getOutputForAttr(attr, &output,
-                                           mSessionId, &streamType, mClientUid,
-                                           &config,
-                                           mFlags, &mRoutedDeviceId, &mPortId);
-
-    if (status != NO_ERROR || output == AUDIO_IO_HANDLE_NONE) {
-        ALOGE("Could not get audio output for session %d, stream type %d, usage %d, sample rate %u,"
-              " format %#x, channel mask %#x, flags %#x",
-              mSessionId, streamType, mAttributes.usage, mSampleRate, mFormat, mChannelMask,
-              mFlags);
-        return BAD_VALUE;
-    }
-    {
-    // Now that we have a reference to an I/O handle and have not yet handed it off to AudioFlinger,
-    // we must release it ourselves if anything goes wrong.
-
-    // Not all of these values are needed under all conditions, but it is easier to get them all
-    status = AudioSystem::getLatency(output, &mAfLatency);
-    if (status != NO_ERROR) {
-        ALOGE("getLatency(%d) failed status %d", output, status);
-        goto release;
-    }
-    ALOGV("createTrack_l() output %d afLatency %u", output, mAfLatency);
-
-    status = AudioSystem::getFrameCount(output, &mAfFrameCount);
-    if (status != NO_ERROR) {
-        ALOGE("getFrameCount(output=%d) status %d", output, status);
-        goto release;
-    }
-
-    // TODO consider making this a member variable if there are other uses for it later
-    size_t afFrameCountHAL;
-    status = AudioSystem::getFrameCountHAL(output, &afFrameCountHAL);
-    if (status != NO_ERROR) {
-        ALOGE("getFrameCountHAL(output=%d) status %d", output, status);
-        goto release;
-    }
-    ALOG_ASSERT(afFrameCountHAL > 0);
-
-    status = AudioSystem::getSamplingRate(output, &mAfSampleRate);
-    if (status != NO_ERROR) {
-        ALOGE("getSamplingRate(output=%d) status %d", output, status);
-        goto release;
-    }
-    if (mSampleRate == 0) {
-        mSampleRate = mAfSampleRate;
-        mOriginalSampleRate = mAfSampleRate;
-    }
-
     // Client can only express a preference for FAST.  Server will perform additional tests.
     if (mFlags & AUDIO_OUTPUT_FLAG_FAST) {
         // either of these use cases:
@@ -1389,130 +1386,81 @@
             // use case 4: synchronous write
             ((mTransfer == TRANSFER_SYNC) && mThreadCanCallJava);
 
-        bool useCaseAllowed = sharedBuffer || transferAllowed;
-        if (!useCaseAllowed) {
+        bool fastAllowed = sharedBuffer || transferAllowed;
+        if (!fastAllowed) {
             ALOGW("AUDIO_OUTPUT_FLAG_FAST denied by client, not shared buffer and transfer = %s",
                   convertTransferToText(mTransfer));
-        }
-
-        // sample rates must also match
-        bool sampleRateAllowed = mSampleRate == mAfSampleRate;
-        if (!sampleRateAllowed) {
-            ALOGW("AUDIO_OUTPUT_FLAG_FAST denied by client, sample rate %u Hz but HAL needs %u Hz",
-                  mSampleRate, mAfSampleRate);
-        }
-
-        bool fastAllowed = useCaseAllowed && sampleRateAllowed;
-        if (!fastAllowed) {
             mFlags = (audio_output_flags_t) (mFlags & ~AUDIO_OUTPUT_FLAG_FAST);
         }
     }
 
-    mNotificationFramesAct = mNotificationFramesReq;
-
-    size_t frameCount = mReqFrameCount;
-    if (!audio_has_proportional_frames(mFormat)) {
-
-        if (mSharedBuffer != 0) {
-            // Same comment as below about ignoring frameCount parameter for set()
-            frameCount = mSharedBuffer->size();
-        } else if (frameCount == 0) {
-            frameCount = mAfFrameCount;
-        }
-        if (mNotificationFramesAct != frameCount) {
-            mNotificationFramesAct = frameCount;
-        }
-    } else if (mSharedBuffer != 0) {
-        // FIXME: Ensure client side memory buffers need
-        // not have additional alignment beyond sample
-        // (e.g. 16 bit stereo accessed as 32 bit frame).
-        size_t alignment = audio_bytes_per_sample(mFormat);
-        if (alignment & 1) {
-            // for AUDIO_FORMAT_PCM_24_BIT_PACKED (not exposed through Java).
-            alignment = 1;
-        }
-        if (mChannelCount > 1) {
-            // More than 2 channels does not require stronger alignment than stereo
-            alignment <<= 1;
-        }
-        if (((uintptr_t)mSharedBuffer->pointer() & (alignment - 1)) != 0) {
-            ALOGE("Invalid buffer alignment: address %p, channel count %u",
-                    mSharedBuffer->pointer(), mChannelCount);
-            status = BAD_VALUE;
-            goto release;
-        }
-
-        // When initializing a shared buffer AudioTrack via constructors,
-        // there's no frameCount parameter.
-        // But when initializing a shared buffer AudioTrack via set(),
-        // there _is_ a frameCount parameter.  We silently ignore it.
-        frameCount = mSharedBuffer->size() / mFrameSize;
+    IAudioFlinger::CreateTrackInput input;
+    if (mStreamType != AUDIO_STREAM_DEFAULT) {
+        stream_type_to_audio_attributes(mStreamType, &input.attr);
     } else {
-        size_t minFrameCount = 0;
-        // For fast tracks the frame count calculations and checks are mostly done by server,
-        // but we try to respect the application's request for notifications per buffer.
-        if (mFlags & AUDIO_OUTPUT_FLAG_FAST) {
-            if (mNotificationsPerBufferReq > 0) {
-                // Avoid possible arithmetic overflow during multiplication.
-                // mNotificationsPerBuffer is clamped to a small integer earlier, so it is unlikely.
-                if (mNotificationsPerBufferReq > SIZE_MAX / afFrameCountHAL) {
-                    ALOGE("Requested notificationPerBuffer=%u ignored for HAL frameCount=%zu",
-                            mNotificationsPerBufferReq, afFrameCountHAL);
-                } else {
-                    minFrameCount = afFrameCountHAL * mNotificationsPerBufferReq;
-                }
-            }
-        } else {
-            // for normal tracks precompute the frame count based on speed.
-            const float speed = !isPurePcmData_l() || isOffloadedOrDirect_l() ? 1.0f :
-                            max(mMaxRequiredSpeed, mPlaybackRate.mSpeed);
-            minFrameCount = calculateMinFrameCount(
-                    mAfLatency, mAfFrameCount, mAfSampleRate, mSampleRate,
-                    speed /*, 0 mNotificationsPerBufferReq*/);
-        }
-        if (frameCount < minFrameCount) {
-            frameCount = minFrameCount;
-        }
+        input.attr = mAttributes;
     }
-
-    audio_output_flags_t flags = mFlags;
-
-    pid_t tid = -1;
+    input.config = AUDIO_CONFIG_INITIALIZER;
+    input.config.sample_rate = mSampleRate;
+    input.config.channel_mask = mChannelMask;
+    input.config.format = mFormat;
+    input.config.offload_info = mOffloadInfoCopy;
+    input.clientInfo.clientUid = mClientUid;
+    input.clientInfo.clientPid = mClientPid;
+    input.clientInfo.clientTid = -1;
     if (mFlags & AUDIO_OUTPUT_FLAG_FAST) {
         // It is currently meaningless to request SCHED_FIFO for a Java thread.  Even if the
         // application-level code follows all non-blocking design rules, the language runtime
         // doesn't also follow those rules, so the thread will not benefit overall.
         if (mAudioTrackThread != 0 && !mThreadCanCallJava) {
-            tid = mAudioTrackThread->getTid();
+            input.clientInfo.clientTid = mAudioTrackThread->getTid();
         }
     }
+    input.sharedBuffer = mSharedBuffer;
+    input.notificationsPerBuffer = mNotificationsPerBufferReq;
+    input.speed = 1.0;
+    if (audio_has_proportional_frames(mFormat) && mSharedBuffer == 0 &&
+            (mFlags & AUDIO_OUTPUT_FLAG_FAST) == 0) {
+        input.speed  = !isPurePcmData_l() || isOffloadedOrDirect_l() ? 1.0f :
+                        max(mMaxRequiredSpeed, mPlaybackRate.mSpeed);
+    }
+    input.flags = mFlags;
+    input.frameCount = mReqFrameCount;
+    input.notificationFrameCount = mNotificationFramesReq;
+    input.selectedDeviceId = mSelectedDeviceId;
+    input.sessionId = mSessionId;
 
-    size_t temp = frameCount;   // temp may be replaced by a revised value of frameCount,
-                                // but we will still need the original value also
-    audio_session_t originalSessionId = mSessionId;
-    sp<IAudioTrack> track = audioFlinger->createTrack(streamType,
-                                                      mSampleRate,
-                                                      mFormat,
-                                                      mChannelMask,
-                                                      &temp,
-                                                      &flags,
-                                                      mSharedBuffer,
+    IAudioFlinger::CreateTrackOutput output;
+
+    sp<IAudioTrack> track = audioFlinger->createTrack(input,
                                                       output,
-                                                      mClientPid,
-                                                      tid,
-                                                      &mSessionId,
-                                                      mClientUid,
-                                                      &status,
-                                                      mPortId);
-    ALOGE_IF(originalSessionId != AUDIO_SESSION_ALLOCATE && mSessionId != originalSessionId,
-            "session ID changed from %d to %d", originalSessionId, mSessionId);
+                                                      &status);
 
-    if (status != NO_ERROR) {
-        ALOGE("AudioFlinger could not create track, status: %d", status);
-        goto release;
+    if (status != NO_ERROR || output.outputId == AUDIO_IO_HANDLE_NONE) {
+        ALOGE("AudioFlinger could not create track, status: %d output %d", status, output.outputId);
+        if (status == NO_ERROR) {
+            status = NO_INIT;
+        }
+        goto exit;
     }
     ALOG_ASSERT(track != 0);
 
+    mFrameCount = output.frameCount;
+    mNotificationFramesAct = (uint32_t)output.notificationFrameCount;
+    mRoutedDeviceId = output.selectedDeviceId;
+    mSessionId = output.sessionId;
+
+    mSampleRate = output.sampleRate;
+    if (mOriginalSampleRate == 0) {
+        mOriginalSampleRate = mSampleRate;
+    }
+
+    mAfFrameCount = output.afFrameCount;
+    mAfSampleRate = output.afSampleRate;
+    mAfLatency = output.afLatencyMs;
+
+    mLatency = mAfLatency + (1000LL * mFrameCount) / mSampleRate;
+
     // AudioFlinger now owns the reference to the I/O handle,
     // so we are no longer responsible for releasing it.
 
@@ -1521,13 +1469,13 @@
     if (iMem == 0) {
         ALOGE("Could not get control block");
         status = NO_INIT;
-        goto release;
+        goto exit;
     }
     void *iMemPointer = iMem->pointer();
     if (iMemPointer == NULL) {
         ALOGE("Could not get control block pointer");
         status = NO_INIT;
-        goto release;
+        goto exit;
     }
     // invariant that mAudioTrack != 0 is true only after set() returns successfully
     if (mAudioTrack != 0) {
@@ -1540,75 +1488,33 @@
 
     audio_track_cblk_t* cblk = static_cast<audio_track_cblk_t*>(iMemPointer);
     mCblk = cblk;
-    // note that temp is the (possibly revised) value of frameCount
-    if (temp < frameCount || (frameCount == 0 && temp == 0)) {
-        // In current design, AudioTrack client checks and ensures frame count validity before
-        // passing it to AudioFlinger so AudioFlinger should not return a different value except
-        // for fast track as it uses a special method of assigning frame count.
-        ALOGW("Requested frameCount %zu but received frameCount %zu", frameCount, temp);
-    }
-    frameCount = temp;
 
     mAwaitBoost = false;
     if (mFlags & AUDIO_OUTPUT_FLAG_FAST) {
-        if (flags & AUDIO_OUTPUT_FLAG_FAST) {
-            ALOGI("AUDIO_OUTPUT_FLAG_FAST successful; frameCount %zu -> %zu", frameCount, temp);
+        if (output.flags & AUDIO_OUTPUT_FLAG_FAST) {
+            ALOGI("AUDIO_OUTPUT_FLAG_FAST successful; frameCount %zu -> %zu",
+                  mReqFrameCount, mFrameCount);
             if (!mThreadCanCallJava) {
                 mAwaitBoost = true;
             }
         } else {
-            ALOGW("AUDIO_OUTPUT_FLAG_FAST denied by server; frameCount %zu -> %zu", frameCount,
-                    temp);
+            ALOGW("AUDIO_OUTPUT_FLAG_FAST denied by server; frameCount %zu -> %zu", mReqFrameCount,
+                  mFrameCount);
         }
     }
-    mFlags = flags;
-
-    // Make sure that application is notified with sufficient margin before underrun.
-    // The client can divide the AudioTrack buffer into sub-buffers,
-    // and expresses its desire to server as the notification frame count.
-    if (mSharedBuffer == 0 && audio_is_linear_pcm(mFormat)) {
-        size_t maxNotificationFrames;
-        if (mFlags & AUDIO_OUTPUT_FLAG_FAST) {
-            // notify every HAL buffer, regardless of the size of the track buffer
-            maxNotificationFrames = afFrameCountHAL;
-        } else {
-            // For normal tracks, use at least double-buffering if no sample rate conversion,
-            // or at least triple-buffering if there is sample rate conversion
-            const int nBuffering = mOriginalSampleRate == mAfSampleRate ? 2 : 3;
-            maxNotificationFrames = frameCount / nBuffering;
-            // If client requested a fast track but this was denied, then use the smaller maximum.
-            // FMS_20 is the minimum task wakeup period in ms for which CFS operates reliably.
-#define FMS_20 20   // FIXME share a common declaration with the same symbol in Threads.cpp
-            if (mOrigFlags & AUDIO_OUTPUT_FLAG_FAST) {
-                size_t maxNotificationFramesFastDenied = FMS_20 * mSampleRate / 1000;
-                if (maxNotificationFrames > maxNotificationFramesFastDenied) {
-                    maxNotificationFrames = maxNotificationFramesFastDenied;
-                }
-            }
-        }
-        if (mNotificationFramesAct == 0 || mNotificationFramesAct > maxNotificationFrames) {
-            if (mNotificationFramesAct == 0) {
-                ALOGD("Client defaulted notificationFrames to %zu for frameCount %zu",
-                    maxNotificationFrames, frameCount);
-            } else {
-                ALOGW("Client adjusted notificationFrames from %u to %zu for frameCount %zu",
-                    mNotificationFramesAct, maxNotificationFrames, frameCount);
-            }
-            mNotificationFramesAct = (uint32_t) maxNotificationFrames;
-        }
-    }
+    mFlags = output.flags;
 
     //mOutput != output includes the case where mOutput == AUDIO_IO_HANDLE_NONE for first creation
-    if (mDeviceCallback != 0 && mOutput != output) {
+    if (mDeviceCallback != 0 && mOutput != output.outputId) {
         if (mOutput != AUDIO_IO_HANDLE_NONE) {
             AudioSystem::removeAudioDeviceCallback(this, mOutput);
         }
-        AudioSystem::addAudioDeviceCallback(this, output);
+        AudioSystem::addAudioDeviceCallback(this, output.outputId);
         callbackAdded = true;
     }
 
     // We retain a copy of the I/O handle, but don't own the reference
-    mOutput = output;
+    mOutput = output.outputId;
     mRefreshRemaining = true;
 
     // Starting address of buffers in shared memory.  If there is a shared buffer, buffers
@@ -1623,18 +1529,16 @@
         if (buffers == NULL) {
             ALOGE("Could not get buffer pointer");
             status = NO_INIT;
-            goto release;
+            goto exit;
         }
     }
 
     mAudioTrack->attachAuxEffect(mAuxEffectId);
-    mFrameCount = frameCount;
-    updateLatency_l();  // this refetches mAfLatency and sets mLatency
 
     // If IAudioTrack is re-created, don't let the requested frameCount
     // decrease.  This can confuse clients that cache frameCount().
-    if (frameCount > mReqFrameCount) {
-        mReqFrameCount = frameCount;
+    if (mFrameCount > mReqFrameCount) {
+        mReqFrameCount = mFrameCount;
     }
 
     // reset server position to 0 as we have new cblk.
@@ -1643,9 +1547,9 @@
     // update proxy
     if (mSharedBuffer == 0) {
         mStaticProxy.clear();
-        mProxy = new AudioTrackClientProxy(cblk, buffers, frameCount, mFrameSize);
+        mProxy = new AudioTrackClientProxy(cblk, buffers, mFrameCount, mFrameSize);
     } else {
-        mStaticProxy = new StaticAudioTrackClientProxy(cblk, buffers, frameCount, mFrameSize);
+        mStaticProxy = new StaticAudioTrackClientProxy(cblk, buffers, mFrameCount, mFrameSize);
         mProxy = mStaticProxy;
     }
 
@@ -1668,18 +1572,17 @@
     mDeathNotifier = new DeathNotifier(this);
     IInterface::asBinder(mAudioTrack)->linkToDeath(mDeathNotifier, this);
 
-    return NO_ERROR;
     }
 
-release:
-    AudioSystem::releaseOutput(output, streamType, mSessionId);
-    if (callbackAdded) {
+exit:
+    if (status != NO_ERROR && callbackAdded) {
         // note: mOutput is always valid is callbackAdded is true
         AudioSystem::removeAudioDeviceCallback(this, mOutput);
     }
-    if (status == NO_ERROR) {
-        status = NO_INIT;
-    }
+
+    mStatus = status;
+
+    // sp<IAudioTrack> track destructor will cause releaseOutput() to be called by AudioFlinger
     return status;
 }
 
@@ -2415,8 +2318,8 @@
         return true; // static tracks do not have issues with buffer sizing.
     }
     const size_t minFrameCount =
-            calculateMinFrameCount(mAfLatency, mAfFrameCount, mAfSampleRate, sampleRate, speed
-                /*, 0 mNotificationsPerBufferReq*/);
+            AudioSystem::calculateMinFrameCount(mAfLatency, mAfFrameCount, mAfSampleRate,
+                                            sampleRate, speed /*, 0 mNotificationsPerBufferReq*/);
     const bool allowed = mFrameCount >= minFrameCount;
     ALOGD_IF(!allowed,
             "isSampleRateSpeedAllowed_l denied "
@@ -2832,23 +2735,28 @@
 
 status_t AudioTrack::dump(int fd, const Vector<String16>& args __unused) const
 {
-
-    const size_t SIZE = 256;
-    char buffer[SIZE];
     String8 result;
 
     result.append(" AudioTrack::dump\n");
-    snprintf(buffer, 255, "  stream type(%d), left - right volume(%f, %f)\n", mStreamType,
-            mVolume[AUDIO_INTERLEAVE_LEFT], mVolume[AUDIO_INTERLEAVE_RIGHT]);
-    result.append(buffer);
-    snprintf(buffer, 255, "  format(%d), channel count(%d), frame count(%zu)\n", mFormat,
-            mChannelCount, mFrameCount);
-    result.append(buffer);
-    snprintf(buffer, 255, "  sample rate(%u), speed(%f), status(%d)\n",
-            mSampleRate, mPlaybackRate.mSpeed, mStatus);
-    result.append(buffer);
-    snprintf(buffer, 255, "  state(%d), latency (%d)\n", mState, mLatency);
-    result.append(buffer);
+    result.appendFormat("  status(%d), state(%d), session Id(%d), flags(%#x)\n",
+                        mStatus, mState, mSessionId, mFlags);
+    result.appendFormat("  stream type(%d), left - right volume(%f, %f)\n",
+                        (mStreamType == AUDIO_STREAM_DEFAULT) ?
+                                audio_attributes_to_stream_type(&mAttributes) : mStreamType,
+                        mVolume[AUDIO_INTERLEAVE_LEFT], mVolume[AUDIO_INTERLEAVE_RIGHT]);
+    result.appendFormat("  format(%#x), channel mask(%#x), channel count(%u)\n",
+                  mFormat, mChannelMask, mChannelCount);
+    result.appendFormat("  sample rate(%u), original sample rate(%u), speed(%f)\n",
+                  mSampleRate, mOriginalSampleRate, mPlaybackRate.mSpeed);
+    result.appendFormat("  frame count(%zu), req. frame count(%zu)\n",
+                  mFrameCount, mReqFrameCount);
+    result.appendFormat("  notif. frame count(%u), req. notif. frame count(%u),"
+            " req. notif. per buff(%u)\n",
+             mNotificationFramesAct, mNotificationFramesReq, mNotificationsPerBufferReq);
+    result.appendFormat("  latency (%d), selected device Id(%d), routed device Id(%d)\n",
+                        mLatency, mSelectedDeviceId, mRoutedDeviceId);
+    result.appendFormat("  output(%d) AF latency (%u) AF frame count(%zu) AF SampleRate(%u)\n",
+                        mOutput, mAfLatency, mAfFrameCount, mAfSampleRate);
     ::write(fd, result.string(), result.size());
     return NO_ERROR;
 }
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index 14feada..56ddd4f 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -22,6 +22,7 @@
 #include <stdint.h>
 #include <sys/types.h>
 
+#include <binder/IPCThreadState.h>
 #include <binder/Parcel.h>
 
 #include "IAudioFlinger.h"
@@ -30,7 +31,7 @@
 
 enum {
     CREATE_TRACK = IBinder::FIRST_CALL_TRANSACTION,
-    OPEN_RECORD,
+    CREATE_RECORD,
     SAMPLE_RATE,
     RESERVED,   // obsolete, was CHANNEL_COUNT
     FORMAT,
@@ -95,182 +96,74 @@
     {
     }
 
-    virtual sp<IAudioTrack> createTrack(
-                                audio_stream_type_t streamType,
-                                uint32_t sampleRate,
-                                audio_format_t format,
-                                audio_channel_mask_t channelMask,
-                                size_t *pFrameCount,
-                                audio_output_flags_t *flags,
-                                const sp<IMemory>& sharedBuffer,
-                                audio_io_handle_t output,
-                                pid_t pid,
-                                pid_t tid,
-                                audio_session_t *sessionId,
-                                int clientUid,
-                                status_t *status,
-                                audio_port_handle_t portId)
+    virtual sp<IAudioTrack> createTrack(const CreateTrackInput& input,
+                                        CreateTrackOutput& output,
+                                        status_t *status)
     {
         Parcel data, reply;
         sp<IAudioTrack> track;
         data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeInt32((int32_t) streamType);
-        data.writeInt32(sampleRate);
-        data.writeInt32(format);
-        data.writeInt32(channelMask);
-        size_t frameCount = pFrameCount != NULL ? *pFrameCount : 0;
-        data.writeInt64(frameCount);
-        audio_output_flags_t lFlags = flags != NULL ? *flags : AUDIO_OUTPUT_FLAG_NONE;
-        data.writeInt32(lFlags);
-        // haveSharedBuffer
-        if (sharedBuffer != 0) {
-            data.writeInt32(true);
-            data.writeStrongBinder(IInterface::asBinder(sharedBuffer));
-        } else {
-            data.writeInt32(false);
+
+        if (status == nullptr) {
+            return track;
         }
-        data.writeInt32((int32_t) output);
-        data.writeInt32((int32_t) pid);
-        data.writeInt32((int32_t) tid);
-        audio_session_t lSessionId = AUDIO_SESSION_ALLOCATE;
-        if (sessionId != NULL) {
-            lSessionId = *sessionId;
-        }
-        data.writeInt32(lSessionId);
-        data.writeInt32(clientUid);
-        data.writeInt32(portId);
+
+        input.writeToParcel(&data);
+
         status_t lStatus = remote()->transact(CREATE_TRACK, data, &reply);
         if (lStatus != NO_ERROR) {
-            ALOGE("createTrack error: %s", strerror(-lStatus));
-        } else {
-            frameCount = reply.readInt64();
-            if (pFrameCount != NULL) {
-                *pFrameCount = frameCount;
-            }
-            lFlags = (audio_output_flags_t)reply.readInt32();
-            if (flags != NULL) {
-                *flags = lFlags;
-            }
-            lSessionId = (audio_session_t) reply.readInt32();
-            if (sessionId != NULL) {
-                *sessionId = lSessionId;
-            }
-            lStatus = reply.readInt32();
-            track = interface_cast<IAudioTrack>(reply.readStrongBinder());
-            if (lStatus == NO_ERROR) {
-                if (track == 0) {
-                    ALOGE("createTrack should have returned an IAudioTrack");
-                    lStatus = UNKNOWN_ERROR;
-                }
-            } else {
-                if (track != 0) {
-                    ALOGE("createTrack returned an IAudioTrack but with status %d", lStatus);
-                    track.clear();
-                }
-            }
+            ALOGE("createTrack transaction error %d", lStatus);
+            *status = DEAD_OBJECT;
+            return track;
         }
-        if (status != NULL) {
-            *status = lStatus;
+        *status = reply.readInt32();
+        if (*status != NO_ERROR) {
+            ALOGE("createTrack returned error %d", *status);
+            return track;
         }
+        track = interface_cast<IAudioTrack>(reply.readStrongBinder());
+        if (track == 0) {
+            ALOGE("createTrack returned an NULL IAudioTrack with status OK");
+            *status = DEAD_OBJECT;
+            return track;
+        }
+        output.readFromParcel(&reply);
         return track;
     }
 
-    virtual sp<IAudioRecord> openRecord(
-                                audio_io_handle_t input,
-                                uint32_t sampleRate,
-                                audio_format_t format,
-                                audio_channel_mask_t channelMask,
-                                const String16& opPackageName,
-                                size_t *pFrameCount,
-                                audio_input_flags_t *flags,
-                                pid_t pid,
-                                pid_t tid,
-                                int clientUid,
-                                audio_session_t *sessionId,
-                                size_t *notificationFrames,
-                                sp<IMemory>& cblk,
-                                sp<IMemory>& buffers,
-                                status_t *status,
-                                audio_port_handle_t portId)
+    virtual sp<media::IAudioRecord> createRecord(const CreateRecordInput& input,
+                                                 CreateRecordOutput& output,
+                                                 status_t *status)
     {
         Parcel data, reply;
-        sp<IAudioRecord> record;
+        sp<media::IAudioRecord> record;
         data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeInt32((int32_t) input);
-        data.writeInt32(sampleRate);
-        data.writeInt32(format);
-        data.writeInt32(channelMask);
-        data.writeString16(opPackageName);
-        size_t frameCount = pFrameCount != NULL ? *pFrameCount : 0;
-        data.writeInt64(frameCount);
-        audio_input_flags_t lFlags = flags != NULL ? *flags : AUDIO_INPUT_FLAG_NONE;
-        data.writeInt32(lFlags);
-        data.writeInt32((int32_t) pid);
-        data.writeInt32((int32_t) tid);
-        data.writeInt32((int32_t) clientUid);
-        audio_session_t lSessionId = AUDIO_SESSION_ALLOCATE;
-        if (sessionId != NULL) {
-            lSessionId = *sessionId;
+
+        if (status == nullptr) {
+            return record;
         }
-        data.writeInt32(lSessionId);
-        data.writeInt64(notificationFrames != NULL ? *notificationFrames : 0);
-        data.writeInt32(portId);
-        cblk.clear();
-        buffers.clear();
-        status_t lStatus = remote()->transact(OPEN_RECORD, data, &reply);
+
+        input.writeToParcel(&data);
+
+        status_t lStatus = remote()->transact(CREATE_RECORD, data, &reply);
         if (lStatus != NO_ERROR) {
-            ALOGE("openRecord error: %s", strerror(-lStatus));
-        } else {
-            frameCount = reply.readInt64();
-            if (pFrameCount != NULL) {
-                *pFrameCount = frameCount;
-            }
-            lFlags = (audio_input_flags_t)reply.readInt32();
-            if (flags != NULL) {
-                *flags = lFlags;
-            }
-            lSessionId = (audio_session_t) reply.readInt32();
-            if (sessionId != NULL) {
-                *sessionId = lSessionId;
-            }
-            size_t lNotificationFrames = (size_t) reply.readInt64();
-            if (notificationFrames != NULL) {
-                *notificationFrames = lNotificationFrames;
-            }
-            lStatus = reply.readInt32();
-            record = interface_cast<IAudioRecord>(reply.readStrongBinder());
-            cblk = interface_cast<IMemory>(reply.readStrongBinder());
-            if (cblk != 0 && cblk->pointer() == NULL) {
-                cblk.clear();
-            }
-            buffers = interface_cast<IMemory>(reply.readStrongBinder());
-            if (buffers != 0 && buffers->pointer() == NULL) {
-                buffers.clear();
-            }
-            if (lStatus == NO_ERROR) {
-                if (record == 0) {
-                    ALOGE("openRecord should have returned an IAudioRecord");
-                    lStatus = UNKNOWN_ERROR;
-                } else if (cblk == 0) {
-                    ALOGE("openRecord should have returned a cblk");
-                    lStatus = NO_MEMORY;
-                }
-                // buffers is permitted to be 0
-            } else {
-                if (record != 0 || cblk != 0 || buffers != 0) {
-                    ALOGE("openRecord returned an IAudioRecord, cblk, "
-                          "or buffers but with status %d", lStatus);
-                }
-            }
-            if (lStatus != NO_ERROR) {
-                record.clear();
-                cblk.clear();
-                buffers.clear();
-            }
+            ALOGE("createRecord transaction error %d", lStatus);
+            *status = DEAD_OBJECT;
+            return record;
         }
-        if (status != NULL) {
-            *status = lStatus;
+        *status = reply.readInt32();
+        if (*status != NO_ERROR) {
+            ALOGE("createRecord returned error %d", *status);
+            return record;
         }
+
+        record = interface_cast<media::IAudioRecord>(reply.readStrongBinder());
+        if (record == 0) {
+            ALOGE("createRecord returned a NULL IAudioRecord with status OK");
+            *status = DEAD_OBJECT;
+            return record;
+        }
+        output.readFromParcel(&reply);
         return record;
     }
 
@@ -944,21 +837,46 @@
 status_t BnAudioFlinger::onTransact(
     uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
 {
+    // make sure transactions reserved to AudioPolicyManager do not come from other processes
+    switch (code) {
+        case SET_STREAM_VOLUME:
+        case SET_STREAM_MUTE:
+        case SET_MODE:
+        case OPEN_OUTPUT:
+        case OPEN_DUPLICATE_OUTPUT:
+        case CLOSE_OUTPUT:
+        case SUSPEND_OUTPUT:
+        case RESTORE_OUTPUT:
+        case OPEN_INPUT:
+        case CLOSE_INPUT:
+        case INVALIDATE_STREAM:
+        case SET_VOICE_VOLUME:
+        case MOVE_EFFECTS:
+        case LOAD_HW_MODULE:
+        case LIST_AUDIO_PORTS:
+        case GET_AUDIO_PORT:
+        case CREATE_AUDIO_PATCH:
+        case RELEASE_AUDIO_PATCH:
+        case LIST_AUDIO_PATCHES:
+        case SET_AUDIO_PORT_CONFIG:
+            ALOGW("%s: transaction %d received from PID %d",
+                  __func__, code, IPCThreadState::self()->getCallingPid());
+            return INVALID_OPERATION;
+        default:
+            break;
+    }
+
     // Whitelist of relevant events to trigger log merging.
     // Log merging should activate during audio activity of any kind. This are considered the
     // most relevant events.
     // TODO should select more wisely the items from the list
     switch (code) {
         case CREATE_TRACK:
-        case OPEN_RECORD:
+        case CREATE_RECORD:
         case SET_MASTER_VOLUME:
         case SET_MASTER_MUTE:
-        case SET_STREAM_VOLUME:
-        case SET_STREAM_MUTE:
         case SET_MIC_MUTE:
         case SET_PARAMETERS:
-        case OPEN_INPUT:
-        case SET_VOICE_VOLUME:
         case CREATE_EFFECT:
         case SYSTEM_READY: {
             requestLogMerge();
@@ -967,77 +885,56 @@
         default:
             break;
     }
+
     switch (code) {
         case CREATE_TRACK: {
             CHECK_INTERFACE(IAudioFlinger, data, reply);
-            int streamType = data.readInt32();
-            uint32_t sampleRate = data.readInt32();
-            audio_format_t format = (audio_format_t) data.readInt32();
-            audio_channel_mask_t channelMask = data.readInt32();
-            size_t frameCount = data.readInt64();
-            audio_output_flags_t flags = (audio_output_flags_t) data.readInt32();
-            bool haveSharedBuffer = data.readInt32() != 0;
-            sp<IMemory> buffer;
-            if (haveSharedBuffer) {
-                buffer = interface_cast<IMemory>(data.readStrongBinder());
+
+            CreateTrackInput input;
+            if (input.readFromParcel((Parcel*)&data) != NO_ERROR) {
+                reply->writeInt32(DEAD_OBJECT);
+                return NO_ERROR;
             }
-            audio_io_handle_t output = (audio_io_handle_t) data.readInt32();
-            pid_t pid = (pid_t) data.readInt32();
-            pid_t tid = (pid_t) data.readInt32();
-            audio_session_t sessionId = (audio_session_t) data.readInt32();
-            int clientUid = data.readInt32();
-            audio_port_handle_t portId = (audio_port_handle_t) data.readInt32();
-            status_t status = NO_ERROR;
-            sp<IAudioTrack> track;
-            if ((haveSharedBuffer && (buffer == 0)) ||
-                    ((buffer != 0) && (buffer->pointer() == NULL))) {
-                ALOGW("CREATE_TRACK: cannot retrieve shared memory");
-                status = DEAD_OBJECT;
-            } else {
-                track = createTrack(
-                        (audio_stream_type_t) streamType, sampleRate, format,
-                        channelMask, &frameCount, &flags, buffer, output, pid, tid,
-                        &sessionId, clientUid, &status, portId);
-                LOG_ALWAYS_FATAL_IF((track != 0) != (status == NO_ERROR));
-            }
-            reply->writeInt64(frameCount);
-            reply->writeInt32(flags);
-            reply->writeInt32(sessionId);
+
+            status_t status;
+            CreateTrackOutput output;
+
+            sp<IAudioTrack> track= createTrack(input,
+                                               output,
+                                               &status);
+
+            LOG_ALWAYS_FATAL_IF((track != 0) != (status == NO_ERROR));
             reply->writeInt32(status);
+            if (status != NO_ERROR) {
+                return NO_ERROR;
+            }
             reply->writeStrongBinder(IInterface::asBinder(track));
+            output.writeToParcel(reply);
             return NO_ERROR;
         } break;
-        case OPEN_RECORD: {
+        case CREATE_RECORD: {
             CHECK_INTERFACE(IAudioFlinger, data, reply);
-            audio_io_handle_t input = (audio_io_handle_t) data.readInt32();
-            uint32_t sampleRate = data.readInt32();
-            audio_format_t format = (audio_format_t) data.readInt32();
-            audio_channel_mask_t channelMask = data.readInt32();
-            const String16& opPackageName = data.readString16();
-            size_t frameCount = data.readInt64();
-            audio_input_flags_t flags = (audio_input_flags_t) data.readInt32();
-            pid_t pid = (pid_t) data.readInt32();
-            pid_t tid = (pid_t) data.readInt32();
-            int clientUid = data.readInt32();
-            audio_session_t sessionId = (audio_session_t) data.readInt32();
-            size_t notificationFrames = data.readInt64();
-            audio_port_handle_t portId = (audio_port_handle_t) data.readInt32();
-            sp<IMemory> cblk;
-            sp<IMemory> buffers;
-            status_t status = NO_ERROR;
-            sp<IAudioRecord> record = openRecord(input,
-                    sampleRate, format, channelMask, opPackageName, &frameCount, &flags,
-                    pid, tid, clientUid, &sessionId, &notificationFrames, cblk, buffers,
-                    &status, portId);
+
+            CreateRecordInput input;
+            if (input.readFromParcel((Parcel*)&data) != NO_ERROR) {
+                reply->writeInt32(DEAD_OBJECT);
+                return NO_ERROR;
+            }
+
+            status_t status;
+            CreateRecordOutput output;
+
+            sp<media::IAudioRecord> record = createRecord(input,
+                                                          output,
+                                                          &status);
+
             LOG_ALWAYS_FATAL_IF((record != 0) != (status == NO_ERROR));
-            reply->writeInt64(frameCount);
-            reply->writeInt32(flags);
-            reply->writeInt32(sessionId);
-            reply->writeInt64(notificationFrames);
             reply->writeInt32(status);
+            if (status != NO_ERROR) {
+                return NO_ERROR;
+            }
             reply->writeStrongBinder(IInterface::asBinder(record));
-            reply->writeStrongBinder(IInterface::asBinder(cblk));
-            reply->writeStrongBinder(IInterface::asBinder(buffers));
+            output.writeToParcel(reply);
             return NO_ERROR;
         } break;
         case SAMPLE_RATE: {
diff --git a/media/libaudioclient/IAudioPolicyService.cpp b/media/libaudioclient/IAudioPolicyService.cpp
index d838975..53bc1b7 100644
--- a/media/libaudioclient/IAudioPolicyService.cpp
+++ b/media/libaudioclient/IAudioPolicyService.cpp
@@ -22,6 +22,7 @@
 #include <math.h>
 #include <sys/types.h>
 
+#include <binder/IPCThreadState.h>
 #include <binder/Parcel.h>
 
 #include <media/AudioEffect.h>
@@ -160,28 +161,11 @@
         return static_cast <audio_policy_forced_cfg_t> (reply.readInt32());
     }
 
-    virtual audio_io_handle_t getOutput(
-                                        audio_stream_type_t stream,
-                                        uint32_t samplingRate,
-                                        audio_format_t format,
-                                        audio_channel_mask_t channelMask,
-                                        audio_output_flags_t flags,
-                                        const audio_offload_info_t *offloadInfo)
+    virtual audio_io_handle_t getOutput(audio_stream_type_t stream)
     {
         Parcel data, reply;
         data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
         data.writeInt32(static_cast <uint32_t>(stream));
-        data.writeInt32(samplingRate);
-        data.writeInt32(static_cast <uint32_t>(format));
-        data.writeInt32(channelMask);
-        data.writeInt32(static_cast <uint32_t>(flags));
-        // hasOffloadInfo
-        if (offloadInfo == NULL) {
-            data.writeInt32(0);
-        } else {
-            data.writeInt32(1);
-            data.write(offloadInfo, sizeof(audio_offload_info_t));
-        }
         remote()->transact(GET_OUTPUT, data, &reply);
         return static_cast <audio_io_handle_t> (reply.readInt32());
     }
@@ -848,10 +832,33 @@
 
 // ----------------------------------------------------------------------
 
-
 status_t BnAudioPolicyService::onTransact(
     uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
 {
+    // make sure transactions reserved to AudioFlinger do not come from other processes
+    switch (code) {
+        case START_OUTPUT:
+        case STOP_OUTPUT:
+        case RELEASE_OUTPUT:
+        case GET_INPUT_FOR_ATTR:
+        case START_INPUT:
+        case STOP_INPUT:
+        case RELEASE_INPUT:
+        case GET_STRATEGY_FOR_STREAM:
+        case GET_OUTPUT_FOR_EFFECT:
+        case REGISTER_EFFECT:
+        case UNREGISTER_EFFECT:
+        case SET_EFFECT_ENABLED:
+        case GET_OUTPUT_FOR_ATTR:
+        case ACQUIRE_SOUNDTRIGGER_SESSION:
+        case RELEASE_SOUNDTRIGGER_SESSION:
+            ALOGW("%s: transaction %d received from PID %d",
+                  __func__, code, IPCThreadState::self()->getCallingPid());
+            return INVALID_OPERATION;
+        default:
+            break;
+    }
+
     switch (code) {
         case SET_DEVICE_CONNECTION_STATE: {
             CHECK_INTERFACE(IAudioPolicyService, data, reply);
@@ -934,22 +941,7 @@
             CHECK_INTERFACE(IAudioPolicyService, data, reply);
             audio_stream_type_t stream =
                     static_cast <audio_stream_type_t>(data.readInt32());
-            uint32_t samplingRate = data.readInt32();
-            audio_format_t format = (audio_format_t) data.readInt32();
-            audio_channel_mask_t channelMask = data.readInt32();
-            audio_output_flags_t flags =
-                    static_cast <audio_output_flags_t>(data.readInt32());
-            bool hasOffloadInfo = data.readInt32() != 0;
-            audio_offload_info_t offloadInfo;
-            if (hasOffloadInfo) {
-                data.read(&offloadInfo, sizeof(audio_offload_info_t));
-            }
-            audio_io_handle_t output = getOutput(stream,
-                                                 samplingRate,
-                                                 format,
-                                                 channelMask,
-                                                 flags,
-                                                 hasOffloadInfo ? &offloadInfo : NULL);
+            audio_io_handle_t output = getOutput(stream);
             reply->writeInt32(static_cast <int>(output));
             return NO_ERROR;
         } break;
diff --git a/media/libaudioclient/IAudioRecord.cpp b/media/libaudioclient/IAudioRecord.cpp
deleted file mode 100644
index 1331c0d..0000000
--- a/media/libaudioclient/IAudioRecord.cpp
+++ /dev/null
@@ -1,94 +0,0 @@
-/*
-**
-** Copyright 2007, The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-**     http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
-
-#define LOG_TAG "IAudioRecord"
-//#define LOG_NDEBUG 0
-#include <utils/Log.h>
-
-#include <stdint.h>
-#include <sys/types.h>
-
-#include <binder/Parcel.h>
-
-#include <media/IAudioRecord.h>
-
-namespace android {
-
-enum {
-    UNUSED_WAS_GET_CBLK = IBinder::FIRST_CALL_TRANSACTION,
-    START,
-    STOP
-};
-
-class BpAudioRecord : public BpInterface<IAudioRecord>
-{
-public:
-    explicit BpAudioRecord(const sp<IBinder>& impl)
-        : BpInterface<IAudioRecord>(impl)
-    {
-    }
-
-    virtual status_t start(int /*AudioSystem::sync_event_t*/ event, audio_session_t triggerSession)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioRecord::getInterfaceDescriptor());
-        data.writeInt32(event);
-        data.writeInt32(triggerSession);
-        status_t status = remote()->transact(START, data, &reply);
-        if (status == NO_ERROR) {
-            status = reply.readInt32();
-        } else {
-            ALOGW("start() error: %s", strerror(-status));
-        }
-        return status;
-    }
-
-    virtual void stop()
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioRecord::getInterfaceDescriptor());
-        remote()->transact(STOP, data, &reply);
-    }
-
-};
-
-IMPLEMENT_META_INTERFACE(AudioRecord, "android.media.IAudioRecord");
-
-// ----------------------------------------------------------------------
-
-status_t BnAudioRecord::onTransact(
-    uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
-{
-    switch (code) {
-        case START: {
-            CHECK_INTERFACE(IAudioRecord, data, reply);
-            int /*AudioSystem::sync_event_t*/ event = data.readInt32();
-            audio_session_t triggerSession = (audio_session_t) data.readInt32();
-            reply->writeInt32(start(event, triggerSession));
-            return NO_ERROR;
-        } break;
-        case STOP: {
-            CHECK_INTERFACE(IAudioRecord, data, reply);
-            stop();
-            return NO_ERROR;
-        } break;
-        default:
-            return BBinder::onTransact(code, data, reply, flags);
-    }
-}
-
-} // namespace android
diff --git a/media/libaudioclient/IAudioTrack.cpp b/media/libaudioclient/IAudioTrack.cpp
index 79e864d..adff057 100644
--- a/media/libaudioclient/IAudioTrack.cpp
+++ b/media/libaudioclient/IAudioTrack.cpp
@@ -28,6 +28,8 @@
 
 namespace android {
 
+using media::VolumeShaper;
+
 enum {
     GET_CBLK = IBinder::FIRST_CALL_TRANSACTION,
     START,
@@ -185,7 +187,7 @@
             return nullptr;
         }
         sp<VolumeShaper::State> state = new VolumeShaper::State;
-        status = state->readFromParcel(reply);
+        status = state->readFromParcel(&reply);
         if (status != NO_ERROR) {
             return nullptr;
         }
@@ -263,12 +265,12 @@
             status_t status = data.readInt32(&present);
             if (status == NO_ERROR && present != 0) {
                 configuration = new VolumeShaper::Configuration();
-                status = configuration->readFromParcel(data);
+                status = configuration->readFromParcel(&data);
             }
             status = status ?: data.readInt32(&present);
             if (status == NO_ERROR && present != 0) {
                 operation = new VolumeShaper::Operation();
-                status = operation->readFromParcel(data);
+                status = operation->readFromParcel(&data);
             }
             if (status == NO_ERROR) {
                 status = (status_t)applyVolumeShaper(configuration, operation);
diff --git a/media/libaudioclient/PlayerBase.cpp b/media/libaudioclient/PlayerBase.cpp
index 7868318..b0c68e5 100644
--- a/media/libaudioclient/PlayerBase.cpp
+++ b/media/libaudioclient/PlayerBase.cpp
@@ -22,6 +22,8 @@
 
 namespace android {
 
+using media::VolumeShaper;
+
 //--------------------------------------------------------------------------------------------------
 PlayerBase::PlayerBase() : BnPlayer(),
         mPanMultiplierL(1.0f), mPanMultiplierR(1.0f),
@@ -117,23 +119,26 @@
 
 //------------------------------------------------------------------------------
 // Implementation of IPlayer
-void PlayerBase::start() {
+binder::Status PlayerBase::start() {
     ALOGD("PlayerBase::start() from IPlayer");
     (void)startWithStatus();
+    return binder::Status::ok();
 }
 
-void PlayerBase::pause() {
+binder::Status PlayerBase::pause() {
     ALOGD("PlayerBase::pause() from IPlayer");
     (void)pauseWithStatus();
+    return binder::Status::ok();
 }
 
 
-void PlayerBase::stop() {
+binder::Status PlayerBase::stop() {
     ALOGD("PlayerBase::stop() from IPlayer");
     (void)stopWithStatus();
+    return binder::Status::ok();
 }
 
-void PlayerBase::setVolume(float vol) {
+binder::Status PlayerBase::setVolume(float vol) {
     ALOGD("PlayerBase::setVolume() from IPlayer");
     {
         Mutex::Autolock _l(mSettingsLock);
@@ -144,9 +149,10 @@
     if (status != NO_ERROR) {
         ALOGW("PlayerBase::setVolume() error %d", status);
     }
+    return binder::Status::fromStatusT(status);
 }
 
-void PlayerBase::setPan(float pan) {
+binder::Status PlayerBase::setPan(float pan) {
     ALOGD("PlayerBase::setPan() from IPlayer");
     {
         Mutex::Autolock _l(mSettingsLock);
@@ -163,22 +169,19 @@
     if (status != NO_ERROR) {
         ALOGW("PlayerBase::setPan() error %d", status);
     }
+    return binder::Status::fromStatusT(status);
 }
 
-void PlayerBase::setStartDelayMs(int32_t delayMs __unused) {
+binder::Status PlayerBase::setStartDelayMs(int32_t delayMs __unused) {
     ALOGW("setStartDelay() is not supported");
+    return binder::Status::ok();
 }
 
-void PlayerBase::applyVolumeShaper(
-        const sp<VolumeShaper::Configuration>& configuration  __unused,
-        const sp<VolumeShaper::Operation>& operation __unused) {
+binder::Status PlayerBase::applyVolumeShaper(
+            const VolumeShaper::Configuration& configuration __unused,
+            const VolumeShaper::Operation& operation __unused) {
     ALOGW("applyVolumeShaper() is not supported");
-}
-
-status_t PlayerBase::onTransact(
-    uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
-{
-    return BnPlayer::onTransact(code, data, reply, flags);
+    return binder::Status::ok();
 }
 
 } // namespace android
diff --git a/media/libaudioclient/ToneGenerator.cpp b/media/libaudioclient/ToneGenerator.cpp
index cfb5be6..5a33975 100644
--- a/media/libaudioclient/ToneGenerator.cpp
+++ b/media/libaudioclient/ToneGenerator.cpp
@@ -20,6 +20,7 @@
 #include <math.h>
 #include <utils/Log.h>
 #include <cutils/properties.h>
+#include <media/AudioPolicyHelper.h>
 #include "media/ToneGenerator.h"
 
 
@@ -1044,7 +1045,7 @@
         }
     }
 
-    ALOGV("startTone");
+    ALOGV("startTone toneType %d", toneType);
 
     mLock.lock();
 
@@ -1196,9 +1197,16 @@
     mpAudioTrack = new AudioTrack();
     ALOGV("AudioTrack(%p) created", mpAudioTrack.get());
 
+    audio_attributes_t attr;
+    audio_stream_type_t streamType = mStreamType;
+    if (mStreamType == AUDIO_STREAM_VOICE_CALL) {
+        streamType = AUDIO_STREAM_DTMF;
+    }
+    stream_type_to_audio_attributes(streamType, &attr);
+
     const size_t frameCount = mProcessSize;
     status_t status = mpAudioTrack->set(
-            mStreamType,
+            AUDIO_STREAM_DEFAULT,
             0,    // sampleRate
             AUDIO_FORMAT_PCM_16_BIT,
             AUDIO_CHANNEL_OUT_MONO,
@@ -1210,7 +1218,11 @@
             0,    // sharedBuffer
             mThreadCanCallJava,
             AUDIO_SESSION_ALLOCATE,
-            AudioTrack::TRANSFER_CALLBACK);
+            AudioTrack::TRANSFER_CALLBACK,
+            nullptr,
+            AUDIO_UID_INVALID,
+            -1,
+            &attr);
 
     if (status != NO_ERROR) {
         ALOGE("AudioTrack(%p) set failed with error %d", mpAudioTrack.get(), status);
diff --git a/media/libaudioclient/TrackPlayerBase.cpp b/media/libaudioclient/TrackPlayerBase.cpp
index 48cd803..0a914fc 100644
--- a/media/libaudioclient/TrackPlayerBase.cpp
+++ b/media/libaudioclient/TrackPlayerBase.cpp
@@ -18,6 +18,8 @@
 
 namespace android {
 
+using media::VolumeShaper;
+
 //--------------------------------------------------------------------------------------------------
 TrackPlayerBase::TrackPlayerBase() : PlayerBase(),
         mPlayerVolumeL(1.0f), mPlayerVolumeR(1.0f)
@@ -103,18 +105,24 @@
 }
 
 
-void TrackPlayerBase::applyVolumeShaper(
-        const sp<VolumeShaper::Configuration>& configuration,
-        const sp<VolumeShaper::Operation>& operation) {
+binder::Status TrackPlayerBase::applyVolumeShaper(
+        const VolumeShaper::Configuration& configuration,
+        const VolumeShaper::Operation& operation) {
+
+    sp<VolumeShaper::Configuration> spConfiguration = new VolumeShaper::Configuration(configuration);
+    sp<VolumeShaper::Operation> spOperation = new VolumeShaper::Operation(operation);
+
     if (mAudioTrack != 0) {
         ALOGD("TrackPlayerBase::applyVolumeShaper() from IPlayer");
-        VolumeShaper::Status status = mAudioTrack->applyVolumeShaper(configuration, operation);
+        VolumeShaper::Status status = mAudioTrack->applyVolumeShaper(spConfiguration, spOperation);
         if (status < 0) { // a non-negative value is the volume shaper id.
             ALOGE("TrackPlayerBase::applyVolumeShaper() failed with status %d", status);
         }
+        return binder::Status::fromStatusT(status);
     } else {
         ALOGD("TrackPlayerBase::applyVolumeShaper()"
-                " no AudioTrack for volume control from IPlayer");
+              " no AudioTrack for volume control from IPlayer");
+        return binder::Status::ok();
     }
 }
 
diff --git a/media/libaudioclient/aidl/android/media/IAudioRecord.aidl b/media/libaudioclient/aidl/android/media/IAudioRecord.aidl
new file mode 100644
index 0000000..7572671
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/IAudioRecord.aidl
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/* Native code must specify namespace media (media::IAudioRecord) when referring to this class */
+interface IAudioRecord {
+
+  /* After it's created the track is not active. Call start() to
+   * make it active.
+   */
+  void start(int /*AudioSystem::sync_event_t*/ event,
+             int /*audio_session_t*/ triggerSession);
+
+  /* Stop a track. If set, the callback will cease being called and
+   * obtainBuffer will return an error. Buffers that are already released
+   * will be processed, unless flush() is called.
+   */
+  void stop();
+}
diff --git a/media/libaudioclient/aidl/android/media/IPlayer.aidl b/media/libaudioclient/aidl/android/media/IPlayer.aidl
new file mode 100644
index 0000000..a90fcdd
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/IPlayer.aidl
@@ -0,0 +1,34 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.VolumeShaper.Configuration;
+import android.media.VolumeShaper.Operation;
+
+/**
+ * @hide
+ */
+interface IPlayer {
+    oneway void start();
+    oneway void pause();
+    oneway void stop();
+    oneway void setVolume(float vol);
+    oneway void setPan(float pan);
+    oneway void setStartDelayMs(int delayMs);
+    oneway void applyVolumeShaper(in Configuration configuration,
+                                  in Operation operation);
+}
diff --git a/media/libstagefright/MediaSource.cpp b/media/libaudioclient/aidl/android/media/VolumeShaper/Configuration.aidl
similarity index 72%
copy from media/libstagefright/MediaSource.cpp
copy to media/libaudioclient/aidl/android/media/VolumeShaper/Configuration.aidl
index a17757a..fd0e60f 100644
--- a/media/libstagefright/MediaSource.cpp
+++ b/media/libaudioclient/aidl/android/media/VolumeShaper/Configuration.aidl
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2009 The Android Open Source Project
+ * Copyright (C) 2017 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,12 +14,6 @@
  * limitations under the License.
  */
 
-#include <media/stagefright/MediaSource.h>
+package android.media.VolumeShaper;
 
-namespace android {
-
-MediaSource::MediaSource() {}
-
-MediaSource::~MediaSource() {}
-
-}  // namespace android
+parcelable Configuration cpp_header "media/VolumeShaper.h";
diff --git a/media/libstagefright/MediaSource.cpp b/media/libaudioclient/aidl/android/media/VolumeShaper/Operation.aidl
similarity index 72%
copy from media/libstagefright/MediaSource.cpp
copy to media/libaudioclient/aidl/android/media/VolumeShaper/Operation.aidl
index a17757a..4290d9d 100644
--- a/media/libstagefright/MediaSource.cpp
+++ b/media/libaudioclient/aidl/android/media/VolumeShaper/Operation.aidl
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2009 The Android Open Source Project
+ * Copyright (C) 2017 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,12 +14,6 @@
  * limitations under the License.
  */
 
-#include <media/stagefright/MediaSource.h>
+package android.media.VolumeShaper;
 
-namespace android {
-
-MediaSource::MediaSource() {}
-
-MediaSource::~MediaSource() {}
-
-}  // namespace android
+parcelable Operation cpp_header "media/VolumeShaper.h";
diff --git a/media/libstagefright/MediaSource.cpp b/media/libaudioclient/aidl/android/media/VolumeShaper/State.aidl
similarity index 72%
rename from media/libstagefright/MediaSource.cpp
rename to media/libaudioclient/aidl/android/media/VolumeShaper/State.aidl
index a17757a..f6a22b8 100644
--- a/media/libstagefright/MediaSource.cpp
+++ b/media/libaudioclient/aidl/android/media/VolumeShaper/State.aidl
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2009 The Android Open Source Project
+ * Copyright (C) 2017 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,12 +14,6 @@
  * limitations under the License.
  */
 
-#include <media/stagefright/MediaSource.h>
+package android.media.VolumeShaper;
 
-namespace android {
-
-MediaSource::MediaSource() {}
-
-MediaSource::~MediaSource() {}
-
-}  // namespace android
+parcelable State cpp_header "media/VolumeShaper.h";
diff --git a/media/libaudioclient/include/media/AudioClient.h b/media/libaudioclient/include/media/AudioClient.h
index 9efd76d..247af9e 100644
--- a/media/libaudioclient/include/media/AudioClient.h
+++ b/media/libaudioclient/include/media/AudioClient.h
@@ -18,19 +18,38 @@
 #ifndef ANDROID_AUDIO_CLIENT_H
 #define ANDROID_AUDIO_CLIENT_H
 
+#include <binder/Parcel.h>
+#include <binder/Parcelable.h>
 #include <system/audio.h>
 #include <utils/String16.h>
 
 namespace android {
 
-class AudioClient {
+class AudioClient : public Parcelable {
  public:
     AudioClient() :
-        clientUid(-1), clientPid(-1), packageName("") {}
+        clientUid(-1), clientPid(-1), clientTid(-1), packageName("") {}
 
     uid_t clientUid;
     pid_t clientPid;
+    pid_t clientTid;
     String16 packageName;
+
+    status_t readFromParcel(const Parcel *parcel) override {
+        clientUid = parcel->readInt32();
+        clientPid = parcel->readInt32();
+        clientTid = parcel->readInt32();
+        packageName = parcel->readString16();
+        return NO_ERROR;
+    }
+
+    status_t writeToParcel(Parcel *parcel) const override {
+        parcel->writeInt32(clientUid);
+        parcel->writeInt32(clientPid);
+        parcel->writeInt32(clientTid);
+        parcel->writeString16(packageName);
+        return NO_ERROR;
+    }
 };
 
 }; // namespace android
diff --git a/media/libaudioclient/include/media/AudioMixer.h b/media/libaudioclient/include/media/AudioMixer.h
index 2bd2d01..d4ce417 100644
--- a/media/libaudioclient/include/media/AudioMixer.h
+++ b/media/libaudioclient/include/media/AudioMixer.h
@@ -25,7 +25,7 @@
 #include <media/AudioResampler.h>
 #include <media/AudioResamplerPublic.h>
 #include <media/BufferProviders.h>
-#include <media/nbaio/NBLog.h>
+#include <media/nblog/NBLog.h>
 #include <system/audio.h>
 #include <utils/Compat.h>
 #include <utils/threads.h>
@@ -33,6 +33,9 @@
 // FIXME This is actually unity gain, which might not be max in future, expressed in U.12
 #define MAX_GAIN_INT AudioMixer::UNITY_GAIN_INT
 
+// This must match frameworks/av/services/audioflinger/Configuration.h
+#define FLOAT_AUX
+
 namespace android {
 
 // ----------------------------------------------------------------------------
diff --git a/media/libaudioclient/include/media/AudioRecord.h b/media/libaudioclient/include/media/AudioRecord.h
index dd72170..fea973a 100644
--- a/media/libaudioclient/include/media/AudioRecord.h
+++ b/media/libaudioclient/include/media/AudioRecord.h
@@ -17,13 +17,17 @@
 #ifndef ANDROID_AUDIORECORD_H
 #define ANDROID_AUDIORECORD_H
 
+#include <binder/IMemory.h>
 #include <cutils/sched_policy.h>
 #include <media/AudioSystem.h>
 #include <media/AudioTimestamp.h>
-#include <media/IAudioRecord.h>
+#include <media/MediaAnalyticsItem.h>
 #include <media/Modulo.h>
+#include <utils/RefBase.h>
 #include <utils/threads.h>
 
+#include "android/media/IAudioRecord.h"
+
 namespace android {
 
 // ----------------------------------------------------------------------------
@@ -182,7 +186,8 @@
                                     audio_input_flags_t flags = AUDIO_INPUT_FLAG_NONE,
                                     uid_t uid = AUDIO_UID_INVALID,
                                     pid_t pid = -1,
-                                    const audio_attributes_t* pAttributes = NULL);
+                                    const audio_attributes_t* pAttributes = NULL,
+                                    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE);
 
     /* Terminates the AudioRecord and unregisters it from AudioFlinger.
      * Also destroys all resources associated with the AudioRecord.
@@ -220,7 +225,8 @@
                             audio_input_flags_t flags = AUDIO_INPUT_FLAG_NONE,
                             uid_t uid = AUDIO_UID_INVALID,
                             pid_t pid = -1,
-                            const audio_attributes_t* pAttributes = NULL);
+                            const audio_attributes_t* pAttributes = NULL,
+                            audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE);
 
     /* Result of constructing the AudioRecord. This must be checked for successful initialization
      * before using any AudioRecord API (except for set()), because using
@@ -516,6 +522,11 @@
     /* Get the flags */
             audio_input_flags_t getFlags() const { AutoMutex _l(mLock); return mFlags; }
 
+    /*
+     * Dumps the state of an audio record.
+     */
+            status_t    dump(int fd, const Vector<String16>& args) const;
+
 private:
     /* copying audio record objects is not allowed */
                         AudioRecord(const AudioRecord& other);
@@ -565,7 +576,7 @@
 
             // caller must hold lock on mLock for all _l methods
 
-            status_t openRecord_l(const Modulo<uint32_t> &epoch, const String16& opPackageName);
+            status_t createRecord_l(const Modulo<uint32_t> &epoch, const String16& opPackageName);
 
             // FIXME enum is faster than strcmp() for parameter 'from'
             status_t restoreRecord_l(const char *from);
@@ -635,7 +646,7 @@
 
     // Next 5 fields may be changed if IAudioRecord is re-created, but always != 0
     // provided the initial set() was successful
-    sp<IAudioRecord>        mAudioRecord;
+    sp<media::IAudioRecord> mAudioRecord;
     sp<IMemory>             mCblkMemory;
     audio_track_cblk_t*     mCblk;              // re-load after mLock.unlock()
     sp<IMemory>             mBufferMemory;
@@ -677,8 +688,25 @@
                                               // May not match the app selection depending on other
                                               // activity and connected devices
     wp<AudioSystem::AudioDeviceCallback> mDeviceCallback;
-    audio_port_handle_t    mPortId;  // unique ID allocated by audio policy
 
+private:
+    class MediaMetrics {
+      public:
+        MediaMetrics() : mAnalyticsItem(new MediaAnalyticsItem("audiorecord")) {
+        }
+        ~MediaMetrics() {
+            // mAnalyticsItem alloc failure will be flagged in the constructor
+            // don't log empty records
+            if (mAnalyticsItem->count() > 0) {
+                mAnalyticsItem->setFinalized(true);
+                mAnalyticsItem->selfrecord();
+            }
+        }
+        void gather(const AudioRecord *record);
+      private:
+        std::unique_ptr<MediaAnalyticsItem> mAnalyticsItem;
+    };
+    MediaMetrics mMediaMetrics;
 };
 
 }; // namespace android
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index 5a81d83..24a6e22 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -106,6 +106,9 @@
 
     static float linearToLog(int volume);
     static int logToLinear(float volume);
+    static size_t calculateMinFrameCount(
+            uint32_t afLatencyMs, uint32_t afFrameCount, uint32_t afSampleRate,
+            uint32_t sampleRate, float speed /*, uint32_t notificationsPerBufferReq*/);
 
     // Returned samplingRate and frameCount output values are guaranteed
     // to be non-zero if status == NO_ERROR
@@ -209,14 +212,6 @@
     static status_t setForceUse(audio_policy_force_use_t usage, audio_policy_forced_cfg_t config);
     static audio_policy_forced_cfg_t getForceUse(audio_policy_force_use_t usage);
 
-    // Client must successfully hand off the handle reference to AudioFlinger via createTrack(),
-    // or release it with releaseOutput().
-    static audio_io_handle_t getOutput(audio_stream_type_t stream,
-                                        uint32_t samplingRate = 0,
-                                        audio_format_t format = AUDIO_FORMAT_DEFAULT,
-                                        audio_channel_mask_t channelMask = AUDIO_CHANNEL_OUT_STEREO,
-                                        audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
-                                        const audio_offload_info_t *offloadInfo = NULL);
     static status_t getOutputForAttr(const audio_attributes_t *attr,
                                      audio_io_handle_t *output,
                                      audio_session_t session,
@@ -236,7 +231,7 @@
                               audio_stream_type_t stream,
                               audio_session_t session);
 
-    // Client must successfully hand off the handle reference to AudioFlinger via openRecord(),
+    // Client must successfully hand off the handle reference to AudioFlinger via createRecord(),
     // or release it with releaseInput().
     static status_t getInputForAttr(const audio_attributes_t *attr,
                                     audio_io_handle_t *input,
@@ -450,6 +445,7 @@
         Vector <sp <AudioPortCallback> >    mAudioPortCallbacks;
     };
 
+    static audio_io_handle_t getOutput(audio_stream_type_t stream);
     static const sp<AudioFlingerClient> getAudioFlingerClient();
     static sp<AudioIoDescriptor> getIoDescriptor(audio_io_handle_t ioHandle);
 
diff --git a/media/libaudioclient/include/media/AudioTrack.h b/media/libaudioclient/include/media/AudioTrack.h
index 47d87e9..c146db9 100644
--- a/media/libaudioclient/include/media/AudioTrack.h
+++ b/media/libaudioclient/include/media/AudioTrack.h
@@ -22,6 +22,7 @@
 #include <media/AudioTimestamp.h>
 #include <media/IAudioTrack.h>
 #include <media/AudioResamplerPublic.h>
+#include <media/MediaAnalyticsItem.h>
 #include <media/Modulo.h>
 #include <utils/threads.h>
 
@@ -218,6 +219,8 @@
      *                     maxRequiredSpeed playback. Values less than 1.0f and greater than
      *                     AUDIO_TIMESTRETCH_SPEED_MAX will be clamped.  For non-PCM tracks
      *                     and direct or offloaded tracks, this parameter is ignored.
+     * selectedDeviceId:   Selected device id of the app which initially requested the AudioTrack
+     *                     to open with a specific device.
      * threadCanCallJava:  Not present in parameter list, and so is fixed at false.
      */
 
@@ -237,7 +240,8 @@
                                     pid_t pid = -1,
                                     const audio_attributes_t* pAttributes = NULL,
                                     bool doNotReconnect = false,
-                                    float maxRequiredSpeed = 1.0f);
+                                    float maxRequiredSpeed = 1.0f,
+                                    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE);
 
     /* Creates an audio track and registers it with AudioFlinger.
      * With this constructor, the track is configured for static buffer mode.
@@ -313,7 +317,8 @@
                             pid_t pid = -1,
                             const audio_attributes_t* pAttributes = NULL,
                             bool doNotReconnect = false,
-                            float maxRequiredSpeed = 1.0f);
+                            float maxRequiredSpeed = 1.0f,
+                            audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE);
 
     /* Result of constructing the AudioTrack. This must be checked for successful initialization
      * before using any AudioTrack API (except for set()), because using
@@ -748,12 +753,12 @@
             status_t    setParameters(const String8& keyValuePairs);
 
     /* Sets the volume shaper object */
-            VolumeShaper::Status applyVolumeShaper(
-                    const sp<VolumeShaper::Configuration>& configuration,
-                    const sp<VolumeShaper::Operation>& operation);
+            media::VolumeShaper::Status applyVolumeShaper(
+                    const sp<media::VolumeShaper::Configuration>& configuration,
+                    const sp<media::VolumeShaper::Operation>& operation);
 
     /* Gets the volume shaper state */
-            sp<VolumeShaper::State> getVolumeShaperState(int id);
+            sp<media::VolumeShaper::State> getVolumeShaperState(int id);
 
     /* Get parameters */
             String8     getParameters(const String8& keys);
@@ -990,7 +995,7 @@
     sp<IAudioTrack>         mAudioTrack;
     sp<IMemory>             mCblkMemory;
     audio_track_cblk_t*     mCblk;                  // re-load after mLock.unlock()
-    audio_io_handle_t       mOutput;                // returned by AudioSystem::getOutput()
+    audio_io_handle_t       mOutput;                // returned by AudioSystem::getOutputForAttr()
 
     sp<AudioTrackThread>    mAudioTrackThread;
     bool                    mThreadCanCallJava;
@@ -1160,7 +1165,7 @@
                                               // May not match the app selection depending on other
                                               // activity and connected devices.
 
-    sp<VolumeHandler>       mVolumeHandler;
+    sp<media::VolumeHandler>       mVolumeHandler;
 
 private:
     class DeathNotifier : public IBinder::DeathRecipient {
@@ -1178,7 +1183,25 @@
     pid_t                   mClientPid;
 
     wp<AudioSystem::AudioDeviceCallback> mDeviceCallback;
-    audio_port_handle_t     mPortId;  // unique ID allocated by audio policy
+
+private:
+    class MediaMetrics {
+      public:
+        MediaMetrics() : mAnalyticsItem(new MediaAnalyticsItem("audiotrack")) {
+        }
+        ~MediaMetrics() {
+            // mAnalyticsItem alloc failure will be flagged in the constructor
+            // don't log empty records
+            if (mAnalyticsItem->count() > 0) {
+                mAnalyticsItem->setFinalized(true);
+                mAnalyticsItem->selfrecord();
+            }
+        }
+        void gather(const AudioTrack *track);
+      private:
+        std::unique_ptr<MediaAnalyticsItem> mAnalyticsItem;
+    };
+    MediaMetrics mMediaMetrics;
 };
 
 }; // namespace android
diff --git a/media/libaudioclient/include/media/IAudioFlinger.h b/media/libaudioclient/include/media/IAudioFlinger.h
index 0ad4231..57d9778 100644
--- a/media/libaudioclient/include/media/IAudioFlinger.h
+++ b/media/libaudioclient/include/media/IAudioFlinger.h
@@ -24,8 +24,10 @@
 #include <utils/RefBase.h>
 #include <utils/Errors.h>
 #include <binder/IInterface.h>
+#include <binder/Parcel.h>
+#include <binder/Parcelable.h>
+#include <media/AudioClient.h>
 #include <media/IAudioTrack.h>
-#include <media/IAudioRecord.h>
 #include <media/IAudioFlingerClient.h>
 #include <system/audio.h>
 #include <system/audio_effect.h>
@@ -34,6 +36,8 @@
 #include <media/IEffectClient.h>
 #include <utils/String8.h>
 
+#include "android/media/IAudioRecord.h"
+
 namespace android {
 
 // ----------------------------------------------------------------------------
@@ -43,6 +47,271 @@
 public:
     DECLARE_META_INTERFACE(AudioFlinger);
 
+    /* CreateTrackInput contains all input arguments sent by AudioTrack to AudioFlinger
+     * when calling createTrack() including arguments that will be updated by AudioFlinger
+     * and returned in CreateTrackOutput object
+     */
+    class CreateTrackInput : public Parcelable {
+    public:
+        status_t readFromParcel(const Parcel *parcel) override {
+            /* input arguments*/
+            memset(&attr, 0, sizeof(audio_attributes_t));
+            if (parcel->read(&attr, sizeof(audio_attributes_t)) != NO_ERROR) {
+                return DEAD_OBJECT;
+            }
+            attr.tags[AUDIO_ATTRIBUTES_TAGS_MAX_SIZE -1] = '\0';
+            memset(&config, 0, sizeof(audio_config_t));
+            if (parcel->read(&config, sizeof(audio_config_t)) != NO_ERROR) {
+                return DEAD_OBJECT;
+            }
+            if (clientInfo.readFromParcel(parcel) != NO_ERROR) {
+                return DEAD_OBJECT;
+            }
+            if (parcel->readInt32() != 0) {
+                sharedBuffer = interface_cast<IMemory>(parcel->readStrongBinder());
+                if (sharedBuffer == 0 || sharedBuffer->pointer() == NULL) {
+                    return BAD_VALUE;
+                }
+            }
+            notificationsPerBuffer = parcel->readInt32();
+            speed = parcel->readFloat();
+
+            /* input/output arguments*/
+            (void)parcel->read(&flags, sizeof(audio_output_flags_t));
+            frameCount = parcel->readInt64();
+            notificationFrameCount = parcel->readInt64();
+            (void)parcel->read(&selectedDeviceId, sizeof(audio_port_handle_t));
+            (void)parcel->read(&sessionId, sizeof(audio_session_t));
+            return NO_ERROR;
+        }
+
+        status_t writeToParcel(Parcel *parcel) const override {
+            /* input arguments*/
+            (void)parcel->write(&attr, sizeof(audio_attributes_t));
+            (void)parcel->write(&config, sizeof(audio_config_t));
+            (void)clientInfo.writeToParcel(parcel);
+            if (sharedBuffer != 0) {
+                (void)parcel->writeInt32(1);
+                (void)parcel->writeStrongBinder(IInterface::asBinder(sharedBuffer));
+            } else {
+                (void)parcel->writeInt32(0);
+            }
+            (void)parcel->writeInt32(notificationsPerBuffer);
+            (void)parcel->writeFloat(speed);
+
+            /* input/output arguments*/
+            (void)parcel->write(&flags, sizeof(audio_output_flags_t));
+            (void)parcel->writeInt64(frameCount);
+            (void)parcel->writeInt64(notificationFrameCount);
+            (void)parcel->write(&selectedDeviceId, sizeof(audio_port_handle_t));
+            (void)parcel->write(&sessionId, sizeof(audio_session_t));
+            return NO_ERROR;
+        }
+
+        /* input */
+        audio_attributes_t attr;
+        audio_config_t config;
+        AudioClient clientInfo;
+        sp<IMemory> sharedBuffer;
+        uint32_t notificationsPerBuffer;
+        float speed;
+
+        /* input/output */
+        audio_output_flags_t flags;
+        size_t frameCount;
+        size_t notificationFrameCount;
+        audio_port_handle_t selectedDeviceId;
+        audio_session_t sessionId;
+    };
+
+    /* CreateTrackOutput contains all output arguments returned by AudioFlinger to AudioTrack
+     * when calling createTrack() including arguments that were passed as I/O for update by
+     * CreateTrackInput.
+     */
+    class CreateTrackOutput : public Parcelable {
+    public:
+        status_t readFromParcel(const Parcel *parcel) override {
+            /* input/output arguments*/
+            (void)parcel->read(&flags, sizeof(audio_output_flags_t));
+            frameCount = parcel->readInt64();
+            notificationFrameCount = parcel->readInt64();
+            (void)parcel->read(&selectedDeviceId, sizeof(audio_port_handle_t));
+            (void)parcel->read(&sessionId, sizeof(audio_session_t));
+
+            /* output arguments*/
+            sampleRate = parcel->readUint32();
+            afFrameCount = parcel->readInt64();
+            afSampleRate = parcel->readInt64();
+            afLatencyMs = parcel->readInt32();
+            (void)parcel->read(&outputId, sizeof(audio_io_handle_t));
+            return NO_ERROR;
+        }
+
+        status_t writeToParcel(Parcel *parcel) const override {
+            /* input/output arguments*/
+            (void)parcel->write(&flags, sizeof(audio_output_flags_t));
+            (void)parcel->writeInt64(frameCount);
+            (void)parcel->writeInt64(notificationFrameCount);
+            (void)parcel->write(&selectedDeviceId, sizeof(audio_port_handle_t));
+            (void)parcel->write(&sessionId, sizeof(audio_session_t));
+
+            /* output arguments*/
+            (void)parcel->writeUint32(sampleRate);
+            (void)parcel->writeInt64(afFrameCount);
+            (void)parcel->writeInt64(afSampleRate);
+            (void)parcel->writeInt32(afLatencyMs);
+            (void)parcel->write(&outputId, sizeof(audio_io_handle_t));
+            return NO_ERROR;
+        }
+
+        /* input/output */
+        audio_output_flags_t flags;
+        size_t frameCount;
+        size_t notificationFrameCount;
+        audio_port_handle_t selectedDeviceId;
+        audio_session_t sessionId;
+
+        /* output */
+        uint32_t sampleRate;
+        size_t   afFrameCount;
+        uint32_t afSampleRate;
+        uint32_t afLatencyMs;
+        audio_io_handle_t outputId;
+    };
+
+    /* CreateRecordInput contains all input arguments sent by AudioRecord to AudioFlinger
+     * when calling createRecord() including arguments that will be updated by AudioFlinger
+     * and returned in CreateRecordOutput object
+     */
+    class CreateRecordInput : public Parcelable {
+    public:
+        status_t readFromParcel(const Parcel *parcel) override {
+            /* input arguments*/
+            memset(&attr, 0, sizeof(audio_attributes_t));
+            if (parcel->read(&attr, sizeof(audio_attributes_t)) != NO_ERROR) {
+                return DEAD_OBJECT;
+            }
+            attr.tags[AUDIO_ATTRIBUTES_TAGS_MAX_SIZE -1] = '\0';
+            memset(&config, 0, sizeof(audio_config_base_t));
+            if (parcel->read(&config, sizeof(audio_config_base_t)) != NO_ERROR) {
+                return DEAD_OBJECT;
+            }
+            if (clientInfo.readFromParcel(parcel) != NO_ERROR) {
+                return DEAD_OBJECT;
+            }
+            opPackageName = parcel->readString16();
+
+            /* input/output arguments*/
+            (void)parcel->read(&flags, sizeof(audio_input_flags_t));
+            frameCount = parcel->readInt64();
+            notificationFrameCount = parcel->readInt64();
+            (void)parcel->read(&selectedDeviceId, sizeof(audio_port_handle_t));
+            (void)parcel->read(&sessionId, sizeof(audio_session_t));
+            return NO_ERROR;
+        }
+
+        status_t writeToParcel(Parcel *parcel) const override {
+            /* input arguments*/
+            (void)parcel->write(&attr, sizeof(audio_attributes_t));
+            (void)parcel->write(&config, sizeof(audio_config_base_t));
+            (void)clientInfo.writeToParcel(parcel);
+            (void)parcel->writeString16(opPackageName);
+
+            /* input/output arguments*/
+            (void)parcel->write(&flags, sizeof(audio_input_flags_t));
+            (void)parcel->writeInt64(frameCount);
+            (void)parcel->writeInt64(notificationFrameCount);
+            (void)parcel->write(&selectedDeviceId, sizeof(audio_port_handle_t));
+            (void)parcel->write(&sessionId, sizeof(audio_session_t));
+            return NO_ERROR;
+        }
+
+        /* input */
+        audio_attributes_t attr;
+        audio_config_base_t config;
+        AudioClient clientInfo;
+        String16 opPackageName;
+
+        /* input/output */
+        audio_input_flags_t flags;
+        size_t frameCount;
+        size_t notificationFrameCount;
+        audio_port_handle_t selectedDeviceId;
+        audio_session_t sessionId;
+    };
+
+    /* CreateRecordOutput contains all output arguments returned by AudioFlinger to AudioRecord
+     * when calling createRecord() including arguments that were passed as I/O for update by
+     * CreateRecordInput.
+     */
+    class CreateRecordOutput : public Parcelable {
+    public:
+        status_t readFromParcel(const Parcel *parcel) override {
+            /* input/output arguments*/
+            (void)parcel->read(&flags, sizeof(audio_input_flags_t));
+            frameCount = parcel->readInt64();
+            notificationFrameCount = parcel->readInt64();
+            (void)parcel->read(&selectedDeviceId, sizeof(audio_port_handle_t));
+            (void)parcel->read(&sessionId, sizeof(audio_session_t));
+
+            /* output arguments*/
+            sampleRate = parcel->readUint32();
+            (void)parcel->read(&inputId, sizeof(audio_io_handle_t));
+            if (parcel->readInt32() != 0) {
+                cblk = interface_cast<IMemory>(parcel->readStrongBinder());
+                if (cblk == 0 || cblk->pointer() == NULL) {
+                    return BAD_VALUE;
+                }
+            }
+            if (parcel->readInt32() != 0) {
+                buffers = interface_cast<IMemory>(parcel->readStrongBinder());
+                if (buffers == 0 || buffers->pointer() == NULL) {
+                    return BAD_VALUE;
+                }
+            }
+            return NO_ERROR;
+        }
+
+        status_t writeToParcel(Parcel *parcel) const override {
+            /* input/output arguments*/
+            (void)parcel->write(&flags, sizeof(audio_input_flags_t));
+            (void)parcel->writeInt64(frameCount);
+            (void)parcel->writeInt64(notificationFrameCount);
+            (void)parcel->write(&selectedDeviceId, sizeof(audio_port_handle_t));
+            (void)parcel->write(&sessionId, sizeof(audio_session_t));
+
+            /* output arguments*/
+            (void)parcel->writeUint32(sampleRate);
+            (void)parcel->write(&inputId, sizeof(audio_io_handle_t));
+            if (cblk != 0) {
+                (void)parcel->writeInt32(1);
+                (void)parcel->writeStrongBinder(IInterface::asBinder(cblk));
+            } else {
+                (void)parcel->writeInt32(0);
+            }
+            if (buffers != 0) {
+                (void)parcel->writeInt32(1);
+                (void)parcel->writeStrongBinder(IInterface::asBinder(buffers));
+            } else {
+                (void)parcel->writeInt32(0);
+            }
+
+            return NO_ERROR;
+        }
+
+        /* input/output */
+        audio_input_flags_t flags;
+        size_t frameCount;
+        size_t notificationFrameCount;
+        audio_port_handle_t selectedDeviceId;
+        audio_session_t sessionId;
+
+        /* output */
+        uint32_t sampleRate;
+        audio_io_handle_t inputId;
+        sp<IMemory> cblk;
+        sp<IMemory> buffers;
+    };
 
     // invariant on exit for all APIs that return an sp<>:
     //   (return value != 0) == (*status == NO_ERROR)
@@ -50,45 +319,13 @@
     /* create an audio track and registers it with AudioFlinger.
      * return null if the track cannot be created.
      */
-    virtual sp<IAudioTrack> createTrack(
-                                audio_stream_type_t streamType,
-                                uint32_t sampleRate,
-                                audio_format_t format,
-                                audio_channel_mask_t channelMask,
-                                size_t *pFrameCount,
-                                audio_output_flags_t *flags,
-                                const sp<IMemory>& sharedBuffer,
-                                // On successful return, AudioFlinger takes over the handle
-                                // reference and will release it when the track is destroyed.
-                                // However on failure, the client is responsible for release.
-                                audio_io_handle_t output,
-                                pid_t pid,
-                                pid_t tid,  // -1 means unused, otherwise must be valid non-0
-                                audio_session_t *sessionId,
-                                int clientUid,
-                                status_t *status,
-                                audio_port_handle_t portId) = 0;
+    virtual sp<IAudioTrack> createTrack(const CreateTrackInput& input,
+                                        CreateTrackOutput& output,
+                                        status_t *status) = 0;
 
-    virtual sp<IAudioRecord> openRecord(
-                                // On successful return, AudioFlinger takes over the handle
-                                // reference and will release it when the track is destroyed.
-                                // However on failure, the client is responsible for release.
-                                audio_io_handle_t input,
-                                uint32_t sampleRate,
-                                audio_format_t format,
-                                audio_channel_mask_t channelMask,
-                                const String16& callingPackage,
-                                size_t *pFrameCount,
-                                audio_input_flags_t *flags,
-                                pid_t pid,
-                                pid_t tid,  // -1 means unused, otherwise must be valid non-0
-                                int clientUid,
-                                audio_session_t *sessionId,
-                                size_t *notificationFrames,
-                                sp<IMemory>& cblk,
-                                sp<IMemory>& buffers,   // return value 0 means it follows cblk
-                                status_t *status,
-                                audio_port_handle_t portId) = 0;
+    virtual sp<media::IAudioRecord> createRecord(const CreateRecordInput& input,
+                                        CreateRecordOutput& output,
+                                        status_t *status) = 0;
 
     // FIXME Surprisingly, format/latency don't work for input handles
 
diff --git a/media/libaudioclient/include/media/IAudioPolicyService.h b/media/libaudioclient/include/media/IAudioPolicyService.h
index 60ba4ba..5558b77 100644
--- a/media/libaudioclient/include/media/IAudioPolicyService.h
+++ b/media/libaudioclient/include/media/IAudioPolicyService.h
@@ -55,12 +55,7 @@
     virtual status_t setForceUse(audio_policy_force_use_t usage,
                                     audio_policy_forced_cfg_t config) = 0;
     virtual audio_policy_forced_cfg_t getForceUse(audio_policy_force_use_t usage) = 0;
-    virtual audio_io_handle_t getOutput(audio_stream_type_t stream,
-                                        uint32_t samplingRate = 0,
-                                        audio_format_t format = AUDIO_FORMAT_DEFAULT,
-                                        audio_channel_mask_t channelMask = 0,
-                                        audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
-                                        const audio_offload_info_t *offloadInfo = NULL) = 0;
+    virtual audio_io_handle_t getOutput(audio_stream_type_t stream) = 0;
     virtual status_t getOutputForAttr(const audio_attributes_t *attr,
                                       audio_io_handle_t *output,
                                       audio_session_t session,
diff --git a/media/libaudioclient/include/media/IAudioRecord.h b/media/libaudioclient/include/media/IAudioRecord.h
deleted file mode 100644
index 7768176..0000000
--- a/media/libaudioclient/include/media/IAudioRecord.h
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Copyright (C) 2007 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef IAUDIORECORD_H_
-#define IAUDIORECORD_H_
-
-#include <stdint.h>
-#include <sys/types.h>
-
-#include <utils/RefBase.h>
-#include <utils/Errors.h>
-#include <binder/IInterface.h>
-#include <binder/IMemory.h>
-#include <system/audio.h>
-
-namespace android {
-
-// ----------------------------------------------------------------------------
-
-class IAudioRecord : public IInterface
-{
-public:
-    DECLARE_META_INTERFACE(AudioRecord);
-
-    /* After it's created the track is not active. Call start() to
-     * make it active.
-     */
-    virtual status_t    start(int /*AudioSystem::sync_event_t*/ event,
-                              audio_session_t triggerSession) = 0;
-
-    /* Stop a track. If set, the callback will cease being called and
-     * obtainBuffer will return an error. Buffers that are already released
-     * will be processed, unless flush() is called.
-     */
-    virtual void        stop() = 0;
-};
-
-// ----------------------------------------------------------------------------
-
-class BnAudioRecord : public BnInterface<IAudioRecord>
-{
-public:
-    virtual status_t    onTransact( uint32_t code,
-                                    const Parcel& data,
-                                    Parcel* reply,
-                                    uint32_t flags = 0);
-};
-
-// ----------------------------------------------------------------------------
-
-}; // namespace android
-
-#endif /*IAUDIORECORD_H_*/
diff --git a/media/libaudioclient/include/media/IAudioTrack.h b/media/libaudioclient/include/media/IAudioTrack.h
index 27a62d6..94afe3c 100644
--- a/media/libaudioclient/include/media/IAudioTrack.h
+++ b/media/libaudioclient/include/media/IAudioTrack.h
@@ -77,12 +77,12 @@
     virtual void        signal() = 0;
 
     /* Sets the volume shaper */
-    virtual VolumeShaper::Status applyVolumeShaper(
-            const sp<VolumeShaper::Configuration>& configuration,
-            const sp<VolumeShaper::Operation>& operation) = 0;
+    virtual media::VolumeShaper::Status applyVolumeShaper(
+            const sp<media::VolumeShaper::Configuration>& configuration,
+            const sp<media::VolumeShaper::Operation>& operation) = 0;
 
     /* gets the volume shaper state */
-    virtual sp<VolumeShaper::State> getVolumeShaperState(int id) = 0;
+    virtual sp<media::VolumeShaper::State> getVolumeShaperState(int id) = 0;
 };
 
 // ----------------------------------------------------------------------------
diff --git a/media/libaudioclient/include/media/PlayerBase.h b/media/libaudioclient/include/media/PlayerBase.h
index e63090b..e7a8abc 100644
--- a/media/libaudioclient/include/media/PlayerBase.h
+++ b/media/libaudioclient/include/media/PlayerBase.h
@@ -17,35 +17,31 @@
 #ifndef __ANDROID_PLAYER_BASE_H__
 #define __ANDROID_PLAYER_BASE_H__
 
-#include <audiomanager/IPlayer.h>
 #include <audiomanager/AudioManager.h>
 #include <audiomanager/IAudioManager.h>
 
+#include "android/media/BnPlayer.h"
 
 namespace android {
 
-class PlayerBase : public BnPlayer
+class PlayerBase : public ::android::media::BnPlayer
 {
 public:
     explicit PlayerBase();
-    virtual ~PlayerBase();
+    virtual ~PlayerBase() override;
 
     virtual void destroy() = 0;
 
     //IPlayer implementation
-    virtual void start();
-    virtual void pause();
-    virtual void stop();
-    virtual void setVolume(float vol);
-    virtual void setPan(float pan);
-    virtual void setStartDelayMs(int32_t delayMs);
-    virtual void applyVolumeShaper(
-            const sp<VolumeShaper::Configuration>& configuration,
-            const sp<VolumeShaper::Operation>& operation) override;
-
-    virtual status_t onTransact(
-                uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags);
-
+    virtual binder::Status start() override;
+    virtual binder::Status pause() override;
+    virtual binder::Status stop() override;
+    virtual binder::Status setVolume(float vol) override;
+    virtual binder::Status setPan(float pan) override;
+    virtual binder::Status setStartDelayMs(int32_t delayMs) override;
+    virtual binder::Status applyVolumeShaper(
+            const media::VolumeShaper::Configuration& configuration,
+            const media::VolumeShaper::Operation& operation) override;
 
             status_t startWithStatus();
             status_t pauseWithStatus();
diff --git a/media/libaudioclient/include/media/TrackPlayerBase.h b/media/libaudioclient/include/media/TrackPlayerBase.h
index 2d113c0..66e9b3b 100644
--- a/media/libaudioclient/include/media/TrackPlayerBase.h
+++ b/media/libaudioclient/include/media/TrackPlayerBase.h
@@ -32,9 +32,9 @@
     virtual void destroy();
 
     //IPlayer implementation
-    virtual void applyVolumeShaper(
-            const sp<VolumeShaper::Configuration>& configuration,
-            const sp<VolumeShaper::Operation>& operation);
+    virtual binder::Status applyVolumeShaper(
+            const media::VolumeShaper::Configuration& configuration,
+            const media::VolumeShaper::Operation& operation);
 
     //FIXME move to protected field, so far made public to minimize changes to AudioTrack logic
     sp<AudioTrack> mAudioTrack;
diff --git a/media/libaudioclient/tests/Android.bp b/media/libaudioclient/tests/Android.bp
new file mode 100644
index 0000000..52bb2fb
--- /dev/null
+++ b/media/libaudioclient/tests/Android.bp
@@ -0,0 +1,35 @@
+cc_defaults {
+    name: "libaudioclient_tests_defaults",
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
+}
+
+cc_test {
+    name: "test_create_audiotrack",
+    defaults: ["libaudioclient_tests_defaults"],
+    srcs: ["test_create_audiotrack.cpp",
+           "test_create_utils.cpp"],
+    shared_libs: [
+        "libaudioclient",
+        "libbinder",
+        "libcutils",
+        "libutils",
+    ],
+    data: ["track_test_input_*.txt"],
+}
+
+cc_test {
+    name: "test_create_audiorecord",
+    defaults: ["libaudioclient_tests_defaults"],
+    srcs: ["test_create_audiorecord.cpp",
+           "test_create_utils.cpp"],
+    shared_libs: [
+        "libaudioclient",
+        "libbinder",
+        "libcutils",
+        "libutils",
+    ],
+    data: ["record_test_input_*.txt"],
+}
diff --git a/media/libaudioclient/tests/record_test_input_v1.0_ref.txt b/media/libaudioclient/tests/record_test_input_v1.0_ref.txt
new file mode 100644
index 0000000..e01598e
--- /dev/null
+++ b/media/libaudioclient/tests/record_test_input_v1.0_ref.txt
@@ -0,0 +1,33 @@
+version 1.0
+# Input file for test_create_audiorecord
+# Add one line for each tested AudioRecord constructor with the following arguments:
+# sampleRate format   channelMask frameCount notificationFrames flags sessionId inputSource
+# sample rate tests
+  48000      0x1      0x10        4800       2400               0x0   0         0
+  24000      0x1      0x10        4800       2400               0x0   0         0
+  16000      0x1      0x10        4800       2400               0x0   0         0
+   8000      0x1      0x10        4800       2400               0x0   0         0
+  44100      0x1      0x10        4410       2205               0x0   0         0
+  22050      0x1      0x10        4410       2205               0x0   0         0
+  11025      0x1      0x10        4410       2205               0x0   0         0
+# format tests
+  48000      0x2      0x10        4800       2400               0x0   0         0
+  48000      0x3      0x10        4800       2400               0x0   0         0
+  48000      0x5      0x10        4800       2400               0x0   0         0
+# channel mask tests
+  48000      0x1      0x0C        4800       2400               0x0   0         0
+# frame count tests
+  48000      0x1      0x10        0          0                  0x0   0         0
+  48000      0x1      0x10        48000      0                  0x0   0         0
+  48000      0x1      0x10        12000      6000               0x0   0         0
+# flags test
+  48000      0x1      0x0C        0          0                  0x1   0         0
+  44100      0x1      0x0C        0          0                  0x5   0         0
+# session tests
+  48000      0x1      0x10        0          0                  0     1001      0
+# input source tests
+  48000      0x1      0x10        0          0                  0     0         1
+  48000      0x1      0x10        0          0                  0     0         5
+  48000      0x1      0x10        0          0                  0     0         6
+  48000      0x1      0x10        0          0                  0     0         7
+  48000      0x1      0x10        0          0                  0     0         9
diff --git a/media/libaudioclient/tests/record_test_output_v1.0_ref_walleye.txt b/media/libaudioclient/tests/record_test_output_v1.0_ref_walleye.txt
new file mode 100644
index 0000000..76608eb
--- /dev/null
+++ b/media/libaudioclient/tests/record_test_output_v1.0_ref_walleye.txt
@@ -0,0 +1,198 @@
+
+#### Test 1 status 0
+ AudioRecord::dump
+  status(0), active(0), session Id(65)
+  flags(0), req. flags(0), audio source(0)
+  format(0x1), channel mask(0x10), channel count(1), sample rate(48000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(960), req. notif. frame count(2400)
+  input(150), latency(100), selected device Id(0), routed device Id(11)
+
+#### Test 2 status 0
+ AudioRecord::dump
+  status(0), active(0), session Id(73)
+  flags(0), req. flags(0), audio source(0)
+  format(0x1), channel mask(0x10), channel count(1), sample rate(24000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(480), req. notif. frame count(2400)
+  input(158), latency(200), selected device Id(0), routed device Id(11)
+
+#### Test 3 status 0
+ AudioRecord::dump
+  status(0), active(0), session Id(81)
+  flags(0), req. flags(0), audio source(0)
+  format(0x1), channel mask(0x10), channel count(1), sample rate(16000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(320), req. notif. frame count(2400)
+  input(166), latency(300), selected device Id(0), routed device Id(11)
+
+#### Test 4 status 0
+ AudioRecord::dump
+  status(0), active(0), session Id(89)
+  flags(0), req. flags(0), audio source(0)
+  format(0x1), channel mask(0x10), channel count(1), sample rate(8000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(160), req. notif. frame count(2400)
+  input(174), latency(600), selected device Id(0), routed device Id(11)
+
+#### Test 5 status 0
+ AudioRecord::dump
+  status(0), active(0), session Id(97)
+  flags(0), req. flags(0), audio source(0)
+  format(0x1), channel mask(0x10), channel count(1), sample rate(44100)
+  frame count(4410), req. frame count(4410)
+  notif. frame count(896), req. notif. frame count(2205)
+  input(182), latency(100), selected device Id(0), routed device Id(11)
+
+#### Test 6 status 0
+ AudioRecord::dump
+  status(0), active(0), session Id(105)
+  flags(0), req. flags(0), audio source(0)
+  format(0x1), channel mask(0x10), channel count(1), sample rate(22050)
+  frame count(4410), req. frame count(4410)
+  notif. frame count(448), req. notif. frame count(2205)
+  input(190), latency(200), selected device Id(0), routed device Id(11)
+
+#### Test 7 status 0
+ AudioRecord::dump
+  status(0), active(0), session Id(113)
+  flags(0), req. flags(0), audio source(0)
+  format(0x1), channel mask(0x10), channel count(1), sample rate(11025)
+  frame count(4410), req. frame count(4410)
+  notif. frame count(224), req. notif. frame count(2205)
+  input(198), latency(400), selected device Id(0), routed device Id(11)
+
+#### Test 8 status 0
+ AudioRecord::dump
+  status(0), active(0), session Id(121)
+  flags(0), req. flags(0), audio source(0)
+  format(0x2), channel mask(0x10), channel count(1), sample rate(48000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(960), req. notif. frame count(2400)
+  input(206), latency(100), selected device Id(0), routed device Id(11)
+
+#### Test 9 status 0
+ AudioRecord::dump
+  status(0), active(0), session Id(129)
+  flags(0), req. flags(0), audio source(0)
+  format(0x3), channel mask(0x10), channel count(1), sample rate(48000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(960), req. notif. frame count(2400)
+  input(214), latency(100), selected device Id(0), routed device Id(11)
+
+#### Test 10 status 0
+ AudioRecord::dump
+  status(0), active(0), session Id(137)
+  flags(0), req. flags(0), audio source(0)
+  format(0x5), channel mask(0x10), channel count(1), sample rate(48000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(960), req. notif. frame count(2400)
+  input(222), latency(100), selected device Id(0), routed device Id(11)
+
+#### Test 11 status 0
+ AudioRecord::dump
+  status(0), active(0), session Id(145)
+  flags(0), req. flags(0), audio source(0)
+  format(0x1), channel mask(0xc), channel count(2), sample rate(48000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(960), req. notif. frame count(2400)
+  input(230), latency(100), selected device Id(0), routed device Id(11)
+
+#### Test 12 status 0
+ AudioRecord::dump
+  status(0), active(0), session Id(153)
+  flags(0), req. flags(0), audio source(0)
+  format(0x1), channel mask(0x10), channel count(1), sample rate(48000)
+  frame count(2880), req. frame count(2880)
+  notif. frame count(960), req. notif. frame count(0)
+  input(238), latency(60), selected device Id(0), routed device Id(11)
+
+#### Test 13 status 0
+ AudioRecord::dump
+  status(0), active(0), session Id(161)
+  flags(0), req. flags(0), audio source(0)
+  format(0x1), channel mask(0x10), channel count(1), sample rate(48000)
+  frame count(48000), req. frame count(48000)
+  notif. frame count(960), req. notif. frame count(0)
+  input(246), latency(1000), selected device Id(0), routed device Id(11)
+
+#### Test 14 status 0
+ AudioRecord::dump
+  status(0), active(0), session Id(169)
+  flags(0), req. flags(0), audio source(0)
+  format(0x1), channel mask(0x10), channel count(1), sample rate(48000)
+  frame count(12000), req. frame count(12000)
+  notif. frame count(960), req. notif. frame count(6000)
+  input(254), latency(250), selected device Id(0), routed device Id(11)
+
+#### Test 15 status 0
+ AudioRecord::dump
+  status(0), active(0), session Id(177)
+  flags(0x1), req. flags(0x1), audio source(0)
+  format(0x1), channel mask(0xc), channel count(2), sample rate(48000)
+  frame count(4096), req. frame count(4096)
+  notif. frame count(96), req. notif. frame count(0)
+  input(262), latency(85), selected device Id(0), routed device Id(11)
+
+#### Test 16 status 0
+ AudioRecord::dump
+  status(0), active(0), session Id(185)
+  flags(0), req. flags(0x5), audio source(0)
+  format(0x1), channel mask(0xc), channel count(2), sample rate(44100)
+  frame count(2664), req. frame count(2664)
+  notif. frame count(888), req. notif. frame count(0)
+  input(278), latency(60), selected device Id(0), routed device Id(11)
+
+#### Test 17 status 0
+ AudioRecord::dump
+  status(0), active(0), session Id(1001)
+  flags(0), req. flags(0), audio source(0)
+  format(0x1), channel mask(0x10), channel count(1), sample rate(48000)
+  frame count(2880), req. frame count(2880)
+  notif. frame count(960), req. notif. frame count(0)
+  input(286), latency(60), selected device Id(0), routed device Id(11)
+
+#### Test 18 status 0
+ AudioRecord::dump
+  status(0), active(0), session Id(193)
+  flags(0), req. flags(0), audio source(1)
+  format(0x1), channel mask(0x10), channel count(1), sample rate(48000)
+  frame count(2880), req. frame count(2880)
+  notif. frame count(960), req. notif. frame count(0)
+  input(294), latency(60), selected device Id(0), routed device Id(11)
+
+#### Test 19 status 0
+ AudioRecord::dump
+  status(0), active(0), session Id(201)
+  flags(0), req. flags(0), audio source(5)
+  format(0x1), channel mask(0x10), channel count(1), sample rate(48000)
+  frame count(2880), req. frame count(2880)
+  notif. frame count(960), req. notif. frame count(0)
+  input(302), latency(60), selected device Id(0), routed device Id(12)
+
+#### Test 20 status 0
+ AudioRecord::dump
+  status(0), active(0), session Id(209)
+  flags(0), req. flags(0), audio source(6)
+  format(0x1), channel mask(0x10), channel count(1), sample rate(48000)
+  frame count(2880), req. frame count(2880)
+  notif. frame count(960), req. notif. frame count(0)
+  input(310), latency(60), selected device Id(0), routed device Id(11)
+
+#### Test 21 status 0
+ AudioRecord::dump
+  status(0), active(0), session Id(217)
+  flags(0), req. flags(0), audio source(7)
+  format(0x1), channel mask(0x10), channel count(1), sample rate(48000)
+  frame count(2880), req. frame count(2880)
+  notif. frame count(960), req. notif. frame count(0)
+  input(318), latency(60), selected device Id(0), routed device Id(11)
+
+#### Test 22 status 0
+ AudioRecord::dump
+  status(0), active(0), session Id(225)
+  flags(0), req. flags(0), audio source(9)
+  format(0x1), channel mask(0x10), channel count(1), sample rate(48000)
+  frame count(2880), req. frame count(2880)
+  notif. frame count(960), req. notif. frame count(0)
+  input(326), latency(60), selected device Id(0), routed device Id(11)
diff --git a/media/libaudioclient/tests/test_create_audiorecord.cpp b/media/libaudioclient/tests/test_create_audiorecord.cpp
new file mode 100644
index 0000000..cf6a734
--- /dev/null
+++ b/media/libaudioclient/tests/test_create_audiorecord.cpp
@@ -0,0 +1,129 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <fcntl.h>
+#include <stdio.h>
+#include <string.h>
+#include <unistd.h>
+
+#include <binder/MemoryBase.h>
+#include <binder/MemoryDealer.h>
+#include <binder/MemoryHeapBase.h>
+#include <media/AudioRecord.h>
+
+#include "test_create_utils.h"
+
+#define NUM_ARGUMENTS 8
+#define VERSION_VALUE "1.0"
+#define PACKAGE_NAME  "AudioRecord test"
+
+namespace android {
+
+int testRecord(FILE *inputFile, int outputFileFd)
+{
+    char line[MAX_INPUT_FILE_LINE_LENGTH];
+    uint32_t testCount = 0;
+    Vector<String16> args;
+    int ret = 0;
+
+    if (inputFile == nullptr) {
+        sp<AudioRecord> record = new AudioRecord(AUDIO_SOURCE_DEFAULT,
+                                              0 /* sampleRate */,
+                                              AUDIO_FORMAT_DEFAULT,
+                                              AUDIO_CHANNEL_IN_MONO,
+                                              String16(PACKAGE_NAME));
+        if (record == 0 || record->initCheck() != NO_ERROR) {
+            write(outputFileFd, "Error creating AudioRecord\n",
+                  sizeof("Error creating AudioRecord\n"));
+        } else {
+            record->dump(outputFileFd, args);
+        }
+        return 0;
+    }
+
+    // check version
+    if (!checkVersion(inputFile, VERSION_VALUE)) {
+        return 1;
+    }
+
+    while (readLine(inputFile, line, MAX_INPUT_FILE_LINE_LENGTH) == 0) {
+        uint32_t sampleRate;
+        audio_format_t format;
+        audio_channel_mask_t channelMask;
+        size_t frameCount;
+        int32_t notificationFrames;
+        audio_input_flags_t flags;
+        audio_session_t sessionId;
+        audio_source_t inputSource;
+        audio_attributes_t attributes;
+        status_t status;
+        char statusStr[MAX_OUTPUT_FILE_LINE_LENGTH];
+        bool fast = false;
+
+        if (sscanf(line, " %u %x %x %zu %d %x %u %u",
+                   &sampleRate, &format, &channelMask,
+                   &frameCount, &notificationFrames,
+                   &flags, &sessionId, &inputSource) != NUM_ARGUMENTS) {
+            fprintf(stderr, "Malformed line for test #%u in input file\n", testCount+1);
+            ret = 1;
+            continue;
+        }
+        testCount++;
+
+        if ((flags & AUDIO_INPUT_FLAG_FAST) != 0) {
+            fast = true;
+        }
+
+        memset(&attributes, 0, sizeof(attributes));
+        attributes.source = inputSource;
+
+        sp<AudioRecord> record = new AudioRecord(String16(PACKAGE_NAME));
+
+        record->set(AUDIO_SOURCE_DEFAULT,
+                   sampleRate,
+                   format,
+                   channelMask,
+                   frameCount,
+                   fast ? callback : nullptr,
+                   nullptr,
+                   notificationFrames,
+                   false,
+                   sessionId,
+                   fast ? AudioRecord::TRANSFER_CALLBACK : AudioRecord::TRANSFER_DEFAULT,
+                   flags,
+                   getuid(),
+                   getpid(),
+                   &attributes,
+                   AUDIO_PORT_HANDLE_NONE);
+        status = record->initCheck();
+        sprintf(statusStr, "\n#### Test %u status %d\n", testCount, status);
+        write(outputFileFd, statusStr, strlen(statusStr));
+        if (status != NO_ERROR) {
+            continue;
+        }
+        record->dump(outputFileFd, args);
+    }
+    return ret;
+}
+
+}; // namespace android
+
+
+int main(int argc, char **argv)
+{
+    return android::main(argc, argv, android::testRecord);
+}
+
diff --git a/media/libaudioclient/tests/test_create_audiotrack.cpp b/media/libaudioclient/tests/test_create_audiotrack.cpp
new file mode 100644
index 0000000..cf9b925
--- /dev/null
+++ b/media/libaudioclient/tests/test_create_audiotrack.cpp
@@ -0,0 +1,153 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <fcntl.h>
+#include <stdio.h>
+#include <string.h>
+#include <unistd.h>
+
+#include <binder/MemoryBase.h>
+#include <binder/MemoryDealer.h>
+#include <binder/MemoryHeapBase.h>
+#include <media/AudioTrack.h>
+
+#include "test_create_utils.h"
+
+#define NUM_ARGUMENTS 10
+#define VERSION_VALUE "1.0"
+
+namespace android {
+
+int testTrack(FILE *inputFile, int outputFileFd)
+{
+    char line[MAX_INPUT_FILE_LINE_LENGTH];
+    uint32_t testCount = 0;
+    Vector<String16> args;
+    int ret = 0;
+
+    if (inputFile == nullptr) {
+        sp<AudioTrack> track = new AudioTrack(AUDIO_STREAM_DEFAULT,
+                                              0 /* sampleRate */,
+                                              AUDIO_FORMAT_DEFAULT,
+                                              AUDIO_CHANNEL_OUT_STEREO);
+        if (track == 0 || track->initCheck() != NO_ERROR) {
+            write(outputFileFd, "Error creating AudioTrack\n",
+                  sizeof("Error creating AudioTrack\n"));
+        } else {
+            track->dump(outputFileFd, args);
+        }
+        return 0;
+    }
+
+    // check version
+    if (!checkVersion(inputFile, VERSION_VALUE)) {
+        return 1;
+    }
+
+    while (readLine(inputFile, line, MAX_INPUT_FILE_LINE_LENGTH) == 0) {
+        uint32_t sampleRate;
+        audio_format_t format;
+        audio_channel_mask_t channelMask;
+        size_t frameCount;
+        int32_t notificationFrames;
+        uint32_t useSharedBuffer;
+        audio_output_flags_t flags;
+        audio_session_t sessionId;
+        audio_usage_t usage;
+        audio_content_type_t contentType;
+        audio_attributes_t attributes;
+        sp<IMemory> sharedBuffer;
+        sp<MemoryDealer> heap;
+        audio_offload_info_t offloadInfo = AUDIO_INFO_INITIALIZER;
+        status_t status;
+        char statusStr[MAX_OUTPUT_FILE_LINE_LENGTH];
+        bool offload = false;
+        bool fast = false;
+
+        if (sscanf(line, " %u %x %x %zu %d %u %x %u %u %u",
+                   &sampleRate, &format, &channelMask,
+                   &frameCount, &notificationFrames, &useSharedBuffer,
+                   &flags, &sessionId, &usage, &contentType) != NUM_ARGUMENTS) {
+            fprintf(stderr, "Malformed line for test #%u in input file\n", testCount+1);
+            ret = 1;
+            continue;
+        }
+        testCount++;
+
+        if (useSharedBuffer != 0) {
+            size_t heapSize = audio_channel_count_from_out_mask(channelMask) *
+                    audio_bytes_per_sample(format) * frameCount;
+            heap = new MemoryDealer(heapSize, "AudioTrack Heap Base");
+            sharedBuffer = heap->allocate(heapSize);
+            frameCount = 0;
+            notificationFrames = 0;
+        }
+        if ((flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) != 0) {
+            offloadInfo.sample_rate = sampleRate;
+            offloadInfo.channel_mask = channelMask;
+            offloadInfo.format = format;
+            offload = true;
+        }
+        if ((flags & AUDIO_OUTPUT_FLAG_FAST) != 0) {
+            fast = true;
+        }
+
+        memset(&attributes, 0, sizeof(attributes));
+        attributes.content_type = contentType;
+        attributes.usage = usage;
+
+        sp<AudioTrack> track = new AudioTrack();
+
+        track->set(AUDIO_STREAM_DEFAULT,
+                   sampleRate,
+                   format,
+                   channelMask,
+                   frameCount,
+                   flags,
+                   (fast || offload) ? callback : nullptr,
+                   nullptr,
+                   notificationFrames,
+                   sharedBuffer,
+                   false,
+                   sessionId,
+                   ((fast && sharedBuffer == 0) || offload) ?
+                           AudioTrack::TRANSFER_CALLBACK : AudioTrack::TRANSFER_DEFAULT,
+                   offload ? &offloadInfo : nullptr,
+                   getuid(),
+                   getpid(),
+                   &attributes,
+                   false,
+                   1.0f,
+                   AUDIO_PORT_HANDLE_NONE);
+        status = track->initCheck();
+        sprintf(statusStr, "\n#### Test %u status %d\n", testCount, status);
+        write(outputFileFd, statusStr, strlen(statusStr));
+        if (status != NO_ERROR) {
+            continue;
+        }
+        track->dump(outputFileFd, args);
+    }
+    return ret;
+}
+
+}; // namespace android
+
+
+int main(int argc, char **argv)
+{
+    return android::main(argc, argv, android::testTrack);
+}
+
diff --git a/media/libaudioclient/tests/test_create_utils.cpp b/media/libaudioclient/tests/test_create_utils.cpp
new file mode 100644
index 0000000..8aa1f13
--- /dev/null
+++ b/media/libaudioclient/tests/test_create_utils.cpp
@@ -0,0 +1,134 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <fcntl.h>
+#include <stdio.h>
+#include <string.h>
+#include <unistd.h>
+
+#include "test_create_utils.h"
+
+namespace android {
+
+int readLine(FILE *inputFile, char *line, int size) {
+    int ret = 0;
+    while (true) {
+        char *str = fgets(line, size, inputFile);
+        if (str == nullptr) {
+            ret = -1;
+            break;
+        }
+        if (feof(inputFile) != 0 || ferror(inputFile) != 0) {
+            ret = -1;
+            break;
+        }
+        if (strlen(str) != 0 && str[0] != COMMENT_CHAR) {
+            break;
+        }
+    }
+    return ret;
+}
+
+bool checkVersion(FILE *inputFile, const char *version)
+{
+    char line[MAX_INPUT_FILE_LINE_LENGTH];
+    char versionKey[MAX_INPUT_FILE_LINE_LENGTH];
+    char versionValue[MAX_INPUT_FILE_LINE_LENGTH];
+
+    if (readLine(inputFile, line, MAX_INPUT_FILE_LINE_LENGTH) != 0) {
+        fprintf(stderr, "Missing version in input file\n");
+        return false;
+    }
+
+    if (sscanf(line, " %s %s", versionKey, versionValue) != 2) {
+        fprintf(stderr, "Malformed version in input file\n");
+        return false;
+    }
+    if (strcmp(versionKey, VERSION_KEY) != 0) {
+        fprintf(stderr, "Malformed version in input file\n");
+        return false;
+    }
+    if (strcmp(versionValue, version) != 0) {
+        fprintf(stderr, "Wrong input file version %s expecting %s\n", versionValue, version);
+        return false;
+    }
+    return true;
+}
+
+void callback(int event __unused, void* user __unused, void *info __unused)
+{
+}
+
+int main(int argc, char **argv, test_func_t testFunc)
+{
+    FILE *inputFile = nullptr;
+    int outputFileFd = STDOUT_FILENO;
+    mode_t mode = S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH;
+    int ret = 0;
+
+    if (argc > 5) {
+        fprintf(stderr, "Usage: %s [-i input_params.txt] [-o output_params.txt]\n", argv[0]);
+        return 1;
+    }
+
+    argv++;
+    while (*argv) {
+        if (strcmp(*argv, "-i") == 0) {
+            argv++;
+            if (*argv) {
+                inputFile = fopen(*argv, "r");
+                if (inputFile == nullptr) {
+                    ret = 1;
+                }
+            } else {
+                ret = 1;
+            }
+        }
+        if (strcmp(*argv, "-o") == 0) {
+            argv++;
+            if (*argv) {
+                outputFileFd = open(*argv, O_WRONLY|O_CREAT, mode);
+                if (outputFileFd < 0) {
+                    ret = 1;
+                }
+            } else {
+                ret = 1;
+            }
+            argv++;
+        }
+        if (*argv) {
+            argv++;
+        }
+    }
+
+    if (ret != 0) {
+        return ret;
+    }
+
+    ret = testFunc(inputFile, outputFileFd);
+
+    if (inputFile) {
+        fclose(inputFile);
+    }
+    if (outputFileFd >= 0 && outputFileFd != STDOUT_FILENO) {
+        close(outputFileFd);
+    }
+
+    return ret;
+}
+
+}; // namespace android
+
diff --git a/media/libaudioclient/tests/test_create_utils.h b/media/libaudioclient/tests/test_create_utils.h
new file mode 100644
index 0000000..2ad646e
--- /dev/null
+++ b/media/libaudioclient/tests/test_create_utils.h
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <fcntl.h>
+#include <stdio.h>
+#include <string.h>
+#include <unistd.h>
+
+#define MAX_INPUT_FILE_LINE_LENGTH 512
+#define MAX_OUTPUT_FILE_LINE_LENGTH 512
+
+#define COMMENT_CHAR '#'
+#define VERSION_KEY "version"
+
+namespace android {
+
+int readLine(FILE *inputFile, char *line, int size);
+
+bool checkVersion(FILE *inputFile, const char *version);
+
+void callback(int event, void* user, void *info);
+
+typedef int (*test_func_t)(FILE *inputFile, int outputFileFd);
+
+int main(int argc, char **argv, test_func_t testFunc);
+
+}; // namespace android
diff --git a/media/libaudioclient/tests/track_test_input_v1.0_ref.txt b/media/libaudioclient/tests/track_test_input_v1.0_ref.txt
new file mode 100644
index 0000000..b923ff3
--- /dev/null
+++ b/media/libaudioclient/tests/track_test_input_v1.0_ref.txt
@@ -0,0 +1,40 @@
+version 1.0
+# Input file for test_create_audiotrack
+# Add one line for each tested AudioTrack constructor with the following arguments:
+# sampleRate format 	channelMask frameCount notificationFrames sharedBuffer flags sessionId usage contentType
+# sample rate tests
+  48000      0x1    	0x3         4800       2400               0            0x0   0         1     2
+  24000      0x1    	0x3         4800       2400               0            0x0   0         1     2
+  16000      0x1    	0x3         4800       2400               0            0x0   0         1     2
+   8000      0x1    	0x3         4800       2400               0            0x0   0         1     2
+  44100      0x1    	0x3         4410       2205               0            0x0   0         1     2
+  22050      0x1    	0x3         4410       2205               0            0x0   0         1     2
+  11025      0x1    	0x3         4410       2205               0            0x0   0         1     2
+# format tests
+  48000      0x2    	0x3         4800       2400               0            0x0   0         1     2
+  48000      0x3    	0x3         4800       2400               0            0x0   0         1     2
+  48000      0x5    	0x3         4800       2400               0            0x0   0         1     2
+# channel mask tests
+  48000      0x1    	0x1         4800       2400               0            0x0   0         1     2
+  48000      0x1    	0x3F        4800       2400               0            0x0   0         1     2
+  48000      0x1    	0x63F       4800       2400               0            0x0   0         1     2
+# framecount tests
+  48000      0x1    	0x3         0          0                  0            0x0   0         1     2
+  48000      0x1    	0x3         48000      0                  0            0x0   0         1     2
+  48000      0x1    	0x3         0          -2                 0            0x4   0         1     2
+# shared memory tests
+  48000      0x1    	0x3         4800       2400               1            0x0   0         1     2
+  48000      0x1    	0x3         4800       2400               1            0x4   0         1     2
+# flags test
+  48000      0x1    	0x3         4800       2400               0            0x4   0         1     2
+  48000      0x1    	0x3         4800       2400               0            0x8   0         1     2
+  44100      0x1000000  0x3         4800       2400               0            0x11  0         1     2
+# session tests
+  48000      0x1    	0x3         4800       2400               0            0x0   1001      1     2
+# attributes tests
+  48000      0x1    	0x3         4800       2400               0            0x0   0         0     0
+  48000      0x1    	0x3         4800       2400               0            0x0   0         2     1
+  48000      0x1    	0x3         4800       2400               0            0x0   0         4     2
+  48000      0x1    	0x3         4800       2400               0            0x0   0         5     2
+  48000      0x1    	0x3         4800       2400               0            0x0   0         11    1
+  48000      0x1    	0x3         4800       2400               0            0x0   0         12    1
diff --git a/media/libaudioclient/tests/track_test_output_v1.0_ref_walleye.txt b/media/libaudioclient/tests/track_test_output_v1.0_ref_walleye.txt
new file mode 100644
index 0000000..5fe433c
--- /dev/null
+++ b/media/libaudioclient/tests/track_test_output_v1.0_ref_walleye.txt
@@ -0,0 +1,308 @@
+
+#### Test 1 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(49), flags(0)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(3), channel count(2)
+  sample rate(48000), original sample rate(48000), speed(1.000000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(2400), req. notif. frame count(2400), req. notif. per buff(0)
+  latency (150), selected device Id(0), routed device Id(2)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
+
+#### Test 2 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(57), flags(0)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(3), channel count(2)
+  sample rate(24000), original sample rate(24000), speed(1.000000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(1600), req. notif. frame count(2400), req. notif. per buff(0)
+  latency (250), selected device Id(0), routed device Id(2)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
+
+#### Test 3 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(65), flags(0)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(3), channel count(2)
+  sample rate(16000), original sample rate(16000), speed(1.000000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(1600), req. notif. frame count(2400), req. notif. per buff(0)
+  latency (350), selected device Id(0), routed device Id(2)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
+
+#### Test 4 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(73), flags(0)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(3), channel count(2)
+  sample rate(8000), original sample rate(8000), speed(1.000000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(1600), req. notif. frame count(2400), req. notif. per buff(0)
+  latency (650), selected device Id(0), routed device Id(2)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
+
+#### Test 5 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(81), flags(0)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(3), channel count(2)
+  sample rate(44100), original sample rate(44100), speed(1.000000)
+  frame count(4410), req. frame count(4410)
+  notif. frame count(1470), req. notif. frame count(2205), req. notif. per buff(0)
+  latency (150), selected device Id(0), routed device Id(2)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
+
+#### Test 6 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(89), flags(0)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(3), channel count(2)
+  sample rate(22050), original sample rate(22050), speed(1.000000)
+  frame count(4410), req. frame count(4410)
+  notif. frame count(1470), req. notif. frame count(2205), req. notif. per buff(0)
+  latency (250), selected device Id(0), routed device Id(2)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
+
+#### Test 7 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(97), flags(0)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(3), channel count(2)
+  sample rate(11025), original sample rate(11025), speed(1.000000)
+  frame count(4410), req. frame count(4410)
+  notif. frame count(1470), req. notif. frame count(2205), req. notif. per buff(0)
+  latency (450), selected device Id(0), routed device Id(2)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
+
+#### Test 8 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(105), flags(0)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(2), channel mask(3), channel count(2)
+  sample rate(48000), original sample rate(48000), speed(1.000000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(2400), req. notif. frame count(2400), req. notif. per buff(0)
+  latency (150), selected device Id(0), routed device Id(2)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
+
+#### Test 9 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(113), flags(0)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(3), channel mask(3), channel count(2)
+  sample rate(48000), original sample rate(48000), speed(1.000000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(2400), req. notif. frame count(2400), req. notif. per buff(0)
+  latency (180), selected device Id(0), routed device Id(2)
+  output(29) AF latency (80) AF frame count(1920) AF SampleRate(48000)
+
+#### Test 10 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(121), flags(0)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(5), channel mask(3), channel count(2)
+  sample rate(48000), original sample rate(48000), speed(1.000000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(2400), req. notif. frame count(2400), req. notif. per buff(0)
+  latency (180), selected device Id(0), routed device Id(2)
+  output(29) AF latency (80) AF frame count(1920) AF SampleRate(48000)
+
+#### Test 11 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(129), flags(0)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(1), channel count(1)
+  sample rate(48000), original sample rate(48000), speed(1.000000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(2400), req. notif. frame count(2400), req. notif. per buff(0)
+  latency (150), selected device Id(0), routed device Id(2)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
+
+#### Test 12 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(137), flags(0)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(3f), channel count(6)
+  sample rate(48000), original sample rate(48000), speed(1.000000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(2400), req. notif. frame count(2400), req. notif. per buff(0)
+  latency (150), selected device Id(0), routed device Id(2)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
+
+#### Test 13 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(145), flags(0)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(63f), channel count(8)
+  sample rate(48000), original sample rate(48000), speed(1.000000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(2400), req. notif. frame count(2400), req. notif. per buff(0)
+  latency (150), selected device Id(0), routed device Id(2)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
+
+#### Test 14 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(153), flags(0)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(3), channel count(2)
+  sample rate(48000), original sample rate(48000), speed(1.000000)
+  frame count(1924), req. frame count(1924)
+  notif. frame count(962), req. notif. frame count(0), req. notif. per buff(0)
+  latency (90), selected device Id(0), routed device Id(2)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
+
+#### Test 15 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(161), flags(0)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(3), channel count(2)
+  sample rate(48000), original sample rate(48000), speed(1.000000)
+  frame count(48000), req. frame count(48000)
+  notif. frame count(24000), req. notif. frame count(0), req. notif. per buff(0)
+  latency (1050), selected device Id(0), routed device Id(2)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
+
+#### Test 16 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(169), flags(4)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(3), channel count(2)
+  sample rate(48000), original sample rate(48000), speed(1.000000)
+  frame count(480), req. frame count(480)
+  notif. frame count(240), req. notif. frame count(0), req. notif. per buff(2)
+  latency (60), selected device Id(0), routed device Id(2)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
+
+#### Test 17 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(177), flags(0)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(3), channel count(2)
+  sample rate(48000), original sample rate(48000), speed(1.000000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(0), req. notif. frame count(0), req. notif. per buff(0)
+  latency (150), selected device Id(0), routed device Id(2)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
+
+#### Test 18 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(185), flags(4)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(3), channel count(2)
+  sample rate(48000), original sample rate(48000), speed(1.000000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(0), req. notif. frame count(0), req. notif. per buff(0)
+  latency (150), selected device Id(0), routed device Id(2)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
+
+#### Test 19 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(193), flags(4)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(3), channel count(2)
+  sample rate(48000), original sample rate(48000), speed(1.000000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(240), req. notif. frame count(2400), req. notif. per buff(0)
+  latency (150), selected device Id(0), routed device Id(2)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
+
+#### Test 20 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(201), flags(8)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(3), channel count(2)
+  sample rate(48000), original sample rate(48000), speed(1.000000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(2400), req. notif. frame count(2400), req. notif. per buff(0)
+  latency (180), selected device Id(0), routed device Id(2)
+  output(29) AF latency (80) AF frame count(1920) AF SampleRate(48000)
+
+#### Test 21 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(209), flags(11)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(1000000), channel mask(3), channel count(2)
+  sample rate(44100), original sample rate(44100), speed(1.000000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(4800), req. notif. frame count(2400), req. notif. per buff(0)
+  latency (204), selected device Id(0), routed device Id(2)
+  output(53) AF latency (96) AF frame count(262144) AF SampleRate(44100)
+
+#### Test 22 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(1001), flags(0)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(3), channel count(2)
+  sample rate(48000), original sample rate(48000), speed(1.000000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(2400), req. notif. frame count(2400), req. notif. per buff(0)
+  latency (150), selected device Id(0), routed device Id(2)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
+
+#### Test 23 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(217), flags(0)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(3), channel count(2)
+  sample rate(48000), original sample rate(48000), speed(1.000000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(2400), req. notif. frame count(2400), req. notif. per buff(0)
+  latency (150), selected device Id(0), routed device Id(2)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
+
+#### Test 24 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(225), flags(0)
+  stream type(0), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(3), channel count(2)
+  sample rate(48000), original sample rate(48000), speed(1.000000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(2400), req. notif. frame count(2400), req. notif. per buff(0)
+  latency (140), selected device Id(0), routed device Id(1)
+  output(45) AF latency (40) AF frame count(960) AF SampleRate(48000)
+
+#### Test 25 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(233), flags(0)
+  stream type(4), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(3), channel count(2)
+  sample rate(48000), original sample rate(48000), speed(1.000000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(2400), req. notif. frame count(2400), req. notif. per buff(0)
+  latency (150), selected device Id(0), routed device Id(3)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
+
+#### Test 26 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(241), flags(0)
+  stream type(5), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(3), channel count(2)
+  sample rate(48000), original sample rate(48000), speed(1.000000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(2400), req. notif. frame count(2400), req. notif. per buff(0)
+  latency (150), selected device Id(0), routed device Id(3)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
+
+#### Test 27 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(249), flags(0)
+  stream type(10), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(3), channel count(2)
+  sample rate(48000), original sample rate(48000), speed(1.000000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(2400), req. notif. frame count(2400), req. notif. per buff(0)
+  latency (150), selected device Id(0), routed device Id(2)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
+
+#### Test 28 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(257), flags(0)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(3), channel count(2)
+  sample rate(48000), original sample rate(48000), speed(1.000000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(2400), req. notif. frame count(2400), req. notif. per buff(0)
+  latency (150), selected device Id(0), routed device Id(2)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
diff --git a/media/libaudiohal/Android.bp b/media/libaudiohal/Android.bp
new file mode 100644
index 0000000..700de8e
--- /dev/null
+++ b/media/libaudiohal/Android.bp
@@ -0,0 +1,47 @@
+cc_library_shared {
+    name: "libaudiohal",
+
+    srcs: [
+        "DeviceHalLocal.cpp",
+        "DevicesFactoryHalHybrid.cpp",
+        "DevicesFactoryHalLocal.cpp",
+        "StreamHalLocal.cpp",
+
+        "ConversionHelperHidl.cpp",
+        "HalDeathHandlerHidl.cpp",
+        "DeviceHalHidl.cpp",
+        "DevicesFactoryHalHidl.cpp",
+        "EffectBufferHalHidl.cpp",
+        "EffectHalHidl.cpp",
+        "EffectsFactoryHalHidl.cpp",
+        "StreamHalHidl.cpp",
+    ],
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
+    export_include_dirs: ["include"],
+
+    shared_libs: [
+        "libaudioutils",
+        "libcutils",
+        "liblog",
+        "libutils",
+        "libhardware",
+        "libbase",
+        "libfmq",
+        "libhwbinder",
+        "libhidlbase",
+        "libhidlmemory",
+        "libhidltransport",
+        "android.hardware.audio@2.0",
+        "android.hardware.audio.common@2.0",
+        "android.hardware.audio.common@2.0-util",
+        "android.hardware.audio.effect@2.0",
+        "android.hidl.allocator@1.0",
+        "android.hidl.memory@1.0",
+        "libmedia_helper",
+        "libmediautils",
+    ],
+}
diff --git a/media/libaudiohal/Android.mk b/media/libaudiohal/Android.mk
deleted file mode 100644
index 827908e..0000000
--- a/media/libaudiohal/Android.mk
+++ /dev/null
@@ -1,71 +0,0 @@
-LOCAL_PATH := $(call my-dir)
-
-include $(CLEAR_VARS)
-
-LOCAL_SHARED_LIBRARIES := \
-    libaudioutils \
-    libcutils   \
-    liblog      \
-    libutils    \
-    libhardware
-
-LOCAL_SRC_FILES := \
-    DeviceHalLocal.cpp          \
-    DevicesFactoryHalHybrid.cpp \
-    DevicesFactoryHalLocal.cpp  \
-    StreamHalLocal.cpp
-
-LOCAL_CFLAGS := -Wall -Werror
-
-ifeq ($(USE_LEGACY_LOCAL_AUDIO_HAL), true)
-
-# Use audiohal directly w/o hwbinder middleware.
-# This is for performance comparison and debugging only.
-
-LOCAL_SRC_FILES += \
-    EffectBufferHalLocal.cpp    \
-    EffectsFactoryHalLocal.cpp  \
-    EffectHalLocal.cpp
-
-LOCAL_SHARED_LIBRARIES += \
-    libeffects
-
-LOCAL_CFLAGS += -DUSE_LEGACY_LOCAL_AUDIO_HAL
-
-else  # if !USE_LEGACY_LOCAL_AUDIO_HAL
-
-LOCAL_SRC_FILES += \
-    ConversionHelperHidl.cpp   \
-    HalDeathHandlerHidl.cpp    \
-    DeviceHalHidl.cpp          \
-    DevicesFactoryHalHidl.cpp  \
-    EffectBufferHalHidl.cpp    \
-    EffectHalHidl.cpp          \
-    EffectsFactoryHalHidl.cpp  \
-    StreamHalHidl.cpp
-
-LOCAL_SHARED_LIBRARIES += \
-    libbase          \
-    libfmq           \
-    libhwbinder      \
-    libhidlbase      \
-    libhidlmemory    \
-    libhidltransport \
-    android.hardware.audio@2.0             \
-    android.hardware.audio.common@2.0      \
-    android.hardware.audio.common@2.0-util \
-    android.hardware.audio.effect@2.0      \
-    android.hidl.allocator@1.0             \
-    android.hidl.memory@1.0                \
-    libmedia_helper  \
-    libmediautils
-
-endif  # USE_LEGACY_LOCAL_AUDIO_HAL
-
-LOCAL_C_INCLUDES := $(LOCAL_PATH)/include
-
-LOCAL_EXPORT_C_INCLUDE_DIRS := $(LOCAL_PATH)/include
-
-LOCAL_MODULE := libaudiohal
-
-include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libaudiohal/DevicesFactoryHalHidl.cpp b/media/libaudiohal/DevicesFactoryHalHidl.cpp
index 31da263..5b33592 100644
--- a/media/libaudiohal/DevicesFactoryHalHidl.cpp
+++ b/media/libaudiohal/DevicesFactoryHalHidl.cpp
@@ -43,6 +43,9 @@
         ALOGE("Failed to obtain IDevicesFactory service, terminating process.");
         exit(1);
     }
+    // The MSD factory is optional
+    mDevicesFactoryMsd = IDevicesFactory::getService(AUDIO_HAL_SERVICE_NAME_MSD);
+    // TODO: Register death handler, and add 'restart' directive to audioserver.rc
 }
 
 DevicesFactoryHalHidl::~DevicesFactoryHalHidl() {
diff --git a/media/libaudiohal/DevicesFactoryHalHidl.h b/media/libaudiohal/DevicesFactoryHalHidl.h
index e2f1ad1..0748849 100644
--- a/media/libaudiohal/DevicesFactoryHalHidl.h
+++ b/media/libaudiohal/DevicesFactoryHalHidl.h
@@ -39,6 +39,7 @@
     friend class DevicesFactoryHalHybrid;
 
     sp<IDevicesFactory> mDevicesFactory;
+    sp<IDevicesFactory> mDevicesFactoryMsd;
 
     static status_t nameFromHal(const char *name, IDevicesFactory::Device *device);
 
diff --git a/media/libaudiohal/DevicesFactoryHalHybrid.cpp b/media/libaudiohal/DevicesFactoryHalHybrid.cpp
index 454b03b..8dc1434 100644
--- a/media/libaudiohal/DevicesFactoryHalHybrid.cpp
+++ b/media/libaudiohal/DevicesFactoryHalHybrid.cpp
@@ -19,9 +19,7 @@
 
 #include "DevicesFactoryHalHybrid.h"
 #include "DevicesFactoryHalLocal.h"
-#ifndef USE_LEGACY_LOCAL_AUDIO_HAL
 #include "DevicesFactoryHalHidl.h"
-#endif
 
 namespace android {
 
@@ -32,13 +30,7 @@
 
 DevicesFactoryHalHybrid::DevicesFactoryHalHybrid()
         : mLocalFactory(new DevicesFactoryHalLocal()),
-          mHidlFactory(
-#ifdef USE_LEGACY_LOCAL_AUDIO_HAL
-                  nullptr
-#else
-                  new DevicesFactoryHalHidl()
-#endif
-                       ) {
+          mHidlFactory(new DevicesFactoryHalHidl()) {
 }
 
 DevicesFactoryHalHybrid::~DevicesFactoryHalHybrid() {
diff --git a/media/libaudiohal/EffectBufferHalHidl.h b/media/libaudiohal/EffectBufferHalHidl.h
index 66a81c2..d7a43ae 100644
--- a/media/libaudiohal/EffectBufferHalHidl.h
+++ b/media/libaudiohal/EffectBufferHalHidl.h
@@ -35,6 +35,8 @@
     virtual audio_buffer_t* audioBuffer();
     virtual void* externalData() const;
 
+    virtual size_t getSize() const override { return mBufferSize; }
+
     virtual void setExternalData(void* external);
     virtual void setFrameCount(size_t frameCount);
     virtual bool checkFrameCountChange();
diff --git a/media/libaudiohal/EffectBufferHalLocal.cpp b/media/libaudiohal/EffectBufferHalLocal.cpp
deleted file mode 100644
index 7951c8e..0000000
--- a/media/libaudiohal/EffectBufferHalLocal.cpp
+++ /dev/null
@@ -1,91 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "EffectBufferHalLocal"
-//#define LOG_NDEBUG 0
-
-#include <utils/Log.h>
-
-#include "EffectBufferHalLocal.h"
-
-namespace android {
-
-// static
-status_t EffectBufferHalInterface::allocate(
-        size_t size, sp<EffectBufferHalInterface>* buffer) {
-    *buffer = new EffectBufferHalLocal(size);
-    return OK;
-}
-
-// static
-status_t EffectBufferHalInterface::mirror(
-        void* external, size_t size, sp<EffectBufferHalInterface>* buffer) {
-    *buffer = new EffectBufferHalLocal(external, size);
-    return OK;
-}
-
-EffectBufferHalLocal::EffectBufferHalLocal(size_t size)
-        : mOwnBuffer(new uint8_t[size]),
-          mBufferSize(size), mFrameCountChanged(false),
-          mAudioBuffer{0, {mOwnBuffer.get()}} {
-}
-
-EffectBufferHalLocal::EffectBufferHalLocal(void* external, size_t size)
-        : mOwnBuffer(nullptr),
-          mBufferSize(size), mFrameCountChanged(false),
-          mAudioBuffer{0, {external}} {
-}
-
-EffectBufferHalLocal::~EffectBufferHalLocal() {
-}
-
-audio_buffer_t* EffectBufferHalLocal::audioBuffer() {
-    return &mAudioBuffer;
-}
-
-void* EffectBufferHalLocal::externalData() const {
-    return mAudioBuffer.raw;
-}
-
-void EffectBufferHalLocal::setFrameCount(size_t frameCount) {
-    mAudioBuffer.frameCount = frameCount;
-    mFrameCountChanged = true;
-}
-
-void EffectBufferHalLocal::setExternalData(void* external) {
-    ALOGE_IF(mOwnBuffer != nullptr, "Attempt to set external data for allocated buffer");
-    mAudioBuffer.raw = external;
-}
-
-bool EffectBufferHalLocal::checkFrameCountChange() {
-    bool result = mFrameCountChanged;
-    mFrameCountChanged = false;
-    return result;
-}
-
-void EffectBufferHalLocal::update() {
-}
-
-void EffectBufferHalLocal::commit() {
-}
-
-void EffectBufferHalLocal::update(size_t) {
-}
-
-void EffectBufferHalLocal::commit(size_t) {
-}
-
-} // namespace android
diff --git a/media/libaudiohal/EffectBufferHalLocal.h b/media/libaudiohal/EffectBufferHalLocal.h
deleted file mode 100644
index d2b624b..0000000
--- a/media/libaudiohal/EffectBufferHalLocal.h
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_HARDWARE_EFFECT_BUFFER_HAL_LOCAL_H
-#define ANDROID_HARDWARE_EFFECT_BUFFER_HAL_LOCAL_H
-
-#include <memory>
-
-#include <media/audiohal/EffectBufferHalInterface.h>
-#include <system/audio_effect.h>
-
-namespace android {
-
-class EffectBufferHalLocal : public EffectBufferHalInterface
-{
-  public:
-    virtual audio_buffer_t* audioBuffer();
-    virtual void* externalData() const;
-
-    virtual void setExternalData(void* external);
-    virtual void setFrameCount(size_t frameCount);
-    virtual bool checkFrameCountChange();
-
-    virtual void update();
-    virtual void commit();
-    virtual void update(size_t size);
-    virtual void commit(size_t size);
-
-  private:
-    friend class EffectBufferHalInterface;
-
-    std::unique_ptr<uint8_t[]> mOwnBuffer;
-    const size_t mBufferSize;
-    bool mFrameCountChanged;
-    audio_buffer_t mAudioBuffer;
-
-    // Can not be constructed directly by clients.
-    explicit EffectBufferHalLocal(size_t size);
-    EffectBufferHalLocal(void* external, size_t size);
-
-    virtual ~EffectBufferHalLocal();
-
-    status_t init();
-};
-
-} // namespace android
-
-#endif // ANDROID_HARDWARE_EFFECT_BUFFER_HAL_LOCAL_H
diff --git a/media/libaudiohal/EffectHalHidl.cpp b/media/libaudiohal/EffectHalHidl.cpp
index 61fb6bab..f4d1958 100644
--- a/media/libaudiohal/EffectHalHidl.cpp
+++ b/media/libaudiohal/EffectHalHidl.cpp
@@ -121,16 +121,24 @@
 }
 
 status_t EffectHalHidl::setInBuffer(const sp<EffectBufferHalInterface>& buffer) {
-    if (mInBuffer == 0 || buffer->audioBuffer() != mInBuffer->audioBuffer()) {
-        mBuffersChanged = true;
+    if (!mBuffersChanged) {
+        if (buffer.get() == nullptr || mInBuffer.get() == nullptr) {
+            mBuffersChanged = buffer.get() != mInBuffer.get();
+        } else {
+            mBuffersChanged = buffer->audioBuffer() != mInBuffer->audioBuffer();
+        }
     }
     mInBuffer = buffer;
     return OK;
 }
 
 status_t EffectHalHidl::setOutBuffer(const sp<EffectBufferHalInterface>& buffer) {
-    if (mOutBuffer == 0 || buffer->audioBuffer() != mOutBuffer->audioBuffer()) {
-        mBuffersChanged = true;
+    if (!mBuffersChanged) {
+        if (buffer.get() == nullptr || mOutBuffer.get() == nullptr) {
+            mBuffersChanged = buffer.get() != mOutBuffer.get();
+        } else {
+            mBuffersChanged = buffer->audioBuffer() != mOutBuffer->audioBuffer();
+        }
     }
     mOutBuffer = buffer;
     return OK;
diff --git a/media/libaudiohal/EffectHalLocal.cpp b/media/libaudiohal/EffectHalLocal.cpp
deleted file mode 100644
index dd465c3..0000000
--- a/media/libaudiohal/EffectHalLocal.cpp
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "EffectHalLocal"
-//#define LOG_NDEBUG 0
-
-#include <media/EffectsFactoryApi.h>
-#include <utils/Log.h>
-
-#include "EffectHalLocal.h"
-
-namespace android {
-
-EffectHalLocal::EffectHalLocal(effect_handle_t handle)
-        : mHandle(handle) {
-}
-
-EffectHalLocal::~EffectHalLocal() {
-    int status = EffectRelease(mHandle);
-    ALOGW_IF(status, "Error releasing effect %p: %s", mHandle, strerror(-status));
-    mHandle = 0;
-}
-
-status_t EffectHalLocal::setInBuffer(const sp<EffectBufferHalInterface>& buffer) {
-    mInBuffer = buffer;
-    return OK;
-}
-
-status_t EffectHalLocal::setOutBuffer(const sp<EffectBufferHalInterface>& buffer) {
-    mOutBuffer = buffer;
-    return OK;
-}
-
-status_t EffectHalLocal::process() {
-    if (mInBuffer == nullptr || mOutBuffer == nullptr) {
-        ALOGE_IF(mInBuffer == nullptr, "Input buffer not set");
-        ALOGE_IF(mOutBuffer == nullptr, "Output buffer not set");
-        return NO_INIT;
-    }
-    return (*mHandle)->process(mHandle, mInBuffer->audioBuffer(), mOutBuffer->audioBuffer());
-}
-
-status_t EffectHalLocal::processReverse() {
-    if ((*mHandle)->process_reverse != NULL) {
-        if (mInBuffer == nullptr || mOutBuffer == nullptr) {
-            ALOGE_IF(mInBuffer == nullptr, "Input buffer not set");
-            ALOGE_IF(mOutBuffer == nullptr, "Output buffer not set");
-            return NO_INIT;
-        }
-        return (*mHandle)->process_reverse(
-                mHandle, mInBuffer->audioBuffer(), mOutBuffer->audioBuffer());
-    } else {
-        return INVALID_OPERATION;
-    }
-}
-
-status_t EffectHalLocal::command(uint32_t cmdCode, uint32_t cmdSize, void *pCmdData,
-        uint32_t *replySize, void *pReplyData) {
-    return (*mHandle)->command(mHandle, cmdCode, cmdSize, pCmdData, replySize, pReplyData);
-}
-
-status_t EffectHalLocal::getDescriptor(effect_descriptor_t *pDescriptor) {
-    return (*mHandle)->get_descriptor(mHandle, pDescriptor);
-}
-
-status_t EffectHalLocal::close() {
-    return OK;
-}
-
-} // namespace android
diff --git a/media/libaudiohal/EffectHalLocal.h b/media/libaudiohal/EffectHalLocal.h
deleted file mode 100644
index 693fb50..0000000
--- a/media/libaudiohal/EffectHalLocal.h
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_HARDWARE_EFFECT_HAL_LOCAL_H
-#define ANDROID_HARDWARE_EFFECT_HAL_LOCAL_H
-
-#include <hardware/audio_effect.h>
-#include <media/audiohal/EffectHalInterface.h>
-
-namespace android {
-
-class EffectHalLocal : public EffectHalInterface
-{
-  public:
-    // Set the input buffer.
-    virtual status_t setInBuffer(const sp<EffectBufferHalInterface>& buffer);
-
-    // Set the output buffer.
-    virtual status_t setOutBuffer(const sp<EffectBufferHalInterface>& buffer);
-
-    // Effect process function.
-    virtual status_t process();
-
-    // Process reverse stream function. This function is used to pass
-    // a reference stream to the effect engine.
-    virtual status_t processReverse();
-
-    // Send a command and receive a response to/from effect engine.
-    virtual status_t command(uint32_t cmdCode, uint32_t cmdSize, void *pCmdData,
-            uint32_t *replySize, void *pReplyData);
-
-    // Returns the effect descriptor.
-    virtual status_t getDescriptor(effect_descriptor_t *pDescriptor);
-
-    // Free resources on the remote side.
-    virtual status_t close();
-
-    // Whether it's a local implementation.
-    virtual bool isLocal() const { return true; }
-
-    effect_handle_t handle() const { return mHandle; }
-
-  private:
-    effect_handle_t mHandle;
-    sp<EffectBufferHalInterface> mInBuffer;
-    sp<EffectBufferHalInterface> mOutBuffer;
-
-    friend class EffectsFactoryHalLocal;
-
-    // Can not be constructed directly by clients.
-    explicit EffectHalLocal(effect_handle_t handle);
-
-    // The destructor automatically releases the effect.
-    virtual ~EffectHalLocal();
-};
-
-} // namespace android
-
-#endif // ANDROID_HARDWARE_EFFECT_HAL_LOCAL_H
diff --git a/media/libaudiohal/EffectsFactoryHalLocal.cpp b/media/libaudiohal/EffectsFactoryHalLocal.cpp
deleted file mode 100644
index bbdef5d..0000000
--- a/media/libaudiohal/EffectsFactoryHalLocal.cpp
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <media/EffectsFactoryApi.h>
-
-#include "EffectHalLocal.h"
-#include "EffectsFactoryHalLocal.h"
-
-namespace android {
-
-// static
-sp<EffectsFactoryHalInterface> EffectsFactoryHalInterface::create() {
-    return new EffectsFactoryHalLocal();
-}
-
-// static
-bool EffectsFactoryHalInterface::isNullUuid(const effect_uuid_t *pEffectUuid) {
-    return EffectIsNullUuid(pEffectUuid);
-}
-
-status_t EffectsFactoryHalLocal::queryNumberEffects(uint32_t *pNumEffects) {
-    return EffectQueryNumberEffects(pNumEffects);
-}
-
-status_t EffectsFactoryHalLocal::getDescriptor(
-        uint32_t index, effect_descriptor_t *pDescriptor) {
-    return EffectQueryEffect(index, pDescriptor);
-}
-
-status_t EffectsFactoryHalLocal::getDescriptor(
-        const effect_uuid_t *pEffectUuid, effect_descriptor_t *pDescriptor) {
-    return EffectGetDescriptor(pEffectUuid, pDescriptor);
-}
-
-status_t EffectsFactoryHalLocal::createEffect(
-        const effect_uuid_t *pEffectUuid, int32_t sessionId, int32_t ioId,
-        sp<EffectHalInterface> *effect) {
-    effect_handle_t handle;
-    int result = EffectCreate(pEffectUuid, sessionId, ioId, &handle);
-    if (result == 0) {
-        *effect = new EffectHalLocal(handle);
-    }
-    return result;
-}
-
-status_t EffectsFactoryHalLocal::dumpEffects(int fd) {
-    return EffectDumpEffects(fd);
-}
-
-} // namespace android
diff --git a/media/libaudiohal/EffectsFactoryHalLocal.h b/media/libaudiohal/EffectsFactoryHalLocal.h
deleted file mode 100644
index d5b81be..0000000
--- a/media/libaudiohal/EffectsFactoryHalLocal.h
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_HARDWARE_EFFECTS_FACTORY_HAL_LOCAL_H
-#define ANDROID_HARDWARE_EFFECTS_FACTORY_HAL_LOCAL_H
-
-#include <media/audiohal/EffectsFactoryHalInterface.h>
-
-namespace android {
-
-class EffectsFactoryHalLocal : public EffectsFactoryHalInterface
-{
-  public:
-    // Returns the number of different effects in all loaded libraries.
-    virtual status_t queryNumberEffects(uint32_t *pNumEffects);
-
-    // Returns a descriptor of the next available effect.
-    virtual status_t getDescriptor(uint32_t index,
-            effect_descriptor_t *pDescriptor);
-
-    virtual status_t getDescriptor(const effect_uuid_t *pEffectUuid,
-            effect_descriptor_t *pDescriptor);
-
-    // Creates an effect engine of the specified type.
-    // To release the effect engine, it is necessary to release references
-    // to the returned effect object.
-    virtual status_t createEffect(const effect_uuid_t *pEffectUuid,
-            int32_t sessionId, int32_t ioId,
-            sp<EffectHalInterface> *effect);
-
-    virtual status_t dumpEffects(int fd);
-
-  private:
-    friend class EffectsFactoryHalInterface;
-
-    // Can not be constructed directly by clients.
-    EffectsFactoryHalLocal() {}
-
-    virtual ~EffectsFactoryHalLocal() {}
-};
-
-} // namespace android
-
-#endif // ANDROID_HARDWARE_EFFECTS_FACTORY_HAL_LOCAL_H
diff --git a/media/libaudiohal/HalDeathHandlerHidl.cpp b/media/libaudiohal/HalDeathHandlerHidl.cpp
index a742671..1e3ab58 100644
--- a/media/libaudiohal/HalDeathHandlerHidl.cpp
+++ b/media/libaudiohal/HalDeathHandlerHidl.cpp
@@ -48,12 +48,13 @@
 
 void HalDeathHandler::serviceDied(uint64_t /*cookie*/, const wp<IBase>& /*who*/) {
     // No matter which of the service objects has died,
-    // we need to run all the registered handlers and crash our process.
+    // we need to run all the registered handlers and exit.
     std::lock_guard<std::mutex> guard(mHandlersLock);
     for (const auto& handler : mHandlers) {
         handler.second();
     }
-    LOG_ALWAYS_FATAL("HAL server crashed, need to restart");
+    ALOGE("HAL server crashed, audio server is restarting");
+    exit(1);
 }
 
 } // namespace android
diff --git a/media/libaudiohal/StreamHalLocal.cpp b/media/libaudiohal/StreamHalLocal.cpp
index dc17f5c..8d61e24 100644
--- a/media/libaudiohal/StreamHalLocal.cpp
+++ b/media/libaudiohal/StreamHalLocal.cpp
@@ -21,7 +21,6 @@
 #include <utils/Log.h>
 
 #include "DeviceHalLocal.h"
-#include "EffectHalLocal.h"
 #include "StreamHalLocal.h"
 
 namespace android {
@@ -86,16 +85,14 @@
     return OK;
 }
 
-status_t StreamHalLocal::addEffect(sp<EffectHalInterface> effect) {
-    LOG_ALWAYS_FATAL_IF(!effect->isLocal(), "Only local effects can be added for a local stream");
-    return mStream->add_audio_effect(mStream,
-            static_cast<EffectHalLocal*>(effect.get())->handle());
+status_t StreamHalLocal::addEffect(sp<EffectHalInterface>) {
+    LOG_ALWAYS_FATAL("Local streams can not have effects");
+    return INVALID_OPERATION;
 }
 
-status_t StreamHalLocal::removeEffect(sp<EffectHalInterface> effect) {
-    LOG_ALWAYS_FATAL_IF(!effect->isLocal(), "Only local effects can be removed for a local stream");
-    return mStream->remove_audio_effect(mStream,
-            static_cast<EffectHalLocal*>(effect.get())->handle());
+status_t StreamHalLocal::removeEffect(sp<EffectHalInterface>) {
+    LOG_ALWAYS_FATAL("Local streams can not have effects");
+    return INVALID_OPERATION;
 }
 
 status_t StreamHalLocal::standby() {
diff --git a/media/libaudiohal/include/media/audiohal/EffectBufferHalInterface.h b/media/libaudiohal/include/media/audiohal/EffectBufferHalInterface.h
index e862f6e..1cae662 100644
--- a/media/libaudiohal/include/media/audiohal/EffectBufferHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/EffectBufferHalInterface.h
@@ -37,6 +37,8 @@
         return externalData() != nullptr ? externalData() : audioBuffer()->raw;
     }
 
+    virtual size_t getSize() const = 0;
+
     virtual void setExternalData(void* external) = 0;
     virtual void setFrameCount(size_t frameCount) = 0;
     virtual bool checkFrameCountChange() = 0;  // returns whether frame count has been updated
diff --git a/media/libaudioprocessing/Android.mk b/media/libaudioprocessing/Android.mk
index c850984..da1ecc2 100644
--- a/media/libaudioprocessing/Android.mk
+++ b/media/libaudioprocessing/Android.mk
@@ -24,6 +24,7 @@
     libcutils \
     liblog \
     libnbaio \
+    libnblog \
     libsonic \
     libutils \
 
diff --git a/media/libaudioprocessing/AudioMixer.cpp b/media/libaudioprocessing/AudioMixer.cpp
index 238925d..f8e05e7 100644
--- a/media/libaudioprocessing/AudioMixer.cpp
+++ b/media/libaudioprocessing/AudioMixer.cpp
@@ -71,12 +71,20 @@
 
 // Set kUseNewMixer to true to use the new mixer engine always. Otherwise the
 // original code will be used for stereo sinks, the new mixer for multichannel.
-static const bool kUseNewMixer = true;
+static constexpr bool kUseNewMixer = true;
 
 // Set kUseFloat to true to allow floating input into the mixer engine.
 // If kUseNewMixer is false, this is ignored or may be overridden internally
 // because of downmix/upmix support.
-static const bool kUseFloat = true;
+static constexpr bool kUseFloat = true;
+
+#ifdef FLOAT_AUX
+using TYPE_AUX = float;
+static_assert(kUseNewMixer && kUseFloat,
+        "kUseNewMixer and kUseFloat must be true for FLOAT_AUX option");
+#else
+using TYPE_AUX = int32_t; // q4.27
+#endif
 
 // Set to default copy buffer size in frames for input processing.
 static const size_t kCopyBufferFrameCount = 256;
@@ -813,7 +821,7 @@
                 mMixerInFormat, sampleRate, playbackRate);
         reconfigureBufferProviders();
     } else {
-        reinterpret_cast<TimestretchBufferProvider*>(mTimestretchBufferProvider)
+        static_cast<TimestretchBufferProvider*>(mTimestretchBufferProvider)
                 ->setPlaybackRate(playbackRate);
     }
     return true;
@@ -861,16 +869,25 @@
             }
         }
     }
-    /* TODO: aux is always integer regardless of output buffer type */
+
     if (aux) {
-        if (((auxInc>0) && (((prevAuxLevel+auxInc)>>16) >= auxLevel)) ||
-                ((auxInc<0) && (((prevAuxLevel+auxInc)>>16) <= auxLevel))) {
+#ifdef FLOAT_AUX
+        if (useFloat) {
+            if ((mAuxInc > 0.f && mPrevAuxLevel + mAuxInc >= mAuxLevel) ||
+                    (mAuxInc < 0.f && mPrevAuxLevel + mAuxInc <= mAuxLevel)) {
+                auxInc = 0;
+                prevAuxLevel = auxLevel << 16;
+                mAuxInc = 0.f;
+                mPrevAuxLevel = mAuxLevel;
+            }
+        } else
+#endif
+        if ((auxInc > 0 && ((prevAuxLevel + auxInc) >> 16) >= auxLevel) ||
+                (auxInc < 0 && ((prevAuxLevel + auxInc) >> 16) <= auxLevel)) {
             auxInc = 0;
             prevAuxLevel = auxLevel << 16;
-            mAuxInc = 0.;
+            mAuxInc = 0.f;
             mPrevAuxLevel = mAuxLevel;
-        } else {
-            //ALOGV("aux ramp: %d %d %d", auxLevel << 16, prevAuxLevel, auxInc);
         }
     }
 }
@@ -1694,7 +1711,7 @@
 /* MIXTYPE     (see AudioMixerOps.h MIXTYPE_* enumeration)
  * TO: int32_t (Q4.27) or float
  * TI: int32_t (Q4.27) or int16_t (Q0.15) or float
- * TA: int32_t (Q4.27)
+ * TA: int32_t (Q4.27) or float
  */
 template <int MIXTYPE,
         typename TO, typename TI, typename TV, typename TA, typename TAV>
@@ -1738,7 +1755,7 @@
 /* MIXTYPE     (see AudioMixerOps.h MIXTYPE_* enumeration)
  * TO: int32_t (Q4.27) or float
  * TI: int32_t (Q4.27) or int16_t (Q0.15) or float
- * TA: int32_t (Q4.27)
+ * TA: int32_t (Q4.27) or float
  */
 template <int MIXTYPE,
         typename TO, typename TI, typename TV, typename TA, typename TAV>
@@ -1778,7 +1795,7 @@
  * ADJUSTVOL   (set to true if volume ramp parameters needs adjustment afterwards)
  * TO: int32_t (Q4.27) or float
  * TI: int32_t (Q4.27) or int16_t (Q0.15) or float
- * TA: int32_t (Q4.27)
+ * TA: int32_t (Q4.27) or float
  */
 template <int MIXTYPE, bool USEFLOATVOL, bool ADJUSTVOL,
     typename TO, typename TI, typename TA>
@@ -1788,13 +1805,25 @@
     if (USEFLOATVOL) {
         if (ramp) {
             volumeRampMulti<MIXTYPE>(t->mMixerChannelCount, out, outFrames, in, aux,
-                    t->mPrevVolume, t->mVolumeInc, &t->prevAuxLevel, t->auxInc);
+                    t->mPrevVolume, t->mVolumeInc,
+#ifdef FLOAT_AUX
+                    &t->mPrevAuxLevel, t->mAuxInc
+#else
+                    &t->prevAuxLevel, t->auxInc
+#endif
+                );
             if (ADJUSTVOL) {
                 t->adjustVolumeRamp(aux != NULL, true);
             }
         } else {
             volumeMulti<MIXTYPE>(t->mMixerChannelCount, out, outFrames, in, aux,
-                    t->mVolume, t->auxLevel);
+                    t->mVolume,
+#ifdef FLOAT_AUX
+                    t->mAuxLevel
+#else
+                    t->auxLevel
+#endif
+            );
         }
     } else {
         if (ramp) {
@@ -1851,7 +1880,7 @@
         }
 
         const size_t outFrames = b.frameCount;
-        volumeMix<MIXTYPE, is_same<TI, float>::value, false> (
+        volumeMix<MIXTYPE, is_same<TI, float>::value /* USEFLOATVOL */, false /* ADJUSTVOL */> (
                 out, outFrames, in, aux, ramp, t);
 
         out += outFrames * channels;
@@ -1874,7 +1903,7 @@
  * MIXTYPE     (see AudioMixerOps.h MIXTYPE_* enumeration)
  * TO: int32_t (Q4.27) or float
  * TI: int32_t (Q4.27) or int16_t (Q0.15) or float
- * TA: int32_t (Q4.27)
+ * TA: int32_t (Q4.27) or float
  */
 template <int MIXTYPE, typename TO, typename TI, typename TA>
 void AudioMixer::track__Resample(track_t* t, TO* out, size_t outFrameCount, TO* temp, TA* aux)
@@ -1890,7 +1919,7 @@
         memset(temp, 0, outFrameCount * t->mMixerChannelCount * sizeof(TO));
         t->resampler->resample((int32_t*)temp, outFrameCount, t->bufferProvider);
 
-        volumeMix<MIXTYPE, is_same<TI, float>::value, true>(
+        volumeMix<MIXTYPE, is_same<TI, float>::value /* USEFLOATVOL */, true /* ADJUSTVOL */>(
                 out, outFrameCount, temp, aux, ramp, t);
 
     } else { // constant volume gain
@@ -1905,7 +1934,7 @@
  * MIXTYPE     (see AudioMixerOps.h MIXTYPE_* enumeration)
  * TO: int32_t (Q4.27) or float
  * TI: int32_t (Q4.27) or int16_t (Q0.15) or float
- * TA: int32_t (Q4.27)
+ * TA: int32_t (Q4.27) or float
  */
 template <int MIXTYPE, typename TO, typename TI, typename TA>
 void AudioMixer::track__NoResample(track_t* t, TO* out, size_t frameCount,
@@ -1914,7 +1943,7 @@
     ALOGVV("track__NoResample\n");
     const TI *in = static_cast<const TI *>(t->in);
 
-    volumeMix<MIXTYPE, is_same<TI, float>::value, true>(
+    volumeMix<MIXTYPE, is_same<TI, float>::value /* USEFLOATVOL */, true /* ADJUSTVOL */>(
             out, frameCount, in, aux, t->needsRamp(), t);
 
     // MIXTYPE_MONOEXPAND reads a single input channel and expands to NCHAN output channels.
@@ -1947,11 +1976,10 @@
     case AUDIO_FORMAT_PCM_16_BIT:
         switch (mixerOutFormat) {
         case AUDIO_FORMAT_PCM_FLOAT:
-            memcpy_to_float_from_q4_27((float*)out, (int32_t*)in, sampleCount);
+            memcpy_to_float_from_q4_27((float*)out, (const int32_t*)in, sampleCount);
             break;
         case AUDIO_FORMAT_PCM_16_BIT:
-            // two int16_t are produced per iteration
-            ditherAndClamp((int32_t*)out, (int32_t*)in, sampleCount >> 1);
+            memcpy_to_i16_from_q4_27((int16_t*)out, (const int32_t*)in, sampleCount);
             break;
         default:
             LOG_ALWAYS_FATAL("bad mixerOutFormat: %#x", mixerOutFormat);
@@ -1991,11 +2019,11 @@
     case TRACKTYPE_RESAMPLE:
         switch (mixerInFormat) {
         case AUDIO_FORMAT_PCM_FLOAT:
-            return (AudioMixer::hook_t)
-                    track__Resample<MIXTYPE_MULTI, float /*TO*/, float /*TI*/, int32_t /*TA*/>;
+            return (AudioMixer::hook_t)track__Resample<
+                    MIXTYPE_MULTI, float /*TO*/, float /*TI*/, TYPE_AUX>;
         case AUDIO_FORMAT_PCM_16_BIT:
-            return (AudioMixer::hook_t)\
-                    track__Resample<MIXTYPE_MULTI, int32_t, int16_t, int32_t>;
+            return (AudioMixer::hook_t)track__Resample<
+                    MIXTYPE_MULTI, int32_t /*TO*/, int16_t /*TI*/, TYPE_AUX>;
         default:
             LOG_ALWAYS_FATAL("bad mixerInFormat: %#x", mixerInFormat);
             break;
@@ -2004,11 +2032,11 @@
     case TRACKTYPE_NORESAMPLEMONO:
         switch (mixerInFormat) {
         case AUDIO_FORMAT_PCM_FLOAT:
-            return (AudioMixer::hook_t)
-                    track__NoResample<MIXTYPE_MONOEXPAND, float, float, int32_t>;
+            return (AudioMixer::hook_t)track__NoResample<
+                            MIXTYPE_MONOEXPAND, float /*TO*/, float /*TI*/, TYPE_AUX>;
         case AUDIO_FORMAT_PCM_16_BIT:
-            return (AudioMixer::hook_t)
-                    track__NoResample<MIXTYPE_MONOEXPAND, int32_t, int16_t, int32_t>;
+            return (AudioMixer::hook_t)track__NoResample<
+                            MIXTYPE_MONOEXPAND, int32_t /*TO*/, int16_t /*TI*/, TYPE_AUX>;
         default:
             LOG_ALWAYS_FATAL("bad mixerInFormat: %#x", mixerInFormat);
             break;
@@ -2017,11 +2045,11 @@
     case TRACKTYPE_NORESAMPLE:
         switch (mixerInFormat) {
         case AUDIO_FORMAT_PCM_FLOAT:
-            return (AudioMixer::hook_t)
-                    track__NoResample<MIXTYPE_MULTI, float, float, int32_t>;
+            return (AudioMixer::hook_t)track__NoResample<
+                    MIXTYPE_MULTI, float /*TO*/, float /*TI*/, TYPE_AUX>;
         case AUDIO_FORMAT_PCM_16_BIT:
-            return (AudioMixer::hook_t)
-                    track__NoResample<MIXTYPE_MULTI, int32_t, int16_t, int32_t>;
+            return (AudioMixer::hook_t)track__NoResample<
+                    MIXTYPE_MULTI, int32_t /*TO*/, int16_t /*TI*/, TYPE_AUX>;
         default:
             LOG_ALWAYS_FATAL("bad mixerInFormat: %#x", mixerInFormat);
             break;
@@ -2056,11 +2084,11 @@
     case AUDIO_FORMAT_PCM_FLOAT:
         switch (mixerOutFormat) {
         case AUDIO_FORMAT_PCM_FLOAT:
-            return process_NoResampleOneTrack<MIXTYPE_MULTI_SAVEONLY,
-                    float /*TO*/, float /*TI*/, int32_t /*TA*/>;
+            return process_NoResampleOneTrack<
+                    MIXTYPE_MULTI_SAVEONLY, float /*TO*/, float /*TI*/, TYPE_AUX>;
         case AUDIO_FORMAT_PCM_16_BIT:
-            return process_NoResampleOneTrack<MIXTYPE_MULTI_SAVEONLY,
-                    int16_t, float, int32_t>;
+            return process_NoResampleOneTrack<
+                    MIXTYPE_MULTI_SAVEONLY, int16_t /*TO*/, float /*TI*/, TYPE_AUX>;
         default:
             LOG_ALWAYS_FATAL("bad mixerOutFormat: %#x", mixerOutFormat);
             break;
@@ -2069,11 +2097,11 @@
     case AUDIO_FORMAT_PCM_16_BIT:
         switch (mixerOutFormat) {
         case AUDIO_FORMAT_PCM_FLOAT:
-            return process_NoResampleOneTrack<MIXTYPE_MULTI_SAVEONLY,
-                    float, int16_t, int32_t>;
+            return process_NoResampleOneTrack<
+                    MIXTYPE_MULTI_SAVEONLY, float /*TO*/, int16_t /*TI*/, TYPE_AUX>;
         case AUDIO_FORMAT_PCM_16_BIT:
-            return process_NoResampleOneTrack<MIXTYPE_MULTI_SAVEONLY,
-                    int16_t, int16_t, int32_t>;
+            return process_NoResampleOneTrack<
+                    MIXTYPE_MULTI_SAVEONLY, int16_t /*TO*/, int16_t /*TI*/, TYPE_AUX>;
         default:
             LOG_ALWAYS_FATAL("bad mixerOutFormat: %#x", mixerOutFormat);
             break;
diff --git a/media/libaudioprocessing/AudioMixerOps.h b/media/libaudioprocessing/AudioMixerOps.h
index 8d74024..f33e361 100644
--- a/media/libaudioprocessing/AudioMixerOps.h
+++ b/media/libaudioprocessing/AudioMixerOps.h
@@ -188,13 +188,13 @@
 
 template<>
 inline void MixAccum<float, int16_t>(float *auxaccum, int16_t value) {
-    static const float norm = 1. / (1 << 15);
+    static constexpr float norm = 1. / (1 << 15);
     *auxaccum += norm * value;
 }
 
 template<>
 inline void MixAccum<float, int32_t>(float *auxaccum, int32_t value) {
-    static const float norm = 1. / (1 << 27);
+    static constexpr float norm = 1. / (1 << 27);
     *auxaccum += norm * value;
 }
 
@@ -238,6 +238,7 @@
  *   NCHAN represents number of input and output channels.
  *   TO: int32_t (Q4.27) or float
  *   TI: int32_t (Q4.27) or int16_t (Q0.15) or float
+ *   TA: int32_t (Q4.27) or float
  *   TV: int32_t (U4.28) or int16_t (U4.12) or float
  *   vol: represents a volume array.
  *
@@ -247,7 +248,8 @@
  *   Single input channel. NCHAN represents number of output channels.
  *   TO: int32_t (Q4.27) or float
  *   TI: int32_t (Q4.27) or int16_t (Q0.15) or float
- *   TV: int32_t (U4.28) or int16_t (U4.12) or float
+ *   TA: int32_t (Q4.27) or float
+ *   TV/TAV: int32_t (U4.28) or int16_t (U4.12) or float
  *   Input channel count is 1.
  *   vol: represents volume array.
  *
@@ -257,7 +259,8 @@
  *   NCHAN represents number of input and output channels.
  *   TO: int16_t (Q.15) or float
  *   TI: int32_t (Q4.27) or int16_t (Q0.15) or float
- *   TV: int32_t (U4.28) or int16_t (U4.12) or float
+ *   TA: int32_t (Q4.27) or float
+ *   TV/TAV: int32_t (U4.28) or int16_t (U4.12) or float
  *   vol: represents a volume array.
  *
  *   MIXTYPE_MULTI_SAVEONLY does not accumulate into the out pointer.
diff --git a/media/libaudioprocessing/AudioResamplerDyn.cpp b/media/libaudioprocessing/AudioResamplerDyn.cpp
index 8f7b982..eeeecce 100644
--- a/media/libaudioprocessing/AudioResamplerDyn.cpp
+++ b/media/libaudioprocessing/AudioResamplerDyn.cpp
@@ -38,6 +38,9 @@
 
 //#define DEBUG_RESAMPLER
 
+// use this for our buffer alignment.  Should be at least 32 bytes.
+constexpr size_t CACHE_LINE_SIZE = 64;
+
 namespace android {
 
 /*
@@ -94,7 +97,10 @@
 
     // create new buffer
     TI* state = NULL;
-    (void)posix_memalign(reinterpret_cast<void**>(&state), 32, stateCount*sizeof(*state));
+    (void)posix_memalign(
+            reinterpret_cast<void **>(&state),
+            CACHE_LINE_SIZE /* alignment */,
+            stateCount * sizeof(*state));
     memset(state, 0, stateCount*sizeof(*state));
 
     // attempt to preserve state
@@ -185,6 +191,16 @@
     // setSampleRate() for 1:1. (May be removed if precalculated filters are used.)
     mInSampleRate = 0;
     mConstants.set(128, 8, mSampleRate, mSampleRate); // TODO: set better
+
+    // fetch property based resampling parameters
+    mPropertyEnableAtSampleRate = property_get_int32(
+            "ro.audio.resampler.psd.enable_at_samplerate", mPropertyEnableAtSampleRate);
+    mPropertyHalfFilterLength = property_get_int32(
+            "ro.audio.resampler.psd.halflength", mPropertyHalfFilterLength);
+    mPropertyStopbandAttenuation = property_get_int32(
+            "ro.audio.resampler.psd.stopband", mPropertyStopbandAttenuation);
+    mPropertyCutoffPercent = property_get_int32(
+            "ro.audio.resampler.psd.cutoff_percent", mPropertyCutoffPercent);
 }
 
 template<typename TC, typename TI, typename TO>
@@ -215,6 +231,8 @@
     }
 }
 
+// TODO: update to C++11
+
 template<typename T> T max(T a, T b) {return a > b ? a : b;}
 
 template<typename T> T absdiff(T a, T b) {return a > b ? a - b : b - a;}
@@ -223,37 +241,74 @@
 void AudioResamplerDyn<TC, TI, TO>::createKaiserFir(Constants &c,
         double stopBandAtten, int inSampleRate, int outSampleRate, double tbwCheat)
 {
-    TC* buf = NULL;
-    static const double atten = 0.9998;   // to avoid ripple overflow
-    double fcr;
-    double tbw = firKaiserTbw(c.mHalfNumCoefs, stopBandAtten);
+    // compute the normalized transition bandwidth
+    const double tbw = firKaiserTbw(c.mHalfNumCoefs, stopBandAtten);
+    const double halfbw = tbw / 2.;
 
-    (void)posix_memalign(reinterpret_cast<void**>(&buf), 32, (c.mL+1)*c.mHalfNumCoefs*sizeof(TC));
+    double fcr; // compute fcr, the 3 dB amplitude cut-off.
     if (inSampleRate < outSampleRate) { // upsample
-        fcr = max(0.5*tbwCheat - tbw/2, tbw/2);
+        fcr = max(0.5 * tbwCheat - halfbw, halfbw);
     } else { // downsample
-        fcr = max(0.5*tbwCheat*outSampleRate/inSampleRate - tbw/2, tbw/2);
+        fcr = max(0.5 * tbwCheat * outSampleRate / inSampleRate - halfbw, halfbw);
     }
-    // create and set filter
-    firKaiserGen(buf, c.mL, c.mHalfNumCoefs, stopBandAtten, fcr, atten);
-    c.mFirCoefs = buf;
-    if (mCoefBuffer) {
-        free(mCoefBuffer);
-    }
-    mCoefBuffer = buf;
-#ifdef DEBUG_RESAMPLER
+    createKaiserFir(c, stopBandAtten, fcr);
+}
+
+template<typename TC, typename TI, typename TO>
+void AudioResamplerDyn<TC, TI, TO>::createKaiserFir(Constants &c,
+        double stopBandAtten, double fcr) {
+    // compute the normalized transition bandwidth
+    const double tbw = firKaiserTbw(c.mHalfNumCoefs, stopBandAtten);
+    const int phases = c.mL;
+    const int halfLength = c.mHalfNumCoefs;
+
+    // create buffer
+    TC *coefs = nullptr;
+    int ret = posix_memalign(
+            reinterpret_cast<void **>(&coefs),
+            CACHE_LINE_SIZE /* alignment */,
+            (phases + 1) * halfLength * sizeof(TC));
+    LOG_ALWAYS_FATAL_IF(ret != 0, "Cannot allocate buffer memory, ret %d", ret);
+    c.mFirCoefs = coefs;
+    free(mCoefBuffer);
+    mCoefBuffer = coefs;
+
+    // square the computed minimum passband value (extra safety).
+    double attenuation =
+            computeWindowedSincMinimumPassbandValue(stopBandAtten);
+    attenuation *= attenuation;
+
+    // design filter
+    firKaiserGen(coefs, phases, halfLength, stopBandAtten, fcr, attenuation);
+
+    // update the design criteria
+    mNormalizedCutoffFrequency = fcr;
+    mNormalizedTransitionBandwidth = tbw;
+    mFilterAttenuation = attenuation;
+    mStopbandAttenuationDb = stopBandAtten;
+    mPassbandRippleDb = computeWindowedSincPassbandRippleDb(stopBandAtten);
+
+#if 0
+    // Keep this debug code in case an app causes resampler design issues.
+    const double halfbw = tbw / 2.;
     // print basic filter stats
-    printf("L:%d  hnc:%d  stopBandAtten:%lf  fcr:%lf  atten:%lf  tbw:%lf\n",
-            c.mL, c.mHalfNumCoefs, stopBandAtten, fcr, atten, tbw);
-    // test the filter and report results
-    double fp = (fcr - tbw/2)/c.mL;
-    double fs = (fcr + tbw/2)/c.mL;
+    ALOGD("L:%d  hnc:%d  stopBandAtten:%lf  fcr:%lf  atten:%lf  tbw:%lf\n",
+            c.mL, c.mHalfNumCoefs, stopBandAtten, fcr, attenuation, tbw);
+
+    // test the filter and report results.
+    // Since this is a polyphase filter, normalized fp and fs must be scaled.
+    const double fp = (fcr - halfbw) / phases;
+    const double fs = (fcr + halfbw) / phases;
+
     double passMin, passMax, passRipple;
     double stopMax, stopRipple;
-    testFir(buf, c.mL, c.mHalfNumCoefs, fp, fs, /*passSteps*/ 1000, /*stopSteps*/ 100000,
+
+    const int32_t passSteps = 1000;
+
+    testFir(coefs, c.mL, c.mHalfNumCoefs, fp, fs, passSteps, passSteps * c.ML /*stopSteps*/,
             passMin, passMax, passRipple, stopMax, stopRipple);
-    printf("passband(%lf, %lf): %.8lf %.8lf %.8lf\n", 0., fp, passMin, passMax, passRipple);
-    printf("stopband(%lf, %lf): %.8lf %.3lf\n", fs, 0.5, stopMax, stopRipple);
+    ALOGD("passband(%lf, %lf): %.8lf %.8lf %.8lf\n", 0., fp, passMin, passMax, passRipple);
+    ALOGD("stopband(%lf, %lf): %.8lf %.3lf\n", fs, 0.5, stopMax, stopRipple);
 #endif
 }
 
@@ -304,6 +359,11 @@
         mFilterSampleRate = inSampleRate;
         mFilterQuality = getQuality();
 
+        double stopBandAtten;
+        double tbwCheat = 1.; // how much we "cheat" into aliasing
+        int halfLength;
+        double fcr = 0.;
+
         // Begin Kaiser Filter computation
         //
         // The quantization floor for S16 is about 96db - 10*log_10(#length) + 3dB.
@@ -313,52 +373,60 @@
         // 96-98dB
         //
 
-        double stopBandAtten;
-        double tbwCheat = 1.; // how much we "cheat" into aliasing
-        int halfLength;
-        if (mFilterQuality == DYN_HIGH_QUALITY) {
-            // 32b coefficients, 64 length
+        if (mPropertyEnableAtSampleRate >= 0 && mSampleRate >= mPropertyEnableAtSampleRate) {
+            // An alternative method which allows allows a greater fcr
+            // at the expense of potential aliasing.
+            halfLength = mPropertyHalfFilterLength;
+            stopBandAtten = mPropertyStopbandAttenuation;
             useS32 = true;
-            stopBandAtten = 98.;
-            if (inSampleRate >= mSampleRate * 4) {
-                halfLength = 48;
-            } else if (inSampleRate >= mSampleRate * 2) {
-                halfLength = 40;
-            } else {
-                halfLength = 32;
-            }
-        } else if (mFilterQuality == DYN_LOW_QUALITY) {
-            // 16b coefficients, 16-32 length
-            useS32 = false;
-            stopBandAtten = 80.;
-            if (inSampleRate >= mSampleRate * 4) {
-                halfLength = 24;
-            } else if (inSampleRate >= mSampleRate * 2) {
-                halfLength = 16;
-            } else {
-                halfLength = 8;
-            }
-            if (inSampleRate <= mSampleRate) {
-                tbwCheat = 1.05;
-            } else {
-                tbwCheat = 1.03;
-            }
-        } else { // DYN_MED_QUALITY
-            // 16b coefficients, 32-64 length
-            // note: > 64 length filters with 16b coefs can have quantization noise problems
-            useS32 = false;
-            stopBandAtten = 84.;
-            if (inSampleRate >= mSampleRate * 4) {
-                halfLength = 32;
-            } else if (inSampleRate >= mSampleRate * 2) {
-                halfLength = 24;
-            } else {
-                halfLength = 16;
-            }
-            if (inSampleRate <= mSampleRate) {
-                tbwCheat = 1.03;
-            } else {
-                tbwCheat = 1.01;
+            fcr = mInSampleRate <= mSampleRate
+                    ? 0.5 : 0.5 * mSampleRate / mInSampleRate;
+            fcr *= mPropertyCutoffPercent / 100.;
+        } else {
+            if (mFilterQuality == DYN_HIGH_QUALITY) {
+                // 32b coefficients, 64 length
+                useS32 = true;
+                stopBandAtten = 98.;
+                if (inSampleRate >= mSampleRate * 4) {
+                    halfLength = 48;
+                } else if (inSampleRate >= mSampleRate * 2) {
+                    halfLength = 40;
+                } else {
+                    halfLength = 32;
+                }
+            } else if (mFilterQuality == DYN_LOW_QUALITY) {
+                // 16b coefficients, 16-32 length
+                useS32 = false;
+                stopBandAtten = 80.;
+                if (inSampleRate >= mSampleRate * 4) {
+                    halfLength = 24;
+                } else if (inSampleRate >= mSampleRate * 2) {
+                    halfLength = 16;
+                } else {
+                    halfLength = 8;
+                }
+                if (inSampleRate <= mSampleRate) {
+                    tbwCheat = 1.05;
+                } else {
+                    tbwCheat = 1.03;
+                }
+            } else { // DYN_MED_QUALITY
+                // 16b coefficients, 32-64 length
+                // note: > 64 length filters with 16b coefs can have quantization noise problems
+                useS32 = false;
+                stopBandAtten = 84.;
+                if (inSampleRate >= mSampleRate * 4) {
+                    halfLength = 32;
+                } else if (inSampleRate >= mSampleRate * 2) {
+                    halfLength = 24;
+                } else {
+                    halfLength = 16;
+                }
+                if (inSampleRate <= mSampleRate) {
+                    tbwCheat = 1.03;
+                } else {
+                    tbwCheat = 1.01;
+                }
             }
         }
 
@@ -390,8 +458,12 @@
 
         // create the filter
         mConstants.set(phases, halfLength, inSampleRate, mSampleRate);
-        createKaiserFir(mConstants, stopBandAtten,
-                inSampleRate, mSampleRate, tbwCheat);
+        if (fcr > 0.) {
+            createKaiserFir(mConstants, stopBandAtten, fcr);
+        } else {
+            createKaiserFir(mConstants, stopBandAtten,
+                    inSampleRate, mSampleRate, tbwCheat);
+        }
     } // End Kaiser filter
 
     // update phase and state based on the new filter.
diff --git a/media/libaudioprocessing/AudioResamplerDyn.h b/media/libaudioprocessing/AudioResamplerDyn.h
index 1840fc7..92144d0 100644
--- a/media/libaudioprocessing/AudioResamplerDyn.h
+++ b/media/libaudioprocessing/AudioResamplerDyn.h
@@ -55,6 +55,39 @@
     virtual size_t resample(int32_t* out, size_t outFrameCount,
             AudioBufferProvider* provider);
 
+    // Make available key design criteria for testing
+    int getHalfLength() const {
+        return mConstants.mHalfNumCoefs;
+    }
+
+    const TC *getFilterCoefs() const {
+        return mConstants.mFirCoefs;
+    }
+
+    int getPhases() const {
+        return mConstants.mL;
+    }
+
+    double getStopbandAttenuationDb() const {
+        return mStopbandAttenuationDb;
+    }
+
+    double getPassbandRippleDb() const {
+        return mPassbandRippleDb;
+    }
+
+    double getNormalizedTransitionBandwidth() const {
+        return mNormalizedTransitionBandwidth;
+    }
+
+    double getFilterAttenuation() const {
+        return mFilterAttenuation;
+    }
+
+    double getNormalizedCutoffFrequency() const {
+        return mNormalizedCutoffFrequency;
+    }
+
 private:
 
     class Constants { // stores the filter constants.
@@ -112,6 +145,8 @@
     void createKaiserFir(Constants &c, double stopBandAtten,
             int inSampleRate, int outSampleRate, double tbwCheat);
 
+    void createKaiserFir(Constants &c, double stopBandAtten, double fcr);
+
     template<int CHANNELS, bool LOCKED, int STRIDE>
     size_t resample(TO* out, size_t outFrameCount, AudioBufferProvider* provider);
 
@@ -127,6 +162,38 @@
             int32_t mFilterSampleRate; // designed filter sample rate.
         src_quality mFilterQuality;    // designed filter quality.
               void* mCoefBuffer;       // if a filter is created, this is not null
+
+    // Property selected design parameters.
+              // This will enable fixed high quality resampling.
+
+              // 32 char PROP_NAME_MAX limit enforced before Android O
+
+              // Use for sample rates greater than or equal to this value.
+              // Set to non-negative to enable, negative to disable.
+              int32_t mPropertyEnableAtSampleRate = 48000;
+                      // "ro.audio.resampler.psd.enable_at_samplerate"
+
+              // Specify HALF the resampling filter length.
+              // Set to a value which is a multiple of 4.
+              int32_t mPropertyHalfFilterLength = 32;
+                      // "ro.audio.resampler.psd.halflength"
+
+              // Specify the stopband attenuation in positive dB.
+              // Set to a value greater or equal to 20.
+              int32_t mPropertyStopbandAttenuation = 90;
+                      // "ro.audio.resampler.psd.stopband"
+
+              // Specify the cutoff frequency as a percentage of Nyquist.
+              // Set to a value between 50 and 100.
+              int32_t mPropertyCutoffPercent = 100;
+                      // "ro.audio.resampler.psd.cutoff_percent"
+
+    // Filter creation design parameters, see setSampleRate()
+             double mStopbandAttenuationDb = 0.;
+             double mPassbandRippleDb = 0.;
+             double mNormalizedTransitionBandwidth = 0.;
+             double mFilterAttenuation = 0.;
+             double mNormalizedCutoffFrequency = 0.;
 };
 
 } // namespace android
diff --git a/media/libaudioprocessing/AudioResamplerFirGen.h b/media/libaudioprocessing/AudioResamplerFirGen.h
index ad18965..39cafeb 100644
--- a/media/libaudioprocessing/AudioResamplerFirGen.h
+++ b/media/libaudioprocessing/AudioResamplerFirGen.h
@@ -546,8 +546,9 @@
         }
         wstart += wstep;
     }
-    // renormalize - this is only needed for integer filter types
-    double norm = 1./((1ULL<<(sizeof(T)*8-1))*L);
+    // renormalize - this is needed for integer filter types, use 1 for float or double.
+    constexpr int64_t integralShift = std::is_integral<T>::value ? (sizeof(T) * 8 - 1) : 0;
+    const double norm = 1. / (L << integralShift);
 
     firMin = fmin * norm;
     firMax = fmax * norm;
@@ -557,9 +558,12 @@
  * evaluates the |H(f)| lowpass band characteristics.
  *
  * This function tests the lowpass characteristics for the overall polyphase filter,
- * and is used to verify the design.  For this case, fp should be set to the
+ * and is used to verify the design.
+ *
+ * For a polyphase filter (L > 1), typically fp should be set to the
  * passband normalized frequency from 0 to 0.5 for the overall filter (thus it
  * is the designed polyphase bank value / L).  Likewise for fs.
+ * Similarly the stopSteps should be L * passSteps for equivalent accuracy.
  *
  * @param coef is the designed polyphase filter banks
  *
@@ -610,6 +614,74 @@
 }
 
 /*
+ * Estimate the windowed sinc minimum passband value.
+ *
+ * This is the minimum value for a windowed sinc filter in its passband,
+ * which is identical to the scaling required not to cause overflow of a 0dBFS signal.
+ * The actual value used to attenuate the filter amplitude should be slightly
+ * smaller than this (suggest squaring) as this is just an estimate.
+ *
+ * As a windowed sinc has a passband ripple commensurate to the stopband attenuation
+ * due to Gibb's phenomenon from truncating the sinc, we derive this value from
+ * the design stopbandAttenuationDb (a positive value).
+ */
+static inline double computeWindowedSincMinimumPassbandValue(
+        double stopBandAttenuationDb) {
+    return 1. - pow(10. /* base */, stopBandAttenuationDb * (-1. / 20.));
+}
+
+/*
+ * Compute the windowed sinc passband ripple from stopband attenuation.
+ *
+ * As a windowed sinc has an passband ripple commensurate to the stopband attenuation
+ * due to Gibb's phenomenon from truncating the sinc, we derive this value from
+ * the design stopbandAttenuationDb (a positive value).
+ */
+static inline double computeWindowedSincPassbandRippleDb(
+        double stopBandAttenuationDb) {
+    return -20. * log10(computeWindowedSincMinimumPassbandValue(stopBandAttenuationDb));
+}
+
+/*
+ * Kaiser window Beta value
+ *
+ * Formula 3.2.5, 3.2.7, Vaidyanathan, _Multirate Systems and Filter Banks_, p. 48
+ * Formula 7.75, Oppenheim and Schafer, _Discrete-time Signal Processing, 3e_, p. 542
+ *
+ * See also: http://melodi.ee.washington.edu/courses/ee518/notes/lec17.pdf
+ *
+ * Kaiser window and beta parameter
+ *
+ *         | 0.1102*(A - 8.7)                         A > 50
+ *  Beta = | 0.5842*(A - 21)^0.4 + 0.07886*(A - 21)   21 < A <= 50
+ *         | 0.                                       A <= 21
+ *
+ * with A is the desired stop-band attenuation in positive dBFS
+ *
+ *    30 dB    2.210
+ *    40 dB    3.384
+ *    50 dB    4.538
+ *    60 dB    5.658
+ *    70 dB    6.764
+ *    80 dB    7.865
+ *    90 dB    8.960
+ *   100 dB   10.056
+ *
+ * For some values of stopBandAttenuationDb the function may be computed
+ * at compile time.
+ */
+static inline constexpr double computeBeta(double stopBandAttenuationDb) {
+    if (stopBandAttenuationDb > 50.) {
+        return 0.1102 * (stopBandAttenuationDb - 8.7);
+    }
+    const double offset = stopBandAttenuationDb - 21.;
+    if (offset > 0.) {
+        return 0.5842 * pow(offset, 0.4) + 0.07886 * offset;
+    }
+    return 0.;
+}
+
+/*
  * Calculates the overall polyphase filter based on a windowed sinc function.
  *
  * The windowed sinc is an odd length symmetric filter of exactly L*halfNumCoef*2+1
@@ -642,31 +714,8 @@
 template <typename T>
 static inline void firKaiserGen(T* coef, int L, int halfNumCoef,
         double stopBandAtten, double fcr, double atten) {
-    //
-    // Formula 3.2.5, 3.2.7, Vaidyanathan, _Multirate Systems and Filter Banks_, p. 48
-    // Formula 7.75, Oppenheim and Schafer, _Discrete-time Signal Processing, 3e_, p. 542
-    //
-    // See also: http://melodi.ee.washington.edu/courses/ee518/notes/lec17.pdf
-    //
-    // Kaiser window and beta parameter
-    //
-    //         | 0.1102*(A - 8.7)                         A > 50
-    //  beta = | 0.5842*(A - 21)^0.4 + 0.07886*(A - 21)   21 <= A <= 50
-    //         | 0.                                       A < 21
-    //
-    // with A is the desired stop-band attenuation in dBFS
-    //
-    //    30 dB    2.210
-    //    40 dB    3.384
-    //    50 dB    4.538
-    //    60 dB    5.658
-    //    70 dB    6.764
-    //    80 dB    7.865
-    //    90 dB    8.960
-    //   100 dB   10.056
-
     const int N = L * halfNumCoef; // non-negative half
-    const double beta = 0.1102 * (stopBandAtten - 8.7); // >= 50dB always
+    const double beta = computeBeta(stopBandAtten);
     const double xstep = (2. * M_PI) * fcr / L;
     const double xfrac = 1. / N;
     const double yscale = atten * L / (I0(beta) * M_PI);
@@ -696,9 +745,9 @@
                 sg.advance();
             }
 
-            if (is_same<T, int16_t>::value) { // int16_t needs noise shaping
+            if (std::is_same<T, int16_t>::value) { // int16_t needs noise shaping
                 *coef++ = static_cast<T>(toint(y, 1ULL<<(sizeof(T)*8-1), err));
-            } else if (is_same<T, int32_t>::value) {
+            } else if (std::is_same<T, int32_t>::value) {
                 *coef++ = static_cast<T>(toint(y, 1ULL<<(sizeof(T)*8-1)));
             } else { // assumed float or double
                 *coef++ = static_cast<T>(y);
diff --git a/media/libaudioprocessing/OWNERS b/media/libaudioprocessing/OWNERS
new file mode 100644
index 0000000..96d0ea0
--- /dev/null
+++ b/media/libaudioprocessing/OWNERS
@@ -0,0 +1,3 @@
+gkasten@google.com
+hunga@google.com
+rago@google.com
diff --git a/media/libaudioprocessing/tests/build_and_run_all_unit_tests.sh b/media/libaudioprocessing/tests/build_and_run_all_unit_tests.sh
index 704d095..efef417 100755
--- a/media/libaudioprocessing/tests/build_and_run_all_unit_tests.sh
+++ b/media/libaudioprocessing/tests/build_and_run_all_unit_tests.sh
@@ -14,8 +14,8 @@
 
 echo "waiting for device"
 adb root && adb wait-for-device remount
-adb push $OUT/system/lib/libaudioresampler.so /system/lib
-adb push $OUT/system/lib64/libaudioresampler.so /system/lib64
+adb push $OUT/system/lib/libaudioprocessing.so /system/lib
+adb push $OUT/system/lib64/libaudioprocessing.so /system/lib64
 adb push $OUT/data/nativetest/resampler_tests/resampler_tests /data/nativetest/resampler_tests/resampler_tests
 adb push $OUT/data/nativetest64/resampler_tests/resampler_tests /data/nativetest64/resampler_tests/resampler_tests
 
diff --git a/media/libaudioprocessing/tests/resampler_tests.cpp b/media/libaudioprocessing/tests/resampler_tests.cpp
index a23c000..e1623f7 100644
--- a/media/libaudioprocessing/tests/resampler_tests.cpp
+++ b/media/libaudioprocessing/tests/resampler_tests.cpp
@@ -29,6 +29,7 @@
 #include <unistd.h>
 
 #include <iostream>
+#include <memory>
 #include <utility>
 #include <vector>
 
@@ -37,6 +38,8 @@
 #include <media/AudioBufferProvider.h>
 
 #include <media/AudioResampler.h>
+#include "../AudioResamplerDyn.h"
+#include "../AudioResamplerFirGen.h"
 #include "test_utils.h"
 
 template <typename T>
@@ -242,6 +245,60 @@
     delete resampler;
 }
 
+void testFilterResponse(
+        size_t channels, unsigned inputFreq, unsigned outputFreq)
+{
+    // create resampler
+    using ResamplerType = android::AudioResamplerDyn<float, float, float>;
+    std::unique_ptr<ResamplerType> rdyn(
+            static_cast<ResamplerType *>(
+                    android::AudioResampler::create(
+                            AUDIO_FORMAT_PCM_FLOAT,
+                            channels,
+                            outputFreq,
+                            android::AudioResampler::DYN_HIGH_QUALITY)));
+    rdyn->setSampleRate(inputFreq);
+
+    // get design parameters
+    const int phases = rdyn->getPhases();
+    const int halfLength = rdyn->getHalfLength();
+    const float *coefs = rdyn->getFilterCoefs();
+    const double fcr = rdyn->getNormalizedCutoffFrequency();
+    const double tbw = rdyn->getNormalizedTransitionBandwidth();
+    const double attenuation = rdyn->getFilterAttenuation();
+    const double stopbandDb = rdyn->getStopbandAttenuationDb();
+    const double passbandDb = rdyn->getPassbandRippleDb();
+    const double fp = fcr - tbw / 2;
+    const double fs = fcr + tbw / 2;
+
+    printf("inputFreq:%d outputFreq:%d design"
+            " phases:%d halfLength:%d"
+            " fcr:%lf fp:%lf fs:%lf tbw:%lf"
+            " attenuation:%lf stopRipple:%.lf passRipple:%lf"
+            "\n",
+            inputFreq, outputFreq,
+            phases, halfLength,
+            fcr, fp, fs, tbw,
+            attenuation, stopbandDb, passbandDb);
+
+    // verify design parameters
+    constexpr int32_t passSteps = 1000;
+    double passMin, passMax, passRipple, stopMax, stopRipple;
+    android::testFir(coefs, phases, halfLength, fp / phases, fs / phases,
+            passSteps, phases * passSteps /* stopSteps */,
+            passMin, passMax, passRipple,
+            stopMax, stopRipple);
+    printf("inputFreq:%d outputFreq:%d verify"
+            " passMin:%lf passMax:%lf passRipple:%lf stopMax:%lf stopRipple:%lf"
+            "\n",
+            inputFreq, outputFreq,
+            passMin, passMax, passRipple, stopMax, stopRipple);
+
+    ASSERT_GT(stopRipple, 60.);  // enough stopband attenuation
+    ASSERT_LT(passRipple, 0.2);  // small passband ripple
+    ASSERT_GT(passMin, 0.99);    // we do not attenuate the signal (ideally 1.)
+}
+
 /* Buffer increment test
  *
  * We compare a reference output, where we consume and process the entire
@@ -484,3 +541,30 @@
     }
 }
 
+TEST(audioflinger_resampler, filterresponse) {
+    std::vector<int> inSampleRates{
+        8000,
+        11025,
+        12000,
+        16000,
+        22050,
+        24000,
+        32000,
+        44100,
+        48000,
+        88200,
+        96000,
+        176400,
+        192000,
+    };
+    std::vector<int> outSampleRates{
+        48000,
+        96000,
+    };
+
+    for (int outSampleRate : outSampleRates) {
+        for (int inSampleRate : inSampleRates) {
+            testFilterResponse(2 /* channels */, inSampleRate, outSampleRate);
+        }
+    }
+}
diff --git a/media/libaudioprocessing/tests/test-mixer.cpp b/media/libaudioprocessing/tests/test-mixer.cpp
index 75dbf91..b67810d 100644
--- a/media/libaudioprocessing/tests/test-mixer.cpp
+++ b/media/libaudioprocessing/tests/test-mixer.cpp
@@ -316,8 +316,7 @@
             outputSampleRate, outputChannels, outputFrames, useMixerFloat);
     if (auxFilename) {
         // Aux buffer is always in q4_27 format for now.
-        // memcpy_to_i16_from_q4_27(), but with stereo frame count (not sample count)
-        ditherAndClamp((int32_t*)auxAddr, (int32_t*)auxAddr, outputFrames >> 1);
+        memcpy_to_i16_from_q4_27((int16_t*)auxAddr, (const int32_t*)auxAddr, outputFrames);
         writeFile(auxFilename, auxAddr, outputSampleRate, 1, outputFrames, false);
     }
 
diff --git a/media/libcpustats/OWNERS b/media/libcpustats/OWNERS
new file mode 100644
index 0000000..f9cb567
--- /dev/null
+++ b/media/libcpustats/OWNERS
@@ -0,0 +1 @@
+gkasten@google.com
diff --git a/media/libeffects/OWNERS b/media/libeffects/OWNERS
index 7e3de13..7f9ae81 100644
--- a/media/libeffects/OWNERS
+++ b/media/libeffects/OWNERS
@@ -1,3 +1,4 @@
+hunga@google.com
 krocard@google.com
 mnaganov@google.com
 rago@google.com
diff --git a/media/libeffects/config/Android.bp b/media/libeffects/config/Android.bp
index 4398a91..3e88c7c 100644
--- a/media/libeffects/config/Android.bp
+++ b/media/libeffects/config/Android.bp
@@ -5,6 +5,11 @@
 
     srcs: ["src/EffectsConfig.cpp"],
 
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
+
     shared_libs: [
         "liblog",
         "libtinyxml2",
diff --git a/media/libeffects/config/include/media/EffectsConfig.h b/media/libeffects/config/include/media/EffectsConfig.h
index 811730c..55b946f 100644
--- a/media/libeffects/config/include/media/EffectsConfig.h
+++ b/media/libeffects/config/include/media/EffectsConfig.h
@@ -32,8 +32,13 @@
 namespace android {
 namespace effectsConfig {
 
-/** Default path of effect configuration file. */
-constexpr char DEFAULT_PATH[] = "/vendor/etc/audio_effects.xml";
+/** Default path of effect configuration file. Relative to DEFAULT_LOCATIONS. */
+constexpr const char* DEFAULT_NAME = "audio_effects.xml";
+
+/** Default path of effect configuration file.
+ * The /vendor partition is the recommended one, the others are deprecated.
+ */
+constexpr const char* DEFAULT_LOCATIONS[] = {"/odm/etc", "/vendor/etc", "/system/etc"};
 
 /** Directories where the effect libraries will be search for. */
 constexpr const char* LD_EFFECT_LIBRARY_PATH[] =
@@ -91,13 +96,16 @@
     /** Parsed config, nullptr if the xml lib could not load the file */
     std::unique_ptr<Config> parsedConfig;
     size_t nbSkippedElement; //< Number of skipped invalid library, effect or processing chain
+    const char* configPath; //< Path to the loaded configuration
 };
 
 /** Parses the provided effect configuration.
  * Parsing do not stop of first invalid element, but continues to the next.
+ * @param[in] path of the configuration file do load
+ *                 if nullptr, look for DEFAULT_NAME in DEFAULT_LOCATIONS.
  * @see ParsingResult::nbSkippedElement
  */
-ParsingResult parse(const char* path = DEFAULT_PATH);
+ParsingResult parse(const char* path = nullptr);
 
 } // namespace effectsConfig
 } // namespace android
diff --git a/media/libeffects/config/src/EffectsConfig.cpp b/media/libeffects/config/src/EffectsConfig.cpp
index 98a37ab..4ed3ba8 100644
--- a/media/libeffects/config/src/EffectsConfig.cpp
+++ b/media/libeffects/config/src/EffectsConfig.cpp
@@ -20,6 +20,7 @@
 #include <cstdint>
 #include <functional>
 #include <string>
+#include <unistd.h>
 
 #include <tinyxml2.h>
 #include <log/log.h>
@@ -85,7 +86,7 @@
 constexpr std::enable_if<false, Enum> STREAM_NAME_MAP;
 
 /** All output stream types which support effects.
- * This need to be kept in sink with the xsd streamOutputType.
+ * This need to be kept in sync with the xsd streamOutputType.
  */
 template <>
 constexpr std::pair<audio_stream_type_t, const char*> STREAM_NAME_MAP<audio_stream_type_t>[] = {
@@ -102,7 +103,7 @@
 };
 
 /** All input stream types which support effects.
- * This need to be kept in sink with the xsd streamOutputType.
+ * This need to be kept in sync with the xsd streamOutputType.
  */
 template <>
 constexpr std::pair<audio_source_t, const char*> STREAM_NAME_MAP<audio_source_t>[] = {
@@ -142,7 +143,7 @@
 }
 
 /** Find an element in a collection by its name.
- * @return nullptr if not found, the ellements address if found.
+ * @return nullptr if not found, the element address if found.
  */
 template <class T>
 T* findByName(const char* name, std::vector<T>& collection) {
@@ -249,15 +250,14 @@
     return true;
 }
 
-}; // namespace
-
-ParsingResult parse(const char* path) {
+/** Internal version of the public parse(const char* path) with precondition `path != nullptr`. */
+ParsingResult parseWithPath(const char* path) {
     XMLDocument doc;
     doc.LoadFile(path);
     if (doc.Error()) {
         ALOGE("Failed to parse %s: Tinyxml2 error (%d): %s", path,
               doc.ErrorID(), doc.ErrorStr());
-        return {nullptr, 0};
+        return {nullptr, 0, path};
     }
 
     auto config = std::make_unique<Config>();
@@ -295,7 +295,29 @@
             }
         }
     }
-    return {std::move(config), nbSkippedElements};
+    return {std::move(config), nbSkippedElements, path};
+}
+
+}; // namespace
+
+ParsingResult parse(const char* path) {
+    if (path != nullptr) {
+        return parseWithPath(path);
+    }
+
+    for (std::string location : DEFAULT_LOCATIONS) {
+        std::string defaultPath = location + '/' + DEFAULT_NAME;
+        if (access(defaultPath.c_str(), R_OK) != 0) {
+            continue;
+        }
+        auto result = parseWithPath(defaultPath.c_str());
+        if (result.parsedConfig != nullptr) {
+            return result;
+        }
+    }
+
+    ALOGE("Could not parse effect configuration in any of the default locations.");
+    return {nullptr, 0, nullptr};
 }
 
 } // namespace effectsConfig
diff --git a/media/libeffects/factory/EffectsXmlConfigLoader.cpp b/media/libeffects/factory/EffectsXmlConfigLoader.cpp
index 438b787..7a7d431 100644
--- a/media/libeffects/factory/EffectsXmlConfigLoader.cpp
+++ b/media/libeffects/factory/EffectsXmlConfigLoader.cpp
@@ -327,7 +327,7 @@
                                            &gSkippedEffects, &gSubEffectList);
 
     ALOGE_IF(result.nbSkippedElement != 0, "%zu errors during loading of configuration: %s",
-             result.nbSkippedElement, path ?: effectsConfig::DEFAULT_PATH);
+             result.nbSkippedElement, result.configPath ?: "No config file found");
 
     return result.nbSkippedElement;
 }
diff --git a/media/libeffects/lvm/lib/Bass/src/LVDBE_Coeffs.h b/media/libeffects/lvm/lib/Bass/src/LVDBE_Coeffs.h
index f32ed30..4ecaf14 100644
--- a/media/libeffects/lvm/lib/Bass/src/LVDBE_Coeffs.h
+++ b/media/libeffects/lvm/lib/Bass/src/LVDBE_Coeffs.h
@@ -534,246 +534,246 @@
 
  /* Coefficients for centre frequency 55Hz */
 #define HPF_Fs8000_Fc55_A0                        0.958849f
-#define HPF_Fs8000_Fc55_A1                        -1.917698f
+#define HPF_Fs8000_Fc55_A1                        (-1.917698f)
 #define HPF_Fs8000_Fc55_A2                        0.958849f
-#define HPF_Fs8000_Fc55_B1                        -1.939001f
+#define HPF_Fs8000_Fc55_B1                        (-1.939001f)
 #define HPF_Fs8000_Fc55_B2                        0.940807f
 #define HPF_Fs11025_Fc55_A0                       0.966909f
-#define HPF_Fs11025_Fc55_A1                       -1.933818f
+#define HPF_Fs11025_Fc55_A1                       (-1.933818f)
 #define HPF_Fs11025_Fc55_A2                       0.966909f
-#define HPF_Fs11025_Fc55_B1                       -1.955732f
+#define HPF_Fs11025_Fc55_B1                       (-1.955732f)
 #define HPF_Fs11025_Fc55_B2                       0.956690f
 #define HPF_Fs12000_Fc55_A0                       0.968650f
-#define HPF_Fs12000_Fc55_A1                       -1.937300f
+#define HPF_Fs12000_Fc55_A1                       (-1.937300f)
 #define HPF_Fs12000_Fc55_A2                       0.968650f
-#define HPF_Fs12000_Fc55_B1                       -1.959327f
+#define HPF_Fs12000_Fc55_B1                       (-1.959327f)
 #define HPF_Fs12000_Fc55_B2                       0.960138f
 #define HPF_Fs16000_Fc55_A0                       0.973588f
-#define HPF_Fs16000_Fc55_A1                       -1.947176f
+#define HPF_Fs16000_Fc55_A1                       (-1.947176f)
 #define HPF_Fs16000_Fc55_A2                       0.973588f
-#define HPF_Fs16000_Fc55_B1                       -1.969494f
+#define HPF_Fs16000_Fc55_B1                       (-1.969494f)
 #define HPF_Fs16000_Fc55_B2                       0.969952f
 #define HPF_Fs22050_Fc55_A0                       0.977671f
-#define HPF_Fs22050_Fc55_A1                       -1.955343f
+#define HPF_Fs22050_Fc55_A1                       (-1.955343f)
 #define HPF_Fs22050_Fc55_A2                       0.977671f
-#define HPF_Fs22050_Fc55_B1                       -1.977863f
+#define HPF_Fs22050_Fc55_B1                       (-1.977863f)
 #define HPF_Fs22050_Fc55_B2                       0.978105f
 #define HPF_Fs24000_Fc55_A0                       0.978551f
-#define HPF_Fs24000_Fc55_A1                       -1.957102f
+#define HPF_Fs24000_Fc55_A1                       (-1.957102f)
 #define HPF_Fs24000_Fc55_A2                       0.978551f
-#define HPF_Fs24000_Fc55_B1                       -1.979662f
+#define HPF_Fs24000_Fc55_B1                       (-1.979662f)
 #define HPF_Fs24000_Fc55_B2                       0.979866f
 #define HPF_Fs32000_Fc55_A0                       0.981042f
-#define HPF_Fs32000_Fc55_A1                       -1.962084f
+#define HPF_Fs32000_Fc55_A1                       (-1.962084f)
 #define HPF_Fs32000_Fc55_A2                       0.981042f
-#define HPF_Fs32000_Fc55_B1                       -1.984746f
+#define HPF_Fs32000_Fc55_B1                       (-1.984746f)
 #define HPF_Fs32000_Fc55_B2                       0.984861f
 #define HPF_Fs44100_Fc55_A0                       0.983097f
-#define HPF_Fs44100_Fc55_A1                       -1.966194f
+#define HPF_Fs44100_Fc55_A1                       (-1.966194f)
 #define HPF_Fs44100_Fc55_A2                       0.983097f
-#define HPF_Fs44100_Fc55_B1                       -1.988931f
+#define HPF_Fs44100_Fc55_B1                       (-1.988931f)
 #define HPF_Fs44100_Fc55_B2                       0.988992f
 #define HPF_Fs48000_Fc55_A0                       0.983539f
-#define HPF_Fs48000_Fc55_A1                       -1.967079f
+#define HPF_Fs48000_Fc55_A1                       (-1.967079f)
 #define HPF_Fs48000_Fc55_A2                       0.983539f
-#define HPF_Fs48000_Fc55_B1                       -1.989831f
+#define HPF_Fs48000_Fc55_B1                       (-1.989831f)
 #define HPF_Fs48000_Fc55_B2                       0.989882f
 
 #ifdef HIGHER_FS
 #define HPF_Fs96000_Fc55_A0                       0.986040f
-#define HPF_Fs96000_Fc55_A1                       -1.972080f
+#define HPF_Fs96000_Fc55_A1                       (-1.972080f)
 #define HPF_Fs96000_Fc55_A2                       0.986040f
-#define HPF_Fs96000_Fc55_B1                       -1.994915f
+#define HPF_Fs96000_Fc55_B1                       (-1.994915f)
 #define HPF_Fs96000_Fc55_B2                       0.994928f
 
 #define HPF_Fs192000_Fc55_A0                      0.987294f
-#define HPF_Fs192000_Fc55_A1                      -1.974588f
+#define HPF_Fs192000_Fc55_A1                      (-1.974588f)
 #define HPF_Fs192000_Fc55_A2                      0.987294f
-#define HPF_Fs192000_Fc55_B1                      -1.997458f
+#define HPF_Fs192000_Fc55_B1                      (-1.997458f)
 #define HPF_Fs192000_Fc55_B2                      0.997461f
 #endif
 
 
  /* Coefficients for centre frequency 66Hz */
 #define HPF_Fs8000_Fc66_A0                        0.953016f
-#define HPF_Fs8000_Fc66_A1                        -1.906032f
+#define HPF_Fs8000_Fc66_A1                        (-1.906032f)
 #define HPF_Fs8000_Fc66_A2                        0.953016f
-#define HPF_Fs8000_Fc66_B1                        -1.926810f
+#define HPF_Fs8000_Fc66_B1                        (-1.926810f)
 #define HPF_Fs8000_Fc66_B2                        0.929396f
 #define HPF_Fs11025_Fc66_A0                       0.962638f
-#define HPF_Fs11025_Fc66_A1                       -1.925275f
+#define HPF_Fs11025_Fc66_A1                       (-1.925275f)
 #define HPF_Fs11025_Fc66_A2                       0.962638f
-#define HPF_Fs11025_Fc66_B1                       -1.946881f
+#define HPF_Fs11025_Fc66_B1                       (-1.946881f)
 #define HPF_Fs11025_Fc66_B2                       0.948256f
 #define HPF_Fs12000_Fc66_A0                       0.964718f
-#define HPF_Fs12000_Fc66_A1                       -1.929435f
+#define HPF_Fs12000_Fc66_A1                       (-1.929435f)
 #define HPF_Fs12000_Fc66_A2                       0.964718f
-#define HPF_Fs12000_Fc66_B1                       -1.951196f
+#define HPF_Fs12000_Fc66_B1                       (-1.951196f)
 #define HPF_Fs12000_Fc66_B2                       0.952359f
 #define HPF_Fs16000_Fc66_A0                       0.970622f
-#define HPF_Fs16000_Fc66_A1                       -1.941244f
+#define HPF_Fs16000_Fc66_A1                       (-1.941244f)
 #define HPF_Fs16000_Fc66_A2                       0.970622f
-#define HPF_Fs16000_Fc66_B1                       -1.963394f
+#define HPF_Fs16000_Fc66_B1                       (-1.963394f)
 #define HPF_Fs16000_Fc66_B2                       0.964052f
 #define HPF_Fs22050_Fc66_A0                       0.975509f
-#define HPF_Fs22050_Fc66_A1                       -1.951019f
+#define HPF_Fs22050_Fc66_A1                       (-1.951019f)
 #define HPF_Fs22050_Fc66_A2                       0.975509f
-#define HPF_Fs22050_Fc66_B1                       -1.973436f
+#define HPF_Fs22050_Fc66_B1                       (-1.973436f)
 #define HPF_Fs22050_Fc66_B2                       0.973784f
 #define HPF_Fs24000_Fc66_A0                       0.976563f
-#define HPF_Fs24000_Fc66_A1                       -1.953125f
+#define HPF_Fs24000_Fc66_A1                       (-1.953125f)
 #define HPF_Fs24000_Fc66_A2                       0.976563f
-#define HPF_Fs24000_Fc66_B1                       -1.975594f
+#define HPF_Fs24000_Fc66_B1                       (-1.975594f)
 #define HPF_Fs24000_Fc66_B2                       0.975889f
 #define HPF_Fs32000_Fc66_A0                       0.979547f
-#define HPF_Fs32000_Fc66_A1                       -1.959093f
+#define HPF_Fs32000_Fc66_A1                       (-1.959093f)
 #define HPF_Fs32000_Fc66_A2                       0.979547f
-#define HPF_Fs32000_Fc66_B1                       -1.981695f
+#define HPF_Fs32000_Fc66_B1                       (-1.981695f)
 #define HPF_Fs32000_Fc66_B2                       0.981861f
 #define HPF_Fs44100_Fc66_A0                       0.982010f
-#define HPF_Fs44100_Fc66_A1                       -1.964019f
+#define HPF_Fs44100_Fc66_A1                       (-1.964019f)
 #define HPF_Fs44100_Fc66_A2                       0.982010f
-#define HPF_Fs44100_Fc66_B1                       -1.986718f
+#define HPF_Fs44100_Fc66_B1                       (-1.986718f)
 #define HPF_Fs44100_Fc66_B2                       0.986805f
 #define HPF_Fs48000_Fc66_A0                       0.982540f
-#define HPF_Fs48000_Fc66_A1                       -1.965079f
+#define HPF_Fs48000_Fc66_A1                       (-1.965079f)
 #define HPF_Fs48000_Fc66_A2                       0.982540f
-#define HPF_Fs48000_Fc66_B1                       -1.987797f
+#define HPF_Fs48000_Fc66_B1                       (-1.987797f)
 #define HPF_Fs48000_Fc66_B2                       0.987871f
 
 #ifdef HIGHER_FS
 #define HPF_Fs96000_Fc66_A0                       0.985539f
-#define HPF_Fs96000_Fc66_A1                       -1.971077f
+#define HPF_Fs96000_Fc66_A1                       (-1.971077f)
 #define HPF_Fs96000_Fc66_A2                       0.985539f
-#define HPF_Fs96000_Fc66_B1                       -1.993898f
+#define HPF_Fs96000_Fc66_B1                       (-1.993898f)
 #define HPF_Fs96000_Fc66_B2                       0.993917f
 
 #define HPF_Fs192000_Fc66_A0                      0.987043f
-#define HPF_Fs192000_Fc66_A1                      -1.974086f
+#define HPF_Fs192000_Fc66_A1                      (-1.974086f)
 #define HPF_Fs192000_Fc66_A2                      0.987043f
-#define HPF_Fs192000_Fc66_B1                      -1.996949f
+#define HPF_Fs192000_Fc66_B1                      (-1.996949f)
 #define HPF_Fs192000_Fc66_B2                      0.996954f
 #endif
 
 /* Coefficients for centre frequency 78Hz */
 #define HPF_Fs8000_Fc78_A0                        0.946693f
-#define HPF_Fs8000_Fc78_A1                        -1.893387f
+#define HPF_Fs8000_Fc78_A1                        (-1.893387f)
 #define HPF_Fs8000_Fc78_A2                        0.946693f
-#define HPF_Fs8000_Fc78_B1                        -1.913517f
+#define HPF_Fs8000_Fc78_B1                        (-1.913517f)
 #define HPF_Fs8000_Fc78_B2                        0.917105f
 #define HPF_Fs11025_Fc78_A0                       0.957999f
-#define HPF_Fs11025_Fc78_A1                       -1.915998f
+#define HPF_Fs11025_Fc78_A1                       (-1.915998f)
 #define HPF_Fs11025_Fc78_A2                       0.957999f
-#define HPF_Fs11025_Fc78_B1                       -1.937229f
+#define HPF_Fs11025_Fc78_B1                       (-1.937229f)
 #define HPF_Fs11025_Fc78_B2                       0.939140f
 #define HPF_Fs12000_Fc78_A0                       0.960446f
-#define HPF_Fs12000_Fc78_A1                       -1.920892f
+#define HPF_Fs12000_Fc78_A1                       (-1.920892f)
 #define HPF_Fs12000_Fc78_A2                       0.960446f
-#define HPF_Fs12000_Fc78_B1                       -1.942326f
+#define HPF_Fs12000_Fc78_B1                       (-1.942326f)
 #define HPF_Fs12000_Fc78_B2                       0.943944f
 #define HPF_Fs16000_Fc78_A0                       0.967397f
-#define HPF_Fs16000_Fc78_A1                       -1.934794f
+#define HPF_Fs16000_Fc78_A1                       (-1.934794f)
 #define HPF_Fs16000_Fc78_A2                       0.967397f
-#define HPF_Fs16000_Fc78_B1                       -1.956740f
+#define HPF_Fs16000_Fc78_B1                       (-1.956740f)
 #define HPF_Fs16000_Fc78_B2                       0.957656f
 #define HPF_Fs22050_Fc78_A0                       0.973156f
-#define HPF_Fs22050_Fc78_A1                       -1.946313f
+#define HPF_Fs22050_Fc78_A1                       (-1.946313f)
 #define HPF_Fs22050_Fc78_A2                       0.973156f
-#define HPF_Fs22050_Fc78_B1                       -1.968607f
+#define HPF_Fs22050_Fc78_B1                       (-1.968607f)
 #define HPF_Fs22050_Fc78_B2                       0.969092f
 #define HPF_Fs24000_Fc78_A0                       0.974398f
-#define HPF_Fs24000_Fc78_A1                       -1.948797f
+#define HPF_Fs24000_Fc78_A1                       (-1.948797f)
 #define HPF_Fs24000_Fc78_A2                       0.974398f
-#define HPF_Fs24000_Fc78_B1                       -1.971157f
+#define HPF_Fs24000_Fc78_B1                       (-1.971157f)
 #define HPF_Fs24000_Fc78_B2                       0.971568f
 #define HPF_Fs32000_Fc78_A0                       0.977918f
-#define HPF_Fs32000_Fc78_A1                       -1.955836f
+#define HPF_Fs32000_Fc78_A1                       (-1.955836f)
 #define HPF_Fs32000_Fc78_A2                       0.977918f
-#define HPF_Fs32000_Fc78_B1                       -1.978367f
+#define HPF_Fs32000_Fc78_B1                       (-1.978367f)
 #define HPF_Fs32000_Fc78_B2                       0.978599f
 #define HPF_Fs44100_Fc78_A0                       0.980824f
-#define HPF_Fs44100_Fc78_A1                       -1.961649f
+#define HPF_Fs44100_Fc78_A1                       (-1.961649f)
 #define HPF_Fs44100_Fc78_A2                       0.980824f
-#define HPF_Fs44100_Fc78_B1                       -1.984303f
+#define HPF_Fs44100_Fc78_B1                       (-1.984303f)
 #define HPF_Fs44100_Fc78_B2                       0.984425f
 #define HPF_Fs48000_Fc78_A0                       0.981450f
-#define HPF_Fs48000_Fc78_A1                       -1.962900f
+#define HPF_Fs48000_Fc78_A1                       (-1.962900f)
 #define HPF_Fs48000_Fc78_A2                       0.981450f
-#define HPF_Fs48000_Fc78_B1                       -1.985578f
+#define HPF_Fs48000_Fc78_B1                       (-1.985578f)
 #define HPF_Fs48000_Fc78_B2                       0.985681f
 
 #ifdef HIGHER_FS
 #define HPF_Fs96000_Fc78_A0                       0.984992f
-#define HPF_Fs96000_Fc78_A1                       -1.969984f
+#define HPF_Fs96000_Fc78_A1                       (-1.969984f)
 #define HPF_Fs96000_Fc78_A2                       0.984992f
-#define HPF_Fs96000_Fc78_B1                       -1.992789f
+#define HPF_Fs96000_Fc78_B1                       (-1.992789f)
 #define HPF_Fs96000_Fc78_B2                       0.992815f
 
 #define HPF_Fs192000_Fc78_A0                      0.986769f
-#define HPF_Fs192000_Fc78_A1                      -1.973539f
+#define HPF_Fs192000_Fc78_A1                      (-1.973539f)
 #define HPF_Fs192000_Fc78_A2                      0.986769f
-#define HPF_Fs192000_Fc78_B1                      -1.996394f
+#define HPF_Fs192000_Fc78_B1                      (-1.996394f)
 #define HPF_Fs192000_Fc78_B2                      0.996401f
 #endif
 
 /* Coefficients for centre frequency 90Hz */
 #define HPF_Fs8000_Fc90_A0                       0.940412f
-#define HPF_Fs8000_Fc90_A1                       -1.880825f
+#define HPF_Fs8000_Fc90_A1                       (-1.880825f)
 #define HPF_Fs8000_Fc90_A2                       0.940412f
-#define HPF_Fs8000_Fc90_B1                       -1.900231f
+#define HPF_Fs8000_Fc90_B1                       (-1.900231f)
 #define HPF_Fs8000_Fc90_B2                       0.904977f
 #define HPF_Fs11025_Fc90_A0                      0.953383f
-#define HPF_Fs11025_Fc90_A1                      -1.906766f
+#define HPF_Fs11025_Fc90_A1                      (-1.906766f)
 #define HPF_Fs11025_Fc90_A2                      0.953383f
-#define HPF_Fs11025_Fc90_B1                      -1.927579f
+#define HPF_Fs11025_Fc90_B1                      (-1.927579f)
 #define HPF_Fs11025_Fc90_B2                      0.930111f
 #define HPF_Fs12000_Fc90_A0                      0.956193f
-#define HPF_Fs12000_Fc90_A1                      -1.912387f
+#define HPF_Fs12000_Fc90_A1                      (-1.912387f)
 #define HPF_Fs12000_Fc90_A2                      0.956193f
-#define HPF_Fs12000_Fc90_B1                      -1.933459f
+#define HPF_Fs12000_Fc90_B1                      (-1.933459f)
 #define HPF_Fs12000_Fc90_B2                      0.935603f
 #define HPF_Fs16000_Fc90_A0                      0.964183f
-#define HPF_Fs16000_Fc90_A1                      -1.928365f
+#define HPF_Fs16000_Fc90_A1                      (-1.928365f)
 #define HPF_Fs16000_Fc90_A2                      0.964183f
-#define HPF_Fs16000_Fc90_B1                      -1.950087f
+#define HPF_Fs16000_Fc90_B1                      (-1.950087f)
 #define HPF_Fs16000_Fc90_B2                      0.951303f
 #define HPF_Fs22050_Fc90_A0                      0.970809f
-#define HPF_Fs22050_Fc90_A1                      -1.941618f
+#define HPF_Fs22050_Fc90_A1                      (-1.941618f)
 #define HPF_Fs22050_Fc90_A2                      0.970809f
-#define HPF_Fs22050_Fc90_B1                      -1.963778f
+#define HPF_Fs22050_Fc90_B1                      (-1.963778f)
 #define HPF_Fs22050_Fc90_B2                      0.964423f
 #define HPF_Fs24000_Fc90_A0                      0.972239f
-#define HPF_Fs24000_Fc90_A1                      -1.944477f
+#define HPF_Fs24000_Fc90_A1                      (-1.944477f)
 #define HPF_Fs24000_Fc90_A2                      0.972239f
-#define HPF_Fs24000_Fc90_B1                      -1.966721f
+#define HPF_Fs24000_Fc90_B1                      (-1.966721f)
 #define HPF_Fs24000_Fc90_B2                      0.967266f
 #define HPF_Fs32000_Fc90_A0                      0.976292f
-#define HPF_Fs32000_Fc90_A1                      -1.952584f
+#define HPF_Fs32000_Fc90_A1                      (-1.952584f)
 #define HPF_Fs32000_Fc90_A2                      0.976292f
-#define HPF_Fs32000_Fc90_B1                      -1.975040f
+#define HPF_Fs32000_Fc90_B1                      (-1.975040f)
 #define HPF_Fs32000_Fc90_B2                      0.975347f
 #define HPF_Fs44100_Fc90_A0                      0.979641f
-#define HPF_Fs44100_Fc90_A1                      -1.959282f
+#define HPF_Fs44100_Fc90_A1                      (-1.959282f)
 #define HPF_Fs44100_Fc90_A2                      0.979641f
-#define HPF_Fs44100_Fc90_B1                      -1.981888f
+#define HPF_Fs44100_Fc90_B1                      (-1.981888f)
 #define HPF_Fs44100_Fc90_B2                      0.982050f
 #define HPF_Fs48000_Fc90_A0                      0.980362f
-#define HPF_Fs48000_Fc90_A1                      -1.960724f
+#define HPF_Fs48000_Fc90_A1                      (-1.960724f)
 #define HPF_Fs48000_Fc90_A2                      0.980362f
-#define HPF_Fs48000_Fc90_B1                      -1.983359f
+#define HPF_Fs48000_Fc90_B1                      (-1.983359f)
 #define HPF_Fs48000_Fc90_B2                      0.983497f
 
 #ifdef HIGHER_FS
 #define HPF_Fs96000_Fc90_A0                       0.984446f
-#define HPF_Fs96000_Fc90_A1                       -1.968892f
+#define HPF_Fs96000_Fc90_A1                       (-1.968892f)
 #define HPF_Fs96000_Fc90_A2                       0.984446f
-#define HPF_Fs96000_Fc90_B1                       -1.991680f
+#define HPF_Fs96000_Fc90_B1                       (-1.991680f)
 #define HPF_Fs96000_Fc90_B2                       0.991714f
 
 #define HPF_Fs192000_Fc90_A0                      0.986496f
-#define HPF_Fs192000_Fc90_A1                      -1.972992f
+#define HPF_Fs192000_Fc90_A1                      (-1.972992f)
 #define HPF_Fs192000_Fc90_A2                      0.986496f
-#define HPF_Fs192000_Fc90_B1                      -1.995840f
+#define HPF_Fs192000_Fc90_B1                      (-1.995840f)
 #define HPF_Fs192000_Fc90_B2                      0.995848f
 #endif
 
@@ -786,244 +786,244 @@
 /* Coefficients for centre frequency 55Hz */
 #define BPF_Fs8000_Fc55_A0                       0.009197f
 #define BPF_Fs8000_Fc55_A1                       0.000000f
-#define BPF_Fs8000_Fc55_A2                       -0.009197f
-#define BPF_Fs8000_Fc55_B1                       -1.979545f
+#define BPF_Fs8000_Fc55_A2                       (-0.009197f)
+#define BPF_Fs8000_Fc55_B1                       (-1.979545f)
 #define BPF_Fs8000_Fc55_B2                       0.981393f
 #define BPF_Fs11025_Fc55_A0                      0.006691f
 #define BPF_Fs11025_Fc55_A1                      0.000000f
-#define BPF_Fs11025_Fc55_A2                      -0.006691f
-#define BPF_Fs11025_Fc55_B1                      -1.985488f
+#define BPF_Fs11025_Fc55_A2                      (-0.006691f)
+#define BPF_Fs11025_Fc55_B1                      (-1.985488f)
 #define BPF_Fs11025_Fc55_B2                      0.986464f
 #define BPF_Fs12000_Fc55_A0                      0.006150f
 #define BPF_Fs12000_Fc55_A1                      0.000000f
-#define BPF_Fs12000_Fc55_A2                      -0.006150f
-#define BPF_Fs12000_Fc55_B1                      -1.986733f
+#define BPF_Fs12000_Fc55_A2                      (-0.006150f)
+#define BPF_Fs12000_Fc55_B1                      (-1.986733f)
 #define BPF_Fs12000_Fc55_B2                      0.987557f
 #define BPF_Fs16000_Fc55_A0                      0.004620f
 #define BPF_Fs16000_Fc55_A1                      0.000000f
-#define BPF_Fs16000_Fc55_A2                      -0.004620f
-#define BPF_Fs16000_Fc55_B1                      -1.990189f
+#define BPF_Fs16000_Fc55_A2                      (-0.004620f)
+#define BPF_Fs16000_Fc55_B1                      (-1.990189f)
 #define BPF_Fs16000_Fc55_B2                      0.990653f
 #define BPF_Fs22050_Fc55_A0                      0.003357f
 #define BPF_Fs22050_Fc55_A1                      0.000000f
-#define BPF_Fs22050_Fc55_A2                      -0.003357f
-#define BPF_Fs22050_Fc55_B1                      -1.992964f
+#define BPF_Fs22050_Fc55_A2                      (-0.003357f)
+#define BPF_Fs22050_Fc55_B1                      (-1.992964f)
 #define BPF_Fs22050_Fc55_B2                      0.993209f
 #define BPF_Fs24000_Fc55_A0                      0.003085f
 #define BPF_Fs24000_Fc55_A1                      0.000000f
-#define BPF_Fs24000_Fc55_A2                      -0.003085f
-#define BPF_Fs24000_Fc55_B1                      -1.993552f
+#define BPF_Fs24000_Fc55_A2                      (-0.003085f)
+#define BPF_Fs24000_Fc55_B1                      (-1.993552f)
 #define BPF_Fs24000_Fc55_B2                      0.993759f
 #define BPF_Fs32000_Fc55_A0                      0.002315f
 #define BPF_Fs32000_Fc55_A1                      0.000000f
-#define BPF_Fs32000_Fc55_A2                      -0.002315f
-#define BPF_Fs32000_Fc55_B1                      -1.995199f
+#define BPF_Fs32000_Fc55_A2                      (-0.002315f)
+#define BPF_Fs32000_Fc55_B1                      (-1.995199f)
 #define BPF_Fs32000_Fc55_B2                      0.995316f
 #define BPF_Fs44100_Fc55_A0                      0.001681f
 #define BPF_Fs44100_Fc55_A1                      0.000000f
-#define BPF_Fs44100_Fc55_A2                      -0.001681f
-#define BPF_Fs44100_Fc55_B1                      -1.996537f
+#define BPF_Fs44100_Fc55_A2                      (-0.001681f)
+#define BPF_Fs44100_Fc55_B1                      (-1.996537f)
 #define BPF_Fs44100_Fc55_B2                      0.996599f
 #define BPF_Fs48000_Fc55_A0                      0.001545f
 #define BPF_Fs48000_Fc55_A1                      0.000000f
-#define BPF_Fs48000_Fc55_A2                      -0.001545f
-#define BPF_Fs48000_Fc55_B1                      -1.996823f
+#define BPF_Fs48000_Fc55_A2                      (-0.001545f)
+#define BPF_Fs48000_Fc55_B1                      (-1.996823f)
 #define BPF_Fs48000_Fc55_B2                      0.996875f
 
 #ifdef HIGHER_FS
 #define BPF_Fs96000_Fc55_A0                      0.000762f
 #define BPF_Fs96000_Fc55_A1                      0.000000f
-#define BPF_Fs96000_Fc55_A2                      -0.000762f
-#define BPF_Fs96000_Fc55_B1                      -1.998461f
+#define BPF_Fs96000_Fc55_A2                      (-0.000762f)
+#define BPF_Fs96000_Fc55_B1                      (-1.998461f)
 #define BPF_Fs96000_Fc55_B2                      0.998477f
 
 #define BPF_Fs192000_Fc55_A0                     0.000381f
 #define BPF_Fs192000_Fc55_A1                     0.000000f
-#define BPF_Fs192000_Fc55_A2                     -0.000381f
-#define BPF_Fs192000_Fc55_B1                     -1.999234f
+#define BPF_Fs192000_Fc55_A2                     (-0.000381f)
+#define BPF_Fs192000_Fc55_B1                     (-1.999234f)
 #define BPF_Fs192000_Fc55_B2                     0.999238f
 #endif
 
 /* Coefficients for centre frequency 66Hz */
 #define BPF_Fs8000_Fc66_A0                      0.012648f
 #define BPF_Fs8000_Fc66_A1                      0.000000f
-#define BPF_Fs8000_Fc66_A2                      -0.012648f
-#define BPF_Fs8000_Fc66_B1                      -1.971760f
+#define BPF_Fs8000_Fc66_A2                      (-0.012648f)
+#define BPF_Fs8000_Fc66_B1                      (-1.971760f)
 #define BPF_Fs8000_Fc66_B2                      0.974412f
 #define BPF_Fs11025_Fc66_A0                     0.009209f
 #define BPF_Fs11025_Fc66_A1                     0.000000f
-#define BPF_Fs11025_Fc66_A2                     -0.009209f
-#define BPF_Fs11025_Fc66_B1                     -1.979966f
+#define BPF_Fs11025_Fc66_A2                     (-0.009209f)
+#define BPF_Fs11025_Fc66_B1                     (-1.979966f)
 #define BPF_Fs11025_Fc66_B2                     0.981368f
 #define BPF_Fs12000_Fc66_A0                     0.008468f
 #define BPF_Fs12000_Fc66_A1                     0.000000f
-#define BPF_Fs12000_Fc66_A2                     -0.008468f
-#define BPF_Fs12000_Fc66_B1                     -1.981685f
+#define BPF_Fs12000_Fc66_A2                     (-0.008468f)
+#define BPF_Fs12000_Fc66_B1                     (-1.981685f)
 #define BPF_Fs12000_Fc66_B2                     0.982869f
 #define BPF_Fs16000_Fc66_A0                     0.006364f
 #define BPF_Fs16000_Fc66_A1                     0.000000f
-#define BPF_Fs16000_Fc66_A2                     -0.006364f
-#define BPF_Fs16000_Fc66_B1                     -1.986457f
+#define BPF_Fs16000_Fc66_A2                     (-0.006364f)
+#define BPF_Fs16000_Fc66_B1                     (-1.986457f)
 #define BPF_Fs16000_Fc66_B2                     0.987124f
 #define BPF_Fs22050_Fc66_A0                     0.004626f
 #define BPF_Fs22050_Fc66_A1                     0.000000f
-#define BPF_Fs22050_Fc66_A2                     -0.004626f
-#define BPF_Fs22050_Fc66_B1                     -1.990288f
+#define BPF_Fs22050_Fc66_A2                     (-0.004626f)
+#define BPF_Fs22050_Fc66_B1                     (-1.990288f)
 #define BPF_Fs22050_Fc66_B2                     0.990641f
 #define BPF_Fs24000_Fc66_A0                     0.004252f
 #define BPF_Fs24000_Fc66_A1                     0.000000f
-#define BPF_Fs24000_Fc66_A2                     -0.004252f
-#define BPF_Fs24000_Fc66_B1                     -1.991100f
+#define BPF_Fs24000_Fc66_A2                     (-0.004252f)
+#define BPF_Fs24000_Fc66_B1                     (-1.991100f)
 #define BPF_Fs24000_Fc66_B2                     0.991398f
 #define BPF_Fs32000_Fc66_A0                     0.003192f
 #define BPF_Fs32000_Fc66_A1                     0.000000f
-#define BPF_Fs32000_Fc66_A2                     -0.003192f
-#define BPF_Fs32000_Fc66_B1                     -1.993374f
+#define BPF_Fs32000_Fc66_A2                     (-0.003192f)
+#define BPF_Fs32000_Fc66_B1                     (-1.993374f)
 #define BPF_Fs32000_Fc66_B2                     0.993541f
 #define BPF_Fs44100_Fc66_A0                     0.002318f
 #define BPF_Fs44100_Fc66_A1                     0.000000f
-#define BPF_Fs44100_Fc66_A2                     -0.002318f
-#define BPF_Fs44100_Fc66_B1                     -1.995221f
+#define BPF_Fs44100_Fc66_A2                     (-0.002318f)
+#define BPF_Fs44100_Fc66_B1                     (-1.995221f)
 #define BPF_Fs44100_Fc66_B2                     0.995309f
 #define BPF_Fs48000_Fc66_A0                     0.002131f
 #define BPF_Fs48000_Fc66_A1                     0.000000f
-#define BPF_Fs48000_Fc66_A2                     -0.002131f
-#define BPF_Fs48000_Fc66_B1                     -1.995615f
+#define BPF_Fs48000_Fc66_A2                     (-0.002131f)
+#define BPF_Fs48000_Fc66_B1                     (-1.995615f)
 #define BPF_Fs48000_Fc66_B2                     0.995690f
 
 #ifdef HIGHER_FS
 #define BPF_Fs96000_Fc66_A0                     0.001055f
 #define BPF_Fs96000_Fc66_A1                     0.000000f
-#define BPF_Fs96000_Fc66_A2                     -0.001055f
-#define BPF_Fs96000_Fc66_B1                     -1.997868f
+#define BPF_Fs96000_Fc66_A2                     (-0.001055f)
+#define BPF_Fs96000_Fc66_B1                     (-1.997868f)
 #define BPF_Fs96000_Fc66_B2                     0.997891f
 
 #define BPF_Fs192000_Fc66_A0                    0.000528f
 #define BPF_Fs192000_Fc66_A1                    0.000000f
-#define BPF_Fs192000_Fc66_A2                   -0.000528f
-#define BPF_Fs192000_Fc66_B1                   -1.998939f
+#define BPF_Fs192000_Fc66_A2                   (-0.000528f)
+#define BPF_Fs192000_Fc66_B1                   (-1.998939f)
 #define BPF_Fs192000_Fc66_B2                    0.998945f
 #endif
 
 /* Coefficients for centre frequency 78Hz */
 #define BPF_Fs8000_Fc78_A0                      0.018572f
 #define BPF_Fs8000_Fc78_A1                      0.000000f
-#define BPF_Fs8000_Fc78_A2                      -0.018572f
-#define BPF_Fs8000_Fc78_B1                      -1.958745f
+#define BPF_Fs8000_Fc78_A2                      (-0.018572f)
+#define BPF_Fs8000_Fc78_B1                      (-1.958745f)
 #define BPF_Fs8000_Fc78_B2                      0.962427f
 #define BPF_Fs11025_Fc78_A0                     0.013545f
 #define BPF_Fs11025_Fc78_A1                     0.000000f
-#define BPF_Fs11025_Fc78_A2                     -0.013545f
-#define BPF_Fs11025_Fc78_B1                     -1.970647f
+#define BPF_Fs11025_Fc78_A2                     (-0.013545f)
+#define BPF_Fs11025_Fc78_B1                     (-1.970647f)
 #define BPF_Fs11025_Fc78_B2                     0.972596f
 #define BPF_Fs12000_Fc78_A0                     0.012458f
 #define BPF_Fs12000_Fc78_A1                     0.000000f
-#define BPF_Fs12000_Fc78_A2                     -0.012458f
-#define BPF_Fs12000_Fc78_B1                     -1.973148f
+#define BPF_Fs12000_Fc78_A2                     (-0.012458f)
+#define BPF_Fs12000_Fc78_B1                     (-1.973148f)
 #define BPF_Fs12000_Fc78_B2                     0.974795f
 #define BPF_Fs16000_Fc78_A0                     0.009373f
 #define BPF_Fs16000_Fc78_A1                     0.000000f
-#define BPF_Fs16000_Fc78_A2                     -0.009373f
-#define BPF_Fs16000_Fc78_B1                     -1.980108f
+#define BPF_Fs16000_Fc78_A2                     (-0.009373f)
+#define BPF_Fs16000_Fc78_B1                     (-1.980108f)
 #define BPF_Fs16000_Fc78_B2                     0.981037f
 #define BPF_Fs22050_Fc78_A0                     0.006819f
 #define BPF_Fs22050_Fc78_A1                     0.000000f
-#define BPF_Fs22050_Fc78_A2                     -0.006819f
-#define BPF_Fs22050_Fc78_B1                     -1.985714f
+#define BPF_Fs22050_Fc78_A2                     (-0.006819f)
+#define BPF_Fs22050_Fc78_B1                     (-1.985714f)
 #define BPF_Fs22050_Fc78_B2                     0.986204f
 #define BPF_Fs24000_Fc78_A0                     0.006268f
 #define BPF_Fs24000_Fc78_A1                     0.000000f
-#define BPF_Fs24000_Fc78_A2                     -0.006268f
-#define BPF_Fs24000_Fc78_B1                     -1.986904f
+#define BPF_Fs24000_Fc78_A2                     (-0.006268f)
+#define BPF_Fs24000_Fc78_B1                     (-1.986904f)
 #define BPF_Fs24000_Fc78_B2                     0.987318f
 #define BPF_Fs32000_Fc78_A0                     0.004709f
 #define BPF_Fs32000_Fc78_A1                     0.000000f
-#define BPF_Fs32000_Fc78_A2                     -0.004709f
-#define BPF_Fs32000_Fc78_B1                     -1.990240f
+#define BPF_Fs32000_Fc78_A2                     (-0.004709f)
+#define BPF_Fs32000_Fc78_B1                     (-1.990240f)
 #define BPF_Fs32000_Fc78_B2                     0.990473f
 #define BPF_Fs44100_Fc78_A0                     0.003421f
 #define BPF_Fs44100_Fc78_A1                     0.000000f
-#define BPF_Fs44100_Fc78_A2                     -0.003421f
-#define BPF_Fs44100_Fc78_B1                     -1.992955f
+#define BPF_Fs44100_Fc78_A2                     (-0.003421f)
+#define BPF_Fs44100_Fc78_B1                     (-1.992955f)
 #define BPF_Fs44100_Fc78_B2                     0.993078f
 #define BPF_Fs48000_Fc78_A0                     0.003144f
 #define BPF_Fs48000_Fc78_A1                     0.000000f
-#define BPF_Fs48000_Fc78_A2                     -0.003144f
-#define BPF_Fs48000_Fc78_B1                     -1.993535f
+#define BPF_Fs48000_Fc78_A2                     (-0.003144f)
+#define BPF_Fs48000_Fc78_B1                     (-1.993535f)
 #define BPF_Fs48000_Fc78_B2                     0.993639f
 
 #ifdef HIGHER_FS
 #define BPF_Fs96000_Fc78_A0                     0.001555f
 #define BPF_Fs96000_Fc78_A1                     0.000000f
-#define BPF_Fs96000_Fc78_A2                    -0.0015555f
-#define BPF_Fs96000_Fc78_B1                    -1.996860f
+#define BPF_Fs96000_Fc78_A2                    (-0.0015555f)
+#define BPF_Fs96000_Fc78_B1                    (-1.996860f)
 #define BPF_Fs96000_Fc78_B2                     0.996891f
 
 #define BPF_Fs192000_Fc78_A0                    0.000778f
 #define BPF_Fs192000_Fc78_A1                    0.000000f
-#define BPF_Fs192000_Fc78_A2                   -0.000778f
-#define BPF_Fs192000_Fc78_B1                   -1.998437f
+#define BPF_Fs192000_Fc78_A2                   (-0.000778f)
+#define BPF_Fs192000_Fc78_B1                   (-1.998437f)
 #define BPF_Fs192000_Fc78_B2                    0.998444f
 #endif
 
 /* Coefficients for centre frequency 90Hz */
 #define BPF_Fs8000_Fc90_A0                       0.022760f
 #define BPF_Fs8000_Fc90_A1                       0.000000f
-#define BPF_Fs8000_Fc90_A2                       -0.022760f
-#define BPF_Fs8000_Fc90_B1                       -1.949073f
+#define BPF_Fs8000_Fc90_A2                       (-0.022760f)
+#define BPF_Fs8000_Fc90_B1                       (-1.949073f)
 #define BPF_Fs8000_Fc90_B2                       0.953953f
 #define BPF_Fs11025_Fc90_A0                      0.016619f
 #define BPF_Fs11025_Fc90_A1                      0.000000f
-#define BPF_Fs11025_Fc90_A2                      -0.016619f
-#define BPF_Fs11025_Fc90_B1                      -1.963791f
+#define BPF_Fs11025_Fc90_A2                      (-0.016619f)
+#define BPF_Fs11025_Fc90_B1                      (-1.963791f)
 #define BPF_Fs11025_Fc90_B2                      0.966377f
 #define BPF_Fs12000_Fc90_A0                      0.015289f
 #define BPF_Fs12000_Fc90_A1                      0.000000f
-#define BPF_Fs12000_Fc90_A2                      -0.015289f
-#define BPF_Fs12000_Fc90_B1                      -1.966882f
+#define BPF_Fs12000_Fc90_A2                      (-0.015289f)
+#define BPF_Fs12000_Fc90_B1                      (-1.966882f)
 #define BPF_Fs12000_Fc90_B2                      0.969067f
 #define BPF_Fs16000_Fc90_A0                      0.011511f
 #define BPF_Fs16000_Fc90_A1                      0.000000f
-#define BPF_Fs16000_Fc90_A2                      -0.011511f
-#define BPF_Fs16000_Fc90_B1                      -1.975477f
+#define BPF_Fs16000_Fc90_A2                      (-0.011511f)
+#define BPF_Fs16000_Fc90_B1                      (-1.975477f)
 #define BPF_Fs16000_Fc90_B2                      0.976711f
 #define BPF_Fs22050_Fc90_A0                      0.008379f
 #define BPF_Fs22050_Fc90_A1                      0.000000f
-#define BPF_Fs22050_Fc90_A2                      -0.008379f
-#define BPF_Fs22050_Fc90_B1                      -1.982395f
+#define BPF_Fs22050_Fc90_A2                      (-0.008379f)
+#define BPF_Fs22050_Fc90_B1                      (-1.982395f)
 #define BPF_Fs22050_Fc90_B2                      0.983047f
 #define BPF_Fs24000_Fc90_A0                      0.007704f
 #define BPF_Fs24000_Fc90_A1                      0.000000f
-#define BPF_Fs24000_Fc90_A2                      -0.007704f
-#define BPF_Fs24000_Fc90_B1                      -1.983863f
+#define BPF_Fs24000_Fc90_A2                      (-0.007704f)
+#define BPF_Fs24000_Fc90_B1                      (-1.983863f)
 #define BPF_Fs24000_Fc90_B2                      0.984414f
 #define BPF_Fs32000_Fc90_A0                      0.005789f
 #define BPF_Fs32000_Fc90_A1                      0.000000f
-#define BPF_Fs32000_Fc90_A2                      -0.005789f
-#define BPF_Fs32000_Fc90_B1                      -1.987977f
+#define BPF_Fs32000_Fc90_A2                      (-0.005789f)
+#define BPF_Fs32000_Fc90_B1                      (-1.987977f)
 #define BPF_Fs32000_Fc90_B2                      0.988288f
 #define BPF_Fs44100_Fc90_A0                      0.004207f
 #define BPF_Fs44100_Fc90_A1                      0.000000f
-#define BPF_Fs44100_Fc90_A2                      -0.004207f
-#define BPF_Fs44100_Fc90_B1                      -1.991324f
+#define BPF_Fs44100_Fc90_A2                      (-0.004207f)
+#define BPF_Fs44100_Fc90_B1                      (-1.991324f)
 #define BPF_Fs44100_Fc90_B2                      0.991488f
 #define BPF_Fs48000_Fc90_A0                      0.003867f
 #define BPF_Fs48000_Fc90_A1                      0.000000f
-#define BPF_Fs48000_Fc90_A2                      -0.003867f
-#define BPF_Fs48000_Fc90_B1                      -1.992038f
+#define BPF_Fs48000_Fc90_A2                      (-0.003867f)
+#define BPF_Fs48000_Fc90_B1                      (-1.992038f)
 #define BPF_Fs48000_Fc90_B2                      0.992177f
 
 #ifdef HIGHER_FS
 #define BPF_Fs96000_Fc90_A0                      0.001913f
 #define BPF_Fs96000_Fc90_A1                      0.000000f
-#define BPF_Fs96000_Fc90_A2                     -0.001913f
-#define BPF_Fs96000_Fc90_B1                     -1.996134f
+#define BPF_Fs96000_Fc90_A2                     (-0.001913f)
+#define BPF_Fs96000_Fc90_B1                     (-1.996134f)
 #define BPF_Fs96000_Fc90_B2                      0.996174f
 
 #define BPF_Fs192000_Fc90_A0                     0.000958f
 #define BPF_Fs192000_Fc90_A1                     0.000000f
-#define BPF_Fs192000_Fc90_A2                    -0.000958f
-#define BPF_Fs192000_Fc90_B1                    -1.998075f
+#define BPF_Fs192000_Fc90_A2                    (-0.000958f)
+#define BPF_Fs192000_Fc90_B1                    (-1.998075f)
 #define BPF_Fs192000_Fc90_B2                     0.998085f
 #endif
 
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_Coeffs.h b/media/libeffects/lvm/lib/Bundle/src/LVM_Coeffs.h
index 353560c..8c04847 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_Coeffs.h
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_Coeffs.h
@@ -69,55 +69,55 @@
 #define HPF_Fs22050_Gain6_B2                            0.000000
                                                                     /* Gain =  7.000000 dB */
 #define HPF_Fs22050_Gain7_A0                            1.390177
-#define HPF_Fs22050_Gain7_A1                            -0.020144
+#define HPF_Fs22050_Gain7_A1                            (-0.020144)
 #define HPF_Fs22050_Gain7_A2                            0.000000
 #define HPF_Fs22050_Gain7_B1                            0.370033
 #define HPF_Fs22050_Gain7_B2                            0.000000
                                                                     /* Gain =  8.000000 dB */
 #define HPF_Fs22050_Gain8_A0                            1.476219
-#define HPF_Fs22050_Gain8_A1                            -0.106187
+#define HPF_Fs22050_Gain8_A1                            (-0.106187)
 #define HPF_Fs22050_Gain8_A2                            0.000000
 #define HPF_Fs22050_Gain8_B1                            0.370033
 #define HPF_Fs22050_Gain8_B2                            0.000000
                                                                     /* Gain =  9.000000 dB */
 #define HPF_Fs22050_Gain9_A0                            1.572761
-#define HPF_Fs22050_Gain9_A1                            -0.202728
+#define HPF_Fs22050_Gain9_A1                            (-0.202728)
 #define HPF_Fs22050_Gain9_A2                            0.000000
 #define HPF_Fs22050_Gain9_B1                            0.370033
 #define HPF_Fs22050_Gain9_B2                            0.000000
                                                                     /* Gain =  10.000000 dB */
 #define HPF_Fs22050_Gain10_A0                           1.681082
-#define HPF_Fs22050_Gain10_A1                           -0.311049
+#define HPF_Fs22050_Gain10_A1                           (-0.311049)
 #define HPF_Fs22050_Gain10_A2                           0.000000
 #define HPF_Fs22050_Gain10_B1                           0.370033
 #define HPF_Fs22050_Gain10_B2                           0.000000
                                                                     /* Gain =  11.000000 dB */
 #define HPF_Fs22050_Gain11_A0                           1.802620
-#define HPF_Fs22050_Gain11_A1                           -0.432588
+#define HPF_Fs22050_Gain11_A1                           (-0.432588)
 #define HPF_Fs22050_Gain11_A2                           0.000000
 #define HPF_Fs22050_Gain11_B1                           0.370033
 #define HPF_Fs22050_Gain11_B2                           0.000000
                                                                     /* Gain =  12.000000 dB */
 #define HPF_Fs22050_Gain12_A0                           1.938989
-#define HPF_Fs22050_Gain12_A1                           -0.568956
+#define HPF_Fs22050_Gain12_A1                           (-0.568956)
 #define HPF_Fs22050_Gain12_A2                           0.000000
 #define HPF_Fs22050_Gain12_B1                           0.370033
 #define HPF_Fs22050_Gain12_B2                           0.000000
                                                                     /* Gain =  13.000000 dB */
 #define HPF_Fs22050_Gain13_A0                           2.091997
-#define HPF_Fs22050_Gain13_A1                           -0.721964
+#define HPF_Fs22050_Gain13_A1                           (-0.721964)
 #define HPF_Fs22050_Gain13_A2                           0.000000
 #define HPF_Fs22050_Gain13_B1                           0.370033
 #define HPF_Fs22050_Gain13_B2                           0.000000
                                                                     /* Gain =  14.000000 dB */
 #define HPF_Fs22050_Gain14_A0                           2.263674
-#define HPF_Fs22050_Gain14_A1                           -0.893641
+#define HPF_Fs22050_Gain14_A1                           (-0.893641)
 #define HPF_Fs22050_Gain14_A2                           0.000000
 #define HPF_Fs22050_Gain14_B1                           0.370033
 #define HPF_Fs22050_Gain14_B2                           0.000000
                                                                     /* Gain =  15.000000 dB */
 #define HPF_Fs22050_Gain15_A0                           2.456300
-#define HPF_Fs22050_Gain15_A1                           -1.086267
+#define HPF_Fs22050_Gain15_A1                           (-1.086267)
 #define HPF_Fs22050_Gain15_A2                           0.000000
 #define HPF_Fs22050_Gain15_B1                           0.370033
 #define HPF_Fs22050_Gain15_B2                           0.000000
@@ -148,342 +148,342 @@
 #define HPF_Fs24000_Gain4_B2                            0.000000
                                                                     /* Gain =  5.000000 dB */
 #define HPF_Fs24000_Gain5_A0                            1.284870
-#define HPF_Fs24000_Gain5_A1                            -0.016921
+#define HPF_Fs24000_Gain5_A1                            (-0.016921)
 #define HPF_Fs24000_Gain5_A2                            0.000000
 #define HPF_Fs24000_Gain5_B1                            0.267949
 #define HPF_Fs24000_Gain5_B2                            0.000000
                                                                     /* Gain =  6.000000 dB */
 #define HPF_Fs24000_Gain6_A0                           1.364291
-#define HPF_Fs24000_Gain6_A1                           -0.096342
+#define HPF_Fs24000_Gain6_A1                           (-0.096342)
 #define HPF_Fs24000_Gain6_A2                           0.000000
 #define HPF_Fs24000_Gain6_B1                           0.267949
 #define HPF_Fs24000_Gain6_B2                           0.000000
                                                                     /* Gain =  7.000000 dB */
 #define HPF_Fs24000_Gain7_A0                            1.453403
-#define HPF_Fs24000_Gain7_A1                            -0.185454
+#define HPF_Fs24000_Gain7_A1                            (-0.185454)
 #define HPF_Fs24000_Gain7_A2                            0.000000
 #define HPF_Fs24000_Gain7_B1                            0.267949
 #define HPF_Fs24000_Gain7_B2                            0.000000
                                                                     /* Gain =  8.000000 dB */
 #define HPF_Fs24000_Gain8_A0                            1.553389
-#define HPF_Fs24000_Gain8_A1                            -0.285440
+#define HPF_Fs24000_Gain8_A1                            (-0.285440)
 #define HPF_Fs24000_Gain8_A2                            0.000000
 #define HPF_Fs24000_Gain8_B1                            0.267949
 #define HPF_Fs24000_Gain8_B2                            0.000000
                                                                     /* Gain =  9.000000 dB */
 #define HPF_Fs24000_Gain9_A0                            1.665574
-#define HPF_Fs24000_Gain9_A1                            -0.397625
+#define HPF_Fs24000_Gain9_A1                            (-0.397625)
 #define HPF_Fs24000_Gain9_A2                            0.000000
 #define HPF_Fs24000_Gain9_B1                            0.267949
 #define HPF_Fs24000_Gain9_B2                            0.000000
                                                                     /* Gain =  10.000000 dB */
 #define HPF_Fs24000_Gain10_A0                           1.791449
-#define HPF_Fs24000_Gain10_A1                           -0.523499
+#define HPF_Fs24000_Gain10_A1                           (-0.523499)
 #define HPF_Fs24000_Gain10_A2                           0.000000
 #define HPF_Fs24000_Gain10_B1                           0.267949
 #define HPF_Fs24000_Gain10_B2                           0.000000
                                                                     /* Gain =  11.000000 dB */
 #define HPF_Fs24000_Gain11_A0                           1.932682
-#define HPF_Fs24000_Gain11_A1                           -0.664733
+#define HPF_Fs24000_Gain11_A1                           (-0.664733)
 #define HPF_Fs24000_Gain11_A2                           0.000000
 #define HPF_Fs24000_Gain11_B1                           0.267949
 #define HPF_Fs24000_Gain11_B2                           0.000000
                                                                     /* Gain =  12.000000 dB */
 #define HPF_Fs24000_Gain12_A0                           2.091148
-#define HPF_Fs24000_Gain12_A1                           -0.823199
+#define HPF_Fs24000_Gain12_A1                           (-0.823199)
 #define HPF_Fs24000_Gain12_A2                           0.000000
 #define HPF_Fs24000_Gain12_B1                           0.267949
 #define HPF_Fs24000_Gain12_B2                           0.000000
                                                                     /* Gain =  13.000000 dB */
 #define HPF_Fs24000_Gain13_A0                           2.268950
-#define HPF_Fs24000_Gain13_A1                           -1.001001
+#define HPF_Fs24000_Gain13_A1                           (-1.001001)
 #define HPF_Fs24000_Gain13_A2                           0.000000
 #define HPF_Fs24000_Gain13_B1                           0.267949
 #define HPF_Fs24000_Gain13_B2                           0.000000
                                                                     /* Gain =  14.000000 dB */
 #define HPF_Fs24000_Gain14_A0                           2.468447
-#define HPF_Fs24000_Gain14_A1                           -1.200498
+#define HPF_Fs24000_Gain14_A1                           (-1.200498)
 #define HPF_Fs24000_Gain14_A2                           0.000000
 #define HPF_Fs24000_Gain14_B1                           0.267949
 #define HPF_Fs24000_Gain14_B2                           0.000000
                                                                     /* Gain =  15.000000 dB */
 #define HPF_Fs24000_Gain15_A0                           2.692287
-#define HPF_Fs24000_Gain15_A1                           -1.424338
+#define HPF_Fs24000_Gain15_A1                           (-1.424338)
 #define HPF_Fs24000_Gain15_A2                           0.000000
 #define HPF_Fs24000_Gain15_B1                           0.267949
 #define HPF_Fs24000_Gain15_B2                           0.000000
 /* Coefficients for sample rate 32000Hz */
                                                                     /* Gain =  1.000000 dB */
 #define HPF_Fs32000_Gain1_A0                            1.061009
-#define HPF_Fs32000_Gain1_A1                            -0.061009
+#define HPF_Fs32000_Gain1_A1                            (-0.061009)
 #define HPF_Fs32000_Gain1_A2                            0.000000
-#define HPF_Fs32000_Gain1_B1                            -0.000000
+#define HPF_Fs32000_Gain1_B1                            (-0.000000)
 #define HPF_Fs32000_Gain1_B2                            0.000000
                                                                     /* Gain =  2.000000 dB */
 #define HPF_Fs32000_Gain2_A0                             1.129463
-#define HPF_Fs32000_Gain2_A1                             -0.129463
+#define HPF_Fs32000_Gain2_A1                             (-0.129463)
 #define HPF_Fs32000_Gain2_A2                             0.000000
-#define HPF_Fs32000_Gain2_B1                             -0.000000
+#define HPF_Fs32000_Gain2_B1                             (-0.000000)
 #define HPF_Fs32000_Gain2_B2                             0.000000
                                                                     /* Gain =  3.000000 dB */
 #define HPF_Fs32000_Gain3_A0                             1.206267
-#define HPF_Fs32000_Gain3_A1                             -0.206267
+#define HPF_Fs32000_Gain3_A1                             (-0.206267)
 #define HPF_Fs32000_Gain3_A2                             0.000000
-#define HPF_Fs32000_Gain3_B1                             -0.000000
+#define HPF_Fs32000_Gain3_B1                             (-0.000000)
 #define HPF_Fs32000_Gain3_B2                             0.000000
                                                                     /* Gain =  4.000000 dB */
 #define HPF_Fs32000_Gain4_A0                            1.292447
-#define HPF_Fs32000_Gain4_A1                            -0.292447
+#define HPF_Fs32000_Gain4_A1                            (-0.292447)
 #define HPF_Fs32000_Gain4_A2                            0.000000
-#define HPF_Fs32000_Gain4_B1                            -0.000000
+#define HPF_Fs32000_Gain4_B1                            (-0.000000)
 #define HPF_Fs32000_Gain4_B2                            0.000000
                                                                     /* Gain =  5.000000 dB */
 #define HPF_Fs32000_Gain5_A0                            1.389140
-#define HPF_Fs32000_Gain5_A1                            -0.389140
+#define HPF_Fs32000_Gain5_A1                            (-0.389140)
 #define HPF_Fs32000_Gain5_A2                            0.000000
-#define HPF_Fs32000_Gain5_B1                            -0.000000
+#define HPF_Fs32000_Gain5_B1                            (-0.000000)
 #define HPF_Fs32000_Gain5_B2                            0.000000
                                                                     /* Gain =  6.000000 dB */
 #define HPF_Fs32000_Gain6_A0                             1.497631
-#define HPF_Fs32000_Gain6_A1                             -0.497631
+#define HPF_Fs32000_Gain6_A1                             (-0.497631)
 #define HPF_Fs32000_Gain6_A2                             0.000000
-#define HPF_Fs32000_Gain6_B1                             -0.000000
+#define HPF_Fs32000_Gain6_B1                             (-0.000000)
 #define HPF_Fs32000_Gain6_B2                             0.000000
                                                                     /* Gain =  7.000000 dB */
 #define HPF_Fs32000_Gain7_A0                             1.619361
-#define HPF_Fs32000_Gain7_A1                             -0.619361
+#define HPF_Fs32000_Gain7_A1                             (-0.619361)
 #define HPF_Fs32000_Gain7_A2                             0.000000
-#define HPF_Fs32000_Gain7_B1                             -0.000000
+#define HPF_Fs32000_Gain7_B1                             (-0.000000)
 #define HPF_Fs32000_Gain7_B2                             0.000000
                                                                     /* Gain =  8.000000 dB */
 #define HPF_Fs32000_Gain8_A0                             1.755943
-#define HPF_Fs32000_Gain8_A1                             -0.755943
+#define HPF_Fs32000_Gain8_A1                             (-0.755943)
 #define HPF_Fs32000_Gain8_A2                             0.000000
-#define HPF_Fs32000_Gain8_B1                             -0.000000
+#define HPF_Fs32000_Gain8_B1                             (-0.000000)
 #define HPF_Fs32000_Gain8_B2                             0.000000
                                                                     /* Gain =  9.000000 dB */
 #define HPF_Fs32000_Gain9_A0                             1.909191
-#define HPF_Fs32000_Gain9_A1                             -0.909191
+#define HPF_Fs32000_Gain9_A1                             (-0.909191)
 #define HPF_Fs32000_Gain9_A2                             0.000000
-#define HPF_Fs32000_Gain9_B1                             -0.000000
+#define HPF_Fs32000_Gain9_B1                             (-0.000000)
 #define HPF_Fs32000_Gain9_B2                             0.000000
                                                                     /* Gain =  10.000000 dB */
 #define HPF_Fs32000_Gain10_A0                            2.081139
-#define HPF_Fs32000_Gain10_A1                            -1.081139
+#define HPF_Fs32000_Gain10_A1                            (-1.081139)
 #define HPF_Fs32000_Gain10_A2                            0.000000
-#define HPF_Fs32000_Gain10_B1                            -0.000000
+#define HPF_Fs32000_Gain10_B1                            (-0.000000)
 #define HPF_Fs32000_Gain10_B2                            0.000000
                                                                     /* Gain =  11.000000 dB */
 #define HPF_Fs32000_Gain11_A0                           2.274067
-#define HPF_Fs32000_Gain11_A1                           -1.274067
+#define HPF_Fs32000_Gain11_A1                           (-1.274067)
 #define HPF_Fs32000_Gain11_A2                           0.000000
-#define HPF_Fs32000_Gain11_B1                           -0.000000
+#define HPF_Fs32000_Gain11_B1                           (-0.000000)
 #define HPF_Fs32000_Gain11_B2                           0.000000
                                                                     /* Gain =  12.000000 dB */
 #define HPF_Fs32000_Gain12_A0                          2.490536
-#define HPF_Fs32000_Gain12_A1                          -1.490536
+#define HPF_Fs32000_Gain12_A1                          (-1.490536)
 #define HPF_Fs32000_Gain12_A2                          0.000000
-#define HPF_Fs32000_Gain12_B1                          -0.000000
+#define HPF_Fs32000_Gain12_B1                          (-0.000000)
 #define HPF_Fs32000_Gain12_B2                          0.000000
                                                                     /* Gain =  13.000000 dB */
 #define HPF_Fs32000_Gain13_A0                           2.733418
-#define HPF_Fs32000_Gain13_A1                           -1.733418
+#define HPF_Fs32000_Gain13_A1                           (-1.733418)
 #define HPF_Fs32000_Gain13_A2                           0.000000
-#define HPF_Fs32000_Gain13_B1                           -0.000000
+#define HPF_Fs32000_Gain13_B1                           (-0.000000)
 #define HPF_Fs32000_Gain13_B2                           0.000000
                                                                     /* Gain =  14.000000 dB */
 #define HPF_Fs32000_Gain14_A0                           3.005936
-#define HPF_Fs32000_Gain14_A1                           -2.005936
+#define HPF_Fs32000_Gain14_A1                           (-2.005936)
 #define HPF_Fs32000_Gain14_A2                           0.000000
-#define HPF_Fs32000_Gain14_B1                           -0.000000
+#define HPF_Fs32000_Gain14_B1                           (-0.000000)
 #define HPF_Fs32000_Gain14_B2                           0.000000
                                                                     /* Gain =  15.000000 dB */
 #define HPF_Fs32000_Gain15_A0                          3.311707
-#define HPF_Fs32000_Gain15_A1                          -2.311707
+#define HPF_Fs32000_Gain15_A1                          (-2.311707)
 #define HPF_Fs32000_Gain15_A2                          0.000000
-#define HPF_Fs32000_Gain15_B1                          -0.000000
+#define HPF_Fs32000_Gain15_B1                          (-0.000000)
 #define HPF_Fs32000_Gain15_B2                          0.000000
 /* Coefficients for sample rate 44100Hz */
                                                                     /* Gain =  1.000000 dB */
 #define HPF_Fs44100_Gain1_A0                            1.074364
-#define HPF_Fs44100_Gain1_A1                            -0.293257
+#define HPF_Fs44100_Gain1_A1                            (-0.293257)
 #define HPF_Fs44100_Gain1_A2                            0.000000
-#define HPF_Fs44100_Gain1_B1                            -0.218894
+#define HPF_Fs44100_Gain1_B1                            (-0.218894)
 #define HPF_Fs44100_Gain1_B2                            0.000000
                                                                     /* Gain =  2.000000 dB */
 #define HPF_Fs44100_Gain2_A0                            1.157801
-#define HPF_Fs44100_Gain2_A1                            -0.376695
+#define HPF_Fs44100_Gain2_A1                            (-0.376695)
 #define HPF_Fs44100_Gain2_A2                            0.000000
-#define HPF_Fs44100_Gain2_B1                            -0.218894
+#define HPF_Fs44100_Gain2_B1                            (-0.218894)
 #define HPF_Fs44100_Gain2_B2                            0.000000
                                                                     /* Gain =  3.000000 dB */
 #define HPF_Fs44100_Gain3_A0                           1.251420
-#define HPF_Fs44100_Gain3_A1                           -0.470313
+#define HPF_Fs44100_Gain3_A1                           (-0.470313)
 #define HPF_Fs44100_Gain3_A2                           0.000000
-#define HPF_Fs44100_Gain3_B1                           -0.218894
+#define HPF_Fs44100_Gain3_B1                           (-0.218894)
 #define HPF_Fs44100_Gain3_B2                           0.000000
                                                                     /* Gain =  4.000000 dB */
 #define HPF_Fs44100_Gain4_A0                            1.356461
-#define HPF_Fs44100_Gain4_A1                            -0.575355
+#define HPF_Fs44100_Gain4_A1                            (-0.575355)
 #define HPF_Fs44100_Gain4_A2                            0.000000
-#define HPF_Fs44100_Gain4_B1                            -0.218894
+#define HPF_Fs44100_Gain4_B1                            (-0.218894)
 #define HPF_Fs44100_Gain4_B2                            0.000000
                                                                     /* Gain =  5.000000 dB */
 #define HPF_Fs44100_Gain5_A0                            1.474320
-#define HPF_Fs44100_Gain5_A1                            -0.693213
+#define HPF_Fs44100_Gain5_A1                            (-0.693213)
 #define HPF_Fs44100_Gain5_A2                            0.000000
-#define HPF_Fs44100_Gain5_B1                            -0.218894
+#define HPF_Fs44100_Gain5_B1                            (-0.218894)
 #define HPF_Fs44100_Gain5_B2                            0.000000
                                                                     /* Gain =  6.000000 dB */
 #define HPF_Fs44100_Gain6_A0                           1.606559
-#define HPF_Fs44100_Gain6_A1                           -0.825453
+#define HPF_Fs44100_Gain6_A1                           (-0.825453)
 #define HPF_Fs44100_Gain6_A2                           0.000000
-#define HPF_Fs44100_Gain6_B1                           -0.218894
+#define HPF_Fs44100_Gain6_B1                           (-0.218894)
 #define HPF_Fs44100_Gain6_B2                           0.000000
                                                                     /* Gain =  7.000000 dB */
 #define HPF_Fs44100_Gain7_A0                           1.754935
-#define HPF_Fs44100_Gain7_A1                           -0.973828
+#define HPF_Fs44100_Gain7_A1                           (-0.973828)
 #define HPF_Fs44100_Gain7_A2                           0.000000
-#define HPF_Fs44100_Gain7_B1                           -0.218894
+#define HPF_Fs44100_Gain7_B1                           (-0.218894)
 #define HPF_Fs44100_Gain7_B2                           0.000000
                                                                     /* Gain =  8.000000 dB */
 #define HPF_Fs44100_Gain8_A0                            1.921414
-#define HPF_Fs44100_Gain8_A1                            -1.140308
+#define HPF_Fs44100_Gain8_A1                            (-1.140308)
 #define HPF_Fs44100_Gain8_A2                            0.000000
-#define HPF_Fs44100_Gain8_B1                            -0.218894
+#define HPF_Fs44100_Gain8_B1                            (-0.218894)
 #define HPF_Fs44100_Gain8_B2                            0.000000
                                                                     /* Gain =  9.000000 dB */
 #define HPF_Fs44100_Gain9_A0                            2.108208
-#define HPF_Fs44100_Gain9_A1                            -1.327101
+#define HPF_Fs44100_Gain9_A1                            (-1.327101)
 #define HPF_Fs44100_Gain9_A2                            0.000000
-#define HPF_Fs44100_Gain9_B1                            -0.218894
+#define HPF_Fs44100_Gain9_B1                            (-0.218894)
 #define HPF_Fs44100_Gain9_B2                            0.000000
                                                                     /* Gain =  10.000000 dB */
 #define HPF_Fs44100_Gain10_A0                          2.317793
-#define HPF_Fs44100_Gain10_A1                          -1.536687
+#define HPF_Fs44100_Gain10_A1                          (-1.536687)
 #define HPF_Fs44100_Gain10_A2                          0.000000
-#define HPF_Fs44100_Gain10_B1                          -0.218894
+#define HPF_Fs44100_Gain10_B1                          (-0.218894)
 #define HPF_Fs44100_Gain10_B2                          0.000000
                                                                     /* Gain =  11.000000 dB */
 #define HPF_Fs44100_Gain11_A0                          2.552952
-#define HPF_Fs44100_Gain11_A1                          -1.771846
+#define HPF_Fs44100_Gain11_A1                          (-1.771846)
 #define HPF_Fs44100_Gain11_A2                          0.000000
-#define HPF_Fs44100_Gain11_B1                          -0.218894
+#define HPF_Fs44100_Gain11_B1                          (-0.218894)
 #define HPF_Fs44100_Gain11_B2                          0.000000
                                                                     /* Gain =  12.000000 dB */
 #define HPF_Fs44100_Gain12_A0                          2.816805
-#define HPF_Fs44100_Gain12_A1                          -2.035698
+#define HPF_Fs44100_Gain12_A1                          (-2.035698)
 #define HPF_Fs44100_Gain12_A2                          0.000000
-#define HPF_Fs44100_Gain12_B1                          -0.218894
+#define HPF_Fs44100_Gain12_B1                          (-0.218894)
 #define HPF_Fs44100_Gain12_B2                          0.000000
                                                                     /* Gain =  13.000000 dB */
 #define HPF_Fs44100_Gain13_A0                           3.112852
-#define HPF_Fs44100_Gain13_A1                           -2.331746
+#define HPF_Fs44100_Gain13_A1                           (-2.331746)
 #define HPF_Fs44100_Gain13_A2                           0.000000
-#define HPF_Fs44100_Gain13_B1                           -0.218894
+#define HPF_Fs44100_Gain13_B1                           (-0.218894)
 #define HPF_Fs44100_Gain13_B2                           0.000000
                                                                     /* Gain =  14.000000 dB */
 #define HPF_Fs44100_Gain14_A0                          3.445023
-#define HPF_Fs44100_Gain14_A1                          -2.663916
+#define HPF_Fs44100_Gain14_A1                          (-2.663916)
 #define HPF_Fs44100_Gain14_A2                          0.000000
-#define HPF_Fs44100_Gain14_B1                          -0.218894
+#define HPF_Fs44100_Gain14_B1                          (-0.218894)
 #define HPF_Fs44100_Gain14_B2                          0.000000
                                                                     /* Gain =  15.000000 dB */
 #define HPF_Fs44100_Gain15_A0                          3.817724
-#define HPF_Fs44100_Gain15_A1                          -3.036618
+#define HPF_Fs44100_Gain15_A1                          (-3.036618)
 #define HPF_Fs44100_Gain15_A2                          0.000000
-#define HPF_Fs44100_Gain15_B1                          -0.218894
+#define HPF_Fs44100_Gain15_B1                          (-0.218894)
 #define HPF_Fs44100_Gain15_B2                          0.000000
 /* Coefficients for sample rate 48000Hz */
                                                                     /* Gain =  1.000000 dB */
 #define HPF_Fs48000_Gain1_A0                          1.077357
-#define HPF_Fs48000_Gain1_A1                          -0.345306
+#define HPF_Fs48000_Gain1_A1                          (-0.345306)
 #define HPF_Fs48000_Gain1_A2                          0.000000
-#define HPF_Fs48000_Gain1_B1                          -0.267949
+#define HPF_Fs48000_Gain1_B1                          (-0.267949)
 #define HPF_Fs48000_Gain1_B2                          0.000000
                                                                     /* Gain =  2.000000 dB */
 #define HPF_Fs48000_Gain2_A0                          1.164152
-#define HPF_Fs48000_Gain2_A1                          -0.432101
+#define HPF_Fs48000_Gain2_A1                          (-0.432101)
 #define HPF_Fs48000_Gain2_A2                          0.000000
-#define HPF_Fs48000_Gain2_B1                          -0.267949
+#define HPF_Fs48000_Gain2_B1                          (-0.267949)
 #define HPF_Fs48000_Gain2_B2                          0.000000
                                                                     /* Gain =  3.000000 dB */
 #define HPF_Fs48000_Gain3_A0                          1.261538
-#define HPF_Fs48000_Gain3_A1                          -0.529488
+#define HPF_Fs48000_Gain3_A1                          (-0.529488)
 #define HPF_Fs48000_Gain3_A2                          0.000000
-#define HPF_Fs48000_Gain3_B1                          -0.267949
+#define HPF_Fs48000_Gain3_B1                          (-0.267949)
 #define HPF_Fs48000_Gain3_B2                          0.000000
                                                                     /* Gain =  4.000000 dB */
 #define HPF_Fs48000_Gain4_A0                           1.370807
-#define HPF_Fs48000_Gain4_A1                           -0.638757
+#define HPF_Fs48000_Gain4_A1                           (-0.638757)
 #define HPF_Fs48000_Gain4_A2                           0.000000
-#define HPF_Fs48000_Gain4_B1                           -0.267949
+#define HPF_Fs48000_Gain4_B1                           (-0.267949)
 #define HPF_Fs48000_Gain4_B2                           0.000000
                                                                     /* Gain =  5.000000 dB */
 #define HPF_Fs48000_Gain5_A0                           1.493409
-#define HPF_Fs48000_Gain5_A1                           -0.761359
+#define HPF_Fs48000_Gain5_A1                           (-0.761359)
 #define HPF_Fs48000_Gain5_A2                           0.000000
-#define HPF_Fs48000_Gain5_B1                           -0.267949
+#define HPF_Fs48000_Gain5_B1                           (-0.267949)
 #define HPF_Fs48000_Gain5_B2                           0.000000
                                                                     /* Gain =  6.000000 dB */
 #define HPF_Fs48000_Gain6_A0                            1.630971
-#define HPF_Fs48000_Gain6_A1                            -0.898920
+#define HPF_Fs48000_Gain6_A1                            (-0.898920)
 #define HPF_Fs48000_Gain6_A2                            0.000000
-#define HPF_Fs48000_Gain6_B1                            -0.267949
+#define HPF_Fs48000_Gain6_B1                            (-0.267949)
 #define HPF_Fs48000_Gain6_B2                            0.000000
                                                                     /* Gain =  7.000000 dB */
 #define HPF_Fs48000_Gain7_A0                            1.785318
-#define HPF_Fs48000_Gain7_A1                            -1.053267
+#define HPF_Fs48000_Gain7_A1                            (-1.053267)
 #define HPF_Fs48000_Gain7_A2                            0.000000
-#define HPF_Fs48000_Gain7_B1                            -0.267949
+#define HPF_Fs48000_Gain7_B1                            (-0.267949)
 #define HPF_Fs48000_Gain7_B2                            0.000000
                                                                     /* Gain =  8.000000 dB */
 #define HPF_Fs48000_Gain8_A0                           1.958498
-#define HPF_Fs48000_Gain8_A1                           -1.226447
+#define HPF_Fs48000_Gain8_A1                           (-1.226447)
 #define HPF_Fs48000_Gain8_A2                           0.000000
-#define HPF_Fs48000_Gain8_B1                           -0.267949
+#define HPF_Fs48000_Gain8_B1                           (-0.267949)
 #define HPF_Fs48000_Gain8_B2                           0.000000
                                                                     /* Gain =  9.000000 dB */
 #define HPF_Fs48000_Gain9_A0                          2.152809
-#define HPF_Fs48000_Gain9_A1                          -1.420758
+#define HPF_Fs48000_Gain9_A1                          (-1.420758)
 #define HPF_Fs48000_Gain9_A2                          0.000000
-#define HPF_Fs48000_Gain9_B1                          -0.267949
+#define HPF_Fs48000_Gain9_B1                          (-0.267949)
 #define HPF_Fs48000_Gain9_B2                          0.000000
                                                                     /* Gain =  10.000000 dB */
 #define HPF_Fs48000_Gain10_A0                         2.370829
-#define HPF_Fs48000_Gain10_A1                         -1.638778
+#define HPF_Fs48000_Gain10_A1                         (-1.638778)
 #define HPF_Fs48000_Gain10_A2                         0.000000
-#define HPF_Fs48000_Gain10_B1                         -0.267949
+#define HPF_Fs48000_Gain10_B1                         (-0.267949)
 #define HPF_Fs48000_Gain10_B2                         0.000000
                                                                     /* Gain =  11.000000 dB */
 #define HPF_Fs48000_Gain11_A0                          2.615452
-#define HPF_Fs48000_Gain11_A1                          -1.883401
+#define HPF_Fs48000_Gain11_A1                          (-1.883401)
 #define HPF_Fs48000_Gain11_A2                          0.000000
-#define HPF_Fs48000_Gain11_B1                          -0.267949
+#define HPF_Fs48000_Gain11_B1                          (-0.267949)
 #define HPF_Fs48000_Gain11_B2                          0.000000
                                                                     /* Gain =  12.000000 dB */
 #define HPF_Fs48000_Gain12_A0                          2.889924
-#define HPF_Fs48000_Gain12_A1                          -2.157873
+#define HPF_Fs48000_Gain12_A1                          (-2.157873)
 #define HPF_Fs48000_Gain12_A2                          0.000000
-#define HPF_Fs48000_Gain12_B1                          -0.267949
+#define HPF_Fs48000_Gain12_B1                          (-0.267949)
 #define HPF_Fs48000_Gain12_B2                          0.000000
                                                                     /* Gain =  13.000000 dB */
 #define HPF_Fs48000_Gain13_A0                           3.197886
-#define HPF_Fs48000_Gain13_A1                           -2.465835
+#define HPF_Fs48000_Gain13_A1                           (-2.465835)
 #define HPF_Fs48000_Gain13_A2                           0.000000
-#define HPF_Fs48000_Gain13_B1                           -0.267949
+#define HPF_Fs48000_Gain13_B1                           (-0.267949)
 #define HPF_Fs48000_Gain13_B2                           0.000000
                                                                     /* Gain =  14.000000 dB */
 #define HPF_Fs48000_Gain14_A0                          3.543425
-#define HPF_Fs48000_Gain14_A1                          -2.811374
+#define HPF_Fs48000_Gain14_A1                          (-2.811374)
 #define HPF_Fs48000_Gain14_A2                          0.000000
-#define HPF_Fs48000_Gain14_B1                          -0.267949
+#define HPF_Fs48000_Gain14_B1                          (-0.267949)
 #define HPF_Fs48000_Gain14_B2                          0.000000
                                                                     /* Gain =  15.000000 dB */
 #define HPF_Fs48000_Gain15_A0                         3.931127
-#define HPF_Fs48000_Gain15_A1                         -3.199076
+#define HPF_Fs48000_Gain15_A1                         (-3.199076)
 #define HPF_Fs48000_Gain15_A2                         0.000000
-#define HPF_Fs48000_Gain15_B1                         -0.267949
+#define HPF_Fs48000_Gain15_B1                         (-0.267949)
 #define HPF_Fs48000_Gain15_B2                         0.000000
 
 #ifdef HIGHER_FS
@@ -491,185 +491,185 @@
 /* Coefficients for sample rate 96000Hz */
                                                                  /* Gain =  1.000000 dB */
 #define HPF_Fs96000_Gain1_A0                          1.096233
-#define HPF_Fs96000_Gain1_A1                          -0.673583
+#define HPF_Fs96000_Gain1_A1                          (-0.673583)
 #define HPF_Fs96000_Gain1_A2                          0.000000
-#define HPF_Fs96000_Gain1_B1                          -0.577350
+#define HPF_Fs96000_Gain1_B1                          (-0.577350)
 #define HPF_Fs96000_Gain1_B2                          0.000000
                                                                  /* Gain =  2.000000 dB */
 #define HPF_Fs96000_Gain2_A0                          1.204208
-#define HPF_Fs96000_Gain2_A1                          -0.781558
+#define HPF_Fs96000_Gain2_A1                          (-0.781558)
 #define HPF_Fs96000_Gain2_A2                          0.000000
-#define HPF_Fs96000_Gain2_B1                          -0.577350
+#define HPF_Fs96000_Gain2_B1                          (-0.577350)
 #define HPF_Fs96000_Gain2_B2                          0.000000
                                                                  /* Gain =  3.000000 dB */
 #define HPF_Fs96000_Gain3_A0                          1.325358
-#define HPF_Fs96000_Gain3_A1                          -0.902708
+#define HPF_Fs96000_Gain3_A1                          (-0.902708)
 #define HPF_Fs96000_Gain3_A2                          0.000000
-#define HPF_Fs96000_Gain3_B1                          -0.577350
+#define HPF_Fs96000_Gain3_B1                          (-0.577350)
 #define HPF_Fs96000_Gain3_B2                          0.000000
                                                                  /* Gain =  4.000000 dB */
 #define HPF_Fs96000_Gain4_A0                           1.461291
-#define HPF_Fs96000_Gain4_A1                           -1.038641
+#define HPF_Fs96000_Gain4_A1                           (-1.038641)
 #define HPF_Fs96000_Gain4_A2                           0.000000
-#define HPF_Fs96000_Gain4_B1                           -0.577350
+#define HPF_Fs96000_Gain4_B1                           (-0.577350)
 #define HPF_Fs96000_Gain4_B2                           0.000000
                                                                  /* Gain =  5.000000 dB */
 #define HPF_Fs96000_Gain5_A0                           1.613810
-#define HPF_Fs96000_Gain5_A1                           -1.191160
+#define HPF_Fs96000_Gain5_A1                           (-1.191160)
 #define HPF_Fs96000_Gain5_A2                           0.000000
-#define HPF_Fs96000_Gain5_B1                           -0.577350
+#define HPF_Fs96000_Gain5_B1                           (-0.577350)
 #define HPF_Fs96000_Gain5_B2                           0.000000
                                                                  /* Gain =  6.000000 dB */
 #define HPF_Fs96000_Gain6_A0                            1.784939
-#define HPF_Fs96000_Gain6_A1                            -1.362289
+#define HPF_Fs96000_Gain6_A1                            (-1.362289)
 #define HPF_Fs96000_Gain6_A2                            0.000000
-#define HPF_Fs96000_Gain6_B1                            -0.577350
+#define HPF_Fs96000_Gain6_B1                            (-0.577350)
 #define HPF_Fs96000_Gain6_B2                            0.000000
                                                                 /* Gain =  7.000000 dB */
 #define HPF_Fs96000_Gain7_A0                            1.976949
-#define HPF_Fs96000_Gain7_A1                            -1.554299
+#define HPF_Fs96000_Gain7_A1                            (-1.554299)
 #define HPF_Fs96000_Gain7_A2                            0.000000
-#define HPF_Fs96000_Gain7_B1                            -0.577350
+#define HPF_Fs96000_Gain7_B1                            (-0.577350)
 #define HPF_Fs96000_Gain7_B2                            0.000000
                                                                  /* Gain =  8.000000 dB */
 #define HPF_Fs96000_Gain8_A0                           2.192387
-#define HPF_Fs96000_Gain8_A1                           -1.769738
+#define HPF_Fs96000_Gain8_A1                           (-1.769738)
 #define HPF_Fs96000_Gain8_A2                           0.000000
-#define HPF_Fs96000_Gain8_B1                           -0.577350
+#define HPF_Fs96000_Gain8_B1                           (-0.577350)
 #define HPF_Fs96000_Gain8_B2                           0.000000
                                                                 /* Gain =  9.000000 dB */
 #define HPF_Fs96000_Gain9_A0                          2.434113
-#define HPF_Fs96000_Gain9_A1                          -2.011464
+#define HPF_Fs96000_Gain9_A1                          (-2.011464)
 #define HPF_Fs96000_Gain9_A2                          0.000000
-#define HPF_Fs96000_Gain9_B1                          -0.577350
+#define HPF_Fs96000_Gain9_B1                          (-0.577350)
 #define HPF_Fs96000_Gain9_B2                          0.000000
                                                                /* Gain =  10.000000 dB */
 #define HPF_Fs96000_Gain10_A0                        2.705335
-#define HPF_Fs96000_Gain10_A1                        -2.282685
+#define HPF_Fs96000_Gain10_A1                        (-2.282685)
 #define HPF_Fs96000_Gain10_A2                         0.000000
-#define HPF_Fs96000_Gain10_B1                         -0.577350
+#define HPF_Fs96000_Gain10_B1                         (-0.577350)
 #define HPF_Fs96000_Gain10_B2                         0.000000
                                                               /* Gain =  11.000000 dB */
 #define HPF_Fs96000_Gain11_A0                          3.009650
-#define HPF_Fs96000_Gain11_A1                          -2.587000
+#define HPF_Fs96000_Gain11_A1                          (-2.587000)
 #define HPF_Fs96000_Gain11_A2                          0.000000
-#define HPF_Fs96000_Gain11_B1                          -0.577350
+#define HPF_Fs96000_Gain11_B1                          (-0.577350)
 #define HPF_Fs96000_Gain11_B2                          0.000000
                                                                   /* Gain =  12.000000 dB */
 #define HPF_Fs96000_Gain12_A0                          3.351097
-#define HPF_Fs96000_Gain12_A1                          -2.928447
+#define HPF_Fs96000_Gain12_A1                          (-2.928447)
 #define HPF_Fs96000_Gain12_A2                          0.000000
-#define HPF_Fs96000_Gain12_B1                          -0.577350
+#define HPF_Fs96000_Gain12_B1                          (-0.577350)
 #define HPF_Fs96000_Gain12_B2                          0.000000
                                                                 /* Gain =  13.000000 dB */
 #define HPF_Fs96000_Gain13_A0                           3.734207
-#define HPF_Fs96000_Gain13_A1                           -3.311558
+#define HPF_Fs96000_Gain13_A1                           (-3.311558)
 #define HPF_Fs96000_Gain13_A2                           0.000000
-#define HPF_Fs96000_Gain13_B1                           -0.577350
+#define HPF_Fs96000_Gain13_B1                           (-0.577350)
 #define HPF_Fs96000_Gain13_B2                           0.000000
                                                                  /* Gain =  14.000000 dB */
 #define HPF_Fs96000_Gain14_A0                         4.164064
-#define HPF_Fs96000_Gain14_A1                         -3.741414
+#define HPF_Fs96000_Gain14_A1                         (-3.741414)
 #define HPF_Fs96000_Gain14_A2                          0.000000
-#define HPF_Fs96000_Gain14_B1                          -0.577350
+#define HPF_Fs96000_Gain14_B1                          (-0.577350)
 #define HPF_Fs96000_Gain14_B2                          0.000000
                                                                  /* Gain =  15.000000 dB */
 #define HPF_Fs96000_Gain15_A0                         4.646371
-#define HPF_Fs96000_Gain15_A1                         -4.223721
+#define HPF_Fs96000_Gain15_A1                         (-4.223721)
 #define HPF_Fs96000_Gain15_A2                         0.000000
-#define HPF_Fs96000_Gain15_B1                         -0.577350
+#define HPF_Fs96000_Gain15_B1                         (-0.577350)
 #define HPF_Fs96000_Gain15_B2                         0.000000
 
 /* Coefficients for sample rate 192000Hz */
                                                                   /* Gain =  1.000000 dB */
 #define HPF_Fs192000_Gain1_A0                          1.107823
-#define HPF_Fs192000_Gain1_A1                          -0.875150
+#define HPF_Fs192000_Gain1_A1                          (-0.875150)
 #define HPF_Fs192000_Gain1_A2                          0.000000
-#define HPF_Fs192000_Gain1_B1                          -0.767327
+#define HPF_Fs192000_Gain1_B1                          (-0.767327)
 #define HPF_Fs192000_Gain1_B2                          0.000000
                                                                   /* Gain =  2.000000 dB */
 #define HPF_Fs192000_Gain2_A0                          1.228803
-#define HPF_Fs192000_Gain2_A1                          -0.996130
+#define HPF_Fs192000_Gain2_A1                          (-0.996130)
 #define HPF_Fs192000_Gain2_A2                          0.000000
-#define HPF_Fs192000_Gain2_B1                          -0.767327
+#define HPF_Fs192000_Gain2_B1                          (-0.767327)
 #define HPF_Fs192000_Gain2_B2                          0.000000
                                                                    /* Gain =  3.000000 dB */
 #define HPF_Fs192000_Gain3_A0                          1.364544
-#define HPF_Fs192000_Gain3_A1                          -1.131871
+#define HPF_Fs192000_Gain3_A1                          (-1.131871)
 #define HPF_Fs192000_Gain3_A2                          0.000000
-#define HPF_Fs192000_Gain3_B1                          -0.767327
+#define HPF_Fs192000_Gain3_B1                          (-0.767327)
 #define HPF_Fs192000_Gain3_B2                          0.000000
                                                                    /* Gain =  4.000000 dB */
 #define HPF_Fs192000_Gain4_A0                          1.516849
-#define HPF_Fs192000_Gain4_A1                          -1.284176
+#define HPF_Fs192000_Gain4_A1                          (-1.284176)
 #define HPF_Fs192000_Gain4_A2                           0.000000
-#define HPF_Fs192000_Gain4_B1                           -0.767327
+#define HPF_Fs192000_Gain4_B1                           (-0.767327)
 #define HPF_Fs192000_Gain4_B2                           0.000000
                                                                    /* Gain =  5.000000 dB */
 #define HPF_Fs192000_Gain5_A0                           1.687737
-#define HPF_Fs192000_Gain5_A1                           -1.455064
+#define HPF_Fs192000_Gain5_A1                           (-1.455064)
 #define HPF_Fs192000_Gain5_A2                           0.000000
-#define HPF_Fs192000_Gain5_B1                           -0.767327
+#define HPF_Fs192000_Gain5_B1                           (-0.767327)
 #define HPF_Fs192000_Gain5_B2                           0.000000
                                                                    /* Gain =  6.000000 dB */
 #define HPF_Fs192000_Gain6_A0                            1.879477
-#define HPF_Fs192000_Gain6_A1                            -1.646804
+#define HPF_Fs192000_Gain6_A1                            (-1.646804)
 #define HPF_Fs192000_Gain6_A2                            0.000000
-#define HPF_Fs192000_Gain6_B1                            -0.767327
+#define HPF_Fs192000_Gain6_B1                            (-0.767327)
 #define HPF_Fs192000_Gain6_B2                            0.000000
                                                                  /* Gain =  7.000000 dB */
 #define HPF_Fs192000_Gain7_A0                            2.094613
-#define HPF_Fs192000_Gain7_A1                            -1.861940
+#define HPF_Fs192000_Gain7_A1                            (-1.861940)
 #define HPF_Fs192000_Gain7_A2                            0.000000
-#define HPF_Fs192000_Gain7_B1                            -0.767327
+#define HPF_Fs192000_Gain7_B1                            (-0.767327)
 #define HPF_Fs192000_Gain7_B2                            0.000000
                                                                    /* Gain =  8.000000 dB */
 #define HPF_Fs192000_Gain8_A0                           2.335999
-#define HPF_Fs192000_Gain8_A1                           -2.103326
+#define HPF_Fs192000_Gain8_A1                           (-2.103326)
 #define HPF_Fs192000_Gain8_A2                           0.000000
-#define HPF_Fs192000_Gain8_B1                           -0.767327
+#define HPF_Fs192000_Gain8_B1                           (-0.767327)
 #define HPF_Fs192000_Gain8_B2                           0.000000
                                                                    /* Gain =  9.000000 dB */
 #define HPF_Fs192000_Gain9_A0                          2.606839
-#define HPF_Fs192000_Gain9_A1                          -2.374166
+#define HPF_Fs192000_Gain9_A1                          (-2.374166)
 #define HPF_Fs192000_Gain9_A2                          0.000000
-#define HPF_Fs192000_Gain9_B1                          -0.767327
+#define HPF_Fs192000_Gain9_B1                          (-0.767327)
 #define HPF_Fs192000_Gain9_B2                          0.000000
                                                                  /* Gain =  10.000000 dB */
 #define HPF_Fs192000_Gain10_A0                        2.910726
-#define HPF_Fs192000_Gain10_A1                        -2.678053
+#define HPF_Fs192000_Gain10_A1                        (-2.678053)
 #define HPF_Fs192000_Gain10_A2                         0.000000
-#define HPF_Fs192000_Gain10_B1                         -0.767327
+#define HPF_Fs192000_Gain10_B1                         (-0.767327)
 #define HPF_Fs192000_Gain10_B2                         0.000000
                                                                   /* Gain =  11.000000 dB */
 #define HPF_Fs192000_Gain11_A0                          3.251693
-#define HPF_Fs192000_Gain11_A1                          -3.019020
+#define HPF_Fs192000_Gain11_A1                          (-3.019020)
 #define HPF_Fs192000_Gain11_A2                          0.000000
-#define HPF_Fs192000_Gain11_B1                          -0.767327
+#define HPF_Fs192000_Gain11_B1                          (-0.767327)
 #define HPF_Fs192000_Gain11_B2                          0.000000
                                                                   /* Gain =  12.000000 dB */
 #define HPF_Fs192000_Gain12_A0                          3.634264
-#define HPF_Fs192000_Gain12_A1                          -3.401591
+#define HPF_Fs192000_Gain12_A1                          (-3.401591)
 #define HPF_Fs192000_Gain12_A2                          0.000000
-#define HPF_Fs192000_Gain12_B1                          -0.767327
+#define HPF_Fs192000_Gain12_B1                          (-0.767327)
 #define HPF_Fs192000_Gain12_B2                          0.000000
                                                                 /* Gain =  13.000000 dB */
 #define HPF_Fs192000_Gain13_A0                           4.063516
-#define HPF_Fs192000_Gain13_A1                           -3.830843
+#define HPF_Fs192000_Gain13_A1                           (-3.830843)
 #define HPF_Fs192000_Gain13_A2                           0.000000
-#define HPF_Fs192000_Gain13_B1                           -0.767327
+#define HPF_Fs192000_Gain13_B1                           (-0.767327)
 #define HPF_Fs192000_Gain13_B2                           0.000000
                                                                 /* Gain =  14.000000 dB */
 #define HPF_Fs192000_Gain14_A0                          4.545145
-#define HPF_Fs192000_Gain14_A1                          -4.312472
+#define HPF_Fs192000_Gain14_A1                          (-4.312472)
 #define HPF_Fs192000_Gain14_A2                          0.000000
-#define HPF_Fs192000_Gain14_B1                          -0.767327
+#define HPF_Fs192000_Gain14_B1                          (-0.767327)
 #define HPF_Fs192000_Gain14_B2                          0.000000
                                                                   /* Gain =  15.000000 dB */
 #define HPF_Fs192000_Gain15_A0                         5.085542
-#define HPF_Fs192000_Gain15_A1                         -4.852868
+#define HPF_Fs192000_Gain15_A1                         (-4.852868)
 #define HPF_Fs192000_Gain15_A2                         0.000000
-#define HPF_Fs192000_Gain15_B1                         -0.767327
+#define HPF_Fs192000_Gain15_B1                         (-0.767327)
 #define HPF_Fs192000_Gain15_B2                         0.000000
 
 #endif
diff --git a/media/libeffects/lvm/lib/Common/lib/LVM_Types.h b/media/libeffects/lvm/lib/Common/lib/LVM_Types.h
index cb15b60..ea16072 100644
--- a/media/libeffects/lvm/lib/Common/lib/LVM_Types.h
+++ b/media/libeffects/lvm/lib/Common/lib/LVM_Types.h
@@ -44,9 +44,6 @@
 
 #define LVM_MAXINT_8            127                 /* Maximum positive integer size */
 #define LVM_MAXINT_16           32767
-#ifdef BUILD_FLOAT
-#define LVM_MAXFLOAT            1.0f
-#endif
 #define LVM_MAXINT_32           2147483647
 #define LVM_MAXENUM             2147483647
 
@@ -99,8 +96,32 @@
 typedef     uint32_t            LVM_UINT32;         /* Unsigned 32-bit word */
 
 #ifdef BUILD_FLOAT
-typedef     float               LVM_FLOAT;          /* single precission floating point*/
-#endif
+
+#define LVM_MAXFLOAT            1.f
+
+typedef     float               LVM_FLOAT;          /* single precision floating point */
+
+// If NATIVE_FLOAT_BUFFER is defined, we expose effects as floating point format;
+// otherwise we expose as integer 16 bit and translate to float for the effect libraries.
+// Hence, NATIVE_FLOAT_BUFFER should only be enabled under BUILD_FLOAT compilation.
+
+#define NATIVE_FLOAT_BUFFER
+
+#endif // BUILD_FLOAT
+
+// Select whether we expose int16_t or float buffers.
+#ifdef NATIVE_FLOAT_BUFFER
+
+#define    EFFECT_BUFFER_FORMAT AUDIO_FORMAT_PCM_FLOAT
+typedef     float               effect_buffer_t;
+
+#else // NATIVE_FLOAT_BUFFER
+
+#define    EFFECT_BUFFER_FORMAT AUDIO_FORMAT_PCM_16_BIT
+typedef     int16_t             effect_buffer_t;
+
+#endif // NATIVE_FLOAT_BUFFER
+
 /****************************************************************************************/
 /*                                                                                      */
 /*  Standard Enumerated types                                                           */
diff --git a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Coeffs.h b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Coeffs.h
index f0deb6c..42ea46f 100644
--- a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Coeffs.h
+++ b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Coeffs.h
@@ -26,21 +26,21 @@
 /*                                                                                  */
 /************************************************************************************/
 #ifdef BUILD_FLOAT
-#define LVEQNB_Gain_Neg15_dB                             -0.822172f
-#define LVEQNB_Gain_Neg14_dB                             -0.800474f
-#define LVEQNB_Gain_Neg13_dB                             -0.776128f
-#define LVEQNB_Gain_Neg12_dB                             -0.748811f
-#define LVEQNB_Gain_Neg11_dB                             -0.718162f
-#define LVEQNB_Gain_Neg10_dB                             -0.683772f
-#define LVEQNB_Gain_Neg9_dB                              -0.645187f
-#define LVEQNB_Gain_Neg8_dB                              -0.601893f
-#define LVEQNB_Gain_Neg7_dB                              -0.553316f
-#define LVEQNB_Gain_Neg6_dB                              -0.498813f
-#define LVEQNB_Gain_Neg5_dB                              -0.437659f
-#define LVEQNB_Gain_Neg4_dB                              -0.369043f
-#define LVEQNB_Gain_Neg3_dB                              -0.292054f
-#define LVEQNB_Gain_Neg2_dB                              -0.205672f
-#define LVEQNB_Gain_Neg1_dB                              -0.108749f
+#define LVEQNB_Gain_Neg15_dB                             (-0.822172f)
+#define LVEQNB_Gain_Neg14_dB                             (-0.800474f)
+#define LVEQNB_Gain_Neg13_dB                             (-0.776128f)
+#define LVEQNB_Gain_Neg12_dB                             (-0.748811f)
+#define LVEQNB_Gain_Neg11_dB                             (-0.718162f)
+#define LVEQNB_Gain_Neg10_dB                             (-0.683772f)
+#define LVEQNB_Gain_Neg9_dB                              (-0.645187f)
+#define LVEQNB_Gain_Neg8_dB                              (-0.601893f)
+#define LVEQNB_Gain_Neg7_dB                              (-0.553316f)
+#define LVEQNB_Gain_Neg6_dB                              (-0.498813f)
+#define LVEQNB_Gain_Neg5_dB                              (-0.437659f)
+#define LVEQNB_Gain_Neg4_dB                              (-0.369043f)
+#define LVEQNB_Gain_Neg3_dB                              (-0.292054f)
+#define LVEQNB_Gain_Neg2_dB                              (-0.205672f)
+#define LVEQNB_Gain_Neg1_dB                              (-0.108749f)
 #define LVEQNB_Gain_0_dB                                  0.000000f
 #define LVEQNB_Gain_1_dB                                  0.122018f
 #define LVEQNB_Gain_2_dB                                  0.258925f
diff --git a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Control.c b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Control.c
index c290aec..7b0f341 100644
--- a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Control.c
+++ b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Control.c
@@ -430,7 +430,15 @@
     }
 
 
-    if(bChange){
+    // During operating mode transition, there is a race condition where the mode
+    // is still LVEQNB_ON, but the effect is considered disabled in the upper layers.
+    // modeChange handles this special race condition.
+    const int /* bool */ modeChange = pParams->OperatingMode != OperatingModeSave
+            || (OperatingModeSave == LVEQNB_ON
+                    && pInstance->bInOperatingModeTransition
+                    && LVC_Mixer_GetTarget(&pInstance->BypassMixer.MixerStream[0]) == 0);
+
+    if (bChange || modeChange) {
 
         /*
          * If the sample rate has changed clear the history
@@ -462,8 +470,7 @@
             LVEQNB_SetCoefficients(pInstance);                  /* Instance pointer */
         }
 
-        if(pParams->OperatingMode != OperatingModeSave)
-        {
+        if (modeChange) {
             if(pParams->OperatingMode == LVEQNB_ON)
             {
 #ifdef BUILD_FLOAT
@@ -479,6 +486,8 @@
             else
             {
                 /* Stay on the ON operating mode until the transition is done */
+                // This may introduce a state race condition if the effect is enabled again
+                // while in transition.  This is fixed in the modeChange logic.
                 pInstance->Params.OperatingMode = LVEQNB_ON;
 #ifdef BUILD_FLOAT
                 LVC_Mixer_SetTarget(&pInstance->BypassMixer.MixerStream[0], 0.0f);
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Headphone_Coeffs.h b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Headphone_Coeffs.h
index 4f5221a..0c2fe53 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Headphone_Coeffs.h
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Headphone_Coeffs.h
@@ -27,127 +27,127 @@
 #ifdef BUILD_FLOAT
 /* Stereo Enhancer coefficients for 8000 Hz sample rate, scaled with 0.161258 */
 #define CS_MIDDLE_8000_A0                           0.227720
-#define CS_MIDDLE_8000_A1                          -0.215125
+#define CS_MIDDLE_8000_A1                          (-0.215125)
 #define CS_MIDDLE_8000_A2                           0.000000
-#define CS_MIDDLE_8000_B1                          -0.921899
+#define CS_MIDDLE_8000_B1                          (-0.921899)
 #define CS_MIDDLE_8000_B2                           0.000000
 #define CS_MIDDLE_8000_SCALE                        15
 #define CS_SIDE_8000_A0                             0.611441
-#define CS_SIDE_8000_A1                            -0.380344
-#define CS_SIDE_8000_A2                            -0.231097
-#define CS_SIDE_8000_B1                            -0.622470
-#define CS_SIDE_8000_B2                            -0.130759
+#define CS_SIDE_8000_A1                            (-0.380344)
+#define CS_SIDE_8000_A2                            (-0.231097)
+#define CS_SIDE_8000_B1                            (-0.622470)
+#define CS_SIDE_8000_B2                            (-0.130759)
 #define CS_SIDE_8000_SCALE                         15
 
 /* Stereo Enhancer coefficients for 11025Hz sample rate, scaled with 0.162943 */
 #define CS_MIDDLE_11025_A0                       0.230838
-#define CS_MIDDLE_11025_A1                      -0.221559
+#define CS_MIDDLE_11025_A1                      (-0.221559)
 #define CS_MIDDLE_11025_A2                       0.000000
-#define CS_MIDDLE_11025_B1                      -0.943056
+#define CS_MIDDLE_11025_B1                      (-0.943056)
 #define CS_MIDDLE_11025_B2                       0.000000
 #define CS_MIDDLE_11025_SCALE                    15
 #define CS_SIDE_11025_A0                         0.557372
-#define CS_SIDE_11025_A1                        -0.391490
-#define CS_SIDE_11025_A2                        -0.165881
-#define CS_SIDE_11025_B1                        -0.880608
+#define CS_SIDE_11025_A1                        (-0.391490)
+#define CS_SIDE_11025_A2                        (-0.165881)
+#define CS_SIDE_11025_B1                        (-0.880608)
 #define CS_SIDE_11025_B2                         0.032397
 #define CS_SIDE_11025_SCALE                      15
 
 /* Stereo Enhancer coefficients for 12000Hz sample rate, scaled with 0.162191 */
 #define CS_MIDDLE_12000_A0                        0.229932
-#define CS_MIDDLE_12000_A1                       -0.221436
+#define CS_MIDDLE_12000_A1                       (-0.221436)
 #define CS_MIDDLE_12000_A2                        0.000000
-#define CS_MIDDLE_12000_B1                       -0.947616
+#define CS_MIDDLE_12000_B1                       (-0.947616)
 #define CS_MIDDLE_12000_B2                        0.000000
 #define CS_MIDDLE_12000_SCALE                        15
 #define CS_SIDE_12000_A0                         0.558398
-#define CS_SIDE_12000_A1                        -0.392211
-#define CS_SIDE_12000_A2                        -0.166187
-#define CS_SIDE_12000_B1                        -0.892550
+#define CS_SIDE_12000_A1                        (-0.392211)
+#define CS_SIDE_12000_A2                        (-0.166187)
+#define CS_SIDE_12000_B1                        (-0.892550)
 #define CS_SIDE_12000_B2                         0.032856
 #define CS_SIDE_12000_SCALE                          15
 
 /* Stereo Enhancer coefficients for 16000Hz sample rate, scaled with 0.162371 */
 #define CS_MIDDLE_16000_A0                       0.230638
-#define CS_MIDDLE_16000_A1                      -0.224232
+#define CS_MIDDLE_16000_A1                      (-0.224232)
 #define CS_MIDDLE_16000_A2                       0.000000
-#define CS_MIDDLE_16000_B1                      -0.960550
+#define CS_MIDDLE_16000_B1                      (-0.960550)
 #define CS_MIDDLE_16000_B2                       0.000000
 #define CS_MIDDLE_16000_SCALE                        15
 #define CS_SIDE_16000_A0                         0.499695
-#define CS_SIDE_16000_A1                        -0.355543
-#define CS_SIDE_16000_A2                        -0.144152
-#define CS_SIDE_16000_B1                        -1.050788
+#define CS_SIDE_16000_A1                        (-0.355543)
+#define CS_SIDE_16000_A2                        (-0.144152)
+#define CS_SIDE_16000_B1                        (-1.050788)
 #define CS_SIDE_16000_B2                         0.144104
 #define CS_SIDE_16000_SCALE                          14
 
 /* Stereo Enhancer coefficients for 22050Hz sample rate, scaled with 0.160781 */
 #define CS_MIDDLE_22050_A0                       0.228749
-#define CS_MIDDLE_22050_A1                      -0.224128
+#define CS_MIDDLE_22050_A1                      (-0.224128)
 #define CS_MIDDLE_22050_A2                       0.000000
-#define CS_MIDDLE_22050_B1                      -0.971262
+#define CS_MIDDLE_22050_B1                      (-0.971262)
 #define CS_MIDDLE_22050_B2                       0.000000
 #define CS_MIDDLE_22050_SCALE                        15
 #define CS_SIDE_22050_A0                          0.440112
-#define CS_SIDE_22050_A1                         -0.261096
-#define CS_SIDE_22050_A2                         -0.179016
-#define CS_SIDE_22050_B1                         -1.116786
+#define CS_SIDE_22050_A1                         (-0.261096)
+#define CS_SIDE_22050_A2                         (-0.179016)
+#define CS_SIDE_22050_B1                         (-1.116786)
 #define CS_SIDE_22050_B2                          0.182507
 #define CS_SIDE_22050_SCALE                          14
 
 /* Stereo Enhancer coefficients for 24000Hz sample rate, scaled with 0.161882 */
 #define CS_MIDDLE_24000_A0                         0.230395
-#define CS_MIDDLE_24000_A1                        -0.226117
+#define CS_MIDDLE_24000_A1                        (-0.226117)
 #define CS_MIDDLE_24000_A2                         0.000000
-#define CS_MIDDLE_24000_B1                        -0.973573
+#define CS_MIDDLE_24000_B1                        (-0.973573)
 #define CS_MIDDLE_24000_B2                         0.000000
 #define CS_MIDDLE_24000_SCALE                        15
 #define CS_SIDE_24000_A0                           0.414770
-#define CS_SIDE_24000_A1                          -0.287182
-#define CS_SIDE_24000_A2                          -0.127588
-#define CS_SIDE_24000_B1                          -1.229648
+#define CS_SIDE_24000_A1                          (-0.287182)
+#define CS_SIDE_24000_A2                          (-0.127588)
+#define CS_SIDE_24000_B1                          (-1.229648)
 #define CS_SIDE_24000_B2                           0.282177
 #define CS_SIDE_24000_SCALE                          14
 
 /* Stereo Enhancer coefficients for 32000Hz sample rate, scaled with 0.160322 */
 #define CS_MIDDLE_32000_A0                          0.228400
-#define CS_MIDDLE_32000_A1                         -0.225214
+#define CS_MIDDLE_32000_A1                         (-0.225214)
 #define CS_MIDDLE_32000_A2                          0.000000
-#define CS_MIDDLE_32000_B1                         -0.980126
+#define CS_MIDDLE_32000_B1                         (-0.980126)
 #define CS_MIDDLE_32000_B2                          0.000000
 #define CS_MIDDLE_32000_SCALE                        15
 #define CS_SIDE_32000_A0                            0.364579
-#define CS_SIDE_32000_A1                           -0.207355
-#define CS_SIDE_32000_A2                           -0.157224
-#define CS_SIDE_32000_B1                           -1.274231
+#define CS_SIDE_32000_A1                           (-0.207355)
+#define CS_SIDE_32000_A2                           (-0.157224)
+#define CS_SIDE_32000_B1                           (-1.274231)
 #define CS_SIDE_32000_B2                            0.312495
 #define CS_SIDE_32000_SCALE                          14
 
 /* Stereo Enhancer coefficients for 44100Hz sample rate, scaled with 0.163834 */
 #define CS_MIDDLE_44100_A0                     0.233593
-#define CS_MIDDLE_44100_A1                    -0.231225
+#define CS_MIDDLE_44100_A1                    (-0.231225)
 #define CS_MIDDLE_44100_A2                     0.000000
-#define CS_MIDDLE_44100_B1                    -0.985545
+#define CS_MIDDLE_44100_B1                    (-0.985545)
 #define CS_MIDDLE_44100_B2                     0.000000
 #define CS_MIDDLE_44100_SCALE                        15
 #define CS_SIDE_44100_A0                       0.284573
-#define CS_SIDE_44100_A1                      -0.258910
-#define CS_SIDE_44100_A2                      -0.025662
-#define CS_SIDE_44100_B1                      -1.572248
+#define CS_SIDE_44100_A1                      (-0.258910)
+#define CS_SIDE_44100_A2                      (-0.025662)
+#define CS_SIDE_44100_B1                      (-1.572248)
 #define CS_SIDE_44100_B2                       0.588399
 #define CS_SIDE_44100_SCALE                  14
 
 /* Stereo Enhancer coefficients for 48000Hz sample rate, scaled with 0.164402 */
 #define CS_MIDDLE_48000_A0                     0.234445
-#define CS_MIDDLE_48000_A1                    -0.232261
+#define CS_MIDDLE_48000_A1                    (-0.232261)
 #define CS_MIDDLE_48000_A2                     0.000000
-#define CS_MIDDLE_48000_B1                    -0.986713
+#define CS_MIDDLE_48000_B1                    (-0.986713)
 #define CS_MIDDLE_48000_B2                     0.000000
 #define CS_MIDDLE_48000_SCALE                        15
 #define CS_SIDE_48000_A0                     0.272606
-#define CS_SIDE_48000_A1                    -0.266952
-#define CS_SIDE_48000_A2                    -0.005654
-#define CS_SIDE_48000_B1                    -1.617141
+#define CS_SIDE_48000_A1                    (-0.266952)
+#define CS_SIDE_48000_A2                    (-0.005654)
+#define CS_SIDE_48000_B1                    (-1.617141)
 #define CS_SIDE_48000_B2                     0.630405
 #define CS_SIDE_48000_SCALE                          14
 
@@ -155,31 +155,31 @@
 /* Stereo Enhancer coefficients for 96000Hz sample rate, scaled with  0.165*/
 /* high pass filter with cutoff frequency 102.18 Hz*/
 #define CS_MIDDLE_96000_A0                     0.235532
-#define CS_MIDDLE_96000_A1                    -0.234432
+#define CS_MIDDLE_96000_A1                    (-0.234432)
 #define CS_MIDDLE_96000_A2                     0.000000
-#define CS_MIDDLE_96000_B1                    -0.993334
+#define CS_MIDDLE_96000_B1                    (-0.993334)
 #define CS_MIDDLE_96000_B2                     0.000000
 #define CS_MIDDLE_96000_SCALE                        15
 /* bandpass filter with fc1 270 and fc2 3703, designed using 2nd order butterworth */
 #define CS_SIDE_96000_A0                     0.016727
 #define CS_SIDE_96000_A1                     0.000000
-#define CS_SIDE_96000_A2                    -0.016727
-#define CS_SIDE_96000_B1                    -1.793372
+#define CS_SIDE_96000_A2                    (-0.016727)
+#define CS_SIDE_96000_B1                    (-1.793372)
 #define CS_SIDE_96000_B2                     0.797236
 #define CS_SIDE_96000_SCALE                        14
 
 /* Stereo Enhancer coefficients for 192000Hz sample rate, scaled with  0.1689*/
 #define CS_MIDDLE_192000_A0                     0.241219
-#define CS_MIDDLE_192000_A1                    -0.240656
+#define CS_MIDDLE_192000_A1                    (-0.240656)
 #define CS_MIDDLE_192000_A2                     0.000000
-#define CS_MIDDLE_192000_B1                    -0.996661
+#define CS_MIDDLE_192000_B1                    (-0.996661)
 #define CS_MIDDLE_192000_B2                     0.000000
 #define CS_MIDDLE_192000_SCALE                        15
 /* bandpass filter with fc1 270 and fc2 3703, designed using 2nd order butterworth */
 #define CS_SIDE_192000_A0                     0.008991
-#define CS_SIDE_192000_A1                    -0.000000
-#define CS_SIDE_192000_A2                    -0.008991
-#define CS_SIDE_192000_B1                    -1.892509
+#define CS_SIDE_192000_A1                    (-0.000000)
+#define CS_SIDE_192000_A2                    (-0.008991)
+#define CS_SIDE_192000_B1                    (-1.892509)
 #define CS_SIDE_192000_B2                     0.893524
 #define CS_SIDE_192000_SCALE                       14
 #endif
@@ -203,74 +203,74 @@
 
 /* Reverb coefficients for 8000 Hz sample rate, scaled with 1.038030 */
 #define CS_REVERB_8000_A0                          0.667271
-#define CS_REVERB_8000_A1                         -0.667271
+#define CS_REVERB_8000_A1                         (-0.667271)
 #define CS_REVERB_8000_A2                          0.000000
-#define CS_REVERB_8000_B1                         -0.668179
+#define CS_REVERB_8000_B1                         (-0.668179)
 #define CS_REVERB_8000_B2                          0.000000
 #define CS_REVERB_8000_SCALE                         15
 
 /* Reverb coefficients for 11025Hz sample rate, scaled with 1.038030 */
 #define CS_REVERB_11025_A0                     0.699638
-#define CS_REVERB_11025_A1                    -0.699638
+#define CS_REVERB_11025_A1                    (-0.699638)
 #define CS_REVERB_11025_A2                     0.000000
-#define CS_REVERB_11025_B1                    -0.749096
+#define CS_REVERB_11025_B1                    (-0.749096)
 #define CS_REVERB_11025_B2                     0.000000
 #define CS_REVERB_11025_SCALE                  15
 
 /* Reverb coefficients for 12000Hz sample rate, scaled with 1.038030 */
 #define CS_REVERB_12000_A0                   0.706931
-#define CS_REVERB_12000_A1                  -0.706931
+#define CS_REVERB_12000_A1                  (-0.706931)
 #define CS_REVERB_12000_A2                   0.000000
-#define CS_REVERB_12000_B1                  -0.767327
+#define CS_REVERB_12000_B1                  (-0.767327)
 #define CS_REVERB_12000_B2                   0.000000
 #define CS_REVERB_12000_SCALE                15
 
 /* Reverb coefficients for 16000Hz sample rate, scaled with 1.038030 */
 #define CS_REVERB_16000_A0                      0.728272
-#define CS_REVERB_16000_A1                     -0.728272
+#define CS_REVERB_16000_A1                     (-0.728272)
 #define CS_REVERB_16000_A2                      0.000000
-#define CS_REVERB_16000_B1                     -0.820679
+#define CS_REVERB_16000_B1                     (-0.820679)
 #define CS_REVERB_16000_B2                      0.000000
 #define CS_REVERB_16000_SCALE                        15
 
 /* Reverb coefficients for 22050Hz sample rate, scaled with 1.038030 */
 #define CS_REVERB_22050_A0                     0.516396
 #define CS_REVERB_22050_A1                     0.000000
-#define CS_REVERB_22050_A2                    -0.516396
-#define CS_REVERB_22050_B1                    -0.518512
-#define CS_REVERB_22050_B2                    -0.290990
+#define CS_REVERB_22050_A2                    (-0.516396)
+#define CS_REVERB_22050_B1                    (-0.518512)
+#define CS_REVERB_22050_B2                    (-0.290990)
 #define CS_REVERB_22050_SCALE                        15
 
 
 /* Reverb coefficients for 24000Hz sample rate, scaled with 1.038030 */
 #define CS_REVERB_24000_A0                       0.479565
 #define CS_REVERB_24000_A1                       0.000000
-#define CS_REVERB_24000_A2                      -0.479565
-#define CS_REVERB_24000_B1                      -0.637745
-#define CS_REVERB_24000_B2                      -0.198912
+#define CS_REVERB_24000_A2                      (-0.479565)
+#define CS_REVERB_24000_B1                      (-0.637745)
+#define CS_REVERB_24000_B2                      (-0.198912)
 #define CS_REVERB_24000_SCALE                        15
 
 /* Reverb coefficients for 32000Hz sample rate, scaled with 1.038030 */
 #define CS_REVERB_32000_A0                      0.380349
 #define CS_REVERB_32000_A1                      0.000000
-#define CS_REVERB_32000_A2                     -0.380349
-#define CS_REVERB_32000_B1                     -0.950873
+#define CS_REVERB_32000_A2                     (-0.380349)
+#define CS_REVERB_32000_B1                     (-0.950873)
 #define CS_REVERB_32000_B2                      0.049127
 #define CS_REVERB_32000_SCALE                        15
 
 /* Reverb coefficients for 44100Hz sample rate, scaled with 1.038030 */
 #define CS_REVERB_44100_A0                         0.297389
 #define CS_REVERB_44100_A1                         0.000000
-#define CS_REVERB_44100_A2                        -0.297389
-#define CS_REVERB_44100_B1                        -1.200423
+#define CS_REVERB_44100_A2                        (-0.297389)
+#define CS_REVERB_44100_B1                        (-1.200423)
 #define CS_REVERB_44100_B2                         0.256529
 #define CS_REVERB_44100_SCALE                        14
 
 /* Reverb coefficients for 48000Hz sample rate, scaled with 1.038030 */
 #define CS_REVERB_48000_A0                       0.278661
 #define CS_REVERB_48000_A1                       0.000000
-#define CS_REVERB_48000_A2                      -0.278661
-#define CS_REVERB_48000_B1                      -1.254993
+#define CS_REVERB_48000_A2                      (-0.278661)
+#define CS_REVERB_48000_B1                      (-1.254993)
 #define CS_REVERB_48000_B2                       0.303347
 #define CS_REVERB_48000_SCALE                        14
 
@@ -279,8 +279,8 @@
 /* Band pass filter with fc1=500 and fc2=8000*/
 #define CS_REVERB_96000_A0                       0.1602488
 #define CS_REVERB_96000_A1                       0.000000
-#define CS_REVERB_96000_A2                      -0.1602488
-#define CS_REVERB_96000_B1                      -1.585413
+#define CS_REVERB_96000_A2                      (-0.1602488)
+#define CS_REVERB_96000_B1                      (-1.585413)
 #define CS_REVERB_96000_B2                       0.599377
 #define CS_REVERB_96000_SCALE                        14
 
@@ -288,8 +288,8 @@
 /* Band pass filter with fc1=500 and fc2=8000*/
 #define CS_REVERB_192000_A0                       0.0878369
 #define CS_REVERB_192000_A1                       0.000000
-#define CS_REVERB_192000_A2                      -0.0878369
-#define CS_REVERB_192000_B1                      -1.7765764
+#define CS_REVERB_192000_A2                      (-0.0878369)
+#define CS_REVERB_192000_B1                      (-1.7765764)
 #define CS_REVERB_192000_B2                       0.7804076
 #define CS_REVERB_192000_SCALE                        14
 
@@ -312,163 +312,163 @@
 /* Equaliser coefficients for 8000 Hz sample rate, \
    CS scaled with 1.038497 and CSEX scaled with 0.775480 */
 #define CS_EQUALISER_8000_A0                     1.263312
-#define CS_EQUALISER_8000_A1                    -0.601748
-#define CS_EQUALISER_8000_A2                    -0.280681
-#define CS_EQUALISER_8000_B1                    -0.475865
-#define CS_EQUALISER_8000_B2                    -0.408154
+#define CS_EQUALISER_8000_A1                    (-0.601748)
+#define CS_EQUALISER_8000_A2                    (-0.280681)
+#define CS_EQUALISER_8000_B1                    (-0.475865)
+#define CS_EQUALISER_8000_B2                    (-0.408154)
 #define CS_EQUALISER_8000_SCALE                      14
 #define CSEX_EQUALISER_8000_A0                    0.943357
-#define CSEX_EQUALISER_8000_A1                   -0.449345
-#define CSEX_EQUALISER_8000_A2                   -0.209594
-#define CSEX_EQUALISER_8000_B1                   -0.475865
-#define CSEX_EQUALISER_8000_B2                   -0.408154
+#define CSEX_EQUALISER_8000_A1                   (-0.449345)
+#define CSEX_EQUALISER_8000_A2                   (-0.209594)
+#define CSEX_EQUALISER_8000_B1                   (-0.475865)
+#define CSEX_EQUALISER_8000_B2                   (-0.408154)
 #define CSEX_EQUALISER_8000_SCALE                    15
 
 /* Equaliser coefficients for 11025Hz sample rate, \
    CS scaled with 1.027761 and CSEX scaled with 0.767463 */
 #define CS_EQUALISER_11025_A0                    1.101145
 #define CS_EQUALISER_11025_A1                    0.139020
-#define CS_EQUALISER_11025_A2                   -0.864423
+#define CS_EQUALISER_11025_A2                   (-0.864423)
 #define CS_EQUALISER_11025_B1                    0.024541
-#define CS_EQUALISER_11025_B2                   -0.908930
+#define CS_EQUALISER_11025_B2                   (-0.908930)
 #define CS_EQUALISER_11025_SCALE                     14
 #define CSEX_EQUALISER_11025_A0                    0.976058
-#define CSEX_EQUALISER_11025_A1                   -0.695326
-#define CSEX_EQUALISER_11025_A2                   -0.090809
-#define CSEX_EQUALISER_11025_B1                   -0.610594
-#define CSEX_EQUALISER_11025_B2                   -0.311149
+#define CSEX_EQUALISER_11025_A1                   (-0.695326)
+#define CSEX_EQUALISER_11025_A2                   (-0.090809)
+#define CSEX_EQUALISER_11025_B1                   (-0.610594)
+#define CSEX_EQUALISER_11025_B2                   (-0.311149)
 #define CSEX_EQUALISER_11025_SCALE                   15
 
 /* Equaliser coefficients for 12000Hz sample rate, \
    CS scaled with 1.032521 and CSEX scaled with 0.771017 */
 #define CS_EQUALISER_12000_A0                      1.276661
-#define CS_EQUALISER_12000_A1                     -1.017519
-#define CS_EQUALISER_12000_A2                     -0.044128
-#define CS_EQUALISER_12000_B1                     -0.729616
-#define CS_EQUALISER_12000_B2                     -0.204532
+#define CS_EQUALISER_12000_A1                     (-1.017519)
+#define CS_EQUALISER_12000_A2                     (-0.044128)
+#define CS_EQUALISER_12000_B1                     (-0.729616)
+#define CS_EQUALISER_12000_B2                     (-0.204532)
 #define CS_EQUALISER_12000_SCALE                     14
 #define CSEX_EQUALISER_12000_A0                 1.007095
-#define CSEX_EQUALISER_12000_A1                -0.871912
+#define CSEX_EQUALISER_12000_A1                (-0.871912)
 #define CSEX_EQUALISER_12000_A2                 0.023232
-#define CSEX_EQUALISER_12000_B1                -0.745857
-#define CSEX_EQUALISER_12000_B2                -0.189171
+#define CSEX_EQUALISER_12000_B1                (-0.745857)
+#define CSEX_EQUALISER_12000_B2                (-0.189171)
 #define CSEX_EQUALISER_12000_SCALE                   14
 
 /* Equaliser coefficients for 16000Hz sample rate, \
    CS scaled with 1.031378 and CSEX scaled with 0.770164 */
 #define CS_EQUALISER_16000_A0                     1.281629
-#define CS_EQUALISER_16000_A1                    -1.075872
-#define CS_EQUALISER_16000_A2                    -0.041365
-#define CS_EQUALISER_16000_B1                    -0.725239
-#define CS_EQUALISER_16000_B2                    -0.224358
+#define CS_EQUALISER_16000_A1                    (-1.075872)
+#define CS_EQUALISER_16000_A2                    (-0.041365)
+#define CS_EQUALISER_16000_B1                    (-0.725239)
+#define CS_EQUALISER_16000_B2                    (-0.224358)
 #define CS_EQUALISER_16000_SCALE                     14
 #define CSEX_EQUALISER_16000_A0                  1.081091
-#define CSEX_EQUALISER_16000_A1                 -0.867183
-#define CSEX_EQUALISER_16000_A2                 -0.070247
-#define CSEX_EQUALISER_16000_B1                 -0.515121
-#define CSEX_EQUALISER_16000_B2                 -0.425893
+#define CSEX_EQUALISER_16000_A1                 (-0.867183)
+#define CSEX_EQUALISER_16000_A2                 (-0.070247)
+#define CSEX_EQUALISER_16000_B1                 (-0.515121)
+#define CSEX_EQUALISER_16000_B2                 (-0.425893)
 #define CSEX_EQUALISER_16000_SCALE                   14
 
 /* Equaliser coefficients for 22050Hz sample rate, \
    CS scaled with 1.041576 and CSEX scaled with 0.777779 */
 #define CS_EQUALISER_22050_A0                   1.388605
-#define CS_EQUALISER_22050_A1                  -1.305799
+#define CS_EQUALISER_22050_A1                  (-1.305799)
 #define CS_EQUALISER_22050_A2                   0.039922
-#define CS_EQUALISER_22050_B1                  -0.719494
-#define CS_EQUALISER_22050_B2                  -0.243245
+#define CS_EQUALISER_22050_B1                  (-0.719494)
+#define CS_EQUALISER_22050_B2                  (-0.243245)
 #define CS_EQUALISER_22050_SCALE                     14
 #define CSEX_EQUALISER_22050_A0                   1.272910
-#define CSEX_EQUALISER_22050_A1                  -1.341014
+#define CSEX_EQUALISER_22050_A1                  (-1.341014)
 #define CSEX_EQUALISER_22050_A2                   0.167462
-#define CSEX_EQUALISER_22050_B1                  -0.614219
-#define CSEX_EQUALISER_22050_B2                  -0.345384
+#define CSEX_EQUALISER_22050_B1                  (-0.614219)
+#define CSEX_EQUALISER_22050_B2                  (-0.345384)
 #define CSEX_EQUALISER_22050_SCALE                   14
 
 /* Equaliser coefficients for 24000Hz sample rate, \
    CS scaled with 1.034495 and CSEX scaled with 0.772491 */
 #define CS_EQUALISER_24000_A0                    1.409832
-#define CS_EQUALISER_24000_A1                   -1.456506
+#define CS_EQUALISER_24000_A1                   (-1.456506)
 #define CS_EQUALISER_24000_A2                    0.151410
-#define CS_EQUALISER_24000_B1                   -0.804201
-#define CS_EQUALISER_24000_B2                   -0.163783
+#define CS_EQUALISER_24000_B1                   (-0.804201)
+#define CS_EQUALISER_24000_B2                   (-0.163783)
 #define CS_EQUALISER_24000_SCALE                     14
 #define CSEX_EQUALISER_24000_A0                  1.299198
-#define CSEX_EQUALISER_24000_A1                 -1.452447
+#define CSEX_EQUALISER_24000_A1                 (-1.452447)
 #define CSEX_EQUALISER_24000_A2                  0.240489
-#define CSEX_EQUALISER_24000_B1                 -0.669303
-#define CSEX_EQUALISER_24000_B2                 -0.294984
+#define CSEX_EQUALISER_24000_B1                 (-0.669303)
+#define CSEX_EQUALISER_24000_B2                 (-0.294984)
 #define CSEX_EQUALISER_24000_SCALE                   14
 
 /* Equaliser coefficients for 32000Hz sample rate, \
    CS scaled with 1.044559 and CSEX scaled with 0.780006 */
 #define CS_EQUALISER_32000_A0                     1.560988
-#define CS_EQUALISER_32000_A1                    -1.877724
+#define CS_EQUALISER_32000_A1                    (-1.877724)
 #define CS_EQUALISER_32000_A2                     0.389741
-#define CS_EQUALISER_32000_B1                    -0.907410
-#define CS_EQUALISER_32000_B2                    -0.070489
+#define CS_EQUALISER_32000_B1                    (-0.907410)
+#define CS_EQUALISER_32000_B2                    (-0.070489)
 #define CS_EQUALISER_32000_SCALE                     14
 #define CSEX_EQUALISER_32000_A0                  1.785049
-#define CSEX_EQUALISER_32000_A1                 -2.233497
+#define CSEX_EQUALISER_32000_A1                 (-2.233497)
 #define CSEX_EQUALISER_32000_A2                  0.526431
-#define CSEX_EQUALISER_32000_B1                 -0.445939
-#define CSEX_EQUALISER_32000_B2                 -0.522446
+#define CSEX_EQUALISER_32000_B1                 (-0.445939)
+#define CSEX_EQUALISER_32000_B2                 (-0.522446)
 #define CSEX_EQUALISER_32000_SCALE                   13
 
 /* Equaliser coefficients for 44100Hz sample rate, \
    CS scaled with 1.022170 and CSEX scaled with 0.763288 */
 #define CS_EQUALISER_44100_A0                  1.623993
-#define CS_EQUALISER_44100_A1                 -2.270743
+#define CS_EQUALISER_44100_A1                 (-2.270743)
 #define CS_EQUALISER_44100_A2                  0.688829
-#define CS_EQUALISER_44100_B1                 -1.117190
+#define CS_EQUALISER_44100_B1                 (-1.117190)
 #define CS_EQUALISER_44100_B2                  0.130208
 #define CS_EQUALISER_44100_SCALE                     13
 #define CSEX_EQUALISER_44100_A0                   2.028315
-#define CSEX_EQUALISER_44100_A1                  -2.882459
+#define CSEX_EQUALISER_44100_A1                  (-2.882459)
 #define CSEX_EQUALISER_44100_A2                   0.904535
-#define CSEX_EQUALISER_44100_B1                  -0.593308
-#define CSEX_EQUALISER_44100_B2                  -0.385816
+#define CSEX_EQUALISER_44100_B1                  (-0.593308)
+#define CSEX_EQUALISER_44100_B2                  (-0.385816)
 #define CSEX_EQUALISER_44100_SCALE                   13
 
 /* Equaliser coefficients for 48000Hz sample rate, \
    CS scaled with 1.018635 and CSEX scaled with 0.760648 */
 #define CS_EQUALISER_48000_A0                    1.641177
-#define CS_EQUALISER_48000_A1                   -2.364687
+#define CS_EQUALISER_48000_A1                   (-2.364687)
 #define CS_EQUALISER_48000_A2                    0.759910
-#define CS_EQUALISER_48000_B1                   -1.166774
+#define CS_EQUALISER_48000_B1                   (-1.166774)
 #define CS_EQUALISER_48000_B2                    0.178074
 #define CS_EQUALISER_48000_SCALE                     13
 #define CSEX_EQUALISER_48000_A0                  2.099655
-#define CSEX_EQUALISER_48000_A1                 -3.065220
+#define CSEX_EQUALISER_48000_A1                 (-3.065220)
 #define CSEX_EQUALISER_48000_A2                  1.010417
-#define CSEX_EQUALISER_48000_B1                 -0.634021
-#define CSEX_EQUALISER_48000_B2                 -0.347332
+#define CSEX_EQUALISER_48000_B1                 (-0.634021)
+#define CSEX_EQUALISER_48000_B2                 (-0.347332)
 #define CSEX_EQUALISER_48000_SCALE                   13
 
 
 #ifdef HIGHER_FS
 #define CS_EQUALISER_96000_A0                    1.784497
-#define CS_EQUALISER_96000_A1                   -3.001435
+#define CS_EQUALISER_96000_A1                   (-3.001435)
 #define CS_EQUALISER_96000_A2                    1.228422
-#define CS_EQUALISER_96000_B1                   -1.477804
+#define CS_EQUALISER_96000_B1                   (-1.477804)
 #define CS_EQUALISER_96000_B2                    0.481369
 #define CS_EQUALISER_96000_SCALE                     13
 #define CSEX_EQUALISER_96000_A0                  2.7573
-#define CSEX_EQUALISER_96000_A1                 -4.6721
+#define CSEX_EQUALISER_96000_A1                 (-4.6721)
 #define CSEX_EQUALISER_96000_A2                  1.9317
-#define CSEX_EQUALISER_96000_B1                 -0.971718
-#define CSEX_EQUALISER_96000_B2                 -0.021216
+#define CSEX_EQUALISER_96000_B1                 (-0.971718)
+#define CSEX_EQUALISER_96000_B2                 (-0.021216)
 #define CSEX_EQUALISER_96000_SCALE                   13
 
 #define CS_EQUALISER_192000_A0                    1.889582
-#define CS_EQUALISER_192000_A1                   -3.456140
+#define CS_EQUALISER_192000_A1                   (-3.456140)
 #define CS_EQUALISER_192000_A2                    1.569864
-#define CS_EQUALISER_192000_B1                   -1.700798
+#define CS_EQUALISER_192000_B1                   (-1.700798)
 #define CS_EQUALISER_192000_B2                    0.701824
 #define CS_EQUALISER_192000_SCALE                     13
 #define CSEX_EQUALISER_192000_A0                  3.4273
-#define CSEX_EQUALISER_192000_A1                 -6.2936
+#define CSEX_EQUALISER_192000_A1                 (-6.2936)
 #define CSEX_EQUALISER_192000_A2                  2.8720
-#define CSEX_EQUALISER_192000_B1                 -1.31074
+#define CSEX_EQUALISER_192000_B1                 (-1.31074)
 #define CSEX_EQUALISER_192000_B2                 0.31312
 #define CSEX_EQUALISER_192000_SCALE                   13
 #endif
diff --git a/media/libeffects/lvm/wrapper/Android.mk b/media/libeffects/lvm/wrapper/Android.mk
index f106aae..341dbc2 100644
--- a/media/libeffects/lvm/wrapper/Android.mk
+++ b/media/libeffects/lvm/wrapper/Android.mk
@@ -1,5 +1,8 @@
 LOCAL_PATH:= $(call my-dir)
 
+# The wrapper -DBUILD_FLOAT needs to match
+# the lvm library -DBUILD_FLOAT.
+
 # music bundle wrapper
 LOCAL_PATH:= $(call my-dir)
 include $(CLEAR_VARS)
@@ -20,15 +23,17 @@
 LOCAL_STATIC_LIBRARIES += libmusicbundle
 
 LOCAL_SHARED_LIBRARIES := \
-     liblog \
+     libaudioutils \
      libcutils \
-     libdl
+     libdl \
+     liblog \
 
 LOCAL_C_INCLUDES += \
 	$(LOCAL_PATH)/Bundle \
 	$(LOCAL_PATH)/../lib/Common/lib/ \
 	$(LOCAL_PATH)/../lib/Bundle/lib/ \
-	$(call include-path-for, audio-effects)
+	$(call include-path-for, audio-effects) \
+	$(call include-path-for, audio-utils) \
 
 LOCAL_HEADER_LIBRARIES += libhardware_headers
 include $(BUILD_SHARED_LIBRARY)
@@ -53,15 +58,20 @@
 LOCAL_STATIC_LIBRARIES += libreverb
 
 LOCAL_SHARED_LIBRARIES := \
-     liblog \
+     libaudioutils \
      libcutils \
-     libdl
+     libdl \
+     liblog \
 
 LOCAL_C_INCLUDES += \
     $(LOCAL_PATH)/Reverb \
     $(LOCAL_PATH)/../lib/Common/lib/ \
     $(LOCAL_PATH)/../lib/Reverb/lib/ \
-    $(call include-path-for, audio-effects)
+    $(call include-path-for, audio-effects) \
+    $(call include-path-for, audio-utils) \
 
 LOCAL_HEADER_LIBRARIES += libhardware_headers
+
+LOCAL_SANITIZE := integer_overflow
+
 include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
index 94d4516..04c2692 100644
--- a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
+++ b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
@@ -27,6 +27,7 @@
 #include <stdlib.h>
 #include <string.h>
 
+#include <audio_utils/primitives.h>
 #include <log/log.h>
 
 #include "EffectBundle.h"
@@ -63,16 +64,6 @@
         }\
     }
 
-
-static inline int16_t clamp16(int32_t sample)
-{
-    // check overflow for both positive and negative values:
-    // all bits above short range must me equal to sign bit
-    if ((sample>>15) ^ (sample>>31))
-        sample = 0x7FFF ^ (sample>>31);
-    return sample;
-}
-
 // Namespaces
 namespace android {
 namespace {
@@ -304,7 +295,7 @@
         pContext->pBundledContext->SamplesToExitCountVirt   = 0;
         pContext->pBundledContext->SamplesToExitCountBb     = 0;
         pContext->pBundledContext->SamplesToExitCountEq     = 0;
-#ifdef BUILD_FLOAT
+#if defined(BUILD_FLOAT) && !defined(NATIVE_FLOAT_BUFFER)
         pContext->pBundledContext->pInputBuffer             = NULL;
         pContext->pBundledContext->pOutputBuffer            = NULL;
 #endif
@@ -475,13 +466,9 @@
         if (pContext->pBundledContext->workBuffer != NULL) {
             free(pContext->pBundledContext->workBuffer);
         }
-#ifdef BUILD_FLOAT
-        if (pContext->pBundledContext->pInputBuffer != NULL) {
-            free(pContext->pBundledContext->pInputBuffer);
-        }
-        if (pContext->pBundledContext->pOutputBuffer != NULL) {
-            free(pContext->pBundledContext->pOutputBuffer);
-        }
+#if defined(BUILD_FLOAT) && !defined(NATIVE_FLOAT_BUFFER)
+        free(pContext->pBundledContext->pInputBuffer);
+        free(pContext->pBundledContext->pOutputBuffer);
 #endif
         delete pContext->pBundledContext;
         pContext->pBundledContext = LVM_NULL;
@@ -554,7 +541,7 @@
 
     pContext->config.inputCfg.accessMode                    = EFFECT_BUFFER_ACCESS_READ;
     pContext->config.inputCfg.channels                      = AUDIO_CHANNEL_OUT_STEREO;
-    pContext->config.inputCfg.format                        = AUDIO_FORMAT_PCM_16_BIT;
+    pContext->config.inputCfg.format                        = EFFECT_BUFFER_FORMAT;
     pContext->config.inputCfg.samplingRate                  = 44100;
     pContext->config.inputCfg.bufferProvider.getBuffer      = NULL;
     pContext->config.inputCfg.bufferProvider.releaseBuffer  = NULL;
@@ -562,7 +549,7 @@
     pContext->config.inputCfg.mask                          = EFFECT_CONFIG_ALL;
     pContext->config.outputCfg.accessMode                   = EFFECT_BUFFER_ACCESS_ACCUMULATE;
     pContext->config.outputCfg.channels                     = AUDIO_CHANNEL_OUT_STEREO;
-    pContext->config.outputCfg.format                       = AUDIO_FORMAT_PCM_16_BIT;
+    pContext->config.outputCfg.format                       = EFFECT_BUFFER_FORMAT;
     pContext->config.outputCfg.samplingRate                 = 44100;
     pContext->config.outputCfg.bufferProvider.getBuffer     = NULL;
     pContext->config.outputCfg.bufferProvider.releaseBuffer = NULL;
@@ -739,47 +726,6 @@
     return 0;
 }   /* end LvmBundle_init */
 
-#ifdef BUILD_FLOAT
-/**********************************************************************************
-   FUNCTION INT16LTOFLOAT
-***********************************************************************************/
-// Todo: need to write function descriptor
-static void Int16ToFloat(const LVM_INT16 *src, LVM_FLOAT *dst, size_t n) {
-    size_t ii;
-    src += n-1;
-    dst += n-1;
-    for (ii = n; ii != 0; ii--) {
-        *dst = ((LVM_FLOAT)((LVM_INT16)*src)) / 32768.0f;
-        src--;
-        dst--;
-    }
-    return;
-}
-/**********************************************************************************
-   FUNCTION FLOATTOINT16_SAT
-***********************************************************************************/
-// Todo : Need to write function descriptor
-static void FloatToInt16_SAT(const LVM_FLOAT *src, LVM_INT16 *dst, size_t n) {
-    size_t ii;
-    LVM_INT32 temp;
-
-    src += n-1;
-    dst += n-1;
-    for (ii = n; ii != 0; ii--) {
-        temp = (LVM_INT32)((*src) * 32768.0f);
-        if (temp >= 32767) {
-            *dst = 32767;
-        } else if (temp <= -32768) {
-            *dst = -32768;
-        } else {
-            *dst = (LVM_INT16)temp;
-        }
-        src--;
-        dst--;
-    }
-    return;
-}
-#endif
 //----------------------------------------------------------------------------
 // LvmBundle_process()
 //----------------------------------------------------------------------------
@@ -787,8 +733,8 @@
 // Apply LVM Bundle effects
 //
 // Inputs:
-//  pIn:        pointer to stereo 16 bit input data
-//  pOut:       pointer to stereo 16 bit output data
+//  pIn:        pointer to stereo float or 16 bit input data
+//  pOut:       pointer to stereo float or 16 bit output data
 //  frameCount: Frames to process
 //  pContext:   effect engine context
 //  strength    strength to be applied
@@ -798,44 +744,37 @@
 //
 //----------------------------------------------------------------------------
 #ifdef BUILD_FLOAT
-int LvmBundle_process(LVM_INT16        *pIn,
-                      LVM_INT16        *pOut,
+int LvmBundle_process(effect_buffer_t  *pIn,
+                      effect_buffer_t  *pOut,
                       int              frameCount,
                       EffectContext    *pContext){
 
-
-    //LVM_ControlParams_t     ActiveParams;                           /* Current control Parameters */
     LVM_ReturnStatus_en     LvmStatus = LVM_SUCCESS;                /* Function call status */
-    LVM_INT16               *pOutTmp;
-    LVM_FLOAT               *pInputBuff;
-    LVM_FLOAT               *pOutputBuff;
-
-    if (pContext->pBundledContext->pInputBuffer == NULL ||
+    effect_buffer_t         *pOutTmp;
+#ifndef NATIVE_FLOAT_BUFFER
+    if (pContext->pBundledContext->pInputBuffer == nullptr ||
             pContext->pBundledContext->frameCount < frameCount) {
-        if (pContext->pBundledContext->pInputBuffer != NULL) {
-            free(pContext->pBundledContext->pInputBuffer);
-        }
-        pContext->pBundledContext->pInputBuffer = (LVM_FLOAT *)malloc(frameCount * \
-                                                                      sizeof(LVM_FLOAT) * FCC_2);
+        free(pContext->pBundledContext->pInputBuffer);
+        pContext->pBundledContext->pInputBuffer =
+                (LVM_FLOAT *)calloc(frameCount, sizeof(LVM_FLOAT) * FCC_2);
     }
 
-    if (pContext->pBundledContext->pOutputBuffer == NULL ||
+    if (pContext->pBundledContext->pOutputBuffer == nullptr ||
             pContext->pBundledContext->frameCount < frameCount) {
-        if (pContext->pBundledContext->pOutputBuffer != NULL) {
-            free(pContext->pBundledContext->pOutputBuffer);
-        }
-        pContext->pBundledContext->pOutputBuffer = (LVM_FLOAT *)malloc(frameCount * \
-                                                                       sizeof(LVM_FLOAT) * FCC_2);
+        free(pContext->pBundledContext->pOutputBuffer);
+        pContext->pBundledContext->pOutputBuffer =
+                (LVM_FLOAT *)calloc(frameCount, sizeof(LVM_FLOAT) * FCC_2);
     }
 
-    if ((pContext->pBundledContext->pInputBuffer == NULL) ||
-                                    (pContext->pBundledContext->pOutputBuffer == NULL)) {
-        ALOGV("LVM_ERROR : LvmBundle_process memory allocation for float buffer's failed");
+    if (pContext->pBundledContext->pInputBuffer == nullptr ||
+            pContext->pBundledContext->pOutputBuffer == nullptr) {
+        ALOGE("LVM_ERROR : LvmBundle_process memory allocation for float buffer's failed");
         return -EINVAL;
     }
 
-    pInputBuff = pContext->pBundledContext->pInputBuffer;
-    pOutputBuff = pContext->pBundledContext->pOutputBuffer;
+    LVM_FLOAT * const pInputBuff = pContext->pBundledContext->pInputBuffer;
+    LVM_FLOAT * const pOutputBuff = pContext->pBundledContext->pOutputBuffer;
+#endif
 
     if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_WRITE){
         pOutTmp = pOut;
@@ -845,7 +784,7 @@
                 free(pContext->pBundledContext->workBuffer);
             }
             pContext->pBundledContext->workBuffer =
-                    (LVM_INT16 *)calloc(frameCount, sizeof(LVM_INT16) * FCC_2);
+                    (effect_buffer_t *)calloc(frameCount, sizeof(effect_buffer_t) * FCC_2);
             if (pContext->pBundledContext->workBuffer == NULL) {
                 return -ENOMEM;
             }
@@ -857,43 +796,61 @@
         return -EINVAL;
     }
 
-    #ifdef LVM_PCM
-    fwrite(pIn, frameCount*sizeof(LVM_INT16) * FCC_2, 1, pContext->pBundledContext->PcmInPtr);
+#ifdef LVM_PCM
+    fwrite(pIn,
+            frameCount*sizeof(effect_buffer_t) * FCC_2, 1, pContext->pBundledContext->PcmInPtr);
     fflush(pContext->pBundledContext->PcmInPtr);
-    #endif
+#endif
 
+#ifndef NATIVE_FLOAT_BUFFER
     /* Converting input data from fixed point to float point */
-    Int16ToFloat(pIn, pInputBuff, frameCount * 2);
+    memcpy_to_float_from_i16(pInputBuff, pIn, frameCount * FCC_2);
 
     /* Process the samples */
     LvmStatus = LVM_Process(pContext->pBundledContext->hInstance, /* Instance handle */
                             pInputBuff,                           /* Input buffer */
                             pOutputBuff,                          /* Output buffer */
                             (LVM_UINT16)frameCount,               /* Number of samples to read */
-                            0);                                   /* Audo Time */
+                            0);                                   /* Audio Time */
 
+    /* Converting output data from float point to fixed point */
+    memcpy_to_i16_from_float(pOutTmp, pOutputBuff, frameCount * FCC_2);
+
+#else
+    /* Process the samples */
+    LvmStatus = LVM_Process(pContext->pBundledContext->hInstance, /* Instance handle */
+                            pIn,                                  /* Input buffer */
+                            pOutTmp,                              /* Output buffer */
+                            (LVM_UINT16)frameCount,               /* Number of samples to read */
+                            0);                                   /* Audio Time */
+#endif
     LVM_ERROR_CHECK(LvmStatus, "LVM_Process", "LvmBundle_process")
     if(LvmStatus != LVM_SUCCESS) return -EINVAL;
 
-    /* Converting output data from float point to fixed point */
-    FloatToInt16_SAT(pOutputBuff, pOutTmp, (LVM_UINT16)frameCount * 2);
-    #ifdef LVM_PCM
-    fwrite(pOutTmp, frameCount*sizeof(LVM_INT16) * FCC_2, 1, pContext->pBundledContext->PcmOutPtr);
+#ifdef LVM_PCM
+    fwrite(pOutTmp,
+            frameCount*sizeof(effect_buffer_t) * FCC_2, 1, pContext->pBundledContext->PcmOutPtr);
     fflush(pContext->pBundledContext->PcmOutPtr);
-    #endif
+#endif
 
     if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE){
-        for (int i = 0; i < frameCount * 2; i++){
+        for (int i = 0; i < frameCount * FCC_2; i++) {
+#ifndef NATIVE_FLOAT_BUFFER
             pOut[i] = clamp16((LVM_INT32)pOut[i] + (LVM_INT32)pOutTmp[i]);
+#else
+            pOut[i] = pOut[i] + pOutTmp[i];
+#endif
         }
     }
     return 0;
 }    /* end LvmBundle_process */
-#else
+
+#else // BUILD_FLOAT
+
 int LvmBundle_process(LVM_INT16        *pIn,
                       LVM_INT16        *pOut,
                       int              frameCount,
-                      EffectContext    *pContext){
+                      EffectContext    *pContext) {
 
     LVM_ReturnStatus_en     LvmStatus = LVM_SUCCESS;                /* Function call status */
     LVM_INT16               *pOutTmp;
@@ -906,7 +863,7 @@
                 free(pContext->pBundledContext->workBuffer);
             }
             pContext->pBundledContext->workBuffer =
-                    (LVM_INT16 *)calloc(frameCount, sizeof(LVM_INT16) * 2);
+                    (effect_buffer_t *)calloc(frameCount, sizeof(effect_buffer_t) * FCC_2);
             if (pContext->pBundledContext->workBuffer == NULL) {
                 return -ENOMEM;
             }
@@ -918,10 +875,11 @@
         return -EINVAL;
     }
 
-    #ifdef LVM_PCM
-    fwrite(pIn, frameCount*sizeof(LVM_INT16)*2, 1, pContext->pBundledContext->PcmInPtr);
+#ifdef LVM_PCM
+    fwrite(pIn, frameCount * sizeof(*pIn) * FCC_2,
+            1 /* nmemb */, pContext->pBundledContext->PcmInPtr);
     fflush(pContext->pBundledContext->PcmInPtr);
-    #endif
+#endif
 
     //ALOGV("Calling LVM_Process");
 
@@ -930,15 +888,16 @@
                             pIn,                                  /* Input buffer */
                             pOutTmp,                              /* Output buffer */
                             (LVM_UINT16)frameCount,               /* Number of samples to read */
-                            0);                                   /* Audo Time */
+                            0);                                   /* Audio Time */
 
     LVM_ERROR_CHECK(LvmStatus, "LVM_Process", "LvmBundle_process")
     if(LvmStatus != LVM_SUCCESS) return -EINVAL;
 
-    #ifdef LVM_PCM
-    fwrite(pOutTmp, frameCount*sizeof(LVM_INT16)*2, 1, pContext->pBundledContext->PcmOutPtr);
+#ifdef LVM_PCM
+    fwrite(pOutTmp, frameCount * sizeof(*pOutTmp) * FCC_2,
+            1 /* nmemb */, pContext->pBundledContext->PcmOutPtr);
     fflush(pContext->pBundledContext->PcmOutPtr);
-    #endif
+#endif
 
     if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE){
         for (int i=0; i<frameCount*2; i++){
@@ -947,7 +906,8 @@
     }
     return 0;
 }    /* end LvmBundle_process */
-#endif
+
+#endif // BUILD_FLOAT
 
 //----------------------------------------------------------------------------
 // EqualizerUpdateActiveParams()
@@ -1281,8 +1241,7 @@
     CHECK_ARG(pConfig->inputCfg.channels == AUDIO_CHANNEL_OUT_STEREO);
     CHECK_ARG(pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_WRITE
               || pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE);
-    CHECK_ARG(pConfig->inputCfg.format == AUDIO_FORMAT_PCM_16_BIT);
-
+    CHECK_ARG(pConfig->inputCfg.format == EFFECT_BUFFER_FORMAT);
     pContext->config = *pConfig;
 
     switch (pConfig->inputCfg.samplingRate) {
@@ -3354,10 +3313,17 @@
         pContext->pBundledContext->NumberEffectsCalled = 0;
         /* Process all the available frames, block processing is
            handled internalLY by the LVM bundle */
-        processStatus = android::LvmBundle_process(    (LVM_INT16 *)inBuffer->raw,
-                                                (LVM_INT16 *)outBuffer->raw,
-                                                outBuffer->frameCount,
-                                                pContext);
+#ifdef NATIVE_FLOAT_BUFFER
+        processStatus = android::LvmBundle_process(inBuffer->f32,
+                                                   outBuffer->f32,
+                                                   outBuffer->frameCount,
+                                                   pContext);
+#else
+        processStatus = android::LvmBundle_process(inBuffer->s16,
+                                                   outBuffer->s16,
+                                                   outBuffer->frameCount,
+                                                   pContext);
+#endif
         if (processStatus != 0){
             ALOGV("\tLVM_ERROR : LvmBundle_process returned error %d", processStatus);
             if (status == 0) {
@@ -3369,14 +3335,19 @@
         //ALOGV("\tEffect_process Not Calling process with %d effects enabled, %d called: Effect %d",
         //pContext->pBundledContext->NumberEffectsEnabled,
         //pContext->pBundledContext->NumberEffectsCalled, pContext->EffectType);
-        // 2 is for stereo input
+
         if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
-            for (size_t i=0; i < outBuffer->frameCount*2; i++){
-                outBuffer->s16[i] =
-                        clamp16((LVM_INT32)outBuffer->s16[i] + (LVM_INT32)inBuffer->s16[i]);
+            for (size_t i = 0; i < outBuffer->frameCount * FCC_2; ++i){
+#ifdef NATIVE_FLOAT_BUFFER
+                outBuffer->f32[i] += inBuffer->f32[i];
+#else
+                outBuffer->s16[i] = clamp16((LVM_INT32)outBuffer->s16[i] + inBuffer->s16[i]);
+#endif
             }
         } else if (outBuffer->raw != inBuffer->raw) {
-            memcpy(outBuffer->raw, inBuffer->raw, outBuffer->frameCount*sizeof(LVM_INT16)*2);
+            memcpy(outBuffer->raw,
+                    inBuffer->raw,
+                    outBuffer->frameCount * sizeof(effect_buffer_t) * FCC_2);
         }
     }
 
diff --git a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.h b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.h
index 291383a..6bf045d 100644
--- a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.h
+++ b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.h
@@ -95,7 +95,7 @@
     int                             SamplesToExitCountEq;
     int                             SamplesToExitCountBb;
     int                             SamplesToExitCountVirt;
-    LVM_INT16                       *workBuffer;
+    effect_buffer_t                 *workBuffer;
     int                             frameCount;
     int32_t                         bandGaindB[FIVEBAND_NUMBANDS];
     int                             volume;
@@ -103,10 +103,10 @@
     FILE                            *PcmInPtr;
     FILE                            *PcmOutPtr;
     #endif
-    #ifdef BUILD_FLOAT
+#if defined(BUILD_FLOAT) && !defined(NATIVE_FLOAT_BUFFER)
     LVM_FLOAT                       *pInputBuffer;
     LVM_FLOAT                       *pOutputBuffer;
-    #endif
+#endif
 };
 
 /* SessionContext : One session */
diff --git a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
index 3d8e982..0630285 100644
--- a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
+++ b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
@@ -27,6 +27,7 @@
 #include <stdlib.h>
 #include <string.h>
 
+#include <audio_utils/primitives.h>
 #include <log/log.h>
 
 #include "EffectReverb.h"
@@ -135,6 +136,12 @@
         &gInsertPresetReverbDescriptor
 };
 
+#ifdef BUILD_FLOAT
+typedef     float               process_buffer_t; // process in float
+#else
+typedef     int32_t             process_buffer_t; // process in Q4_27
+#endif // BUILD_FLOAT
+
 struct ReverbContext{
     const struct effect_interface_s *itfe;
     effect_config_t                 config;
@@ -152,8 +159,8 @@
     FILE                            *PcmOutPtr;
     #endif
     LVM_Fs_en                       SampleRate;
-    LVM_INT32                       *InFrames32;
-    LVM_INT32                       *OutFrames32;
+    process_buffer_t                *InFrames;
+    process_buffer_t                *OutFrames;
     size_t                          bufferSizeIn;
     size_t                          bufferSizeOut;
     bool                            auxiliary;
@@ -262,7 +269,7 @@
 
     *pHandle = (effect_handle_t)pContext;
 
-    #ifdef LVM_PCM
+#ifdef LVM_PCM
     pContext->PcmInPtr = NULL;
     pContext->PcmOutPtr = NULL;
 
@@ -273,19 +280,15 @@
        (pContext->PcmOutPtr == NULL)){
        return -EINVAL;
     }
-    #endif
+#endif
 
+    int channels = audio_channel_count_from_out_mask(pContext->config.inputCfg.channels);
 
     // Allocate memory for reverb process (*2 is for STEREO)
-#ifdef BUILD_FLOAT
-    pContext->bufferSizeIn = LVREV_MAX_FRAME_SIZE * sizeof(float) * 2;
-    pContext->bufferSizeOut = pContext->bufferSizeIn;
-#else
-    pContext->bufferSizeIn = LVREV_MAX_FRAME_SIZE * sizeof(LVM_INT32) * 2;
-    pContext->bufferSizeOut = pContext->bufferSizeIn;
-#endif
-    pContext->InFrames32  = (LVM_INT32 *)malloc(pContext->bufferSizeIn);
-    pContext->OutFrames32 = (LVM_INT32 *)malloc(pContext->bufferSizeOut);
+    pContext->bufferSizeIn = LVREV_MAX_FRAME_SIZE * sizeof(process_buffer_t) * channels;
+    pContext->bufferSizeOut = LVREV_MAX_FRAME_SIZE * sizeof(process_buffer_t) * FCC_2;
+    pContext->InFrames  = (process_buffer_t *)calloc(pContext->bufferSizeIn, 1 /* size */);
+    pContext->OutFrames = (process_buffer_t *)calloc(pContext->bufferSizeOut, 1 /* size */);
 
     ALOGV("\tEffectCreate %p, size %zu", pContext, sizeof(ReverbContext));
     ALOGV("\tEffectCreate end\n");
@@ -305,8 +308,8 @@
     fclose(pContext->PcmInPtr);
     fclose(pContext->PcmOutPtr);
     #endif
-    free(pContext->InFrames32);
-    free(pContext->OutFrames32);
+    free(pContext->InFrames);
+    free(pContext->OutFrames);
     pContext->bufferSizeIn = 0;
     pContext->bufferSizeOut = 0;
     Reverb_free(pContext);
@@ -344,114 +347,6 @@
     }                                         \
 }
 
-#if 0
-//----------------------------------------------------------------------------
-// MonoTo2I_32()
-//----------------------------------------------------------------------------
-// Purpose:
-//  Convert MONO to STEREO
-//
-//----------------------------------------------------------------------------
-
-void MonoTo2I_32( const LVM_INT32  *src,
-                        LVM_INT32  *dst,
-                        LVM_INT16 n)
-{
-   LVM_INT16 ii;
-   src += (n-1);
-   dst += ((n*2)-1);
-
-   for (ii = n; ii != 0; ii--)
-   {
-       *dst = *src;
-       dst--;
-
-       *dst = *src;
-       dst--;
-       src--;
-   }
-
-   return;
-}
-
-//----------------------------------------------------------------------------
-// From2iToMono_32()
-//----------------------------------------------------------------------------
-// Purpose:
-//  Convert STEREO to MONO
-//
-//----------------------------------------------------------------------------
-
-void From2iToMono_32( const LVM_INT32 *src,
-                            LVM_INT32 *dst,
-                            LVM_INT16 n)
-{
-   LVM_INT16 ii;
-   LVM_INT32 Temp;
-
-   for (ii = n; ii != 0; ii--)
-   {
-       Temp = (*src>>1);
-       src++;
-
-       Temp +=(*src>>1);
-       src++;
-
-       *dst = Temp;
-       dst++;
-   }
-
-   return;
-}
-#endif
-
-#ifdef BUILD_FLOAT
-/**********************************************************************************
-   FUNCTION INT16LTOFLOAT
-***********************************************************************************/
-// Todo: need to write function descriptor
-static void Int16ToFloat(const LVM_INT16 *src, LVM_FLOAT *dst, size_t n) {
-    size_t ii;
-    src += n-1;
-    dst += n-1;
-    for (ii = n; ii != 0; ii--) {
-        *dst = ((LVM_FLOAT)((LVM_INT16)*src)) / 32768.0f;
-        src--;
-        dst--;
-    }
-    return;
-}
-/**********************************************************************************
-   FUNCTION FLOATTOINT16_SAT
-***********************************************************************************/
-// Todo : Need to write function descriptor
-static void FloatToInt16_SAT(const LVM_FLOAT *src, LVM_INT16 *dst, size_t n) {
-    size_t ii;
-    LVM_INT32 temp;
-
-    for (ii = 0; ii < n; ii++) {
-        temp = (LVM_INT32)((*src) * 32768.0f);
-        if (temp >= 32767) {
-            *dst = 32767;
-        } else if (temp <= -32768) {
-            *dst = -32768;
-        } else {
-            *dst = (LVM_INT16)temp;
-        }
-        src++;
-        dst++;
-    }
-    return;
-}
-#endif
-
-static inline int16_t clamp16(int32_t sample)
-{
-    if ((sample>>15) ^ (sample>>31))
-        sample = 0x7FFF ^ (sample>>31);
-    return sample;
-}
-
 //----------------------------------------------------------------------------
 // process()
 //----------------------------------------------------------------------------
@@ -459,8 +354,8 @@
 // Apply the Reverb
 //
 // Inputs:
-//  pIn:        pointer to stereo/mono 16 bit input data
-//  pOut:       pointer to stereo 16 bit output data
+//  pIn:        pointer to stereo/mono float or 16 bit input data
+//  pOut:       pointer to stereo float or 16 bit output data
 //  frameCount: Frames to process
 //  pContext:   effect engine context
 //  strength    strength to be applied
@@ -469,116 +364,107 @@
 //  pOut:       pointer to updated stereo 16 bit output data
 //
 //----------------------------------------------------------------------------
-
-int process( LVM_INT16     *pIn,
-             LVM_INT16     *pOut,
+int process( effect_buffer_t   *pIn,
+             effect_buffer_t   *pOut,
              int           frameCount,
              ReverbContext *pContext){
 
-    LVM_INT16               samplesPerFrame = 1;
+    int channels = audio_channel_count_from_out_mask(pContext->config.inputCfg.channels);
     LVREV_ReturnStatus_en   LvmStatus = LVREV_SUCCESS;              /* Function call status */
-    LVM_INT16 *OutFrames16;
-#ifdef BUILD_FLOAT
-    LVM_FLOAT               *pInputBuff;
-    LVM_FLOAT               *pOutputBuff;
-#endif
 
-#ifdef BUILD_FLOAT
-    if (pContext->InFrames32 == NULL ||
-            pContext->bufferSizeIn < frameCount * sizeof(float) * 2) {
-        if (pContext->InFrames32 != NULL) {
-            free(pContext->InFrames32);
-        }
-        pContext->bufferSizeIn = frameCount * sizeof(float) * 2;
-        pContext->InFrames32 = (LVM_INT32 *)malloc(pContext->bufferSizeIn);
-    }
-    if (pContext->OutFrames32 == NULL ||
-            pContext->bufferSizeOut < frameCount * sizeof(float) * 2) {
-        if (pContext->OutFrames32 != NULL) {
-            free(pContext->OutFrames32);
-        }
-        pContext->bufferSizeOut = frameCount * sizeof(float) * 2;
-        pContext->OutFrames32 = (LVM_INT32 *)malloc(pContext->bufferSizeOut);
-    }
-    pInputBuff = (float *)pContext->InFrames32;
-    pOutputBuff = (float *)pContext->OutFrames32;
-#endif
     // Check that the input is either mono or stereo
-    if (pContext->config.inputCfg.channels == AUDIO_CHANNEL_OUT_STEREO) {
-        samplesPerFrame = 2;
-    } else if (pContext->config.inputCfg.channels != AUDIO_CHANNEL_OUT_MONO) {
-        ALOGV("\tLVREV_ERROR : process invalid PCM format");
+    if (!(channels == 1 || channels == FCC_2) ) {
+        ALOGE("\tLVREV_ERROR : process invalid PCM format");
         return -EINVAL;
     }
 
-    OutFrames16 = (LVM_INT16 *)pContext->OutFrames32;
+#ifdef BUILD_FLOAT
+    size_t inSize = frameCount * sizeof(process_buffer_t) * channels;
+    size_t outSize = frameCount * sizeof(process_buffer_t) * FCC_2;
+    if (pContext->InFrames == NULL ||
+            pContext->bufferSizeIn < inSize) {
+        free(pContext->InFrames);
+        pContext->bufferSizeIn = inSize;
+        pContext->InFrames = (process_buffer_t *)calloc(1, pContext->bufferSizeIn);
+    }
+    if (pContext->OutFrames == NULL ||
+            pContext->bufferSizeOut < outSize) {
+        free(pContext->OutFrames);
+        pContext->bufferSizeOut = outSize;
+        pContext->OutFrames = (process_buffer_t *)calloc(1, pContext->bufferSizeOut);
+    }
+
+#ifndef NATIVE_FLOAT_BUFFER
+    effect_buffer_t * const OutFrames16 = (effect_buffer_t *)pContext->OutFrames;
+#endif
+#endif
 
     // Check for NULL pointers
-    if((pContext->InFrames32 == NULL)||(pContext->OutFrames32 == NULL)){
-        ALOGV("\tLVREV_ERROR : process failed to allocate memory for temporary buffers ");
+    if ((pContext->InFrames == NULL) || (pContext->OutFrames == NULL)) {
+        ALOGE("\tLVREV_ERROR : process failed to allocate memory for temporary buffers ");
         return -EINVAL;
     }
 
-    #ifdef LVM_PCM
-    fwrite(pIn, frameCount*sizeof(LVM_INT16)*samplesPerFrame, 1, pContext->PcmInPtr);
+#ifdef LVM_PCM
+    fwrite(pIn, frameCount * sizeof(*pIn) * channels, 1 /* nmemb */, pContext->PcmInPtr);
     fflush(pContext->PcmInPtr);
-    #endif
+#endif
 
     if (pContext->preset && pContext->nextPreset != pContext->curPreset) {
         Reverb_LoadPreset(pContext);
     }
 
-    // Convert to Input 32 bits
     if (pContext->auxiliary) {
 #ifdef BUILD_FLOAT
-        Int16ToFloat(pIn, pInputBuff, frameCount * samplesPerFrame);
+#ifdef NATIVE_FLOAT_BUFFER
+        static_assert(std::is_same<decltype(*pIn), decltype(*pContext->InFrames)>::value,
+                "pIn and InFrames must be same type");
+        memcpy(pContext->InFrames, pIn, frameCount * channels * sizeof(*pIn));
 #else
-        for(int i=0; i<frameCount*samplesPerFrame; i++){
-            pContext->InFrames32[i] = (LVM_INT32)pIn[i]<<8;
+        memcpy_to_float_from_i16(
+                pContext->InFrames, pIn, frameCount * channels);
+#endif
+#else //no BUILD_FLOAT
+        for (int i = 0; i < frameCount * channels; i++) {
+            pContext->InFrames[i] = (process_buffer_t)pIn[i]<<8;
         }
 #endif
         } else {
         // insert reverb input is always stereo
         for (int i = 0; i < frameCount; i++) {
-#ifndef BUILD_FLOAT
-            pContext->InFrames32[2*i] = (pIn[2*i] * REVERB_SEND_LEVEL) >> 4; // <<8 + >>12
-            pContext->InFrames32[2*i+1] = (pIn[2*i+1] * REVERB_SEND_LEVEL) >> 4; // <<8 + >>12
+#ifdef BUILD_FLOAT
+#ifdef NATIVE_FLOAT_BUFFER
+            pContext->InFrames[2 * i] = (process_buffer_t)pIn[2 * i] * REVERB_SEND_LEVEL;
+            pContext->InFrames[2 * i + 1] = (process_buffer_t)pIn[2 * i + 1] * REVERB_SEND_LEVEL;
 #else
-            pInputBuff[2 * i] = (LVM_FLOAT)pIn[2 * i] * REVERB_SEND_LEVEL / 32768.0f;
-            pInputBuff[2 * i + 1] = (LVM_FLOAT)pIn[2 * i + 1] * REVERB_SEND_LEVEL / 32768.0f;
+            pContext->InFrames[2 * i] =
+                    (process_buffer_t)pIn[2 * i] * REVERB_SEND_LEVEL / 32768.0f;
+            pContext->InFrames[2 * i + 1] =
+                    (process_buffer_t)pIn[2 * i + 1] * REVERB_SEND_LEVEL / 32768.0f;
+#endif
+#else
+            pContext->InFrames[2*i] = (pIn[2*i] * REVERB_SEND_LEVEL) >> 4; // <<8 + >>12
+            pContext->InFrames[2*i+1] = (pIn[2*i+1] * REVERB_SEND_LEVEL) >> 4; // <<8 + >>12
 #endif
         }
     }
 
     if (pContext->preset && pContext->curPreset == REVERB_PRESET_NONE) {
-#ifdef BUILD_FLOAT
-        memset(pOutputBuff, 0, frameCount * sizeof(LVM_FLOAT) * 2); //always stereo here
-#else
-        memset(pContext->OutFrames32, 0, frameCount * sizeof(LVM_INT32) * 2); //always stereo here
-#endif
+        memset(pContext->OutFrames, 0,
+                frameCount * sizeof(*pContext->OutFrames) * FCC_2); //always stereo here
     } else {
         if(pContext->bEnabled == LVM_FALSE && pContext->SamplesToExitCount > 0) {
-#ifdef BUILD_FLOAT
-            memset(pInputBuff, 0, frameCount * sizeof(LVM_FLOAT) * samplesPerFrame);
-#else
-            memset(pContext->InFrames32,0,frameCount * sizeof(LVM_INT32) * samplesPerFrame);
-#endif
-            ALOGV("\tZeroing %d samples per frame at the end of call", samplesPerFrame);
+            memset(pContext->InFrames, 0,
+                    frameCount * sizeof(*pContext->OutFrames) * channels);
+            ALOGV("\tZeroing %d samples per frame at the end of call", channels);
         }
 
         /* Process the samples, producing a stereo output */
-#ifdef BUILD_FLOAT
         LvmStatus = LVREV_Process(pContext->hInstance,      /* Instance handle */
-                                  pInputBuff,     /* Input buffer */
-                                  pOutputBuff,    /* Output buffer */
+                                  pContext->InFrames,     /* Input buffer */
+                                  pContext->OutFrames,    /* Output buffer */
                                   frameCount);              /* Number of samples to read */
-#else
-        LvmStatus = LVREV_Process(pContext->hInstance,      /* Instance handle */
-                                  pContext->InFrames32,     /* Input buffer */
-                                  pContext->OutFrames32,    /* Output buffer */
-                                  frameCount);              /* Number of samples to read */
-#endif
-        }
+    }
 
     LVM_ERROR_CHECK(LvmStatus, "LVREV_Process", "process")
     if(LvmStatus != LVREV_SUCCESS) return -EINVAL;
@@ -586,55 +472,87 @@
     // Convert to 16 bits
     if (pContext->auxiliary) {
 #ifdef BUILD_FLOAT
-        FloatToInt16_SAT(pOutputBuff, OutFrames16, (size_t)frameCount * 2);
-#else
-        for (int i=0; i < frameCount*2; i++) { //always stereo here
-            OutFrames16[i] = clamp16(pContext->OutFrames32[i]>>8);
-        }
+        // nothing to do here
+#ifndef NATIVE_FLOAT_BUFFER
+        // pContext->OutFrames and OutFrames16 point to the same buffer
+        // make sure the float to int conversion happens in the right order.
+        memcpy_to_i16_from_float(OutFrames16, pContext->OutFrames,
+                (size_t)frameCount * FCC_2);
 #endif
-        } else {
-#ifdef BUILD_FLOAT
-            for (int i = 0; i < frameCount * 2; i++) {//always stereo here
-                //pOutputBuff and OutFrames16 point to the same buffer, so better to
-                //accumulate in pInputBuff, which is available
-                pInputBuff[i] = pOutputBuff[i] + (LVM_FLOAT)pIn[i] / 32768.0f;
-            }
-
-            FloatToInt16_SAT(pInputBuff, OutFrames16, (size_t)frameCount * 2);
 #else
-            for (int i=0; i < frameCount*2; i++) { //always stereo here
-                OutFrames16[i] = clamp16((pContext->OutFrames32[i]>>8) + (LVM_INT32)pIn[i]);
-            }
+        memcpy_to_i16_from_q4_27(OutFrames16, pContext->OutFrames, (size_t)frameCount * FCC_2);
+#endif
+    } else {
+#ifdef BUILD_FLOAT
+#ifdef NATIVE_FLOAT_BUFFER
+        for (int i = 0; i < frameCount * FCC_2; i++) { // always stereo here
+            // Mix with dry input
+            pContext->OutFrames[i] += pIn[i];
+        }
+#else
+        for (int i = 0; i < frameCount * FCC_2; i++) { // always stereo here
+            // pOutputBuff and OutFrames16 point to the same buffer
+            // make sure the float to int conversion happens in the right order.
+            pContext->OutFrames[i] += (process_buffer_t)pIn[i] / 32768.0f;
+        }
+        memcpy_to_i16_from_float(OutFrames16, pContext->OutFrames,
+                (size_t)frameCount * FCC_2);
+#endif
+#else
+        for (int i=0; i < frameCount * FCC_2; i++) { // always stereo here
+            OutFrames16[i] = clamp16((pContext->OutFrames[i]>>8) + (process_buffer_t)pIn[i]);
+        }
 #endif
         // apply volume with ramp if needed
         if ((pContext->leftVolume != pContext->prevLeftVolume ||
                 pContext->rightVolume != pContext->prevRightVolume) &&
                 pContext->volumeMode == REVERB_VOLUME_RAMP) {
+#if defined (BUILD_FLOAT) && defined (NATIVE_FLOAT_BUFFER)
+            // FIXME: still using int16 volumes.
+            // For reference: REVERB_UNIT_VOLUME  (0x1000) // 1.0 in 4.12 format
+            float vl = (float)pContext->prevLeftVolume / 4096;
+            float incl = (((float)pContext->leftVolume / 4096) - vl) / frameCount;
+            float vr = (float)pContext->prevRightVolume / 4096;
+            float incr = (((float)pContext->rightVolume / 4096) - vr) / frameCount;
+
+            for (int i = 0; i < frameCount; i++) {
+                pContext->OutFrames[FCC_2 * i] *= vl;
+                pContext->OutFrames[FCC_2 * i + 1] *= vr;
+
+                vl += incl;
+                vr += incr;
+            }
+#else
             LVM_INT32 vl = (LVM_INT32)pContext->prevLeftVolume << 16;
             LVM_INT32 incl = (((LVM_INT32)pContext->leftVolume << 16) - vl) / frameCount;
             LVM_INT32 vr = (LVM_INT32)pContext->prevRightVolume << 16;
             LVM_INT32 incr = (((LVM_INT32)pContext->rightVolume << 16) - vr) / frameCount;
 
             for (int i = 0; i < frameCount; i++) {
-                OutFrames16[2*i] =
+                OutFrames16[FCC_2 * i] =
                         clamp16((LVM_INT32)((vl >> 16) * OutFrames16[2*i]) >> 12);
-                OutFrames16[2*i+1] =
+                OutFrames16[FCC_2 * i + 1] =
                         clamp16((LVM_INT32)((vr >> 16) * OutFrames16[2*i+1]) >> 12);
 
                 vl += incl;
                 vr += incr;
             }
-
+#endif
             pContext->prevLeftVolume = pContext->leftVolume;
             pContext->prevRightVolume = pContext->rightVolume;
         } else if (pContext->volumeMode != REVERB_VOLUME_OFF) {
             if (pContext->leftVolume != REVERB_UNIT_VOLUME ||
                 pContext->rightVolume != REVERB_UNIT_VOLUME) {
                 for (int i = 0; i < frameCount; i++) {
-                    OutFrames16[2*i] =
+#if defined(BUILD_FLOAT) && defined(NATIVE_FLOAT_BUFFER)
+                    pContext->OutFrames[FCC_2 * i] *= ((float)pContext->leftVolume / 4096);
+                    pContext->OutFrames[FCC_2 * i + 1] *= ((float)pContext->rightVolume / 4096);
+#else
+                    OutFrames16[FCC_2 * i] =
                             clamp16((LVM_INT32)(pContext->leftVolume * OutFrames16[2*i]) >> 12);
-                    OutFrames16[2*i+1] =
+                    OutFrames16[FCC_2 * i + 1] =
                             clamp16((LVM_INT32)(pContext->rightVolume * OutFrames16[2*i+1]) >> 12);
+#endif
                 }
             }
             pContext->prevLeftVolume = pContext->leftVolume;
@@ -643,20 +561,25 @@
         }
     }
 
-    #ifdef LVM_PCM
-    fwrite(OutFrames16, frameCount*sizeof(LVM_INT16)*2, 1, pContext->PcmOutPtr);
+#ifdef LVM_PCM
+    fwrite(pContext->OutFrames, frameCount * sizeof(*pContext->OutFrames) * FCC_2,
+            1 /* nmemb */, pContext->PcmOutPtr);
     fflush(pContext->PcmOutPtr);
-    #endif
+#endif
 
     // Accumulate if required
     if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE){
         //ALOGV("\tBuffer access is ACCUMULATE");
-        for (int i=0; i<frameCount*2; i++){ //always stereo here
+        for (int i = 0; i < frameCount * FCC_2; i++) { // always stereo here
+#ifndef NATIVE_FLOAT_BUFFER
             pOut[i] = clamp16((int32_t)pOut[i] + (int32_t)OutFrames16[i]);
+#else
+            pOut[i] += pContext->OutFrames[i];
+#endif
         }
     }else{
         //ALOGV("\tBuffer access is WRITE");
-        memcpy(pOut, OutFrames16, frameCount*sizeof(LVM_INT16)*2);
+        memcpy(pOut, pContext->OutFrames, frameCount * sizeof(*pOut) * FCC_2);
     }
 
     return 0;
@@ -733,8 +656,7 @@
     CHECK_ARG(pConfig->outputCfg.channels == AUDIO_CHANNEL_OUT_STEREO);
     CHECK_ARG(pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_WRITE
               || pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE);
-    CHECK_ARG(pConfig->inputCfg.format == AUDIO_FORMAT_PCM_16_BIT);
-
+    CHECK_ARG(pConfig->inputCfg.format == EFFECT_BUFFER_FORMAT);
     //ALOGV("\tReverb_setConfig calling memcpy");
     pContext->config = *pConfig;
 
@@ -847,8 +769,7 @@
     } else {
         pContext->config.inputCfg.channels                  = AUDIO_CHANNEL_OUT_STEREO;
     }
-
-    pContext->config.inputCfg.format                        = AUDIO_FORMAT_PCM_16_BIT;
+    pContext->config.inputCfg.format                        = EFFECT_BUFFER_FORMAT;
     pContext->config.inputCfg.samplingRate                  = 44100;
     pContext->config.inputCfg.bufferProvider.getBuffer      = NULL;
     pContext->config.inputCfg.bufferProvider.releaseBuffer  = NULL;
@@ -856,7 +777,7 @@
     pContext->config.inputCfg.mask                          = EFFECT_CONFIG_ALL;
     pContext->config.outputCfg.accessMode                   = EFFECT_BUFFER_ACCESS_ACCUMULATE;
     pContext->config.outputCfg.channels                     = AUDIO_CHANNEL_OUT_STEREO;
-    pContext->config.outputCfg.format                       = AUDIO_FORMAT_PCM_16_BIT;
+    pContext->config.outputCfg.format                       = EFFECT_BUFFER_FORMAT;
     pContext->config.outputCfg.samplingRate                 = 44100;
     pContext->config.outputCfg.bufferProvider.getBuffer     = NULL;
     pContext->config.outputCfg.bufferProvider.releaseBuffer = NULL;
@@ -2031,10 +1952,17 @@
     }
     //ALOGV("\tReverb_process() Calling process with %d frames", outBuffer->frameCount);
     /* Process all the available frames, block processing is handled internalLY by the LVM bundle */
-    status = process(    (LVM_INT16 *)inBuffer->raw,
-                         (LVM_INT16 *)outBuffer->raw,
-                                      outBuffer->frameCount,
-                                      pContext);
+#if defined (BUILD_FLOAT) && defined (NATIVE_FLOAT_BUFFER)
+    status = process(    inBuffer->f32,
+                         outBuffer->f32,
+                         outBuffer->frameCount,
+                         pContext);
+#else
+    status = process(    inBuffer->s16,
+                         outBuffer->s16,
+                         outBuffer->frameCount,
+                         pContext);
+#endif
 
     if (pContext->bEnabled == LVM_FALSE) {
         if (pContext->SamplesToExitCount > 0) {
diff --git a/media/libeffects/visualizer/EffectVisualizer.cpp b/media/libeffects/visualizer/EffectVisualizer.cpp
index c33f9f5..807f24d 100644
--- a/media/libeffects/visualizer/EffectVisualizer.cpp
+++ b/media/libeffects/visualizer/EffectVisualizer.cpp
@@ -594,7 +594,9 @@
                     deltaSmpl = CAPTURE_BUF_SIZE;
                 }
 
-                int32_t capturePoint = (int32_t)pContext->mCaptureIdx - deltaSmpl;
+                int32_t capturePoint;
+                //capturePoint = (int32_t)pContext->mCaptureIdx - deltaSmpl;
+                __builtin_sub_overflow((int32_t)pContext->mCaptureIdx, deltaSmpl, &capturePoint);
                 // a negative capturePoint means we wrap the buffer.
                 if (capturePoint < 0) {
                     uint32_t size = -capturePoint;
diff --git a/media/libheif/HeifDecoderImpl.cpp b/media/libheif/HeifDecoderImpl.cpp
index 4b131a7..a63a2df 100644
--- a/media/libheif/HeifDecoderImpl.cpp
+++ b/media/libheif/HeifDecoderImpl.cpp
@@ -25,8 +25,8 @@
 #include <drm/drm_framework_common.h>
 #include <media/IDataSource.h>
 #include <media/mediametadataretriever.h>
+#include <media/MediaSource.h>
 #include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/MediaSource.h>
 #include <private/media/VideoFrame.h>
 #include <utils/Log.h>
 #include <utils/RefBase.h>
@@ -270,7 +270,9 @@
     // it's not, default to HAL_PIXEL_FORMAT_RGB_565.
     mOutputColor(HAL_PIXEL_FORMAT_RGB_565),
     mCurScanline(0),
-    mFrameDecoded(false) {
+    mFrameDecoded(false),
+    mHasImage(false),
+    mHasVideo(false) {
 }
 
 HeifDecoderImpl::~HeifDecoderImpl() {
@@ -278,6 +280,8 @@
 
 bool HeifDecoderImpl::init(HeifStream* stream, HeifFrameInfo* frameInfo) {
     mFrameDecoded = false;
+    mFrameMemory.clear();
+
     sp<HeifDataSource> dataSource = new HeifDataSource(stream);
     if (!dataSource->init()) {
         return false;
@@ -285,7 +289,7 @@
     mDataSource = dataSource;
 
     mRetriever = new MediaMetadataRetriever();
-    status_t err = mRetriever->setDataSource(mDataSource, "video/mp4");
+    status_t err = mRetriever->setDataSource(mDataSource, "image/heif");
     if (err != OK) {
         ALOGE("failed to set data source!");
 
@@ -295,15 +299,21 @@
     }
     ALOGV("successfully set data source.");
 
+    const char* hasImage = mRetriever->extractMetadata(METADATA_KEY_HAS_IMAGE);
     const char* hasVideo = mRetriever->extractMetadata(METADATA_KEY_HAS_VIDEO);
-    if (!hasVideo || strcasecmp(hasVideo, "yes")) {
-        ALOGE("no video: %s", hasVideo ? hasVideo : "null");
-        return false;
+
+    mHasImage = hasImage && !strcasecmp(hasImage, "yes");
+    mHasVideo = hasVideo && !strcasecmp(hasVideo, "yes");
+    if (mHasImage) {
+        // image index < 0 to retrieve primary image
+        mFrameMemory = mRetriever->getImageAtIndex(
+                -1, mOutputColor, true /*metaOnly*/);
+    } else if (mHasVideo) {
+        mFrameMemory = mRetriever->getFrameAtTime(0,
+                MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC,
+                mOutputColor, true /*metaOnly*/);
     }
 
-    mFrameMemory = mRetriever->getFrameAtTime(0,
-            IMediaSource::ReadOptions::SEEK_PREVIOUS_SYNC,
-            mOutputColor, true /*metaOnly*/);
     if (mFrameMemory == nullptr || mFrameMemory->pointer() == nullptr) {
         ALOGE("getFrameAtTime: videoFrame is a nullptr");
         return false;
@@ -368,8 +378,14 @@
         return true;
     }
 
-    mFrameMemory = mRetriever->getFrameAtTime(0,
-            IMediaSource::ReadOptions::SEEK_PREVIOUS_SYNC, mOutputColor);
+    if (mHasImage) {
+        // image index < 0 to retrieve primary image
+        mFrameMemory = mRetriever->getImageAtIndex(-1, mOutputColor);
+    } else if (mHasVideo) {
+        mFrameMemory = mRetriever->getFrameAtTime(0,
+                MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC, mOutputColor);
+    }
+
     if (mFrameMemory == nullptr || mFrameMemory->pointer() == nullptr) {
         ALOGE("getFrameAtTime: videoFrame is a nullptr");
         return false;
diff --git a/media/libheif/HeifDecoderImpl.h b/media/libheif/HeifDecoderImpl.h
index c2e4ff3..406c2c1 100644
--- a/media/libheif/HeifDecoderImpl.h
+++ b/media/libheif/HeifDecoderImpl.h
@@ -55,6 +55,8 @@
     android_pixel_format_t mOutputColor;
     size_t mCurScanline;
     bool mFrameDecoded;
+    bool mHasImage;
+    bool mHasVideo;
 };
 
 } // namespace android
diff --git a/media/libmedia/Android.bp b/media/libmedia/Android.bp
index a462f3a..0b4fd25 100644
--- a/media/libmedia/Android.bp
+++ b/media/libmedia/Android.bp
@@ -24,7 +24,7 @@
         "-Wno-error=deprecated-declarations",
         "-Wall",
     ],
-    shared_libs: ["libutils", "liblog", "libgui"],
+    shared_libs: ["libutils", "liblog"],
     header_libs: [
         "libmedia_headers",
         "libaudioclient_headers",
@@ -33,22 +33,21 @@
     clang: true,
 }
 
-// TODO(b/35449087): merge back with libmedia when OMX implementatoins
-// no longer use aidl wrappers (or remove OMX component form libmedia)
-cc_defaults {
-    name: "libmedia_omx_defaults",
+cc_library_shared {
+    name: "libmedia_omx",
+    vendor_available: true,
+    vndk: {
+        enabled: true,
+    },
 
     srcs: [
         "aidl/android/IGraphicBufferSource.aidl",
         "aidl/android/IOMXBufferSource.aidl",
 
         "IMediaCodecList.cpp",
-        "IMediaCodecService.cpp",
         "IOMX.cpp",
-        "IOMXStore.cpp",
         "MediaCodecBuffer.cpp",
         "MediaCodecInfo.cpp",
-        "MediaDefs.cpp",
         "OMXBuffer.cpp",
         "omx/1.0/WGraphicBufferSource.cpp",
         "omx/1.0/WOmx.cpp",
@@ -63,18 +62,13 @@
     },
 
     shared_libs: [
-        "android.hidl.memory@1.0",
         "android.hidl.token@1.0-utils",
         "android.hardware.media.omx@1.0",
-        "android.hardware.media@1.0",
-        "libbase",
         "libbinder",
         "libcutils",
         "libgui",
         "libhidlbase",
-        "libhidlmemory",
         "libhidltransport",
-        "libhwbinder",
         "liblog",
         "libstagefright_foundation",
         "libui",
@@ -82,11 +76,8 @@
     ],
 
     export_shared_lib_headers: [
-        "android.hidl.memory@1.0",
         "android.hidl.token@1.0-utils",
         "android.hardware.media.omx@1.0",
-        "android.hardware.media@1.0",
-        "libhidlmemory",
         "libstagefright_foundation",
         "libui",
     ],
@@ -121,23 +112,38 @@
     },
 }
 
-cc_library_shared {
-    name: "libmedia_omx",
-    vendor_available: true,
-    vndk: {
-        enabled: true,
-    },
+cc_library_static {
+    name: "libmedia_midiiowrapper",
 
-    defaults: ["libmedia_omx_defaults"],
+    srcs: ["MidiIoWrapper.cpp"],
+
+    static_libs: [
+        "libsonivox",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wno-error=deprecated-declarations",
+        "-Wall",
+    ],
+
+    sanitize: {
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+        cfi: true,
+        diag: {
+            cfi: true,
+        },
+    },
 }
 
 cc_library_shared {
     name: "libmedia",
-    defaults: ["libmedia_omx_defaults"],
 
     srcs: [
         "IDataSource.cpp",
-        "IHDCP.cpp",
         "BufferingSettings.cpp",
         "mediaplayer.cpp",
         "IMediaHTTPConnection.cpp",
@@ -161,7 +167,6 @@
         "IMediaMetadataRetriever.cpp",
         "mediametadataretriever.cpp",
         "MidiDeviceInfo.cpp",
-        "MidiIoWrapper.cpp",
         "JetPlayer.cpp",
         "MediaScanner.cpp",
         "MediaScannerClient.cpp",
@@ -175,7 +180,6 @@
     ],
 
     shared_libs: [
-        "libui",
         "liblog",
         "libcutils",
         "libutils",
@@ -186,36 +190,170 @@
         "libexpat",
         "libcamera_client",
         "libstagefright_foundation",
+        "libmediaextractor",
         "libgui",
         "libdl",
         "libaudioutils",
         "libaudioclient",
-        "libmedia_helper",
-        "libmediadrm",
-        "libmediametrics",
-        "libbase",
-        "libhidlbase",
-        "libhidltransport",
-        "libhwbinder",
-        "libhidlmemory",
-        "android.hidl.memory@1.0",
-        "android.hardware.graphics.common@1.0",
-        "android.hardware.graphics.bufferqueue@1.0",
+        "libmedia_omx",
     ],
 
     export_shared_lib_headers: [
+        "libaudioclient",
         "libbinder",
         "libicuuc",
         "libicui18n",
         "libsonivox",
-        "libmediadrm",
-        "libmedia_helper",
-        "android.hidl.memory@1.0",
+        "libmedia_omx",
     ],
 
-    // for memory heap analysis
     static_libs: [
-        "libc_malloc_debug_backtrace",
+        "libc_malloc_debug_backtrace",  // for memory heap analysis
+        "libmedia_midiiowrapper",
+    ],
+
+    export_include_dirs: [
+        "include",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wno-error=deprecated-declarations",
+        "-Wall",
+    ],
+
+    sanitize: {
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+        cfi: true,
+        diag: {
+            cfi: true,
+        },
+    },
+}
+
+cc_library_shared {
+    name: "libmedia_player2_util",
+
+    srcs: [
+        "BufferingSettings.cpp",
+        "IDataSource.cpp",
+        "IMediaExtractor.cpp",
+        "IMediaExtractorService.cpp",
+        "IMediaSource.cpp",
+        "IStreamSource.cpp",
+        "MediaUtils.cpp",
+        "Metadata.cpp",
+        "NdkWrapper.cpp",
+    ],
+
+    shared_libs: [
+        "libbinder",
+        "libcutils",
+        "libgui",
+        "liblog",
+        "libmediaextractor",
+        "libmediandk",
+        "libnativewindow",
+        "libstagefright_foundation",
+        "libui",
+        "libutils",
+    ],
+
+    export_shared_lib_headers: [
+        "libbinder",
+        "libmediandk",
+    ],
+
+    header_libs: [
+        "media_plugin_headers",
+    ],
+
+    static_libs: [
+        "libc_malloc_debug_backtrace",  // for memory heap analysis
+
+        "libstagefright_nuplayer2",
+        "libstagefright_rtsp",
+        "libstagefright_timedtext",
+    ],
+
+    export_include_dirs: [
+        "include",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wno-error=deprecated-declarations",
+        "-Wall",
+    ],
+
+    sanitize: {
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+        cfi: true,
+        diag: {
+            cfi: true,
+        },
+    },
+}
+
+cc_library_shared {
+    name: "libmedia_player2",
+
+    srcs: [
+        "AudioParameter.cpp",
+        "MediaPlayer2Factory.cpp",
+        "MediaPlayer2Manager.cpp",
+        "TestPlayerStub.cpp",
+        "TypeConverter.cpp",
+        "mediaplayer2.cpp",
+    ],
+
+    shared_libs: [
+        "libaudioclient",
+        "libbinder",
+        "libcutils",
+        "libgui",
+        "liblog",
+        "libmedia_omx",
+        "libmedia_player2_util",
+        "libmediaextractor",
+        "libstagefright_foundation",
+        "libui",
+        "libutils",
+
+        "libcrypto",
+        "libmediadrm",
+        "libmediametrics",
+        "libmediandk",
+        "libmediautils",
+        "libmemunreachable",
+        "libnativewindow",
+        "libpowermanager",
+        "libstagefright_httplive",
+        "libstagefright_player2",
+    ],
+
+    export_shared_lib_headers: [
+        "libaudioclient",
+        "libbinder",
+        "libmedia_omx",
+    ],
+
+    header_libs: [
+        "media_plugin_headers",
+    ],
+
+    static_libs: [
+        "libc_malloc_debug_backtrace",  // for memory heap analysis
+
+        "libstagefright_nuplayer2",
+        "libstagefright_rtsp",
+        "libstagefright_timedtext",
     ],
 
     export_include_dirs: [
diff --git a/media/libmedia/BufferingSettings.cpp b/media/libmedia/BufferingSettings.cpp
index a69497e..271a238 100644
--- a/media/libmedia/BufferingSettings.cpp
+++ b/media/libmedia/BufferingSettings.cpp
@@ -23,43 +23,16 @@
 
 namespace android {
 
-// static
-bool BufferingSettings::IsValidBufferingMode(int mode) {
-    return (mode >= BUFFERING_MODE_NONE && mode < BUFFERING_MODE_COUNT);
-}
-
-// static
-bool BufferingSettings::IsTimeBasedBufferingMode(int mode) {
-    return (mode == BUFFERING_MODE_TIME_ONLY || mode == BUFFERING_MODE_TIME_THEN_SIZE);
-}
-
-// static
-bool BufferingSettings::IsSizeBasedBufferingMode(int mode) {
-    return (mode == BUFFERING_MODE_SIZE_ONLY || mode == BUFFERING_MODE_TIME_THEN_SIZE);
-}
-
 BufferingSettings::BufferingSettings()
-        : mInitialBufferingMode(BUFFERING_MODE_NONE),
-          mRebufferingMode(BUFFERING_MODE_NONE),
-          mInitialWatermarkMs(kNoWatermark),
-          mInitialWatermarkKB(kNoWatermark),
-          mRebufferingWatermarkLowMs(kNoWatermark),
-          mRebufferingWatermarkHighMs(kNoWatermark),
-          mRebufferingWatermarkLowKB(kNoWatermark),
-          mRebufferingWatermarkHighKB(kNoWatermark) { }
+        : mInitialMarkMs(kNoMark),
+          mResumePlaybackMarkMs(kNoMark) { }
 
 status_t BufferingSettings::readFromParcel(const Parcel* parcel) {
     if (parcel == nullptr) {
         return BAD_VALUE;
     }
-    mInitialBufferingMode = (BufferingMode)parcel->readInt32();
-    mRebufferingMode = (BufferingMode)parcel->readInt32();
-    mInitialWatermarkMs = parcel->readInt32();
-    mInitialWatermarkKB = parcel->readInt32();
-    mRebufferingWatermarkLowMs = parcel->readInt32();
-    mRebufferingWatermarkHighMs = parcel->readInt32();
-    mRebufferingWatermarkLowKB = parcel->readInt32();
-    mRebufferingWatermarkHighKB = parcel->readInt32();
+    mInitialMarkMs = parcel->readInt32();
+    mResumePlaybackMarkMs = parcel->readInt32();
 
     return OK;
 }
@@ -68,26 +41,17 @@
     if (parcel == nullptr) {
         return BAD_VALUE;
     }
-    parcel->writeInt32(mInitialBufferingMode);
-    parcel->writeInt32(mRebufferingMode);
-    parcel->writeInt32(mInitialWatermarkMs);
-    parcel->writeInt32(mInitialWatermarkKB);
-    parcel->writeInt32(mRebufferingWatermarkLowMs);
-    parcel->writeInt32(mRebufferingWatermarkHighMs);
-    parcel->writeInt32(mRebufferingWatermarkLowKB);
-    parcel->writeInt32(mRebufferingWatermarkHighKB);
+    parcel->writeInt32(mInitialMarkMs);
+    parcel->writeInt32(mResumePlaybackMarkMs);
 
     return OK;
 }
 
 String8 BufferingSettings::toString() const {
     String8 s;
-    s.appendFormat("initialMode(%d), rebufferingMode(%d), "
-            "initialMarks(%d ms, %d KB), rebufferingMarks(%d, %d)ms, (%d, %d)KB",
-            mInitialBufferingMode, mRebufferingMode,
-            mInitialWatermarkMs, mInitialWatermarkKB,
-            mRebufferingWatermarkLowMs, mRebufferingWatermarkHighMs,
-            mRebufferingWatermarkLowKB, mRebufferingWatermarkHighKB);
+    s.appendFormat(
+            "initialMarks(%d ms), resumePlaybackMarks(%d ms)",
+            mInitialMarkMs, mResumePlaybackMarkMs);
     return s;
 }
 
diff --git a/media/libmedia/IHDCP.cpp b/media/libmedia/IHDCP.cpp
deleted file mode 100644
index a46017f..0000000
--- a/media/libmedia/IHDCP.cpp
+++ /dev/null
@@ -1,359 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "IHDCP"
-#include <utils/Log.h>
-
-#include <binder/Parcel.h>
-#include <media/IHDCP.h>
-#include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/foundation/ADebug.h>
-
-namespace android {
-
-enum {
-    OBSERVER_NOTIFY = IBinder::FIRST_CALL_TRANSACTION,
-    HDCP_SET_OBSERVER,
-    HDCP_INIT_ASYNC,
-    HDCP_SHUTDOWN_ASYNC,
-    HDCP_GET_CAPS,
-    HDCP_ENCRYPT,
-    HDCP_ENCRYPT_NATIVE,
-    HDCP_DECRYPT,
-};
-
-struct BpHDCPObserver : public BpInterface<IHDCPObserver> {
-    explicit BpHDCPObserver(const sp<IBinder> &impl)
-        : BpInterface<IHDCPObserver>(impl) {
-    }
-
-    virtual void notify(
-            int msg, int ext1, int ext2, const Parcel *obj) {
-        Parcel data, reply;
-        data.writeInterfaceToken(IHDCPObserver::getInterfaceDescriptor());
-        data.writeInt32(msg);
-        data.writeInt32(ext1);
-        data.writeInt32(ext2);
-        if (obj && obj->dataSize() > 0) {
-            data.appendFrom(const_cast<Parcel *>(obj), 0, obj->dataSize());
-        }
-        remote()->transact(OBSERVER_NOTIFY, data, &reply, IBinder::FLAG_ONEWAY);
-    }
-};
-
-IMPLEMENT_META_INTERFACE(HDCPObserver, "android.hardware.IHDCPObserver");
-
-struct BpHDCP : public BpInterface<IHDCP> {
-    explicit BpHDCP(const sp<IBinder> &impl)
-        : BpInterface<IHDCP>(impl) {
-    }
-
-    virtual status_t setObserver(const sp<IHDCPObserver> &observer) {
-        Parcel data, reply;
-        data.writeInterfaceToken(IHDCP::getInterfaceDescriptor());
-        data.writeStrongBinder(IInterface::asBinder(observer));
-        remote()->transact(HDCP_SET_OBSERVER, data, &reply);
-        return reply.readInt32();
-    }
-
-    virtual status_t initAsync(const char *host, unsigned port) {
-        Parcel data, reply;
-        data.writeInterfaceToken(IHDCP::getInterfaceDescriptor());
-        data.writeCString(host);
-        data.writeInt32(port);
-        remote()->transact(HDCP_INIT_ASYNC, data, &reply);
-        return reply.readInt32();
-    }
-
-    virtual status_t shutdownAsync() {
-        Parcel data, reply;
-        data.writeInterfaceToken(IHDCP::getInterfaceDescriptor());
-        remote()->transact(HDCP_SHUTDOWN_ASYNC, data, &reply);
-        return reply.readInt32();
-    }
-
-    virtual uint32_t getCaps() {
-        Parcel data, reply;
-        data.writeInterfaceToken(IHDCP::getInterfaceDescriptor());
-        remote()->transact(HDCP_GET_CAPS, data, &reply);
-        return reply.readInt32();
-    }
-
-    virtual status_t encrypt(
-            const void *inData, size_t size, uint32_t streamCTR,
-            uint64_t *outInputCTR, void *outData) {
-        Parcel data, reply;
-        data.writeInterfaceToken(IHDCP::getInterfaceDescriptor());
-        data.writeInt32(size);
-        data.write(inData, size);
-        data.writeInt32(streamCTR);
-        remote()->transact(HDCP_ENCRYPT, data, &reply);
-
-        status_t err = reply.readInt32();
-
-        if (err != OK) {
-            *outInputCTR = 0;
-
-            return err;
-        }
-
-        *outInputCTR = reply.readInt64();
-        reply.read(outData, size);
-
-        return err;
-    }
-
-    virtual status_t encryptNative(
-            const sp<GraphicBuffer> &graphicBuffer,
-            size_t offset, size_t size, uint32_t streamCTR,
-            uint64_t *outInputCTR, void *outData) {
-        Parcel data, reply;
-        data.writeInterfaceToken(IHDCP::getInterfaceDescriptor());
-        data.write(*graphicBuffer);
-        data.writeInt32(offset);
-        data.writeInt32(size);
-        data.writeInt32(streamCTR);
-        remote()->transact(HDCP_ENCRYPT_NATIVE, data, &reply);
-
-        status_t err = reply.readInt32();
-
-        if (err != OK) {
-            *outInputCTR = 0;
-            return err;
-        }
-
-        *outInputCTR = reply.readInt64();
-        reply.read(outData, size);
-
-        return err;
-    }
-
-    virtual status_t decrypt(
-            const void *inData, size_t size,
-            uint32_t streamCTR, uint64_t inputCTR,
-            void *outData) {
-        Parcel data, reply;
-        data.writeInterfaceToken(IHDCP::getInterfaceDescriptor());
-        data.writeInt32(size);
-        data.write(inData, size);
-        data.writeInt32(streamCTR);
-        data.writeInt64(inputCTR);
-        remote()->transact(HDCP_DECRYPT, data, &reply);
-
-        status_t err = reply.readInt32();
-
-        if (err != OK) {
-            return err;
-        }
-
-        reply.read(outData, size);
-
-        return err;
-    }
-};
-
-IMPLEMENT_META_INTERFACE(HDCP, "android.hardware.IHDCP");
-
-status_t BnHDCPObserver::onTransact(
-        uint32_t code, const Parcel &data, Parcel *reply, uint32_t flags) {
-    switch (code) {
-        case OBSERVER_NOTIFY:
-        {
-            CHECK_INTERFACE(IHDCPObserver, data, reply);
-
-            int msg = data.readInt32();
-            int ext1 = data.readInt32();
-            int ext2 = data.readInt32();
-
-            Parcel obj;
-            if (data.dataAvail() > 0) {
-                obj.appendFrom(
-                        const_cast<Parcel *>(&data),
-                        data.dataPosition(),
-                        data.dataAvail());
-            }
-
-            notify(msg, ext1, ext2, &obj);
-
-            return OK;
-        }
-
-        default:
-            return BBinder::onTransact(code, data, reply, flags);
-    }
-}
-
-status_t BnHDCP::onTransact(
-        uint32_t code, const Parcel &data, Parcel *reply, uint32_t flags) {
-    switch (code) {
-        case HDCP_SET_OBSERVER:
-        {
-            CHECK_INTERFACE(IHDCP, data, reply);
-
-            sp<IHDCPObserver> observer =
-                interface_cast<IHDCPObserver>(data.readStrongBinder());
-
-            reply->writeInt32(setObserver(observer));
-            return OK;
-        }
-
-        case HDCP_INIT_ASYNC:
-        {
-            CHECK_INTERFACE(IHDCP, data, reply);
-
-            const char *host = data.readCString();
-            unsigned port = data.readInt32();
-
-            reply->writeInt32(initAsync(host, port));
-            return OK;
-        }
-
-        case HDCP_SHUTDOWN_ASYNC:
-        {
-            CHECK_INTERFACE(IHDCP, data, reply);
-
-            reply->writeInt32(shutdownAsync());
-            return OK;
-        }
-
-        case HDCP_GET_CAPS:
-        {
-            CHECK_INTERFACE(IHDCP, data, reply);
-
-            reply->writeInt32(getCaps());
-            return OK;
-        }
-
-        case HDCP_ENCRYPT:
-        {
-            CHECK_INTERFACE(IHDCP, data, reply);
-
-            size_t size = data.readInt32();
-            void *inData = NULL;
-            // watch out for overflow
-            if (size <= SIZE_MAX / 2) {
-                inData = malloc(2 * size);
-            }
-            if (inData == NULL) {
-                reply->writeInt32(ERROR_OUT_OF_RANGE);
-                return OK;
-            }
-
-            void *outData = (uint8_t *)inData + size;
-
-            status_t err = data.read(inData, size);
-            if (err != OK) {
-                free(inData);
-                reply->writeInt32(err);
-                return OK;
-            }
-
-            uint32_t streamCTR = data.readInt32();
-            uint64_t inputCTR;
-            err = encrypt(inData, size, streamCTR, &inputCTR, outData);
-
-            reply->writeInt32(err);
-
-            if (err == OK) {
-                reply->writeInt64(inputCTR);
-                reply->write(outData, size);
-            }
-
-            free(inData);
-            inData = outData = NULL;
-
-            return OK;
-        }
-
-        case HDCP_ENCRYPT_NATIVE:
-        {
-            CHECK_INTERFACE(IHDCP, data, reply);
-
-            sp<GraphicBuffer> graphicBuffer = new GraphicBuffer();
-            data.read(*graphicBuffer);
-            size_t offset = data.readInt32();
-            size_t size = data.readInt32();
-            uint32_t streamCTR = data.readInt32();
-            void *outData = NULL;
-            uint64_t inputCTR;
-
-            status_t err = ERROR_OUT_OF_RANGE;
-
-            outData = malloc(size);
-
-            if (outData != NULL) {
-                err = encryptNative(graphicBuffer, offset, size,
-                                             streamCTR, &inputCTR, outData);
-            }
-
-            reply->writeInt32(err);
-
-            if (err == OK) {
-                reply->writeInt64(inputCTR);
-                reply->write(outData, size);
-            }
-
-            free(outData);
-            outData = NULL;
-
-            return OK;
-        }
-
-        case HDCP_DECRYPT:
-        {
-            CHECK_INTERFACE(IHDCP, data, reply);
-
-            size_t size = data.readInt32();
-            size_t bufSize = 2 * size;
-
-            // watch out for overflow
-            void *inData = NULL;
-            if (bufSize > size) {
-                inData = malloc(bufSize);
-            }
-
-            if (inData == NULL) {
-                reply->writeInt32(ERROR_OUT_OF_RANGE);
-                return OK;
-            }
-
-            void *outData = (uint8_t *)inData + size;
-
-            data.read(inData, size);
-
-            uint32_t streamCTR = data.readInt32();
-            uint64_t inputCTR = data.readInt64();
-            status_t err = decrypt(inData, size, streamCTR, inputCTR, outData);
-
-            reply->writeInt32(err);
-
-            if (err == OK) {
-                reply->write(outData, size);
-            }
-
-            free(inData);
-            inData = outData = NULL;
-
-            return OK;
-        }
-
-        default:
-            return BBinder::onTransact(code, data, reply, flags);
-    }
-}
-
-}  // namespace android
diff --git a/media/libmedia/IMediaCodecService.cpp b/media/libmedia/IMediaCodecService.cpp
deleted file mode 100644
index adfa93d..0000000
--- a/media/libmedia/IMediaCodecService.cpp
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
-**
-** Copyright 2015, The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-**     http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
-
-#define LOG_TAG "IMediaCodecService"
-//#define LOG_NDEBUG 0
-
-#include <utils/Log.h>
-#include <stdint.h>
-#include <sys/types.h>
-#include <binder/Parcel.h>
-#include <media/IMediaCodecService.h>
-
-namespace android {
-
-enum {
-    GET_OMX = IBinder::FIRST_CALL_TRANSACTION,
-    GET_OMX_STORE
-};
-
-class BpMediaCodecService : public BpInterface<IMediaCodecService>
-{
-public:
-    explicit BpMediaCodecService(const sp<IBinder>& impl)
-        : BpInterface<IMediaCodecService>(impl)
-    {
-    }
-
-    virtual sp<IOMX> getOMX() {
-        Parcel data, reply;
-        data.writeInterfaceToken(IMediaCodecService::getInterfaceDescriptor());
-        remote()->transact(GET_OMX, data, &reply);
-        return interface_cast<IOMX>(reply.readStrongBinder());
-    }
-
-    virtual sp<IOMXStore> getOMXStore() {
-        Parcel data, reply;
-        data.writeInterfaceToken(IMediaCodecService::getInterfaceDescriptor());
-        remote()->transact(GET_OMX_STORE, data, &reply);
-        return interface_cast<IOMXStore>(reply.readStrongBinder());
-    }
-
-};
-
-IMPLEMENT_META_INTERFACE(MediaCodecService, "android.media.IMediaCodecService");
-
-// ----------------------------------------------------------------------
-
-status_t BnMediaCodecService::onTransact(
-    uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
-{
-    switch (code) {
-
-        case GET_OMX: {
-            CHECK_INTERFACE(IMediaCodecService, data, reply);
-            sp<IOMX> omx = getOMX();
-            reply->writeStrongBinder(IInterface::asBinder(omx));
-            return NO_ERROR;
-        }
-        case GET_OMX_STORE: {
-            CHECK_INTERFACE(IMediaCodecService, data, reply);
-            sp<IOMXStore> omxStore = getOMXStore();
-            reply->writeStrongBinder(IInterface::asBinder(omxStore));
-            return NO_ERROR;
-        }
-        default:
-            return BBinder::onTransact(code, data, reply, flags);
-    }
-}
-
-// ----------------------------------------------------------------------------
-
-} // namespace android
diff --git a/media/libmedia/IMediaExtractorService.cpp b/media/libmedia/IMediaExtractorService.cpp
index 7c0d08d..d7533ca 100644
--- a/media/libmedia/IMediaExtractorService.cpp
+++ b/media/libmedia/IMediaExtractorService.cpp
@@ -23,7 +23,7 @@
 #include <sys/types.h>
 #include <binder/Parcel.h>
 #include <media/IMediaExtractorService.h>
-#include <media/stagefright/MediaExtractor.h>
+#include <media/MediaExtractor.h>
 
 namespace android {
 
diff --git a/media/libmedia/IMediaHTTPService.cpp b/media/libmedia/IMediaHTTPService.cpp
index 062a07a..74d8ee8 100644
--- a/media/libmedia/IMediaHTTPService.cpp
+++ b/media/libmedia/IMediaHTTPService.cpp
@@ -34,7 +34,7 @@
         : BpInterface<IMediaHTTPService>(impl) {
     }
 
-    virtual sp<IMediaHTTPConnection> makeHTTPConnection() {
+    virtual sp<MediaHTTPConnection> makeHTTPConnection() {
         Parcel data, reply;
         data.writeInterfaceToken(
                 IMediaHTTPService::getInterfaceDescriptor());
diff --git a/media/libmedia/IMediaMetadataRetriever.cpp b/media/libmedia/IMediaMetadataRetriever.cpp
index 5ea2e8b..f725c97 100644
--- a/media/libmedia/IMediaMetadataRetriever.cpp
+++ b/media/libmedia/IMediaMetadataRetriever.cpp
@@ -68,6 +68,8 @@
     SET_DATA_SOURCE_FD,
     SET_DATA_SOURCE_CALLBACK,
     GET_FRAME_AT_TIME,
+    GET_IMAGE_AT_INDEX,
+    GET_FRAME_AT_INDEX,
     EXTRACT_ALBUM_ART,
     EXTRACT_METADATA,
 };
@@ -164,6 +166,55 @@
         return interface_cast<IMemory>(reply.readStrongBinder());
     }
 
+    sp<IMemory> getImageAtIndex(int index, int colorFormat, bool metaOnly)
+    {
+        ALOGV("getImageAtIndex: index %d, colorFormat(%d) metaOnly(%d)",
+                index, colorFormat, metaOnly);
+        Parcel data, reply;
+        data.writeInterfaceToken(IMediaMetadataRetriever::getInterfaceDescriptor());
+        data.writeInt32(index);
+        data.writeInt32(colorFormat);
+        data.writeInt32(metaOnly);
+#ifndef DISABLE_GROUP_SCHEDULE_HACK
+        sendSchedPolicy(data);
+#endif
+        remote()->transact(GET_IMAGE_AT_INDEX, data, &reply);
+        status_t ret = reply.readInt32();
+        if (ret != NO_ERROR) {
+            return NULL;
+        }
+        return interface_cast<IMemory>(reply.readStrongBinder());
+    }
+
+    status_t getFrameAtIndex(std::vector<sp<IMemory> > *frames,
+            int frameIndex, int numFrames, int colorFormat, bool metaOnly)
+    {
+        ALOGV("getFrameAtIndex: frameIndex(%d), numFrames(%d), colorFormat(%d) metaOnly(%d)",
+                frameIndex, numFrames, colorFormat, metaOnly);
+        Parcel data, reply;
+        data.writeInterfaceToken(IMediaMetadataRetriever::getInterfaceDescriptor());
+        data.writeInt32(frameIndex);
+        data.writeInt32(numFrames);
+        data.writeInt32(colorFormat);
+        data.writeInt32(metaOnly);
+#ifndef DISABLE_GROUP_SCHEDULE_HACK
+        sendSchedPolicy(data);
+#endif
+        remote()->transact(GET_FRAME_AT_INDEX, data, &reply);
+        status_t ret = reply.readInt32();
+        if (ret != NO_ERROR) {
+            return ret;
+        }
+        int retNumFrames = reply.readInt32();
+        if (retNumFrames < numFrames) {
+            numFrames = retNumFrames;
+        }
+        for (int i = 0; i < numFrames; i++) {
+            frames->push_back(interface_cast<IMemory>(reply.readStrongBinder()));
+        }
+        return OK;
+    }
+
     sp<IMemory> extractAlbumArt()
     {
         Parcel data, reply;
@@ -300,6 +351,54 @@
 #endif
             return NO_ERROR;
         } break;
+        case GET_IMAGE_AT_INDEX: {
+            CHECK_INTERFACE(IMediaMetadataRetriever, data, reply);
+            int index = data.readInt32();
+            int colorFormat = data.readInt32();
+            bool metaOnly = (data.readInt32() != 0);
+            ALOGV("getImageAtIndex: index(%d), colorFormat(%d), metaOnly(%d)",
+                    index, colorFormat, metaOnly);
+#ifndef DISABLE_GROUP_SCHEDULE_HACK
+            setSchedPolicy(data);
+#endif
+            sp<IMemory> bitmap = getImageAtIndex(index, colorFormat, metaOnly);
+            if (bitmap != 0) {  // Don't send NULL across the binder interface
+                reply->writeInt32(NO_ERROR);
+                reply->writeStrongBinder(IInterface::asBinder(bitmap));
+            } else {
+                reply->writeInt32(UNKNOWN_ERROR);
+            }
+#ifndef DISABLE_GROUP_SCHEDULE_HACK
+            restoreSchedPolicy();
+#endif
+            return NO_ERROR;
+        } break;
+        case GET_FRAME_AT_INDEX: {
+            CHECK_INTERFACE(IMediaMetadataRetriever, data, reply);
+            int frameIndex = data.readInt32();
+            int numFrames = data.readInt32();
+            int colorFormat = data.readInt32();
+            bool metaOnly = (data.readInt32() != 0);
+            ALOGV("getFrameAtIndex: frameIndex(%d), numFrames(%d), colorFormat(%d), metaOnly(%d)",
+                    frameIndex, numFrames, colorFormat, metaOnly);
+#ifndef DISABLE_GROUP_SCHEDULE_HACK
+            setSchedPolicy(data);
+#endif
+            std::vector<sp<IMemory> > frames;
+            status_t err = getFrameAtIndex(
+                    &frames, frameIndex, numFrames, colorFormat, metaOnly);
+            reply->writeInt32(err);
+            if (OK == err) {
+                reply->writeInt32(frames.size());
+                for (size_t i = 0; i < frames.size(); i++) {
+                    reply->writeStrongBinder(IInterface::asBinder(frames[i]));
+                }
+            }
+#ifndef DISABLE_GROUP_SCHEDULE_HACK
+            restoreSchedPolicy();
+#endif
+            return NO_ERROR;
+        } break;
         case EXTRACT_ALBUM_ART: {
             CHECK_INTERFACE(IMediaMetadataRetriever, data, reply);
 #ifndef DISABLE_GROUP_SCHEDULE_HACK
diff --git a/media/libmedia/IMediaPlayer.cpp b/media/libmedia/IMediaPlayer.cpp
index 3996227..e2eccdd 100644
--- a/media/libmedia/IMediaPlayer.cpp
+++ b/media/libmedia/IMediaPlayer.cpp
@@ -35,6 +35,8 @@
 
 namespace android {
 
+using media::VolumeShaper;
+
 enum {
     DISCONNECT = IBinder::FIRST_CALL_TRANSACTION,
     SET_DATA_SOURCE_URL,
@@ -42,7 +44,7 @@
     SET_DATA_SOURCE_STREAM,
     SET_DATA_SOURCE_CALLBACK,
     SET_BUFFERING_SETTINGS,
-    GET_DEFAULT_BUFFERING_SETTINGS,
+    GET_BUFFERING_SETTINGS,
     PREPARE_ASYNC,
     START,
     STOP,
@@ -56,6 +58,7 @@
     GET_CURRENT_POSITION,
     GET_DURATION,
     RESET,
+    NOTIFY_AT,
     SET_AUDIO_STREAM_TYPE,
     SET_LOOPING,
     SET_VOLUME,
@@ -75,6 +78,10 @@
     // Modular DRM
     PREPARE_DRM,
     RELEASE_DRM,
+    // AudioRouting
+    SET_OUTPUT_DEVICE,
+    GET_ROUTED_DEVICE_ID,
+    ENABLE_AUDIO_DEVICE_CALLBACK,
 };
 
 // ModDrm helpers
@@ -177,14 +184,14 @@
         return reply.readInt32();
     }
 
-    status_t getDefaultBufferingSettings(BufferingSettings* buffering /* nonnull */)
+    status_t getBufferingSettings(BufferingSettings* buffering /* nonnull */)
     {
         if (buffering == nullptr) {
             return BAD_VALUE;
         }
         Parcel data, reply;
         data.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor());
-        remote()->transact(GET_DEFAULT_BUFFERING_SETTINGS, data, &reply);
+        remote()->transact(GET_BUFFERING_SETTINGS, data, &reply);
         status_t err = reply.readInt32();
         if (err == OK) {
             err = buffering->readFromParcel(&reply);
@@ -326,6 +333,15 @@
         return reply.readInt32();
     }
 
+    status_t notifyAt(int64_t mediaTimeUs)
+    {
+        Parcel data, reply;
+        data.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor());
+        data.writeInt64(mediaTimeUs);
+        remote()->transact(NOTIFY_AT, data, &reply);
+        return reply.readInt32();
+    }
+
     status_t setAudioStreamType(audio_stream_type_t stream)
     {
         Parcel data, reply;
@@ -509,7 +525,7 @@
             return nullptr;
         }
         sp<VolumeShaper::State> state = new VolumeShaper::State();
-        status = state->readFromParcel(reply);
+        status = state->readFromParcel(&reply);
         if (status != NO_ERROR) {
             return nullptr;
         }
@@ -547,6 +563,59 @@
 
         return reply.readInt32();
     }
+
+    status_t setOutputDevice(audio_port_handle_t deviceId)
+    {
+        Parcel data, reply;
+        data.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor());
+
+        data.writeInt32(deviceId);
+
+        status_t status = remote()->transact(SET_OUTPUT_DEVICE, data, &reply);
+        if (status != OK) {
+            ALOGE("setOutputDevice: binder call failed: %d", status);
+            return status;
+        }
+
+        return reply.readInt32();
+    }
+
+    status_t getRoutedDeviceId(audio_port_handle_t* deviceId)
+    {
+        Parcel data, reply;
+        data.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor());
+
+        status_t status = remote()->transact(GET_ROUTED_DEVICE_ID, data, &reply);
+        if (status != OK) {
+            ALOGE("getRoutedDeviceid: binder call failed: %d", status);
+            *deviceId = AUDIO_PORT_HANDLE_NONE;
+            return status;
+        }
+
+        status = reply.readInt32();
+        if (status != NO_ERROR) {
+            *deviceId = AUDIO_PORT_HANDLE_NONE;
+        } else {
+            *deviceId = reply.readInt32();
+        }
+        return status;
+    }
+
+    status_t enableAudioDeviceCallback(bool enabled)
+    {
+        Parcel data, reply;
+        data.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor());
+
+        data.writeBool(enabled);
+
+        status_t status = remote()->transact(ENABLE_AUDIO_DEVICE_CALLBACK, data, &reply);
+        if (status != OK) {
+            ALOGE("enableAudioDeviceCallback: binder call failed: %d, %d", enabled, status);
+            return status;
+        }
+
+        return reply.readInt32();
+    }
 };
 
 IMPLEMENT_META_INTERFACE(MediaPlayer, "android.media.IMediaPlayer");
@@ -631,10 +700,10 @@
             reply->writeInt32(setBufferingSettings(buffering));
             return NO_ERROR;
         } break;
-        case GET_DEFAULT_BUFFERING_SETTINGS: {
+        case GET_BUFFERING_SETTINGS: {
             CHECK_INTERFACE(IMediaPlayer, data, reply);
             BufferingSettings buffering;
-            status_t err = getDefaultBufferingSettings(&buffering);
+            status_t err = getBufferingSettings(&buffering);
             reply->writeInt32(err);
             if (err == OK) {
                 buffering.writeToParcel(reply);
@@ -744,6 +813,11 @@
             reply->writeInt32(reset());
             return NO_ERROR;
         } break;
+        case NOTIFY_AT: {
+            CHECK_INTERFACE(IMediaPlayer, data, reply);
+            reply->writeInt32(notifyAt(data.readInt64()));
+            return NO_ERROR;
+        } break;
         case SET_AUDIO_STREAM_TYPE: {
             CHECK_INTERFACE(IMediaPlayer, data, reply);
             reply->writeInt32(setAudioStreamType((audio_stream_type_t) data.readInt32()));
@@ -851,14 +925,14 @@
             status_t status = data.readInt32(&present);
             if (status == NO_ERROR && present != 0) {
                 configuration = new VolumeShaper::Configuration();
-                status = configuration->readFromParcel(data);
+                status = configuration->readFromParcel(&data);
             }
             if (status == NO_ERROR) {
                 status = data.readInt32(&present);
             }
             if (status == NO_ERROR && present != 0) {
                 operation = new VolumeShaper::Operation();
-                status = operation->readFromParcel(data);
+                status = operation->readFromParcel(&data);
             }
             if (status == NO_ERROR) {
                 status = (status_t)applyVolumeShaper(configuration, operation);
@@ -899,6 +973,41 @@
             reply->writeInt32(result);
             return OK;
         }
+
+        // AudioRouting
+        case SET_OUTPUT_DEVICE: {
+            CHECK_INTERFACE(IMediaPlayer, data, reply);
+            int deviceId;
+            status_t status = data.readInt32(&deviceId);
+            if (status == NO_ERROR) {
+                reply->writeInt32(setOutputDevice(deviceId));
+            } else {
+                reply->writeInt32(BAD_VALUE);
+            }
+            return NO_ERROR;
+        }
+        case GET_ROUTED_DEVICE_ID: {
+            CHECK_INTERFACE(IMediaPlayer, data, reply);
+            audio_port_handle_t deviceId;
+            status_t ret = getRoutedDeviceId(&deviceId);
+            reply->writeInt32(ret);
+            if (ret == NO_ERROR) {
+                reply->writeInt32(deviceId);
+            }
+            return NO_ERROR;
+        } break;
+        case ENABLE_AUDIO_DEVICE_CALLBACK: {
+            CHECK_INTERFACE(IMediaPlayer, data, reply);
+            bool enabled;
+            status_t status = data.readBool(&enabled);
+            if (status == NO_ERROR) {
+                reply->writeInt32(enableAudioDeviceCallback(enabled));
+            } else {
+                reply->writeInt32(BAD_VALUE);
+            }
+            return NO_ERROR;
+        } break;
+
         default:
             return BBinder::onTransact(code, data, reply, flags);
     }
diff --git a/media/libmedia/IMediaPlayerService.cpp b/media/libmedia/IMediaPlayerService.cpp
index a01852c..aca7ad9 100644
--- a/media/libmedia/IMediaPlayerService.cpp
+++ b/media/libmedia/IMediaPlayerService.cpp
@@ -20,7 +20,6 @@
 
 #include <binder/Parcel.h>
 #include <binder/IMemory.h>
-#include <media/IHDCP.h>
 #include <media/IMediaCodecList.h>
 #include <media/IMediaHTTPService.h>
 #include <media/IMediaPlayerService.h>
@@ -39,8 +38,6 @@
     CREATE = IBinder::FIRST_CALL_TRANSACTION,
     CREATE_MEDIA_RECORDER,
     CREATE_METADATA_RETRIEVER,
-    GET_OMX,
-    MAKE_HDCP,
     ADD_BATTERY_DATA,
     PULL_BATTERY_DATA,
     LISTEN_FOR_REMOTE_DISPLAY,
@@ -83,21 +80,6 @@
         return interface_cast<IMediaRecorder>(reply.readStrongBinder());
     }
 
-    virtual sp<IOMX> getOMX() {
-        Parcel data, reply;
-        data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
-        remote()->transact(GET_OMX, data, &reply);
-        return interface_cast<IOMX>(reply.readStrongBinder());
-    }
-
-    virtual sp<IHDCP> makeHDCP(bool createEncryptionModule) {
-        Parcel data, reply;
-        data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
-        data.writeInt32(createEncryptionModule);
-        remote()->transact(MAKE_HDCP, data, &reply);
-        return interface_cast<IHDCP>(reply.readStrongBinder());
-    }
-
     virtual void addBatteryData(uint32_t params) {
         Parcel data, reply;
         data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
@@ -161,19 +143,6 @@
             reply->writeStrongBinder(IInterface::asBinder(retriever));
             return NO_ERROR;
         } break;
-        case GET_OMX: {
-            CHECK_INTERFACE(IMediaPlayerService, data, reply);
-            sp<IOMX> omx = getOMX();
-            reply->writeStrongBinder(IInterface::asBinder(omx));
-            return NO_ERROR;
-        } break;
-        case MAKE_HDCP: {
-            CHECK_INTERFACE(IMediaPlayerService, data, reply);
-            bool createEncryptionModule = data.readInt32();
-            sp<IHDCP> hdcp = makeHDCP(createEncryptionModule);
-            reply->writeStrongBinder(IInterface::asBinder(hdcp));
-            return NO_ERROR;
-        } break;
         case ADD_BATTERY_DATA: {
             CHECK_INTERFACE(IMediaPlayerService, data, reply);
             uint32_t params = data.readInt32();
diff --git a/media/libmedia/IMediaRecorder.cpp b/media/libmedia/IMediaRecorder.cpp
index 5282352..72f5f58 100644
--- a/media/libmedia/IMediaRecorder.cpp
+++ b/media/libmedia/IMediaRecorder.cpp
@@ -61,6 +61,9 @@
     PAUSE,
     RESUME,
     GET_METRICS,
+    SET_INPUT_DEVICE,
+    GET_ROUTED_DEVICE_ID,
+    ENABLE_AUDIO_DEVICE_CALLBACK,
 
 };
 
@@ -337,6 +340,57 @@
         remote()->transact(RELEASE, data, &reply);
         return reply.readInt32();
     }
+
+    status_t setInputDevice(audio_port_handle_t deviceId)
+    {
+        ALOGV("setInputDevice");
+        Parcel data, reply;
+        data.writeInterfaceToken(IMediaRecorder::getInterfaceDescriptor());
+        data.writeInt32(deviceId);
+
+        status_t status = remote()->transact(SET_INPUT_DEVICE, data, &reply);
+        if (status != OK) {
+            ALOGE("setInputDevice binder call failed: %d", status);
+            return status;
+        }
+        return reply.readInt32();;
+    }
+
+    audio_port_handle_t getRoutedDeviceId(audio_port_handle_t *deviceId)
+    {
+        ALOGV("getRoutedDeviceId");
+        Parcel data, reply;
+        data.writeInterfaceToken(IMediaRecorder::getInterfaceDescriptor());
+
+        status_t status = remote()->transact(GET_ROUTED_DEVICE_ID, data, &reply);
+        if (status != OK) {
+            ALOGE("getRoutedDeviceid binder call failed: %d", status);
+            *deviceId = AUDIO_PORT_HANDLE_NONE;
+            return status;
+        }
+
+        status = reply.readInt32();
+        if (status != NO_ERROR) {
+            *deviceId = AUDIO_PORT_HANDLE_NONE;
+        } else {
+            *deviceId = reply.readInt32();
+        }
+        return status;
+    }
+
+    status_t enableAudioDeviceCallback(bool enabled)
+    {
+        ALOGV("enableAudioDeviceCallback");
+        Parcel data, reply;
+        data.writeInterfaceToken(IMediaRecorder::getInterfaceDescriptor());
+        data.writeBool(enabled);
+        status_t status = remote()->transact(ENABLE_AUDIO_DEVICE_CALLBACK, data, &reply);
+        if (status != OK) {
+            ALOGE("enableAudioDeviceCallback binder call failed: %d, %d", enabled, status);
+            return status;
+        }
+        return reply.readInt32();
+    }
 };
 
 IMPLEMENT_META_INTERFACE(MediaRecorder, "android.media.IMediaRecorder");
@@ -543,6 +597,41 @@
             }
             return NO_ERROR;
         } break;
+        case SET_INPUT_DEVICE: {
+            ALOGV("SET_INPUT_DEVICE");
+            CHECK_INTERFACE(IMediaRecorder, data, reply);
+            audio_port_handle_t deviceId;
+            status_t status = data.readInt32(&deviceId);
+            if (status == NO_ERROR) {
+                reply->writeInt32(setInputDevice(deviceId));
+            } else {
+                reply->writeInt32(BAD_VALUE);
+            }
+            return NO_ERROR;
+        } break;
+        case GET_ROUTED_DEVICE_ID: {
+            ALOGV("GET_ROUTED_DEVICE_ID");
+            CHECK_INTERFACE(IMediaRecorder, data, reply);
+            audio_port_handle_t deviceId;
+            status_t status = getRoutedDeviceId(&deviceId);
+            reply->writeInt32(status);
+            if (status == NO_ERROR) {
+                reply->writeInt32(deviceId);
+            }
+            return NO_ERROR;
+        } break;
+        case ENABLE_AUDIO_DEVICE_CALLBACK: {
+            ALOGV("ENABLE_AUDIO_DEVICE_CALLBACK");
+            CHECK_INTERFACE(IMediaRecorder, data, reply);
+            bool enabled;
+            status_t status = data.readBool(&enabled);
+            if (status == NO_ERROR) {
+                reply->writeInt32(enableAudioDeviceCallback(enabled));
+            } else {
+                reply->writeInt32(BAD_VALUE);
+            }
+            return NO_ERROR;
+        }
         default:
             return BBinder::onTransact(code, data, reply, flags);
     }
diff --git a/media/libmedia/IMediaSource.cpp b/media/libmedia/IMediaSource.cpp
index 724b3a0..0d5127c 100644
--- a/media/libmedia/IMediaSource.cpp
+++ b/media/libmedia/IMediaSource.cpp
@@ -26,7 +26,7 @@
 #include <media/IMediaSource.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaBufferGroup.h>
-#include <media/stagefright/MediaSource.h>
+#include <media/MediaSource.h>
 #include <media/stagefright/MetaData.h>
 
 namespace android {
@@ -113,7 +113,8 @@
         return NULL;
     }
 
-    virtual status_t read(MediaBuffer **buffer, const ReadOptions *options) {
+    virtual status_t read(MediaBuffer **buffer,
+            const MediaSource::ReadOptions *options) {
         Vector<MediaBuffer *> buffers;
         status_t ret = readMultiple(&buffers, 1 /* maxNumBuffers */, options);
         *buffer = buffers.size() == 0 ? nullptr : buffers[0];
@@ -123,7 +124,8 @@
     }
 
     virtual status_t readMultiple(
-            Vector<MediaBuffer *> *buffers, uint32_t maxNumBuffers, const ReadOptions *options) {
+            Vector<MediaBuffer *> *buffers, uint32_t maxNumBuffers,
+            const MediaSource::ReadOptions *options) {
         ALOGV("readMultiple");
         if (buffers == NULL || !buffers->isEmpty()) {
             return BAD_VALUE;
@@ -210,11 +212,6 @@
         return remote()->transact(PAUSE, data, &reply);
     }
 
-    virtual status_t setBuffers(const Vector<MediaBuffer *> & buffers __unused) {
-        ALOGV("setBuffers NOT IMPLEMENTED");
-        return ERROR_UNSUPPORTED; // default
-    }
-
 private:
 
     uint32_t mBuffersSinceStop; // Buffer tracking variable
@@ -330,7 +327,7 @@
             }
 
             // Get read options, if any.
-            ReadOptions opts;
+            MediaSource::ReadOptions opts;
             uint32_t len;
             const bool useOptions =
                     data.readUint32(&len) == NO_ERROR
@@ -449,58 +446,5 @@
     }
 }
 
-////////////////////////////////////////////////////////////////////////////////
-
-IMediaSource::ReadOptions::ReadOptions() {
-    reset();
-}
-
-void IMediaSource::ReadOptions::reset() {
-    mOptions = 0;
-    mSeekTimeUs = 0;
-    mLatenessUs = 0;
-    mNonBlocking = false;
-}
-
-void IMediaSource::ReadOptions::setNonBlocking() {
-    mNonBlocking = true;
-}
-
-void IMediaSource::ReadOptions::clearNonBlocking() {
-    mNonBlocking = false;
-}
-
-bool IMediaSource::ReadOptions::getNonBlocking() const {
-    return mNonBlocking;
-}
-
-void IMediaSource::ReadOptions::setSeekTo(int64_t time_us, SeekMode mode) {
-    mOptions |= kSeekTo_Option;
-    mSeekTimeUs = time_us;
-    mSeekMode = mode;
-}
-
-void IMediaSource::ReadOptions::clearSeekTo() {
-    mOptions &= ~kSeekTo_Option;
-    mSeekTimeUs = 0;
-    mSeekMode = SEEK_CLOSEST_SYNC;
-}
-
-bool IMediaSource::ReadOptions::getSeekTo(
-        int64_t *time_us, SeekMode *mode) const {
-    *time_us = mSeekTimeUs;
-    *mode = mSeekMode;
-    return (mOptions & kSeekTo_Option) != 0;
-}
-
-void IMediaSource::ReadOptions::setLateBy(int64_t lateness_us) {
-    mLatenessUs = lateness_us;
-}
-
-int64_t IMediaSource::ReadOptions::getLateBy() const {
-    return mLatenessUs;
-}
-
-
 }  // namespace android
 
diff --git a/media/libmedia/IOMXStore.cpp b/media/libmedia/IOMXStore.cpp
deleted file mode 100644
index 4948f1a..0000000
--- a/media/libmedia/IOMXStore.cpp
+++ /dev/null
@@ -1,367 +0,0 @@
-/*
- * Copyright (c) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "IOMXStore"
-
-#include <utils/Log.h>
-
-#include <media/IOMX.h>
-#include <media/IOMXStore.h>
-#include <android/hardware/media/omx/1.0/IOmxStore.h>
-
-#include <binder/IInterface.h>
-#include <binder/IBinder.h>
-#include <binder/Parcel.h>
-
-#include <vector>
-#include <string>
-
-namespace android {
-
-namespace {
-
-enum {
-    CONNECT = IBinder::FIRST_CALL_TRANSACTION,
-    LIST_SERVICE_ATTRIBUTES,
-    GET_NODE_PREFIX,
-    LIST_ROLES,
-    GET_OMX,
-};
-
-// Forward declarations of std::vector<T> <-> Parcel conversion funcitons that
-// depend on writeToParcel() and readToParcel() for T <-> Parcel.
-
-template <typename T>
-status_t writeToParcel(const std::vector<T>& v, Parcel* p);
-
-template <typename T>
-status_t readFromParcel(std::vector<T>* v, const Parcel& p);
-
-// std::string <-> Parcel
-
-status_t writeToParcel(const std::string& s, Parcel* p) {
-    if (s.size() > INT32_MAX) {
-        return BAD_VALUE;
-    }
-    return p->writeByteArray(
-            s.size(), reinterpret_cast<const uint8_t*>(s.c_str()));
-}
-
-status_t readFromParcel(std::string* s, const Parcel& p) {
-    int32_t len;
-    status_t status = p.readInt32(&len);
-    if (status != NO_ERROR) {
-        return status;
-    } else if ((len < 0) || (static_cast<uint64_t>(len) > SIZE_MAX)) {
-        return BAD_VALUE;
-    }
-    s->resize(len);
-    if (len == 0) {
-        return NO_ERROR;
-    }
-    return p.read(static_cast<void*>(&s->front()), static_cast<size_t>(len));
-}
-
-// IOMXStore::Attribute <-> Parcel
-
-status_t writeToParcel(const IOMXStore::Attribute& a, Parcel* p) {
-    status_t status = writeToParcel(a.key, p);
-    if (status != NO_ERROR) {
-        return status;
-    }
-    return writeToParcel(a.value, p);
-}
-
-status_t readFromParcel(IOMXStore::Attribute* a, const Parcel& p) {
-    status_t status = readFromParcel(&(a->key), p);
-    if (status != NO_ERROR) {
-        return status;
-    }
-    return readFromParcel(&(a->value), p);
-}
-
-// IOMXStore::NodeInfo <-> Parcel
-
-status_t writeToParcel(const IOMXStore::NodeInfo& n, Parcel* p) {
-    status_t status = writeToParcel(n.name, p);
-    if (status != NO_ERROR) {
-        return status;
-    }
-    status = writeToParcel(n.owner, p);
-    if (status != NO_ERROR) {
-        return status;
-    }
-    return writeToParcel(n.attributes, p);
-}
-
-status_t readFromParcel(IOMXStore::NodeInfo* n, const Parcel& p) {
-    status_t status = readFromParcel(&(n->name), p);
-    if (status != NO_ERROR) {
-        return status;
-    }
-    status = readFromParcel(&(n->owner), p);
-    if (status != NO_ERROR) {
-        return status;
-    }
-    return readFromParcel(&(n->attributes), p);
-}
-
-// IOMXStore::RoleInfo <-> Parcel
-
-status_t writeToParcel(const IOMXStore::RoleInfo& r, Parcel* p) {
-    status_t status = writeToParcel(r.role, p);
-    if (status != NO_ERROR) {
-        return status;
-    }
-    status = writeToParcel(r.type, p);
-    if (status != NO_ERROR) {
-        return status;
-    }
-    status = p->writeBool(r.isEncoder);
-    if (status != NO_ERROR) {
-        return status;
-    }
-    status = p->writeBool(r.preferPlatformNodes);
-    if (status != NO_ERROR) {
-        return status;
-    }
-    return writeToParcel(r.nodes, p);
-}
-
-status_t readFromParcel(IOMXStore::RoleInfo* r, const Parcel& p) {
-    status_t status = readFromParcel(&(r->role), p);
-    if (status != NO_ERROR) {
-        return status;
-    }
-    status = readFromParcel(&(r->type), p);
-    if (status != NO_ERROR) {
-        return status;
-    }
-    status = p.readBool(&(r->isEncoder));
-    if (status != NO_ERROR) {
-        return status;
-    }
-    status = p.readBool(&(r->preferPlatformNodes));
-    if (status != NO_ERROR) {
-        return status;
-    }
-    return readFromParcel(&(r->nodes), p);
-}
-
-// std::vector<NodeInfo> <-> Parcel
-// std::vector<RoleInfo> <-> Parcel
-
-template <typename T>
-status_t writeToParcel(const std::vector<T>& v, Parcel* p) {
-    status_t status = p->writeVectorSize(v);
-    if (status != NO_ERROR) {
-        return status;
-    }
-    for (const T& x : v) {
-        status = writeToParcel(x, p);
-        if (status != NO_ERROR) {
-            return status;
-        }
-    }
-    return NO_ERROR;
-}
-
-template <typename T>
-status_t readFromParcel(std::vector<T>* v, const Parcel& p) {
-    status_t status = p.resizeOutVector(v);
-    if (status != NO_ERROR) {
-        return status;
-    }
-    for (T& x : *v) {
-        status = readFromParcel(&x, p);
-        if (status != NO_ERROR) {
-            return status;
-        }
-    }
-    return NO_ERROR;
-}
-
-} // unnamed namespace
-
-////////////////////////////////////////////////////////////////////////////////
-
-class BpOMXStore : public BpInterface<IOMXStore> {
-public:
-    explicit BpOMXStore(const sp<IBinder> &impl)
-        : BpInterface<IOMXStore>(impl) {
-    }
-
-    status_t listServiceAttributes(
-            std::vector<Attribute>* attributes) override {
-        Parcel data, reply;
-        status_t status;
-        status = data.writeInterfaceToken(IOMXStore::getInterfaceDescriptor());
-        if (status != NO_ERROR) {
-            return status;
-        }
-        status = remote()->transact(LIST_SERVICE_ATTRIBUTES, data, &reply);
-        if (status != NO_ERROR) {
-            return status;
-        }
-        return readFromParcel(attributes, reply);
-    }
-
-    status_t getNodePrefix(std::string* prefix) override {
-        Parcel data, reply;
-        status_t status;
-        status = data.writeInterfaceToken(IOMXStore::getInterfaceDescriptor());
-        if (status != NO_ERROR) {
-            return status;
-        }
-        status = remote()->transact(GET_NODE_PREFIX, data, &reply);
-        if (status != NO_ERROR) {
-            return status;
-        }
-        return readFromParcel(prefix, reply);
-    }
-
-    status_t listRoles(std::vector<RoleInfo>* roleList) override {
-        Parcel data, reply;
-        status_t status;
-        status = data.writeInterfaceToken(IOMXStore::getInterfaceDescriptor());
-        if (status != NO_ERROR) {
-            return status;
-        }
-        status = remote()->transact(LIST_ROLES, data, &reply);
-        if (status != NO_ERROR) {
-            return status;
-        }
-        return readFromParcel(roleList, reply);
-    }
-
-    status_t getOmx(const std::string& name, sp<IOMX>* omx) override {
-        Parcel data, reply;
-        status_t status;
-        status = data.writeInterfaceToken(IOMXStore::getInterfaceDescriptor());
-        if (status != NO_ERROR) {
-            return status;
-        }
-        status = writeToParcel(name, &data);
-        if (status != NO_ERROR) {
-            return status;
-        }
-        status = remote()->transact(GET_OMX, data, &reply);
-        if (status != NO_ERROR) {
-            return status;
-        }
-        return reply.readStrongBinder(omx);
-    }
-
-};
-
-IMPLEMENT_META_INTERFACE(OMXStore, "android.hardware.IOMXStore");
-
-////////////////////////////////////////////////////////////////////////////////
-
-#define CHECK_OMX_INTERFACE(interface, data, reply) \
-        do { if (!(data).enforceInterface(interface::getInterfaceDescriptor())) { \
-            ALOGW("Call incorrectly routed to " #interface); \
-            return PERMISSION_DENIED; \
-        } } while (0)
-
-status_t BnOMXStore::onTransact(
-    uint32_t code, const Parcel &data, Parcel *reply, uint32_t flags) {
-    switch (code) {
-        case LIST_SERVICE_ATTRIBUTES: {
-            CHECK_OMX_INTERFACE(IOMXStore, data, reply);
-            status_t status;
-            std::vector<Attribute> attributes;
-
-            status = listServiceAttributes(&attributes);
-            if (status != NO_ERROR) {
-                ALOGE("listServiceAttributes() fails with status %d",
-                        static_cast<int>(status));
-                return NO_ERROR;
-            }
-            status = writeToParcel(attributes, reply);
-            if (status != NO_ERROR) {
-                ALOGE("listServiceAttributes() fails to send reply");
-                return NO_ERROR;
-            }
-            return NO_ERROR;
-        }
-        case GET_NODE_PREFIX: {
-            CHECK_OMX_INTERFACE(IOMXStore, data, reply);
-            status_t status;
-            std::string prefix;
-
-            status = getNodePrefix(&prefix);
-            if (status != NO_ERROR) {
-                ALOGE("getNodePrefix() fails with status %d",
-                        static_cast<int>(status));
-                return NO_ERROR;
-            }
-            status = writeToParcel(prefix, reply);
-            if (status != NO_ERROR) {
-                ALOGE("getNodePrefix() fails to send reply");
-                return NO_ERROR;
-            }
-            return NO_ERROR;
-        }
-        case LIST_ROLES: {
-            CHECK_OMX_INTERFACE(IOMXStore, data, reply);
-            status_t status;
-            std::vector<RoleInfo> roleList;
-
-            status = listRoles(&roleList);
-            if (status != NO_ERROR) {
-                ALOGE("listRoles() fails with status %d",
-                        static_cast<int>(status));
-                return NO_ERROR;
-            }
-            status = writeToParcel(roleList, reply);
-            if (status != NO_ERROR) {
-                ALOGE("listRoles() fails to send reply");
-                return NO_ERROR;
-            }
-            return NO_ERROR;
-        }
-        case GET_OMX: {
-            CHECK_OMX_INTERFACE(IOMXStore, data, reply);
-            status_t status;
-            std::string name;
-            sp<IOMX> omx;
-
-            status = readFromParcel(&name, data);
-            if (status != NO_ERROR) {
-                ALOGE("getOmx() fails to retrieve name");
-                return NO_ERROR;
-            }
-            status = getOmx(name, &omx);
-            if (status != NO_ERROR) {
-                ALOGE("getOmx() fails with status %d",
-                        static_cast<int>(status));
-                return NO_ERROR;
-            }
-            status = reply->writeStrongBinder(IInterface::asBinder(omx));
-            if (status != NO_ERROR) {
-                ALOGE("getOmx() fails to send reply");
-                return NO_ERROR;
-            }
-            return NO_ERROR;
-        }
-        default:
-            return BBinder::onTransact(code, data, reply, flags);
-    }
-}
-
-}  // namespace android
diff --git a/media/libmedia/IStreamSource.cpp b/media/libmedia/IStreamSource.cpp
index ba0a272..e11bc74 100644
--- a/media/libmedia/IStreamSource.cpp
+++ b/media/libmedia/IStreamSource.cpp
@@ -20,24 +20,13 @@
 
 #include <media/IStreamSource.h>
 #include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/MediaKeys.h>
 
 #include <binder/IMemory.h>
 #include <binder/Parcel.h>
 
 namespace android {
 
-// static
-const char *const IStreamListener::kKeyResumeAtPTS = "resume-at-PTS";
-
-// static
-const char *const IStreamListener::kKeyDiscontinuityMask = "discontinuity-mask";
-
-// static
-const char *const IStreamListener::kKeyMediaTimeUs = "media-time-us";
-
-// static
-const char *const IStreamListener::kKeyRecentMediaTimeUs = "recent-media-time-us";
-
 enum {
     // IStreamSource
     SET_LISTENER = IBinder::FIRST_CALL_TRANSACTION,
diff --git a/media/libmedia/MediaPlayer2Factory.cpp b/media/libmedia/MediaPlayer2Factory.cpp
new file mode 100644
index 0000000..d6aab70
--- /dev/null
+++ b/media/libmedia/MediaPlayer2Factory.cpp
@@ -0,0 +1,262 @@
+/*
+**
+** Copyright 2017, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaPlayer2Factory"
+#include <utils/Log.h>
+
+#include <cutils/properties.h>
+#include <media/DataSource.h>
+#include <media/MediaPlayer2Engine.h>
+#include <media/stagefright/FileSource.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <utils/Errors.h>
+#include <utils/misc.h>
+
+#include "MediaPlayer2Factory.h"
+
+#include "TestPlayerStub.h"
+#include "nuplayer2/NuPlayer2Driver.h"
+
+namespace android {
+
+Mutex MediaPlayer2Factory::sLock;
+MediaPlayer2Factory::tFactoryMap MediaPlayer2Factory::sFactoryMap;
+bool MediaPlayer2Factory::sInitComplete = false;
+
+status_t MediaPlayer2Factory::registerFactory_l(IFactory* factory,
+                                                player2_type type) {
+    if (NULL == factory) {
+        ALOGE("Failed to register MediaPlayer2Factory of type %d, factory is"
+              " NULL.", type);
+        return BAD_VALUE;
+    }
+
+    if (sFactoryMap.indexOfKey(type) >= 0) {
+        ALOGE("Failed to register MediaPlayer2Factory of type %d, type is"
+              " already registered.", type);
+        return ALREADY_EXISTS;
+    }
+
+    if (sFactoryMap.add(type, factory) < 0) {
+        ALOGE("Failed to register MediaPlayer2Factory of type %d, failed to add"
+              " to map.", type);
+        return UNKNOWN_ERROR;
+    }
+
+    return OK;
+}
+
+static player2_type getDefaultPlayerType() {
+    return PLAYER2_NU_PLAYER2;
+}
+
+status_t MediaPlayer2Factory::registerFactory(IFactory* factory,
+                                              player2_type type) {
+    Mutex::Autolock lock_(&sLock);
+    return registerFactory_l(factory, type);
+}
+
+void MediaPlayer2Factory::unregisterFactory(player2_type type) {
+    Mutex::Autolock lock_(&sLock);
+    sFactoryMap.removeItem(type);
+}
+
+#define GET_PLAYER_TYPE_IMPL(a...)                      \
+    Mutex::Autolock lock_(&sLock);                      \
+                                                        \
+    player2_type ret = PLAYER2_STAGEFRIGHT_PLAYER;      \
+    float bestScore = 0.0;                              \
+                                                        \
+    for (size_t i = 0; i < sFactoryMap.size(); ++i) {   \
+                                                        \
+        IFactory* v = sFactoryMap.valueAt(i);           \
+        float thisScore;                                \
+        CHECK(v != NULL);                               \
+        thisScore = v->scoreFactory(a, bestScore);      \
+        if (thisScore > bestScore) {                    \
+            ret = sFactoryMap.keyAt(i);                 \
+            bestScore = thisScore;                      \
+        }                                               \
+    }                                                   \
+                                                        \
+    if (0.0 == bestScore) {                             \
+        ret = getDefaultPlayerType();                   \
+    }                                                   \
+                                                        \
+    return ret;
+
+player2_type MediaPlayer2Factory::getPlayerType(const sp<MediaPlayer2Engine>& client,
+                                               const char* url) {
+    GET_PLAYER_TYPE_IMPL(client, url);
+}
+
+player2_type MediaPlayer2Factory::getPlayerType(const sp<MediaPlayer2Engine>& client,
+                                                int fd,
+                                                int64_t offset,
+                                                int64_t length) {
+    GET_PLAYER_TYPE_IMPL(client, fd, offset, length);
+}
+
+player2_type MediaPlayer2Factory::getPlayerType(const sp<MediaPlayer2Engine>& client,
+                                                const sp<IStreamSource> &source) {
+    GET_PLAYER_TYPE_IMPL(client, source);
+}
+
+player2_type MediaPlayer2Factory::getPlayerType(const sp<MediaPlayer2Engine>& client,
+                                                const sp<DataSource> &source) {
+    GET_PLAYER_TYPE_IMPL(client, source);
+}
+
+#undef GET_PLAYER_TYPE_IMPL
+
+sp<MediaPlayer2Base> MediaPlayer2Factory::createPlayer(
+        player2_type playerType,
+        const wp<MediaPlayer2Engine> &client,
+        MediaPlayer2Base::NotifyCallback notifyFunc,
+        pid_t pid) {
+    sp<MediaPlayer2Base> p;
+    IFactory* factory;
+    status_t init_result;
+    Mutex::Autolock lock_(&sLock);
+
+    if (sFactoryMap.indexOfKey(playerType) < 0) {
+        ALOGE("Failed to create player object of type %d, no registered"
+              " factory", playerType);
+        return p;
+    }
+
+    factory = sFactoryMap.valueFor(playerType);
+    CHECK(NULL != factory);
+    p = factory->createPlayer(pid);
+
+    if (p == NULL) {
+        ALOGE("Failed to create player object of type %d, create failed",
+              playerType);
+        return p;
+    }
+
+    init_result = p->initCheck();
+    if (init_result == NO_ERROR) {
+        p->setNotifyCallback(client, notifyFunc);
+    } else {
+        ALOGE("Failed to create player object of type %d, initCheck failed"
+              " (res = %d)", playerType, init_result);
+        p.clear();
+    }
+
+    return p;
+}
+
+/*****************************************************************************
+ *                                                                           *
+ *                     Built-In Factory Implementations                      *
+ *                                                                           *
+ *****************************************************************************/
+
+class NuPlayer2Factory : public MediaPlayer2Factory::IFactory {
+  public:
+    virtual float scoreFactory(const sp<MediaPlayer2Engine>& /*client*/,
+                               const char* url,
+                               float curScore) {
+        static const float kOurScore = 0.8;
+
+        if (kOurScore <= curScore) {
+            return 0.0;
+        }
+
+        if (!strncasecmp("http://", url, 7)
+                || !strncasecmp("https://", url, 8)
+                || !strncasecmp("file://", url, 7)) {
+            size_t len = strlen(url);
+            if (len >= 5 && !strcasecmp(".m3u8", &url[len - 5])) {
+                return kOurScore;
+            }
+
+            if (strstr(url,"m3u8")) {
+                return kOurScore;
+            }
+
+            if ((len >= 4 && !strcasecmp(".sdp", &url[len - 4])) || strstr(url, ".sdp?")) {
+                return kOurScore;
+            }
+        }
+
+        if (!strncasecmp("rtsp://", url, 7)) {
+            return kOurScore;
+        }
+
+        return 0.0;
+    }
+
+    virtual float scoreFactory(const sp<MediaPlayer2Engine>& /*client*/,
+                               const sp<IStreamSource>& /*source*/,
+                               float /*curScore*/) {
+        return 1.0;
+    }
+
+    virtual float scoreFactory(const sp<MediaPlayer2Engine>& /*client*/,
+                               const sp<DataSource>& /*source*/,
+                               float /*curScore*/) {
+        // Only NuPlayer2 supports setting a DataSource source directly.
+        return 1.0;
+    }
+
+    virtual sp<MediaPlayer2Base> createPlayer(pid_t pid) {
+        ALOGV(" create NuPlayer2");
+        return new NuPlayer2Driver(pid);
+    }
+};
+
+class TestPlayerFactory : public MediaPlayer2Factory::IFactory {
+  public:
+    virtual float scoreFactory(const sp<MediaPlayer2Engine>& /*client*/,
+                               const char* url,
+                               float /*curScore*/) {
+        if (TestPlayerStub::canBeUsed(url)) {
+            return 1.0;
+        }
+
+        return 0.0;
+    }
+
+    virtual sp<MediaPlayer2Base> createPlayer(pid_t /* pid */) {
+        ALOGV("Create Test Player stub");
+        return new TestPlayerStub();
+    }
+};
+
+void MediaPlayer2Factory::registerBuiltinFactories() {
+    Mutex::Autolock lock_(&sLock);
+
+    if (sInitComplete) {
+        return;
+    }
+
+    IFactory* factory = new NuPlayer2Factory();
+    if (registerFactory_l(factory, PLAYER2_NU_PLAYER2) != OK) {
+        delete factory;
+    }
+    factory = new TestPlayerFactory();
+    if (registerFactory_l(factory, PLAYER2_TEST_PLAYER) != OK) {
+        delete factory;
+    }
+
+    sInitComplete = true;
+}
+
+}  // namespace android
diff --git a/media/libmedia/MediaPlayer2Factory.h b/media/libmedia/MediaPlayer2Factory.h
new file mode 100644
index 0000000..799b5f3
--- /dev/null
+++ b/media/libmedia/MediaPlayer2Factory.h
@@ -0,0 +1,90 @@
+/*
+**
+** Copyright 2017, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_MEDIAPLAYER2FACTORY_H
+#define ANDROID_MEDIAPLAYER2FACTORY_H
+
+#include <media/MediaPlayer2Interface.h>
+#include <media/stagefright/foundation/ABase.h>
+
+namespace android {
+
+class MediaPlayer2Factory {
+  public:
+    class IFactory {
+      public:
+        virtual ~IFactory() { }
+
+        virtual float scoreFactory(const sp<MediaPlayer2Engine>& /*client*/,
+                                   const char* /*url*/,
+                                   float /*curScore*/) { return 0.0; }
+
+        virtual float scoreFactory(const sp<MediaPlayer2Engine>& /*client*/,
+                                   int /*fd*/,
+                                   int64_t /*offset*/,
+                                   int64_t /*length*/,
+                                   float /*curScore*/) { return 0.0; }
+
+        virtual float scoreFactory(const sp<MediaPlayer2Engine>& /*client*/,
+                                   const sp<IStreamSource> &/*source*/,
+                                   float /*curScore*/) { return 0.0; }
+
+        virtual float scoreFactory(const sp<MediaPlayer2Engine>& /*client*/,
+                                   const sp<DataSource> &/*source*/,
+                                   float /*curScore*/) { return 0.0; }
+
+        virtual sp<MediaPlayer2Base> createPlayer(pid_t pid) = 0;
+    };
+
+    static status_t registerFactory(IFactory* factory,
+                                    player2_type type);
+    static void unregisterFactory(player2_type type);
+    static player2_type getPlayerType(const sp<MediaPlayer2Engine>& client,
+                                      const char* url);
+    static player2_type getPlayerType(const sp<MediaPlayer2Engine>& client,
+                                      int fd,
+                                      int64_t offset,
+                                      int64_t length);
+    static player2_type getPlayerType(const sp<MediaPlayer2Engine>& client,
+                                      const sp<IStreamSource> &source);
+    static player2_type getPlayerType(const sp<MediaPlayer2Engine>& client,
+                                      const sp<DataSource> &source);
+
+    static sp<MediaPlayer2Base> createPlayer(player2_type playerType,
+                                             const wp<MediaPlayer2Engine> &client,
+                                             MediaPlayer2Base::NotifyCallback notifyFunc,
+                                             pid_t pid);
+
+    static void registerBuiltinFactories();
+
+  private:
+    typedef KeyedVector<player2_type, IFactory*> tFactoryMap;
+
+    MediaPlayer2Factory() { }
+
+    static status_t registerFactory_l(IFactory* factory,
+                                      player2_type type);
+
+    static Mutex       sLock;
+    static tFactoryMap sFactoryMap;
+    static bool        sInitComplete;
+
+    DISALLOW_EVIL_CONSTRUCTORS(MediaPlayer2Factory);
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIAPLAYER2FACTORY_H
diff --git a/media/libmedia/MediaPlayer2Manager.cpp b/media/libmedia/MediaPlayer2Manager.cpp
new file mode 100644
index 0000000..720c1e3
--- /dev/null
+++ b/media/libmedia/MediaPlayer2Manager.cpp
@@ -0,0 +1,2392 @@
+/*
+**
+** Copyright 2017, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+// Proxy for media player implementations
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaPlayer2Manager"
+#include <utils/Log.h>
+
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <sys/time.h>
+#include <dirent.h>
+#include <unistd.h>
+
+#include <string.h>
+
+#include <cutils/atomic.h>
+#include <cutils/properties.h> // for property_get
+
+#include <utils/misc.h>
+
+#include <binder/IPCThreadState.h>
+#include <binder/IServiceManager.h>
+#include <binder/MemoryHeapBase.h>
+#include <binder/MemoryBase.h>
+#include <utils/Errors.h>  // for status_t
+#include <utils/String8.h>
+#include <utils/SystemClock.h>
+#include <utils/Timers.h>
+#include <utils/Vector.h>
+
+#include <media/AudioPolicyHelper.h>
+#include <media/MediaHTTPService.h>
+#include <media/MediaPlayer2EngineClient.h>
+#include <media/MediaPlayer2Interface.h>
+#include <media/Metadata.h>
+#include <media/AudioTrack.h>
+#include <media/MemoryLeakTrackUtil.h>
+#include <media/NdkWrapper.h>
+
+#include <media/stagefright/InterfaceUtils.h>
+#include <media/stagefright/MediaCodecList.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/Utils.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/ALooperRoster.h>
+#include <media/stagefright/SurfaceUtils.h>
+#include <mediautils/BatteryNotifier.h>
+
+#include <memunreachable/memunreachable.h>
+#include <system/audio.h>
+
+#include <private/android_filesystem_config.h>
+
+#include "MediaPlayer2Manager.h"
+#include "MediaPlayer2Factory.h"
+
+static const int kDumpLockRetries = 50;
+static const int kDumpLockSleepUs = 20000;
+
+namespace {
+using android::media::Metadata;
+using android::status_t;
+using android::OK;
+using android::BAD_VALUE;
+using android::NOT_ENOUGH_DATA;
+using android::Parcel;
+using android::media::VolumeShaper;
+
+// Max number of entries in the filter.
+const int kMaxFilterSize = 64;  // I pulled that out of thin air.
+
+const float kMaxRequiredSpeed = 8.0f; // for PCM tracks allow up to 8x speedup.
+
+// FIXME: Move all the metadata related function in the Metadata.cpp
+
+
+// Unmarshall a filter from a Parcel.
+// Filter format in a parcel:
+//
+//  0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |                       number of entries (n)                   |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |                       metadata type 1                         |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |                       metadata type 2                         |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+//  ....
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |                       metadata type n                         |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+//
+// @param p Parcel that should start with a filter.
+// @param[out] filter On exit contains the list of metadata type to be
+//                    filtered.
+// @param[out] status On exit contains the status code to be returned.
+// @return true if the parcel starts with a valid filter.
+bool unmarshallFilter(const Parcel& p,
+                      Metadata::Filter *filter,
+                      status_t *status)
+{
+    int32_t val;
+    if (p.readInt32(&val) != OK)
+    {
+        ALOGE("Failed to read filter's length");
+        *status = NOT_ENOUGH_DATA;
+        return false;
+    }
+
+    if( val > kMaxFilterSize || val < 0)
+    {
+        ALOGE("Invalid filter len %d", val);
+        *status = BAD_VALUE;
+        return false;
+    }
+
+    const size_t num = val;
+
+    filter->clear();
+    filter->setCapacity(num);
+
+    size_t size = num * sizeof(Metadata::Type);
+
+
+    if (p.dataAvail() < size)
+    {
+        ALOGE("Filter too short expected %zu but got %zu", size, p.dataAvail());
+        *status = NOT_ENOUGH_DATA;
+        return false;
+    }
+
+    const Metadata::Type *data =
+            static_cast<const Metadata::Type*>(p.readInplace(size));
+
+    if (NULL == data)
+    {
+        ALOGE("Filter had no data");
+        *status = BAD_VALUE;
+        return false;
+    }
+
+    // TODO: The stl impl of vector would be more efficient here
+    // because it degenerates into a memcpy on pod types. Try to
+    // replace later or use stl::set.
+    for (size_t i = 0; i < num; ++i)
+    {
+        filter->add(*data);
+        ++data;
+    }
+    *status = OK;
+    return true;
+}
+
+// @param filter Of metadata type.
+// @param val To be searched.
+// @return true if a match was found.
+bool findMetadata(const Metadata::Filter& filter, const int32_t val)
+{
+    // Deal with empty and ANY right away
+    if (filter.isEmpty()) return false;
+    if (filter[0] == Metadata::kAny) return true;
+
+    return filter.indexOf(val) >= 0;
+}
+
+}  // anonymous namespace
+
+
+namespace {
+using android::Parcel;
+using android::String16;
+
+// marshalling tag indicating flattened utf16 tags
+// keep in sync with frameworks/base/media/java/android/media/AudioAttributes.java
+const int32_t kAudioAttributesMarshallTagFlattenTags = 1;
+
+// Audio attributes format in a parcel:
+//
+//  0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |                       usage                                   |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |                       content_type                            |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |                       source                                  |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |                       flags                                   |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |                       kAudioAttributesMarshallTagFlattenTags  | // ignore tags if not found
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |                       flattened tags in UTF16                 |
+// |                         ...                                   |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+//
+// @param p Parcel that contains audio attributes.
+// @param[out] attributes On exit points to an initialized audio_attributes_t structure
+// @param[out] status On exit contains the status code to be returned.
+void unmarshallAudioAttributes(const Parcel& parcel, audio_attributes_t *attributes)
+{
+    attributes->usage = (audio_usage_t) parcel.readInt32();
+    attributes->content_type = (audio_content_type_t) parcel.readInt32();
+    attributes->source = (audio_source_t) parcel.readInt32();
+    attributes->flags = (audio_flags_mask_t) parcel.readInt32();
+    const bool hasFlattenedTag = (parcel.readInt32() == kAudioAttributesMarshallTagFlattenTags);
+    if (hasFlattenedTag) {
+        // the tags are UTF16, convert to UTF8
+        String16 tags = parcel.readString16();
+        ssize_t realTagSize = utf16_to_utf8_length(tags.string(), tags.size());
+        if (realTagSize <= 0) {
+            strcpy(attributes->tags, "");
+        } else {
+            // copy the flattened string into the attributes as the destination for the conversion:
+            // copying array size -1, array for tags was calloc'd, no need to NULL-terminate it
+            size_t tagSize = realTagSize > AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1 ?
+                    AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1 : realTagSize;
+            utf16_to_utf8(tags.string(), tagSize, attributes->tags,
+                    sizeof(attributes->tags) / sizeof(attributes->tags[0]));
+        }
+    } else {
+        ALOGE("unmarshallAudioAttributes() received unflattened tags, ignoring tag values");
+        strcpy(attributes->tags, "");
+    }
+}
+} // anonymous namespace
+
+
+namespace android {
+
+extern ALooperRoster gLooperRoster;
+
+MediaPlayer2Manager gMediaPlayer2Manager;
+
+static bool checkPermission(const char* permissionString) {
+    if (getpid() == IPCThreadState::self()->getCallingPid()) return true;
+    bool ok = checkCallingPermission(String16(permissionString));
+    if (!ok) ALOGE("Request requires %s", permissionString);
+    return ok;
+}
+
+// TODO: Find real cause of Audio/Video delay in PV framework and remove this workaround
+/* static */ int MediaPlayer2Manager::AudioOutput::mMinBufferCount = 4;
+/* static */ bool MediaPlayer2Manager::AudioOutput::mIsOnEmulator = false;
+
+// static
+MediaPlayer2Manager& MediaPlayer2Manager::get() {
+    return gMediaPlayer2Manager;
+}
+
+MediaPlayer2Manager::MediaPlayer2Manager() {
+    ALOGV("MediaPlayer2Manager created");
+    // TODO: remove all unnecessary pid/uid handling.
+    mPid = IPCThreadState::self()->getCallingPid();
+    mUid = IPCThreadState::self()->getCallingUid();
+    mNextConnId = 1;
+
+    MediaPlayer2Factory::registerBuiltinFactories();
+}
+
+MediaPlayer2Manager::~MediaPlayer2Manager() {
+    ALOGV("MediaPlayer2Manager destroyed");
+}
+
+sp<MediaPlayer2Engine> MediaPlayer2Manager::create(
+        const sp<MediaPlayer2EngineClient>& client,
+        audio_session_t audioSessionId)
+{
+    int32_t connId = android_atomic_inc(&mNextConnId);
+
+    sp<Client> c = new Client(
+            mPid, connId, client, audioSessionId, mUid);
+
+    ALOGV("Create new client(%d) from pid %d, uid %d, ", connId, mPid, mUid);
+
+    wp<Client> w = c;
+    {
+        Mutex::Autolock lock(mLock);
+        mClients.add(w);
+    }
+    return c;
+}
+
+status_t MediaPlayer2Manager::AudioOutput::dump(int fd, const Vector<String16>& args) const
+{
+    const size_t SIZE = 256;
+    char buffer[SIZE];
+    String8 result;
+
+    result.append(" AudioOutput\n");
+    snprintf(buffer, 255, "  stream type(%d), left - right volume(%f, %f)\n",
+            mStreamType, mLeftVolume, mRightVolume);
+    result.append(buffer);
+    snprintf(buffer, 255, "  msec per frame(%f), latency (%d)\n",
+            mMsecsPerFrame, (mTrack != 0) ? mTrack->latency() : -1);
+    result.append(buffer);
+    snprintf(buffer, 255, "  aux effect id(%d), send level (%f)\n",
+            mAuxEffectId, mSendLevel);
+    result.append(buffer);
+
+    ::write(fd, result.string(), result.size());
+    if (mTrack != 0) {
+        mTrack->dump(fd, args);
+    }
+    return NO_ERROR;
+}
+
+status_t MediaPlayer2Manager::Client::dump(int fd, const Vector<String16>& args)
+{
+    const size_t SIZE = 256;
+    char buffer[SIZE];
+    String8 result;
+    result.append(" Client\n");
+    snprintf(buffer, 255, "  pid(%d), connId(%d), status(%d), looping(%s)\n",
+            mPid, mConnId, mStatus, mLoop?"true": "false");
+    result.append(buffer);
+
+    sp<MediaPlayer2Base> p;
+    sp<AudioOutput> audioOutput;
+    bool locked = false;
+    for (int i = 0; i < kDumpLockRetries; ++i) {
+        if (mLock.tryLock() == NO_ERROR) {
+            locked = true;
+            break;
+        }
+        usleep(kDumpLockSleepUs);
+    }
+
+    if (locked) {
+        p = mPlayer;
+        audioOutput = mAudioOutput;
+        mLock.unlock();
+    } else {
+        result.append("  lock is taken, no dump from player and audio output\n");
+    }
+    write(fd, result.string(), result.size());
+
+    if (p != NULL) {
+        p->dump(fd, args);
+    }
+    if (audioOutput != 0) {
+        audioOutput->dump(fd, args);
+    }
+    write(fd, "\n", 1);
+    return NO_ERROR;
+}
+
+/**
+ * The only arguments this understands right now are -c, -von and -voff,
+ * which are parsed by ALooperRoster::dump()
+ */
+status_t MediaPlayer2Manager::dump(int fd, const Vector<String16>& args)
+{
+    const size_t SIZE = 256;
+    char buffer[SIZE];
+    String8 result;
+    SortedVector< sp<Client> > clients; //to serialise the mutex unlock & client destruction.
+
+    if (checkCallingPermission(String16("android.permission.DUMP")) == false) {
+        snprintf(buffer, SIZE, "Permission Denial: "
+                "can't dump MediaPlayer2Manager from pid=%d, uid=%d\n",
+                mPid, mUid);
+        result.append(buffer);
+    } else {
+        Mutex::Autolock lock(mLock);
+        for (int i = 0, n = mClients.size(); i < n; ++i) {
+            sp<Client> c = mClients[i].promote();
+            if (c != 0) c->dump(fd, args);
+            clients.add(c);
+        }
+
+        result.append(" Files opened and/or mapped:\n");
+        snprintf(buffer, SIZE, "/proc/%d/maps", getpid());
+        FILE *f = fopen(buffer, "r");
+        if (f) {
+            while (!feof(f)) {
+                fgets(buffer, SIZE, f);
+                if (strstr(buffer, " /storage/") ||
+                    strstr(buffer, " /system/sounds/") ||
+                    strstr(buffer, " /data/") ||
+                    strstr(buffer, " /system/media/")) {
+                    result.append("  ");
+                    result.append(buffer);
+                }
+            }
+            fclose(f);
+        } else {
+            result.append("couldn't open ");
+            result.append(buffer);
+            result.append("\n");
+        }
+
+        snprintf(buffer, SIZE, "/proc/%d/fd", getpid());
+        DIR *d = opendir(buffer);
+        if (d) {
+            struct dirent *ent;
+            while((ent = readdir(d)) != NULL) {
+                if (strcmp(ent->d_name,".") && strcmp(ent->d_name,"..")) {
+                    snprintf(buffer, SIZE, "/proc/%d/fd/%s", getpid(), ent->d_name);
+                    struct stat s;
+                    if (lstat(buffer, &s) == 0) {
+                        if ((s.st_mode & S_IFMT) == S_IFLNK) {
+                            char linkto[256];
+                            int len = readlink(buffer, linkto, sizeof(linkto));
+                            if(len > 0) {
+                                if(len > 255) {
+                                    linkto[252] = '.';
+                                    linkto[253] = '.';
+                                    linkto[254] = '.';
+                                    linkto[255] = 0;
+                                } else {
+                                    linkto[len] = 0;
+                                }
+                                if (strstr(linkto, "/storage/") == linkto ||
+                                    strstr(linkto, "/system/sounds/") == linkto ||
+                                    strstr(linkto, "/data/") == linkto ||
+                                    strstr(linkto, "/system/media/") == linkto) {
+                                    result.append("  ");
+                                    result.append(buffer);
+                                    result.append(" -> ");
+                                    result.append(linkto);
+                                    result.append("\n");
+                                }
+                            }
+                        } else {
+                            result.append("  unexpected type for ");
+                            result.append(buffer);
+                            result.append("\n");
+                        }
+                    }
+                }
+            }
+            closedir(d);
+        } else {
+            result.append("couldn't open ");
+            result.append(buffer);
+            result.append("\n");
+        }
+
+        gLooperRoster.dump(fd, args);
+
+        bool dumpMem = false;
+        bool unreachableMemory = false;
+        for (size_t i = 0; i < args.size(); i++) {
+            if (args[i] == String16("-m")) {
+                dumpMem = true;
+            } else if (args[i] == String16("--unreachable")) {
+                unreachableMemory = true;
+            }
+        }
+        if (dumpMem) {
+            result.append("\nDumping memory:\n");
+            std::string s = dumpMemoryAddresses(100 /* limit */);
+            result.append(s.c_str(), s.size());
+        }
+        if (unreachableMemory) {
+            result.append("\nDumping unreachable memory:\n");
+            // TODO - should limit be an argument parameter?
+            std::string s = GetUnreachableMemoryString(true /* contents */, 10000 /* limit */);
+            result.append(s.c_str(), s.size());
+        }
+    }
+    write(fd, result.string(), result.size());
+    return NO_ERROR;
+}
+
+void MediaPlayer2Manager::removeClient(const wp<Client>& client)
+{
+    Mutex::Autolock lock(mLock);
+    mClients.remove(client);
+}
+
+bool MediaPlayer2Manager::hasClient(wp<Client> client)
+{
+    Mutex::Autolock lock(mLock);
+    return mClients.indexOf(client) != NAME_NOT_FOUND;
+}
+
+MediaPlayer2Manager::Client::Client(
+        pid_t pid,
+        int32_t connId,
+        const sp<MediaPlayer2EngineClient>& client,
+        audio_session_t audioSessionId,
+        uid_t uid)
+{
+    ALOGV("Client(%d) constructor", connId);
+    mPid = pid;
+    mConnId = connId;
+    mClient = client;
+    mLoop = false;
+    mStatus = NO_INIT;
+    mAudioSessionId = audioSessionId;
+    mUid = uid;
+    mRetransmitEndpointValid = false;
+    mAudioAttributes = NULL;
+
+#if CALLBACK_ANTAGONIZER
+    ALOGD("create Antagonizer");
+    mAntagonizer = new Antagonizer(notify, this);
+#endif
+}
+
+MediaPlayer2Manager::Client::~Client()
+{
+    ALOGV("Client(%d) destructor pid = %d", mConnId, mPid);
+    mAudioOutput.clear();
+    wp<Client> client(this);
+    disconnect();
+    gMediaPlayer2Manager.removeClient(client);
+    if (mAudioAttributes != NULL) {
+        free(mAudioAttributes);
+    }
+    mAudioDeviceUpdatedListener.clear();
+}
+
+void MediaPlayer2Manager::Client::disconnect()
+{
+    ALOGV("disconnect(%d) from pid %d", mConnId, mPid);
+    // grab local reference and clear main reference to prevent future
+    // access to object
+    sp<MediaPlayer2Base> p;
+    {
+        Mutex::Autolock l(mLock);
+        p = mPlayer;
+        mClient.clear();
+        mPlayer.clear();
+    }
+
+    // clear the notification to prevent callbacks to dead client
+    // and reset the player. We assume the player will serialize
+    // access to itself if necessary.
+    if (p != 0) {
+        p->setNotifyCallback(0, 0);
+#if CALLBACK_ANTAGONIZER
+        ALOGD("kill Antagonizer");
+        mAntagonizer->kill();
+#endif
+        p->reset();
+    }
+
+    {
+        Mutex::Autolock l(mLock);
+        disconnectNativeWindow_l();
+    }
+
+    IPCThreadState::self()->flushCommands();
+}
+
+sp<MediaPlayer2Base> MediaPlayer2Manager::Client::createPlayer(player2_type playerType)
+{
+    // determine if we have the right player type
+    sp<MediaPlayer2Base> p = getPlayer();
+    if ((p != NULL) && (p->playerType() != playerType)) {
+        ALOGV("delete player");
+        p.clear();
+    }
+    if (p == NULL) {
+        p = MediaPlayer2Factory::createPlayer(playerType, this, notify, mPid);
+    }
+
+    if (p != NULL) {
+        p->setUID(mUid);
+    }
+
+    return p;
+}
+
+void MediaPlayer2Manager::Client::AudioDeviceUpdatedNotifier::onAudioDeviceUpdate(
+        audio_io_handle_t audioIo,
+        audio_port_handle_t deviceId) {
+    sp<MediaPlayer2Base> listener = mListener.promote();
+    if (listener != NULL) {
+        listener->sendEvent(MEDIA2_AUDIO_ROUTING_CHANGED, audioIo, deviceId);
+    } else {
+        ALOGW("listener for process %d death is gone", MEDIA2_AUDIO_ROUTING_CHANGED);
+    }
+}
+
+sp<MediaPlayer2Base> MediaPlayer2Manager::Client::setDataSource_pre(
+        player2_type playerType)
+{
+    ALOGV("player type = %d", playerType);
+
+    // create the right type of player
+    sp<MediaPlayer2Base> p = createPlayer(playerType);
+    if (p == NULL) {
+        return p;
+    }
+
+    Mutex::Autolock lock(mLock);
+
+    mAudioDeviceUpdatedListener = new AudioDeviceUpdatedNotifier(p);
+
+    if (!p->hardwareOutput()) {
+        mAudioOutput = new AudioOutput(mAudioSessionId, mUid,
+                mPid, mAudioAttributes, mAudioDeviceUpdatedListener);
+        static_cast<MediaPlayer2Interface*>(p.get())->setAudioSink(mAudioOutput);
+    }
+
+    return p;
+}
+
+status_t MediaPlayer2Manager::Client::setDataSource_post(
+        const sp<MediaPlayer2Base>& p,
+        status_t status)
+{
+    ALOGV(" setDataSource");
+    if (status != OK) {
+        ALOGE("  error: %d", status);
+        return status;
+    }
+
+    // Set the re-transmission endpoint if one was chosen.
+    if (mRetransmitEndpointValid) {
+        status = p->setRetransmitEndpoint(&mRetransmitEndpoint);
+        if (status != NO_ERROR) {
+            ALOGE("setRetransmitEndpoint error: %d", status);
+        }
+    }
+
+    if (status == OK) {
+        Mutex::Autolock lock(mLock);
+        mPlayer = p;
+    }
+    return status;
+}
+
+status_t MediaPlayer2Manager::Client::setDataSource(
+        const sp<MediaHTTPService> &httpService,
+        const char *url,
+        const KeyedVector<String8, String8> *headers)
+{
+    ALOGV("setDataSource(%s)", url);
+    if (url == NULL)
+        return UNKNOWN_ERROR;
+
+    if ((strncmp(url, "http://", 7) == 0) ||
+        (strncmp(url, "https://", 8) == 0) ||
+        (strncmp(url, "rtsp://", 7) == 0)) {
+        if (!checkPermission("android.permission.INTERNET")) {
+            return PERMISSION_DENIED;
+        }
+    }
+
+    if (strncmp(url, "content://", 10) == 0) {
+        ALOGE("setDataSource: content scheme is not supported here");
+        mStatus = UNKNOWN_ERROR;
+        return mStatus;
+    } else {
+        player2_type playerType = MediaPlayer2Factory::getPlayerType(this, url);
+        sp<MediaPlayer2Base> p = setDataSource_pre(playerType);
+        if (p == NULL) {
+            return NO_INIT;
+        }
+
+        return mStatus =
+                setDataSource_post(
+                p, p->setDataSource(httpService, url, headers));
+    }
+}
+
+status_t MediaPlayer2Manager::Client::setDataSource(int fd, int64_t offset, int64_t length)
+{
+    ALOGV("setDataSource fd=%d (%s), offset=%lld, length=%lld",
+            fd, nameForFd(fd).c_str(), (long long) offset, (long long) length);
+    struct stat sb;
+    int ret = fstat(fd, &sb);
+    if (ret != 0) {
+        ALOGE("fstat(%d) failed: %d, %s", fd, ret, strerror(errno));
+        return UNKNOWN_ERROR;
+    }
+
+    ALOGV("st_dev  = %llu", static_cast<unsigned long long>(sb.st_dev));
+    ALOGV("st_mode = %u", sb.st_mode);
+    ALOGV("st_uid  = %lu", static_cast<unsigned long>(sb.st_uid));
+    ALOGV("st_gid  = %lu", static_cast<unsigned long>(sb.st_gid));
+    ALOGV("st_size = %llu", static_cast<unsigned long long>(sb.st_size));
+
+    if (offset >= sb.st_size) {
+        ALOGE("offset error");
+        return UNKNOWN_ERROR;
+    }
+    if (offset + length > sb.st_size) {
+        length = sb.st_size - offset;
+        ALOGV("calculated length = %lld", (long long)length);
+    }
+
+    player2_type playerType = MediaPlayer2Factory::getPlayerType(this,
+                                                               fd,
+                                                               offset,
+                                                               length);
+    sp<MediaPlayer2Base> p = setDataSource_pre(playerType);
+    if (p == NULL) {
+        return NO_INIT;
+    }
+
+    // now set data source
+    return mStatus = setDataSource_post(p, p->setDataSource(fd, offset, length));
+}
+
+status_t MediaPlayer2Manager::Client::setDataSource(
+        const sp<IStreamSource> &source) {
+    // create the right type of player
+    player2_type playerType = MediaPlayer2Factory::getPlayerType(this, source);
+    sp<MediaPlayer2Base> p = setDataSource_pre(playerType);
+    if (p == NULL) {
+        return NO_INIT;
+    }
+
+    // now set data source
+    return mStatus = setDataSource_post(p, p->setDataSource(source));
+}
+
+status_t MediaPlayer2Manager::Client::setDataSource(
+        const sp<DataSource> &source) {
+    player2_type playerType = MediaPlayer2Factory::getPlayerType(this, source);
+    sp<MediaPlayer2Base> p = setDataSource_pre(playerType);
+    if (p == NULL) {
+        return NO_INIT;
+    }
+    // now set data source
+    return mStatus = setDataSource_post(p, p->setDataSource(source));
+}
+
+void MediaPlayer2Manager::Client::disconnectNativeWindow_l() {
+    if (mConnectedWindow != NULL && mConnectedWindow->getANativeWindow() != NULL) {
+        status_t err = nativeWindowDisconnect(
+                mConnectedWindow->getANativeWindow(), "disconnectNativeWindow");
+
+        if (err != OK) {
+            ALOGW("nativeWindowDisconnect returned an error: %s (%d)",
+                    strerror(-err), err);
+        }
+    }
+    mConnectedWindow.clear();
+}
+
+status_t MediaPlayer2Manager::Client::setVideoSurfaceTexture(
+        const sp<ANativeWindowWrapper>& nww)
+{
+    ALOGV("[%d] setVideoSurfaceTexture(%p)",
+          mConnId,
+          (nww == NULL ? NULL : nww->getANativeWindow()));
+    sp<MediaPlayer2Base> p = getPlayer();
+    if (p == 0) return UNKNOWN_ERROR;
+
+    if (nww != NULL && nww->getANativeWindow() != NULL) {
+        if (mConnectedWindow != NULL
+            && mConnectedWindow->getANativeWindow() == nww->getANativeWindow()) {
+            return OK;
+        }
+        status_t err = nativeWindowConnect(nww->getANativeWindow(), "setVideoSurfaceTexture");
+
+        if (err != OK) {
+            ALOGE("setVideoSurfaceTexture failed: %d", err);
+            // Note that we must do the reset before disconnecting from the ANW.
+            // Otherwise queue/dequeue calls could be made on the disconnected
+            // ANW, which may result in errors.
+            reset();
+
+            Mutex::Autolock lock(mLock);
+            disconnectNativeWindow_l();
+
+            return err;
+        }
+    }
+
+    // Note that we must set the player's new GraphicBufferProducer before
+    // disconnecting the old one.  Otherwise queue/dequeue calls could be made
+    // on the disconnected ANW, which may result in errors.
+    status_t err = p->setVideoSurfaceTexture(nww);
+
+    mLock.lock();
+    disconnectNativeWindow_l();
+
+    if (err == OK) {
+        mConnectedWindow = nww;
+        mLock.unlock();
+    } else if (nww != NULL) {
+        mLock.unlock();
+        status_t err = nativeWindowDisconnect(
+                nww->getANativeWindow(), "disconnectNativeWindow");
+
+        if (err != OK) {
+            ALOGW("nativeWindowDisconnect returned an error: %s (%d)",
+                    strerror(-err), err);
+        }
+    }
+
+    return err;
+}
+
+status_t MediaPlayer2Manager::Client::invoke(const Parcel& request,
+                                            Parcel *reply)
+{
+    sp<MediaPlayer2Base> p = getPlayer();
+    if (p == NULL) return UNKNOWN_ERROR;
+    return p->invoke(request, reply);
+}
+
+// This call doesn't need to access the native player.
+status_t MediaPlayer2Manager::Client::setMetadataFilter(const Parcel& filter)
+{
+    status_t status;
+    media::Metadata::Filter allow, drop;
+
+    if (unmarshallFilter(filter, &allow, &status) &&
+        unmarshallFilter(filter, &drop, &status)) {
+        Mutex::Autolock lock(mLock);
+
+        mMetadataAllow = allow;
+        mMetadataDrop = drop;
+    }
+    return status;
+}
+
+status_t MediaPlayer2Manager::Client::getMetadata(
+        bool update_only, bool /*apply_filter*/, Parcel *reply)
+{
+    sp<MediaPlayer2Base> player = getPlayer();
+    if (player == 0) return UNKNOWN_ERROR;
+
+    status_t status;
+    // Placeholder for the return code, updated by the caller.
+    reply->writeInt32(-1);
+
+    media::Metadata::Filter ids;
+
+    // We don't block notifications while we fetch the data. We clear
+    // mMetadataUpdated first so we don't lose notifications happening
+    // during the rest of this call.
+    {
+        Mutex::Autolock lock(mLock);
+        if (update_only) {
+            ids = mMetadataUpdated;
+        }
+        mMetadataUpdated.clear();
+    }
+
+    media::Metadata metadata(reply);
+
+    metadata.appendHeader();
+    status = player->getMetadata(ids, reply);
+
+    if (status != OK) {
+        metadata.resetParcel();
+        ALOGE("getMetadata failed %d", status);
+        return status;
+    }
+
+    // FIXME: ement filtering on the result. Not critical since
+    // filtering takes place on the update notifications already. This
+    // would be when all the metadata are fetch and a filter is set.
+
+    // Everything is fine, update the metadata length.
+    metadata.updateLength();
+    return OK;
+}
+
+status_t MediaPlayer2Manager::Client::setBufferingSettings(
+        const BufferingSettings& buffering)
+{
+    ALOGV("[%d] setBufferingSettings{%s}",
+            mConnId, buffering.toString().string());
+    sp<MediaPlayer2Base> p = getPlayer();
+    if (p == 0) return UNKNOWN_ERROR;
+    return p->setBufferingSettings(buffering);
+}
+
+status_t MediaPlayer2Manager::Client::getBufferingSettings(
+        BufferingSettings* buffering /* nonnull */)
+{
+    sp<MediaPlayer2Base> p = getPlayer();
+    // TODO: create mPlayer on demand.
+    if (p == 0) return UNKNOWN_ERROR;
+    status_t ret = p->getBufferingSettings(buffering);
+    if (ret == NO_ERROR) {
+        ALOGV("[%d] getBufferingSettings{%s}",
+                mConnId, buffering->toString().string());
+    } else {
+        ALOGE("[%d] getBufferingSettings returned %d", mConnId, ret);
+    }
+    return ret;
+}
+
+status_t MediaPlayer2Manager::Client::prepareAsync()
+{
+    ALOGV("[%d] prepareAsync", mConnId);
+    sp<MediaPlayer2Base> p = getPlayer();
+    if (p == 0) return UNKNOWN_ERROR;
+    status_t ret = p->prepareAsync();
+#if CALLBACK_ANTAGONIZER
+    ALOGD("start Antagonizer");
+    if (ret == NO_ERROR) mAntagonizer->start();
+#endif
+    return ret;
+}
+
+status_t MediaPlayer2Manager::Client::start()
+{
+    ALOGV("[%d] start", mConnId);
+    sp<MediaPlayer2Base> p = getPlayer();
+    if (p == 0) return UNKNOWN_ERROR;
+    p->setLooping(mLoop);
+    return p->start();
+}
+
+status_t MediaPlayer2Manager::Client::stop()
+{
+    ALOGV("[%d] stop", mConnId);
+    sp<MediaPlayer2Base> p = getPlayer();
+    if (p == 0) return UNKNOWN_ERROR;
+    return p->stop();
+}
+
+status_t MediaPlayer2Manager::Client::pause()
+{
+    ALOGV("[%d] pause", mConnId);
+    sp<MediaPlayer2Base> p = getPlayer();
+    if (p == 0) return UNKNOWN_ERROR;
+    return p->pause();
+}
+
+status_t MediaPlayer2Manager::Client::isPlaying(bool* state)
+{
+    *state = false;
+    sp<MediaPlayer2Base> p = getPlayer();
+    if (p == 0) return UNKNOWN_ERROR;
+    *state = p->isPlaying();
+    ALOGV("[%d] isPlaying: %d", mConnId, *state);
+    return NO_ERROR;
+}
+
+status_t MediaPlayer2Manager::Client::setPlaybackSettings(const AudioPlaybackRate& rate)
+{
+    ALOGV("[%d] setPlaybackSettings(%f, %f, %d, %d)",
+            mConnId, rate.mSpeed, rate.mPitch, rate.mFallbackMode, rate.mStretchMode);
+    sp<MediaPlayer2Base> p = getPlayer();
+    if (p == 0) return UNKNOWN_ERROR;
+    return p->setPlaybackSettings(rate);
+}
+
+status_t MediaPlayer2Manager::Client::getPlaybackSettings(AudioPlaybackRate* rate /* nonnull */)
+{
+    sp<MediaPlayer2Base> p = getPlayer();
+    if (p == 0) return UNKNOWN_ERROR;
+    status_t ret = p->getPlaybackSettings(rate);
+    if (ret == NO_ERROR) {
+        ALOGV("[%d] getPlaybackSettings(%f, %f, %d, %d)",
+                mConnId, rate->mSpeed, rate->mPitch, rate->mFallbackMode, rate->mStretchMode);
+    } else {
+        ALOGV("[%d] getPlaybackSettings returned %d", mConnId, ret);
+    }
+    return ret;
+}
+
+status_t MediaPlayer2Manager::Client::setSyncSettings(
+        const AVSyncSettings& sync, float videoFpsHint)
+{
+    ALOGV("[%d] setSyncSettings(%u, %u, %f, %f)",
+            mConnId, sync.mSource, sync.mAudioAdjustMode, sync.mTolerance, videoFpsHint);
+    sp<MediaPlayer2Base> p = getPlayer();
+    if (p == 0) return UNKNOWN_ERROR;
+    return p->setSyncSettings(sync, videoFpsHint);
+}
+
+status_t MediaPlayer2Manager::Client::getSyncSettings(
+        AVSyncSettings* sync /* nonnull */, float* videoFps /* nonnull */)
+{
+    sp<MediaPlayer2Base> p = getPlayer();
+    if (p == 0) return UNKNOWN_ERROR;
+    status_t ret = p->getSyncSettings(sync, videoFps);
+    if (ret == NO_ERROR) {
+        ALOGV("[%d] getSyncSettings(%u, %u, %f, %f)",
+                mConnId, sync->mSource, sync->mAudioAdjustMode, sync->mTolerance, *videoFps);
+    } else {
+        ALOGV("[%d] getSyncSettings returned %d", mConnId, ret);
+    }
+    return ret;
+}
+
+status_t MediaPlayer2Manager::Client::getCurrentPosition(int *msec)
+{
+    ALOGV("getCurrentPosition");
+    sp<MediaPlayer2Base> p = getPlayer();
+    if (p == 0) return UNKNOWN_ERROR;
+    status_t ret = p->getCurrentPosition(msec);
+    if (ret == NO_ERROR) {
+        ALOGV("[%d] getCurrentPosition = %d", mConnId, *msec);
+    } else {
+        ALOGE("getCurrentPosition returned %d", ret);
+    }
+    return ret;
+}
+
+status_t MediaPlayer2Manager::Client::getDuration(int *msec)
+{
+    ALOGV("getDuration");
+    sp<MediaPlayer2Base> p = getPlayer();
+    if (p == 0) return UNKNOWN_ERROR;
+    status_t ret = p->getDuration(msec);
+    if (ret == NO_ERROR) {
+        ALOGV("[%d] getDuration = %d", mConnId, *msec);
+    } else {
+        ALOGE("getDuration returned %d", ret);
+    }
+    return ret;
+}
+
+status_t MediaPlayer2Manager::Client::setNextPlayer(const sp<MediaPlayer2Engine>& player) {
+    ALOGV("setNextPlayer");
+    Mutex::Autolock l(mLock);
+    sp<Client> c = static_cast<Client*>(player.get());
+    if (c != NULL && !gMediaPlayer2Manager.hasClient(c)) {
+      return BAD_VALUE;
+    }
+
+    mNextClient = c;
+
+    if (c != NULL) {
+        if (mAudioOutput != NULL) {
+            mAudioOutput->setNextOutput(c->mAudioOutput);
+        } else if ((mPlayer != NULL) && !mPlayer->hardwareOutput()) {
+            ALOGE("no current audio output");
+        }
+
+        if ((mPlayer != NULL) && (mNextClient->getPlayer() != NULL)) {
+            mPlayer->setNextPlayer(mNextClient->getPlayer());
+        }
+    }
+
+    return OK;
+}
+
+VolumeShaper::Status MediaPlayer2Manager::Client::applyVolumeShaper(
+        const sp<VolumeShaper::Configuration>& configuration,
+        const sp<VolumeShaper::Operation>& operation) {
+    // for hardware output, call player instead
+    ALOGV("Client::applyVolumeShaper(%p)", this);
+    sp<MediaPlayer2Base> p = getPlayer();
+    {
+        Mutex::Autolock l(mLock);
+        if (p != 0 && p->hardwareOutput()) {
+            // TODO: investigate internal implementation
+            return VolumeShaper::Status(INVALID_OPERATION);
+        }
+        if (mAudioOutput.get() != nullptr) {
+            return mAudioOutput->applyVolumeShaper(configuration, operation);
+        }
+    }
+    return VolumeShaper::Status(INVALID_OPERATION);
+}
+
+sp<VolumeShaper::State> MediaPlayer2Manager::Client::getVolumeShaperState(int id) {
+    // for hardware output, call player instead
+    ALOGV("Client::getVolumeShaperState(%p)", this);
+    sp<MediaPlayer2Base> p = getPlayer();
+    {
+        Mutex::Autolock l(mLock);
+        if (p != 0 && p->hardwareOutput()) {
+            // TODO: investigate internal implementation.
+            return nullptr;
+        }
+        if (mAudioOutput.get() != nullptr) {
+            return mAudioOutput->getVolumeShaperState(id);
+        }
+    }
+    return nullptr;
+}
+
+status_t MediaPlayer2Manager::Client::seekTo(int msec, MediaPlayer2SeekMode mode)
+{
+    ALOGV("[%d] seekTo(%d, %d)", mConnId, msec, mode);
+    sp<MediaPlayer2Base> p = getPlayer();
+    if (p == 0) return UNKNOWN_ERROR;
+    return p->seekTo(msec, mode);
+}
+
+status_t MediaPlayer2Manager::Client::reset()
+{
+    ALOGV("[%d] reset", mConnId);
+    mRetransmitEndpointValid = false;
+    sp<MediaPlayer2Base> p = getPlayer();
+    if (p == 0) return UNKNOWN_ERROR;
+    return p->reset();
+}
+
+status_t MediaPlayer2Manager::Client::notifyAt(int64_t mediaTimeUs)
+{
+    ALOGV("[%d] notifyAt(%lld)", mConnId, (long long)mediaTimeUs);
+    sp<MediaPlayer2Base> p = getPlayer();
+    if (p == 0) return UNKNOWN_ERROR;
+    return p->notifyAt(mediaTimeUs);
+}
+
+status_t MediaPlayer2Manager::Client::setAudioStreamType(audio_stream_type_t type)
+{
+    ALOGV("[%d] setAudioStreamType(%d)", mConnId, type);
+    // TODO: for hardware output, call player instead
+    Mutex::Autolock l(mLock);
+    if (mAudioOutput != 0) mAudioOutput->setAudioStreamType(type);
+    return NO_ERROR;
+}
+
+status_t MediaPlayer2Manager::Client::setAudioAttributes_l(const Parcel &parcel)
+{
+    if (mAudioAttributes != NULL) { free(mAudioAttributes); }
+    mAudioAttributes = (audio_attributes_t *) calloc(1, sizeof(audio_attributes_t));
+    if (mAudioAttributes == NULL) {
+        return NO_MEMORY;
+    }
+    unmarshallAudioAttributes(parcel, mAudioAttributes);
+
+    ALOGV("setAudioAttributes_l() usage=%d content=%d flags=0x%x tags=%s",
+            mAudioAttributes->usage, mAudioAttributes->content_type, mAudioAttributes->flags,
+            mAudioAttributes->tags);
+
+    if (mAudioOutput != 0) {
+        mAudioOutput->setAudioAttributes(mAudioAttributes);
+    }
+    return NO_ERROR;
+}
+
+status_t MediaPlayer2Manager::Client::setLooping(int loop)
+{
+    ALOGV("[%d] setLooping(%d)", mConnId, loop);
+    mLoop = loop;
+    sp<MediaPlayer2Base> p = getPlayer();
+    if (p != 0) return p->setLooping(loop);
+    return NO_ERROR;
+}
+
+status_t MediaPlayer2Manager::Client::setVolume(float leftVolume, float rightVolume)
+{
+    ALOGV("[%d] setVolume(%f, %f)", mConnId, leftVolume, rightVolume);
+
+    // for hardware output, call player instead
+    sp<MediaPlayer2Base> p = getPlayer();
+    {
+      Mutex::Autolock l(mLock);
+      if (p != 0 && p->hardwareOutput()) {
+          MediaPlayerHWInterface* hwp =
+                  reinterpret_cast<MediaPlayerHWInterface*>(p.get());
+          return hwp->setVolume(leftVolume, rightVolume);
+      } else {
+          if (mAudioOutput != 0) mAudioOutput->setVolume(leftVolume, rightVolume);
+          return NO_ERROR;
+      }
+    }
+
+    return NO_ERROR;
+}
+
+status_t MediaPlayer2Manager::Client::setAuxEffectSendLevel(float level)
+{
+    ALOGV("[%d] setAuxEffectSendLevel(%f)", mConnId, level);
+    Mutex::Autolock l(mLock);
+    if (mAudioOutput != 0) return mAudioOutput->setAuxEffectSendLevel(level);
+    return NO_ERROR;
+}
+
+status_t MediaPlayer2Manager::Client::attachAuxEffect(int effectId)
+{
+    ALOGV("[%d] attachAuxEffect(%d)", mConnId, effectId);
+    Mutex::Autolock l(mLock);
+    if (mAudioOutput != 0) return mAudioOutput->attachAuxEffect(effectId);
+    return NO_ERROR;
+}
+
+status_t MediaPlayer2Manager::Client::setParameter(int key, const Parcel &request) {
+    ALOGV("[%d] setParameter(%d)", mConnId, key);
+    switch (key) {
+    case MEDIA2_KEY_PARAMETER_AUDIO_ATTRIBUTES:
+    {
+        Mutex::Autolock l(mLock);
+        return setAudioAttributes_l(request);
+    }
+    default:
+        sp<MediaPlayer2Base> p = getPlayer();
+        if (p == 0) { return UNKNOWN_ERROR; }
+        return p->setParameter(key, request);
+    }
+}
+
+status_t MediaPlayer2Manager::Client::getParameter(int key, Parcel *reply) {
+    ALOGV("[%d] getParameter(%d)", mConnId, key);
+    sp<MediaPlayer2Base> p = getPlayer();
+    if (p == 0) return UNKNOWN_ERROR;
+    return p->getParameter(key, reply);
+}
+
+status_t MediaPlayer2Manager::Client::setRetransmitEndpoint(
+        const struct sockaddr_in* endpoint) {
+
+    if (NULL != endpoint) {
+        uint32_t a = ntohl(endpoint->sin_addr.s_addr);
+        uint16_t p = ntohs(endpoint->sin_port);
+        ALOGV("[%d] setRetransmitEndpoint(%u.%u.%u.%u:%hu)", mConnId,
+                (a >> 24), (a >> 16) & 0xFF, (a >> 8) & 0xFF, (a & 0xFF), p);
+    } else {
+        ALOGV("[%d] setRetransmitEndpoint = <none>", mConnId);
+    }
+
+    sp<MediaPlayer2Base> p = getPlayer();
+
+    // Right now, the only valid time to set a retransmit endpoint is before
+    // player selection has been made (since the presence or absence of a
+    // retransmit endpoint is going to determine which player is selected during
+    // setDataSource).
+    if (p != 0) return INVALID_OPERATION;
+
+    if (NULL != endpoint) {
+        Mutex::Autolock lock(mLock);
+        mRetransmitEndpoint = *endpoint;
+        mRetransmitEndpointValid = true;
+    } else {
+        Mutex::Autolock lock(mLock);
+        mRetransmitEndpointValid = false;
+    }
+
+    return NO_ERROR;
+}
+
+status_t MediaPlayer2Manager::Client::getRetransmitEndpoint(
+        struct sockaddr_in* endpoint)
+{
+    if (NULL == endpoint)
+        return BAD_VALUE;
+
+    sp<MediaPlayer2Base> p = getPlayer();
+
+    if (p != NULL)
+        return p->getRetransmitEndpoint(endpoint);
+
+    Mutex::Autolock lock(mLock);
+    if (!mRetransmitEndpointValid)
+        return NO_INIT;
+
+    *endpoint = mRetransmitEndpoint;
+
+    return NO_ERROR;
+}
+
+void MediaPlayer2Manager::Client::notify(
+        const wp<MediaPlayer2Engine> &listener, int msg, int ext1, int ext2, const Parcel *obj)
+{
+    sp<MediaPlayer2Engine> spListener = listener.promote();
+    if (spListener == NULL) {
+        return;
+    }
+    Client* client = static_cast<Client*>(spListener.get());
+
+    sp<MediaPlayer2EngineClient> c;
+    sp<Client> nextClient;
+    status_t errStartNext = NO_ERROR;
+    {
+        Mutex::Autolock l(client->mLock);
+        c = client->mClient;
+        if (msg == MEDIA2_PLAYBACK_COMPLETE && client->mNextClient != NULL) {
+            nextClient = client->mNextClient;
+
+            if (client->mAudioOutput != NULL)
+                client->mAudioOutput->switchToNextOutput();
+
+            errStartNext = nextClient->start();
+        }
+    }
+
+    if (nextClient != NULL) {
+        sp<MediaPlayer2EngineClient> nc;
+        {
+            Mutex::Autolock l(nextClient->mLock);
+            nc = nextClient->mClient;
+        }
+        if (nc != NULL) {
+            if (errStartNext == NO_ERROR) {
+                nc->notify(MEDIA2_INFO, MEDIA2_INFO_STARTED_AS_NEXT, 0, obj);
+            } else {
+                nc->notify(MEDIA2_ERROR, MEDIA2_ERROR_UNKNOWN , 0, obj);
+                ALOGE("gapless:start playback for next track failed, err(%d)", errStartNext);
+            }
+        }
+    }
+
+    if (MEDIA2_INFO == msg &&
+        MEDIA2_INFO_METADATA_UPDATE == ext1) {
+        const media::Metadata::Type metadata_type = ext2;
+
+        if(client->shouldDropMetadata(metadata_type)) {
+            return;
+        }
+
+        // Update the list of metadata that have changed. getMetadata
+        // also access mMetadataUpdated and clears it.
+        client->addNewMetadataUpdate(metadata_type);
+    }
+
+    if (c != NULL) {
+        ALOGV("[%d] notify (%p, %d, %d, %d)", client->mConnId, spListener.get(), msg, ext1, ext2);
+        c->notify(msg, ext1, ext2, obj);
+    }
+}
+
+
+bool MediaPlayer2Manager::Client::shouldDropMetadata(media::Metadata::Type code) const
+{
+    Mutex::Autolock lock(mLock);
+
+    if (findMetadata(mMetadataDrop, code)) {
+        return true;
+    }
+
+    if (mMetadataAllow.isEmpty() || findMetadata(mMetadataAllow, code)) {
+        return false;
+    } else {
+        return true;
+    }
+}
+
+
+void MediaPlayer2Manager::Client::addNewMetadataUpdate(media::Metadata::Type metadata_type) {
+    Mutex::Autolock lock(mLock);
+    if (mMetadataUpdated.indexOf(metadata_type) < 0) {
+        mMetadataUpdated.add(metadata_type);
+    }
+}
+
+// Modular DRM
+status_t MediaPlayer2Manager::Client::prepareDrm(const uint8_t uuid[16],
+        const Vector<uint8_t>& drmSessionId)
+{
+    ALOGV("[%d] prepareDrm", mConnId);
+    sp<MediaPlayer2Base> p = getPlayer();
+    if (p == 0) return UNKNOWN_ERROR;
+
+    status_t ret = p->prepareDrm(uuid, drmSessionId);
+    ALOGV("prepareDrm ret: %d", ret);
+
+    return ret;
+}
+
+status_t MediaPlayer2Manager::Client::releaseDrm()
+{
+    ALOGV("[%d] releaseDrm", mConnId);
+    sp<MediaPlayer2Base> p = getPlayer();
+    if (p == 0) return UNKNOWN_ERROR;
+
+    status_t ret = p->releaseDrm();
+    ALOGV("releaseDrm ret: %d", ret);
+
+    return ret;
+}
+
+status_t MediaPlayer2Manager::Client::setOutputDevice(audio_port_handle_t deviceId)
+{
+    ALOGV("[%d] setOutputDevice", mConnId);
+    {
+        Mutex::Autolock l(mLock);
+        if (mAudioOutput.get() != nullptr) {
+            return mAudioOutput->setOutputDevice(deviceId);
+        }
+    }
+    return NO_INIT;
+}
+
+status_t MediaPlayer2Manager::Client::getRoutedDeviceId(audio_port_handle_t* deviceId)
+{
+    ALOGV("[%d] getRoutedDeviceId", mConnId);
+    {
+        Mutex::Autolock l(mLock);
+        if (mAudioOutput.get() != nullptr) {
+            return mAudioOutput->getRoutedDeviceId(deviceId);
+        }
+    }
+    return NO_INIT;
+}
+
+status_t MediaPlayer2Manager::Client::enableAudioDeviceCallback(bool enabled)
+{
+    ALOGV("[%d] enableAudioDeviceCallback, %d", mConnId, enabled);
+    {
+        Mutex::Autolock l(mLock);
+        if (mAudioOutput.get() != nullptr) {
+            return mAudioOutput->enableAudioDeviceCallback(enabled);
+        }
+    }
+    return NO_INIT;
+}
+
+#if CALLBACK_ANTAGONIZER
+const int Antagonizer::interval = 10000; // 10 msecs
+
+Antagonizer::Antagonizer(
+        MediaPlayer2Manager::NotifyCallback cb,
+        const wp<MediaPlayer2Engine> &client) :
+    mExit(false), mActive(false), mClient(client), mCb(cb)
+{
+    createThread(callbackThread, this);
+}
+
+void Antagonizer::kill()
+{
+    Mutex::Autolock _l(mLock);
+    mActive = false;
+    mExit = true;
+    mCondition.wait(mLock);
+}
+
+int Antagonizer::callbackThread(void* user)
+{
+    ALOGD("Antagonizer started");
+    Antagonizer* p = reinterpret_cast<Antagonizer*>(user);
+    while (!p->mExit) {
+        if (p->mActive) {
+            ALOGV("send event");
+            p->mCb(p->mClient, 0, 0, 0);
+        }
+        usleep(interval);
+    }
+    Mutex::Autolock _l(p->mLock);
+    p->mCondition.signal();
+    ALOGD("Antagonizer stopped");
+    return 0;
+}
+#endif
+
+#undef LOG_TAG
+#define LOG_TAG "AudioSink"
+MediaPlayer2Manager::AudioOutput::AudioOutput(audio_session_t sessionId, uid_t uid, int pid,
+        const audio_attributes_t* attr, const sp<AudioSystem::AudioDeviceCallback>& deviceCallback)
+    : mCallback(NULL),
+      mCallbackCookie(NULL),
+      mCallbackData(NULL),
+      mStreamType(AUDIO_STREAM_MUSIC),
+      mLeftVolume(1.0),
+      mRightVolume(1.0),
+      mPlaybackRate(AUDIO_PLAYBACK_RATE_DEFAULT),
+      mSampleRateHz(0),
+      mMsecsPerFrame(0),
+      mFrameSize(0),
+      mSessionId(sessionId),
+      mUid(uid),
+      mPid(pid),
+      mSendLevel(0.0),
+      mAuxEffectId(0),
+      mFlags(AUDIO_OUTPUT_FLAG_NONE),
+      mVolumeHandler(new media::VolumeHandler()),
+      mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE),
+      mRoutedDeviceId(AUDIO_PORT_HANDLE_NONE),
+      mDeviceCallbackEnabled(false),
+      mDeviceCallback(deviceCallback)
+{
+    ALOGV("AudioOutput(%d)", sessionId);
+    if (attr != NULL) {
+        mAttributes = (audio_attributes_t *) calloc(1, sizeof(audio_attributes_t));
+        if (mAttributes != NULL) {
+            memcpy(mAttributes, attr, sizeof(audio_attributes_t));
+            mStreamType = audio_attributes_to_stream_type(attr);
+        }
+    } else {
+        mAttributes = NULL;
+    }
+
+    setMinBufferCount();
+}
+
+MediaPlayer2Manager::AudioOutput::~AudioOutput()
+{
+    close();
+    free(mAttributes);
+    delete mCallbackData;
+}
+
+//static
+void MediaPlayer2Manager::AudioOutput::setMinBufferCount()
+{
+    char value[PROPERTY_VALUE_MAX];
+    if (property_get("ro.kernel.qemu", value, 0)) {
+        mIsOnEmulator = true;
+        mMinBufferCount = 12;  // to prevent systematic buffer underrun for emulator
+    }
+}
+
+// static
+bool MediaPlayer2Manager::AudioOutput::isOnEmulator()
+{
+    setMinBufferCount(); // benign race wrt other threads
+    return mIsOnEmulator;
+}
+
+// static
+int MediaPlayer2Manager::AudioOutput::getMinBufferCount()
+{
+    setMinBufferCount(); // benign race wrt other threads
+    return mMinBufferCount;
+}
+
+ssize_t MediaPlayer2Manager::AudioOutput::bufferSize() const
+{
+    Mutex::Autolock lock(mLock);
+    if (mTrack == 0) return NO_INIT;
+    return mTrack->frameCount() * mFrameSize;
+}
+
+ssize_t MediaPlayer2Manager::AudioOutput::frameCount() const
+{
+    Mutex::Autolock lock(mLock);
+    if (mTrack == 0) return NO_INIT;
+    return mTrack->frameCount();
+}
+
+ssize_t MediaPlayer2Manager::AudioOutput::channelCount() const
+{
+    Mutex::Autolock lock(mLock);
+    if (mTrack == 0) return NO_INIT;
+    return mTrack->channelCount();
+}
+
+ssize_t MediaPlayer2Manager::AudioOutput::frameSize() const
+{
+    Mutex::Autolock lock(mLock);
+    if (mTrack == 0) return NO_INIT;
+    return mFrameSize;
+}
+
+uint32_t MediaPlayer2Manager::AudioOutput::latency () const
+{
+    Mutex::Autolock lock(mLock);
+    if (mTrack == 0) return 0;
+    return mTrack->latency();
+}
+
+float MediaPlayer2Manager::AudioOutput::msecsPerFrame() const
+{
+    Mutex::Autolock lock(mLock);
+    return mMsecsPerFrame;
+}
+
+status_t MediaPlayer2Manager::AudioOutput::getPosition(uint32_t *position) const
+{
+    Mutex::Autolock lock(mLock);
+    if (mTrack == 0) return NO_INIT;
+    return mTrack->getPosition(position);
+}
+
+status_t MediaPlayer2Manager::AudioOutput::getTimestamp(AudioTimestamp &ts) const
+{
+    Mutex::Autolock lock(mLock);
+    if (mTrack == 0) return NO_INIT;
+    return mTrack->getTimestamp(ts);
+}
+
+// TODO: Remove unnecessary calls to getPlayedOutDurationUs()
+// as it acquires locks and may query the audio driver.
+//
+// Some calls could conceivably retrieve extrapolated data instead of
+// accessing getTimestamp() or getPosition() every time a data buffer with
+// a media time is received.
+//
+// Calculate duration of played samples if played at normal rate (i.e., 1.0).
+int64_t MediaPlayer2Manager::AudioOutput::getPlayedOutDurationUs(int64_t nowUs) const
+{
+    Mutex::Autolock lock(mLock);
+    if (mTrack == 0 || mSampleRateHz == 0) {
+        return 0;
+    }
+
+    uint32_t numFramesPlayed;
+    int64_t numFramesPlayedAtUs;
+    AudioTimestamp ts;
+
+    status_t res = mTrack->getTimestamp(ts);
+    if (res == OK) {                 // case 1: mixing audio tracks and offloaded tracks.
+        numFramesPlayed = ts.mPosition;
+        numFramesPlayedAtUs = ts.mTime.tv_sec * 1000000LL + ts.mTime.tv_nsec / 1000;
+        //ALOGD("getTimestamp: OK %d %lld", numFramesPlayed, (long long)numFramesPlayedAtUs);
+    } else if (res == WOULD_BLOCK) { // case 2: transitory state on start of a new track
+        numFramesPlayed = 0;
+        numFramesPlayedAtUs = nowUs;
+        //ALOGD("getTimestamp: WOULD_BLOCK %d %lld",
+        //        numFramesPlayed, (long long)numFramesPlayedAtUs);
+    } else {                         // case 3: transitory at new track or audio fast tracks.
+        res = mTrack->getPosition(&numFramesPlayed);
+        CHECK_EQ(res, (status_t)OK);
+        numFramesPlayedAtUs = nowUs;
+        numFramesPlayedAtUs += 1000LL * mTrack->latency() / 2; /* XXX */
+        //ALOGD("getPosition: %u %lld", numFramesPlayed, (long long)numFramesPlayedAtUs);
+    }
+
+    // CHECK_EQ(numFramesPlayed & (1 << 31), 0);  // can't be negative until 12.4 hrs, test
+    // TODO: remove the (int32_t) casting below as it may overflow at 12.4 hours.
+    int64_t durationUs = (int64_t)((int32_t)numFramesPlayed * 1000000LL / mSampleRateHz)
+            + nowUs - numFramesPlayedAtUs;
+    if (durationUs < 0) {
+        // Occurs when numFramesPlayed position is very small and the following:
+        // (1) In case 1, the time nowUs is computed before getTimestamp() is called and
+        //     numFramesPlayedAtUs is greater than nowUs by time more than numFramesPlayed.
+        // (2) In case 3, using getPosition and adding mAudioSink->latency() to
+        //     numFramesPlayedAtUs, by a time amount greater than numFramesPlayed.
+        //
+        // Both of these are transitory conditions.
+        ALOGV("getPlayedOutDurationUs: negative duration %lld set to zero", (long long)durationUs);
+        durationUs = 0;
+    }
+    ALOGV("getPlayedOutDurationUs(%lld) nowUs(%lld) frames(%u) framesAt(%lld)",
+            (long long)durationUs, (long long)nowUs,
+            numFramesPlayed, (long long)numFramesPlayedAtUs);
+    return durationUs;
+}
+
+status_t MediaPlayer2Manager::AudioOutput::getFramesWritten(uint32_t *frameswritten) const
+{
+    Mutex::Autolock lock(mLock);
+    if (mTrack == 0) return NO_INIT;
+    ExtendedTimestamp ets;
+    status_t status = mTrack->getTimestamp(&ets);
+    if (status == OK || status == WOULD_BLOCK) {
+        *frameswritten = (uint32_t)ets.mPosition[ExtendedTimestamp::LOCATION_CLIENT];
+    }
+    return status;
+}
+
+status_t MediaPlayer2Manager::AudioOutput::setParameters(const String8& keyValuePairs)
+{
+    Mutex::Autolock lock(mLock);
+    if (mTrack == 0) return NO_INIT;
+    return mTrack->setParameters(keyValuePairs);
+}
+
+String8  MediaPlayer2Manager::AudioOutput::getParameters(const String8& keys)
+{
+    Mutex::Autolock lock(mLock);
+    if (mTrack == 0) return String8::empty();
+    return mTrack->getParameters(keys);
+}
+
+void MediaPlayer2Manager::AudioOutput::setAudioAttributes(const audio_attributes_t * attributes) {
+    Mutex::Autolock lock(mLock);
+    if (attributes == NULL) {
+        free(mAttributes);
+        mAttributes = NULL;
+    } else {
+        if (mAttributes == NULL) {
+            mAttributes = (audio_attributes_t *) calloc(1, sizeof(audio_attributes_t));
+        }
+        memcpy(mAttributes, attributes, sizeof(audio_attributes_t));
+        mStreamType = audio_attributes_to_stream_type(attributes);
+    }
+}
+
+void MediaPlayer2Manager::AudioOutput::setAudioStreamType(audio_stream_type_t streamType)
+{
+    Mutex::Autolock lock(mLock);
+    // do not allow direct stream type modification if attributes have been set
+    if (mAttributes == NULL) {
+        mStreamType = streamType;
+    }
+}
+
+void MediaPlayer2Manager::AudioOutput::deleteRecycledTrack_l()
+{
+    ALOGV("deleteRecycledTrack_l");
+    if (mRecycledTrack != 0) {
+
+        if (mCallbackData != NULL) {
+            mCallbackData->setOutput(NULL);
+            mCallbackData->endTrackSwitch();
+        }
+
+        if ((mRecycledTrack->getFlags() & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) == 0) {
+            int32_t msec = 0;
+            if (!mRecycledTrack->stopped()) { // check if active
+                 (void)mRecycledTrack->pendingDuration(&msec);
+            }
+            mRecycledTrack->stop(); // ensure full data drain
+            ALOGD("deleting recycled track, waiting for data drain (%d msec)", msec);
+            if (msec > 0) {
+                static const int32_t WAIT_LIMIT_MS = 3000;
+                if (msec > WAIT_LIMIT_MS) {
+                    msec = WAIT_LIMIT_MS;
+                }
+                usleep(msec * 1000LL);
+            }
+        }
+        // An offloaded track isn't flushed because the STREAM_END is reported
+        // slightly prematurely to allow time for the gapless track switch
+        // but this means that if we decide not to recycle the track there
+        // could be a small amount of residual data still playing. We leave
+        // AudioFlinger to drain the track.
+
+        mRecycledTrack.clear();
+        close_l();
+        delete mCallbackData;
+        mCallbackData = NULL;
+    }
+}
+
+void MediaPlayer2Manager::AudioOutput::close_l()
+{
+    mTrack.clear();
+}
+
+status_t MediaPlayer2Manager::AudioOutput::open(
+        uint32_t sampleRate, int channelCount, audio_channel_mask_t channelMask,
+        audio_format_t format, int bufferCount,
+        AudioCallback cb, void *cookie,
+        audio_output_flags_t flags,
+        const audio_offload_info_t *offloadInfo,
+        bool doNotReconnect,
+        uint32_t suggestedFrameCount)
+{
+    ALOGV("open(%u, %d, 0x%x, 0x%x, %d, %d 0x%x)", sampleRate, channelCount, channelMask,
+                format, bufferCount, mSessionId, flags);
+
+    // offloading is only supported in callback mode for now.
+    // offloadInfo must be present if offload flag is set
+    if (((flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) != 0) &&
+            ((cb == NULL) || (offloadInfo == NULL))) {
+        return BAD_VALUE;
+    }
+
+    // compute frame count for the AudioTrack internal buffer
+    size_t frameCount;
+    if ((flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) != 0) {
+        frameCount = 0; // AudioTrack will get frame count from AudioFlinger
+    } else {
+        // try to estimate the buffer processing fetch size from AudioFlinger.
+        // framesPerBuffer is approximate and generally correct, except when it's not :-).
+        uint32_t afSampleRate;
+        size_t afFrameCount;
+        if (AudioSystem::getOutputFrameCount(&afFrameCount, mStreamType) != NO_ERROR) {
+            return NO_INIT;
+        }
+        if (AudioSystem::getOutputSamplingRate(&afSampleRate, mStreamType) != NO_ERROR) {
+            return NO_INIT;
+        }
+        const size_t framesPerBuffer =
+                (unsigned long long)sampleRate * afFrameCount / afSampleRate;
+
+        if (bufferCount == 0) {
+            // use suggestedFrameCount
+            bufferCount = (suggestedFrameCount + framesPerBuffer - 1) / framesPerBuffer;
+        }
+        // Check argument bufferCount against the mininum buffer count
+        if (bufferCount != 0 && bufferCount < mMinBufferCount) {
+            ALOGV("bufferCount (%d) increased to %d", bufferCount, mMinBufferCount);
+            bufferCount = mMinBufferCount;
+        }
+        // if frameCount is 0, then AudioTrack will get frame count from AudioFlinger
+        // which will be the minimum size permitted.
+        frameCount = bufferCount * framesPerBuffer;
+    }
+
+    if (channelMask == CHANNEL_MASK_USE_CHANNEL_ORDER) {
+        channelMask = audio_channel_out_mask_from_count(channelCount);
+        if (0 == channelMask) {
+            ALOGE("open() error, can\'t derive mask for %d audio channels", channelCount);
+            return NO_INIT;
+        }
+    }
+
+    Mutex::Autolock lock(mLock);
+    mCallback = cb;
+    mCallbackCookie = cookie;
+
+    // Check whether we can recycle the track
+    bool reuse = false;
+    bool bothOffloaded = false;
+
+    if (mRecycledTrack != 0) {
+        // check whether we are switching between two offloaded tracks
+        bothOffloaded = (flags & mRecycledTrack->getFlags()
+                                & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) != 0;
+
+        // check if the existing track can be reused as-is, or if a new track needs to be created.
+        reuse = true;
+
+        if ((mCallbackData == NULL && mCallback != NULL) ||
+                (mCallbackData != NULL && mCallback == NULL)) {
+            // recycled track uses callbacks but the caller wants to use writes, or vice versa
+            ALOGV("can't chain callback and write");
+            reuse = false;
+        } else if ((mRecycledTrack->getSampleRate() != sampleRate) ||
+                (mRecycledTrack->channelCount() != (uint32_t)channelCount) ) {
+            ALOGV("samplerate, channelcount differ: %u/%u Hz, %u/%d ch",
+                  mRecycledTrack->getSampleRate(), sampleRate,
+                  mRecycledTrack->channelCount(), channelCount);
+            reuse = false;
+        } else if (flags != mFlags) {
+            ALOGV("output flags differ %08x/%08x", flags, mFlags);
+            reuse = false;
+        } else if (mRecycledTrack->format() != format) {
+            reuse = false;
+        }
+    } else {
+        ALOGV("no track available to recycle");
+    }
+
+    ALOGV_IF(bothOffloaded, "both tracks offloaded");
+
+    // If we can't recycle and both tracks are offloaded
+    // we must close the previous output before opening a new one
+    if (bothOffloaded && !reuse) {
+        ALOGV("both offloaded and not recycling");
+        deleteRecycledTrack_l();
+    }
+
+    sp<AudioTrack> t;
+    CallbackData *newcbd = NULL;
+
+    // We don't attempt to create a new track if we are recycling an
+    // offloaded track. But, if we are recycling a non-offloaded or we
+    // are switching where one is offloaded and one isn't then we create
+    // the new track in advance so that we can read additional stream info
+
+    if (!(reuse && bothOffloaded)) {
+        ALOGV("creating new AudioTrack");
+
+        if (mCallback != NULL) {
+            newcbd = new CallbackData(this);
+            t = new AudioTrack(
+                    mStreamType,
+                    sampleRate,
+                    format,
+                    channelMask,
+                    frameCount,
+                    flags,
+                    CallbackWrapper,
+                    newcbd,
+                    0,  // notification frames
+                    mSessionId,
+                    AudioTrack::TRANSFER_CALLBACK,
+                    offloadInfo,
+                    mUid,
+                    mPid,
+                    mAttributes,
+                    doNotReconnect,
+                    1.0f,  // default value for maxRequiredSpeed
+                    mSelectedDeviceId);
+        } else {
+            // TODO: Due to buffer memory concerns, we use a max target playback speed
+            // based on mPlaybackRate at the time of open (instead of kMaxRequiredSpeed),
+            // also clamping the target speed to 1.0 <= targetSpeed <= kMaxRequiredSpeed.
+            const float targetSpeed =
+                    std::min(std::max(mPlaybackRate.mSpeed, 1.0f), kMaxRequiredSpeed);
+            ALOGW_IF(targetSpeed != mPlaybackRate.mSpeed,
+                    "track target speed:%f clamped from playback speed:%f",
+                    targetSpeed, mPlaybackRate.mSpeed);
+            t = new AudioTrack(
+                    mStreamType,
+                    sampleRate,
+                    format,
+                    channelMask,
+                    frameCount,
+                    flags,
+                    NULL, // callback
+                    NULL, // user data
+                    0, // notification frames
+                    mSessionId,
+                    AudioTrack::TRANSFER_DEFAULT,
+                    NULL, // offload info
+                    mUid,
+                    mPid,
+                    mAttributes,
+                    doNotReconnect,
+                    targetSpeed,
+                    mSelectedDeviceId);
+        }
+
+        if ((t == 0) || (t->initCheck() != NO_ERROR)) {
+            ALOGE("Unable to create audio track");
+            delete newcbd;
+            // t goes out of scope, so reference count drops to zero
+            return NO_INIT;
+        } else {
+            // successful AudioTrack initialization implies a legacy stream type was generated
+            // from the audio attributes
+            mStreamType = t->streamType();
+        }
+    }
+
+    if (reuse) {
+        CHECK(mRecycledTrack != NULL);
+
+        if (!bothOffloaded) {
+            if (mRecycledTrack->frameCount() != t->frameCount()) {
+                ALOGV("framecount differs: %zu/%zu frames",
+                      mRecycledTrack->frameCount(), t->frameCount());
+                reuse = false;
+            }
+        }
+
+        if (reuse) {
+            ALOGV("chaining to next output and recycling track");
+            close_l();
+            mTrack = mRecycledTrack;
+            mRecycledTrack.clear();
+            if (mCallbackData != NULL) {
+                mCallbackData->setOutput(this);
+            }
+            delete newcbd;
+            return updateTrack();
+        }
+    }
+
+    // we're not going to reuse the track, unblock and flush it
+    // this was done earlier if both tracks are offloaded
+    if (!bothOffloaded) {
+        deleteRecycledTrack_l();
+    }
+
+    CHECK((t != NULL) && ((mCallback == NULL) || (newcbd != NULL)));
+
+    mCallbackData = newcbd;
+    ALOGV("setVolume");
+    t->setVolume(mLeftVolume, mRightVolume);
+
+    // Restore VolumeShapers for the MediaPlayer2 in case the track was recreated
+    // due to an output sink error (e.g. offload to non-offload switch).
+    mVolumeHandler->forall([&t](const VolumeShaper &shaper) -> VolumeShaper::Status {
+        sp<VolumeShaper::Operation> operationToEnd =
+                new VolumeShaper::Operation(shaper.mOperation);
+        // TODO: Ideally we would restore to the exact xOffset position
+        // as returned by getVolumeShaperState(), but we don't have that
+        // information when restoring at the client unless we periodically poll
+        // the server or create shared memory state.
+        //
+        // For now, we simply advance to the end of the VolumeShaper effect
+        // if it has been started.
+        if (shaper.isStarted()) {
+            operationToEnd->setNormalizedTime(1.f);
+        }
+        return t->applyVolumeShaper(shaper.mConfiguration, operationToEnd);
+    });
+
+    mSampleRateHz = sampleRate;
+    mFlags = flags;
+    mMsecsPerFrame = 1E3f / (mPlaybackRate.mSpeed * sampleRate);
+    mFrameSize = t->frameSize();
+    mTrack = t;
+
+    return updateTrack();
+}
+
+status_t MediaPlayer2Manager::AudioOutput::updateTrack() {
+    if (mTrack == NULL) {
+        return NO_ERROR;
+    }
+
+    status_t res = NO_ERROR;
+    // Note some output devices may give us a direct track even though we don't specify it.
+    // Example: Line application b/17459982.
+    if ((mTrack->getFlags()
+            & (AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD | AUDIO_OUTPUT_FLAG_DIRECT)) == 0) {
+        res = mTrack->setPlaybackRate(mPlaybackRate);
+        if (res == NO_ERROR) {
+            mTrack->setAuxEffectSendLevel(mSendLevel);
+            res = mTrack->attachAuxEffect(mAuxEffectId);
+        }
+    }
+    mTrack->setOutputDevice(mSelectedDeviceId);
+    if (mDeviceCallbackEnabled) {
+        mTrack->addAudioDeviceCallback(mDeviceCallback.promote());
+    }
+    ALOGV("updateTrack() DONE status %d", res);
+    return res;
+}
+
+status_t MediaPlayer2Manager::AudioOutput::start()
+{
+    ALOGV("start");
+    Mutex::Autolock lock(mLock);
+    if (mCallbackData != NULL) {
+        mCallbackData->endTrackSwitch();
+    }
+    if (mTrack != 0) {
+        mTrack->setVolume(mLeftVolume, mRightVolume);
+        mTrack->setAuxEffectSendLevel(mSendLevel);
+        status_t status = mTrack->start();
+        if (status == NO_ERROR) {
+            mVolumeHandler->setStarted();
+        }
+        return status;
+    }
+    return NO_INIT;
+}
+
+void MediaPlayer2Manager::AudioOutput::setNextOutput(const sp<AudioOutput>& nextOutput) {
+    Mutex::Autolock lock(mLock);
+    mNextOutput = nextOutput;
+}
+
+void MediaPlayer2Manager::AudioOutput::switchToNextOutput() {
+    ALOGV("switchToNextOutput");
+
+    // Try to acquire the callback lock before moving track (without incurring deadlock).
+    const unsigned kMaxSwitchTries = 100;
+    Mutex::Autolock lock(mLock);
+    for (unsigned tries = 0;;) {
+        if (mTrack == 0) {
+            return;
+        }
+        if (mNextOutput != NULL && mNextOutput != this) {
+            if (mCallbackData != NULL) {
+                // two alternative approaches
+#if 1
+                CallbackData *callbackData = mCallbackData;
+                mLock.unlock();
+                // proper acquisition sequence
+                callbackData->lock();
+                mLock.lock();
+                // Caution: it is unlikely that someone deleted our callback or changed our target
+                if (callbackData != mCallbackData || mNextOutput == NULL || mNextOutput == this) {
+                    // fatal if we are starved out.
+                    LOG_ALWAYS_FATAL_IF(++tries > kMaxSwitchTries,
+                            "switchToNextOutput() cannot obtain correct lock sequence");
+                    callbackData->unlock();
+                    continue;
+                }
+                callbackData->mSwitching = true; // begin track switch
+                callbackData->setOutput(NULL);
+#else
+                // tryBeginTrackSwitch() returns false if the callback has the lock.
+                if (!mCallbackData->tryBeginTrackSwitch()) {
+                    // fatal if we are starved out.
+                    LOG_ALWAYS_FATAL_IF(++tries > kMaxSwitchTries,
+                            "switchToNextOutput() cannot obtain callback lock");
+                    mLock.unlock();
+                    usleep(5 * 1000 /* usec */); // allow callback to use AudioOutput
+                    mLock.lock();
+                    continue;
+                }
+#endif
+            }
+
+            Mutex::Autolock nextLock(mNextOutput->mLock);
+
+            // If the next output track is not NULL, then it has been
+            // opened already for playback.
+            // This is possible even without the next player being started,
+            // for example, the next player could be prepared and seeked.
+            //
+            // Presuming it isn't advisable to force the track over.
+             if (mNextOutput->mTrack == NULL) {
+                ALOGD("Recycling track for gapless playback");
+                delete mNextOutput->mCallbackData;
+                mNextOutput->mCallbackData = mCallbackData;
+                mNextOutput->mRecycledTrack = mTrack;
+                mNextOutput->mSampleRateHz = mSampleRateHz;
+                mNextOutput->mMsecsPerFrame = mMsecsPerFrame;
+                mNextOutput->mFlags = mFlags;
+                mNextOutput->mFrameSize = mFrameSize;
+                close_l();
+                mCallbackData = NULL;  // destruction handled by mNextOutput
+            } else {
+                ALOGW("Ignoring gapless playback because next player has already started");
+                // remove track in case resource needed for future players.
+                if (mCallbackData != NULL) {
+                    mCallbackData->endTrackSwitch();  // release lock for callbacks before close.
+                }
+                close_l();
+            }
+        }
+        break;
+    }
+}
+
+ssize_t MediaPlayer2Manager::AudioOutput::write(const void* buffer, size_t size, bool blocking)
+{
+    Mutex::Autolock lock(mLock);
+    LOG_ALWAYS_FATAL_IF(mCallback != NULL, "Don't call write if supplying a callback.");
+
+    //ALOGV("write(%p, %u)", buffer, size);
+    if (mTrack != 0) {
+        return mTrack->write(buffer, size, blocking);
+    }
+    return NO_INIT;
+}
+
+void MediaPlayer2Manager::AudioOutput::stop()
+{
+    ALOGV("stop");
+    Mutex::Autolock lock(mLock);
+    if (mTrack != 0) mTrack->stop();
+}
+
+void MediaPlayer2Manager::AudioOutput::flush()
+{
+    ALOGV("flush");
+    Mutex::Autolock lock(mLock);
+    if (mTrack != 0) mTrack->flush();
+}
+
+void MediaPlayer2Manager::AudioOutput::pause()
+{
+    ALOGV("pause");
+    Mutex::Autolock lock(mLock);
+    if (mTrack != 0) mTrack->pause();
+}
+
+void MediaPlayer2Manager::AudioOutput::close()
+{
+    ALOGV("close");
+    sp<AudioTrack> track;
+    {
+        Mutex::Autolock lock(mLock);
+        track = mTrack;
+        close_l(); // clears mTrack
+    }
+    // destruction of the track occurs outside of mutex.
+}
+
+void MediaPlayer2Manager::AudioOutput::setVolume(float left, float right)
+{
+    ALOGV("setVolume(%f, %f)", left, right);
+    Mutex::Autolock lock(mLock);
+    mLeftVolume = left;
+    mRightVolume = right;
+    if (mTrack != 0) {
+        mTrack->setVolume(left, right);
+    }
+}
+
+status_t MediaPlayer2Manager::AudioOutput::setPlaybackRate(const AudioPlaybackRate &rate)
+{
+    ALOGV("setPlaybackRate(%f %f %d %d)",
+                rate.mSpeed, rate.mPitch, rate.mFallbackMode, rate.mStretchMode);
+    Mutex::Autolock lock(mLock);
+    if (mTrack == 0) {
+        // remember rate so that we can set it when the track is opened
+        mPlaybackRate = rate;
+        return OK;
+    }
+    status_t res = mTrack->setPlaybackRate(rate);
+    if (res != NO_ERROR) {
+        return res;
+    }
+    // rate.mSpeed is always greater than 0 if setPlaybackRate succeeded
+    CHECK_GT(rate.mSpeed, 0.f);
+    mPlaybackRate = rate;
+    if (mSampleRateHz != 0) {
+        mMsecsPerFrame = 1E3f / (rate.mSpeed * mSampleRateHz);
+    }
+    return res;
+}
+
+status_t MediaPlayer2Manager::AudioOutput::getPlaybackRate(AudioPlaybackRate *rate)
+{
+    ALOGV("setPlaybackRate");
+    Mutex::Autolock lock(mLock);
+    if (mTrack == 0) {
+        return NO_INIT;
+    }
+    *rate = mTrack->getPlaybackRate();
+    return NO_ERROR;
+}
+
+status_t MediaPlayer2Manager::AudioOutput::setAuxEffectSendLevel(float level)
+{
+    ALOGV("setAuxEffectSendLevel(%f)", level);
+    Mutex::Autolock lock(mLock);
+    mSendLevel = level;
+    if (mTrack != 0) {
+        return mTrack->setAuxEffectSendLevel(level);
+    }
+    return NO_ERROR;
+}
+
+status_t MediaPlayer2Manager::AudioOutput::attachAuxEffect(int effectId)
+{
+    ALOGV("attachAuxEffect(%d)", effectId);
+    Mutex::Autolock lock(mLock);
+    mAuxEffectId = effectId;
+    if (mTrack != 0) {
+        return mTrack->attachAuxEffect(effectId);
+    }
+    return NO_ERROR;
+}
+
+status_t MediaPlayer2Manager::AudioOutput::setOutputDevice(audio_port_handle_t deviceId)
+{
+    ALOGV("setOutputDevice(%d)", deviceId);
+    Mutex::Autolock lock(mLock);
+    mSelectedDeviceId = deviceId;
+    if (mTrack != 0) {
+        return mTrack->setOutputDevice(deviceId);
+    }
+    return NO_ERROR;
+}
+
+status_t MediaPlayer2Manager::AudioOutput::getRoutedDeviceId(audio_port_handle_t* deviceId)
+{
+    ALOGV("getRoutedDeviceId");
+    Mutex::Autolock lock(mLock);
+    if (mTrack != 0) {
+        mRoutedDeviceId = mTrack->getRoutedDeviceId();
+    }
+    *deviceId = mRoutedDeviceId;
+    return NO_ERROR;
+}
+
+status_t MediaPlayer2Manager::AudioOutput::enableAudioDeviceCallback(bool enabled)
+{
+    ALOGV("enableAudioDeviceCallback, %d", enabled);
+    Mutex::Autolock lock(mLock);
+    mDeviceCallbackEnabled = enabled;
+    if (mTrack != 0) {
+        status_t status;
+        if (enabled) {
+            status = mTrack->addAudioDeviceCallback(mDeviceCallback.promote());
+        } else {
+            status = mTrack->removeAudioDeviceCallback(mDeviceCallback.promote());
+        }
+        return status;
+    }
+    return NO_ERROR;
+}
+
+VolumeShaper::Status MediaPlayer2Manager::AudioOutput::applyVolumeShaper(
+                const sp<VolumeShaper::Configuration>& configuration,
+                const sp<VolumeShaper::Operation>& operation)
+{
+    Mutex::Autolock lock(mLock);
+    ALOGV("AudioOutput::applyVolumeShaper");
+
+    mVolumeHandler->setIdIfNecessary(configuration);
+
+    VolumeShaper::Status status;
+    if (mTrack != 0) {
+        status = mTrack->applyVolumeShaper(configuration, operation);
+        if (status >= 0) {
+            (void)mVolumeHandler->applyVolumeShaper(configuration, operation);
+            if (mTrack->isPlaying()) { // match local AudioTrack to properly restore.
+                mVolumeHandler->setStarted();
+            }
+        }
+    } else {
+        // VolumeShapers are not affected when a track moves between players for
+        // gapless playback (setNextMediaPlayer).
+        // We forward VolumeShaper operations that do not change configuration
+        // to the new player so that unducking may occur as expected.
+        // Unducking is an idempotent operation, same if applied back-to-back.
+        if (configuration->getType() == VolumeShaper::Configuration::TYPE_ID
+                && mNextOutput != nullptr) {
+            ALOGV("applyVolumeShaper: Attempting to forward missed operation: %s %s",
+                    configuration->toString().c_str(), operation->toString().c_str());
+            Mutex::Autolock nextLock(mNextOutput->mLock);
+
+            // recycled track should be forwarded from this AudioSink by switchToNextOutput
+            sp<AudioTrack> track = mNextOutput->mRecycledTrack;
+            if (track != nullptr) {
+                ALOGD("Forward VolumeShaper operation to recycled track %p", track.get());
+                (void)track->applyVolumeShaper(configuration, operation);
+            } else {
+                // There is a small chance that the unduck occurs after the next
+                // player has already started, but before it is registered to receive
+                // the unduck command.
+                track = mNextOutput->mTrack;
+                if (track != nullptr) {
+                    ALOGD("Forward VolumeShaper operation to track %p", track.get());
+                    (void)track->applyVolumeShaper(configuration, operation);
+                }
+            }
+        }
+        status = mVolumeHandler->applyVolumeShaper(configuration, operation);
+    }
+    return status;
+}
+
+sp<VolumeShaper::State> MediaPlayer2Manager::AudioOutput::getVolumeShaperState(int id)
+{
+    Mutex::Autolock lock(mLock);
+    if (mTrack != 0) {
+        return mTrack->getVolumeShaperState(id);
+    } else {
+        return mVolumeHandler->getVolumeShaperState(id);
+    }
+}
+
+// static
+void MediaPlayer2Manager::AudioOutput::CallbackWrapper(
+        int event, void *cookie, void *info) {
+    //ALOGV("callbackwrapper");
+    CallbackData *data = (CallbackData*)cookie;
+    // lock to ensure we aren't caught in the middle of a track switch.
+    data->lock();
+    AudioOutput *me = data->getOutput();
+    AudioTrack::Buffer *buffer = (AudioTrack::Buffer *)info;
+    if (me == NULL) {
+        // no output set, likely because the track was scheduled to be reused
+        // by another player, but the format turned out to be incompatible.
+        data->unlock();
+        if (buffer != NULL) {
+            buffer->size = 0;
+        }
+        return;
+    }
+
+    switch(event) {
+    case AudioTrack::EVENT_MORE_DATA: {
+        size_t actualSize = (*me->mCallback)(
+                me, buffer->raw, buffer->size, me->mCallbackCookie,
+                CB_EVENT_FILL_BUFFER);
+
+        // Log when no data is returned from the callback.
+        // (1) We may have no data (especially with network streaming sources).
+        // (2) We may have reached the EOS and the audio track is not stopped yet.
+        // Note that AwesomePlayer/AudioPlayer will only return zero size when it reaches the EOS.
+        // NuPlayer2Renderer will return zero when it doesn't have data (it doesn't block to fill).
+        //
+        // This is a benign busy-wait, with the next data request generated 10 ms or more later;
+        // nevertheless for power reasons, we don't want to see too many of these.
+
+        ALOGV_IF(actualSize == 0 && buffer->size > 0, "callbackwrapper: empty buffer returned");
+
+        buffer->size = actualSize;
+        } break;
+
+    case AudioTrack::EVENT_STREAM_END:
+        // currently only occurs for offloaded callbacks
+        ALOGV("callbackwrapper: deliver EVENT_STREAM_END");
+        (*me->mCallback)(me, NULL /* buffer */, 0 /* size */,
+                me->mCallbackCookie, CB_EVENT_STREAM_END);
+        break;
+
+    case AudioTrack::EVENT_NEW_IAUDIOTRACK :
+        ALOGV("callbackwrapper: deliver EVENT_TEAR_DOWN");
+        (*me->mCallback)(me,  NULL /* buffer */, 0 /* size */,
+                me->mCallbackCookie, CB_EVENT_TEAR_DOWN);
+        break;
+
+    case AudioTrack::EVENT_UNDERRUN:
+        // This occurs when there is no data available, typically
+        // when there is a failure to supply data to the AudioTrack.  It can also
+        // occur in non-offloaded mode when the audio device comes out of standby.
+        //
+        // If an AudioTrack underruns it outputs silence. Since this happens suddenly
+        // it may sound like an audible pop or glitch.
+        //
+        // The underrun event is sent once per track underrun; the condition is reset
+        // when more data is sent to the AudioTrack.
+        ALOGD("callbackwrapper: EVENT_UNDERRUN (discarded)");
+        break;
+
+    default:
+        ALOGE("received unknown event type: %d inside CallbackWrapper !", event);
+    }
+
+    data->unlock();
+}
+
+audio_session_t MediaPlayer2Manager::AudioOutput::getSessionId() const
+{
+    Mutex::Autolock lock(mLock);
+    return mSessionId;
+}
+
+uint32_t MediaPlayer2Manager::AudioOutput::getSampleRate() const
+{
+    Mutex::Autolock lock(mLock);
+    if (mTrack == 0) return 0;
+    return mTrack->getSampleRate();
+}
+
+int64_t MediaPlayer2Manager::AudioOutput::getBufferDurationInUs() const
+{
+    Mutex::Autolock lock(mLock);
+    if (mTrack == 0) {
+        return 0;
+    }
+    int64_t duration;
+    if (mTrack->getBufferDurationInUs(&duration) != OK) {
+        return 0;
+    }
+    return duration;
+}
+
+} // namespace android
diff --git a/media/libmedia/MediaPlayer2Manager.h b/media/libmedia/MediaPlayer2Manager.h
new file mode 100644
index 0000000..b42cbbb
--- /dev/null
+++ b/media/libmedia/MediaPlayer2Manager.h
@@ -0,0 +1,414 @@
+/*
+**
+** Copyright 2017, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_MEDIAPLAYER2MANAGER_H
+#define ANDROID_MEDIAPLAYER2MANAGER_H
+
+#include <arpa/inet.h>
+
+#include <utils/threads.h>
+#include <utils/Errors.h>
+#include <utils/KeyedVector.h>
+#include <utils/String8.h>
+#include <utils/Vector.h>
+
+#include <media/MediaPlayer2Engine.h>
+#include <media/MediaPlayer2Interface.h>
+#include <media/Metadata.h>
+#include <media/stagefright/foundation/ABase.h>
+
+#include <system/audio.h>
+
+namespace android {
+
+struct ANativeWindowWrapper;
+struct AudioPlaybackRate;
+class AudioTrack;
+struct AVSyncSettings;
+class DataSource;
+struct MediaHTTPService;
+class MediaPlayer2EngineClient;
+
+#define CALLBACK_ANTAGONIZER 0
+#if CALLBACK_ANTAGONIZER
+class Antagonizer {
+public:
+    Antagonizer(
+            MediaPlayer2Base::NotifyCallback cb,
+            const wp<MediaPlayer2Engine> &client);
+    void start() { mActive = true; }
+    void stop() { mActive = false; }
+    void kill();
+private:
+    static const int interval;
+    Antagonizer();
+    static int callbackThread(void *cookie);
+    Mutex                            mLock;
+    Condition                        mCondition;
+    bool                             mExit;
+    bool                             mActive;
+    wp<MediaPlayer2Engine>           mClient;
+    MediaPlayer2Base::NotifyCallback mCb;
+};
+#endif
+
+class MediaPlayer2Manager {
+    class Client;
+
+    class AudioOutput : public MediaPlayer2Base::AudioSink
+    {
+        class CallbackData;
+
+     public:
+                                AudioOutput(
+                                        audio_session_t sessionId,
+                                        uid_t uid,
+                                        int pid,
+                                        const audio_attributes_t * attr,
+                                        const sp<AudioSystem::AudioDeviceCallback>& deviceCallback);
+        virtual                 ~AudioOutput();
+
+        virtual bool            ready() const { return mTrack != 0; }
+        virtual ssize_t         bufferSize() const;
+        virtual ssize_t         frameCount() const;
+        virtual ssize_t         channelCount() const;
+        virtual ssize_t         frameSize() const;
+        virtual uint32_t        latency() const;
+        virtual float           msecsPerFrame() const;
+        virtual status_t        getPosition(uint32_t *position) const;
+        virtual status_t        getTimestamp(AudioTimestamp &ts) const;
+        virtual int64_t         getPlayedOutDurationUs(int64_t nowUs) const;
+        virtual status_t        getFramesWritten(uint32_t *frameswritten) const;
+        virtual audio_session_t getSessionId() const;
+        virtual uint32_t        getSampleRate() const;
+        virtual int64_t         getBufferDurationInUs() const;
+
+        virtual status_t        open(
+                uint32_t sampleRate, int channelCount, audio_channel_mask_t channelMask,
+                audio_format_t format, int bufferCount,
+                AudioCallback cb, void *cookie,
+                audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
+                const audio_offload_info_t *offloadInfo = NULL,
+                bool doNotReconnect = false,
+                uint32_t suggestedFrameCount = 0);
+
+        virtual status_t        start();
+        virtual ssize_t         write(const void* buffer, size_t size, bool blocking = true);
+        virtual void            stop();
+        virtual void            flush();
+        virtual void            pause();
+        virtual void            close();
+                void            setAudioStreamType(audio_stream_type_t streamType);
+        virtual audio_stream_type_t getAudioStreamType() const { return mStreamType; }
+                void            setAudioAttributes(const audio_attributes_t * attributes);
+
+                void            setVolume(float left, float right);
+        virtual status_t        setPlaybackRate(const AudioPlaybackRate& rate);
+        virtual status_t        getPlaybackRate(AudioPlaybackRate* rate /* nonnull */);
+
+                status_t        setAuxEffectSendLevel(float level);
+                status_t        attachAuxEffect(int effectId);
+        virtual status_t        dump(int fd, const Vector<String16>& args) const;
+
+        static bool             isOnEmulator();
+        static int              getMinBufferCount();
+                void            setNextOutput(const sp<AudioOutput>& nextOutput);
+                void            switchToNextOutput();
+        virtual bool            needsTrailingPadding() { return mNextOutput == NULL; }
+        virtual status_t        setParameters(const String8& keyValuePairs);
+        virtual String8         getParameters(const String8& keys);
+
+        virtual media::VolumeShaper::Status applyVolumeShaper(
+                                        const sp<media::VolumeShaper::Configuration>& configuration,
+                                        const sp<media::VolumeShaper::Operation>& operation) override;
+        virtual sp<media::VolumeShaper::State> getVolumeShaperState(int id) override;
+
+        // AudioRouting
+        virtual status_t        setOutputDevice(audio_port_handle_t deviceId);
+        virtual status_t        getRoutedDeviceId(audio_port_handle_t* deviceId);
+        virtual status_t        enableAudioDeviceCallback(bool enabled);
+
+    private:
+        static void             setMinBufferCount();
+        static void             CallbackWrapper(
+                int event, void *me, void *info);
+               void             deleteRecycledTrack_l();
+               void             close_l();
+           status_t             updateTrack();
+
+        sp<AudioTrack>          mTrack;
+        sp<AudioTrack>          mRecycledTrack;
+        sp<AudioOutput>         mNextOutput;
+        AudioCallback           mCallback;
+        void *                  mCallbackCookie;
+        CallbackData *          mCallbackData;
+        audio_stream_type_t     mStreamType;
+        audio_attributes_t *    mAttributes;
+        float                   mLeftVolume;
+        float                   mRightVolume;
+        AudioPlaybackRate       mPlaybackRate;
+        uint32_t                mSampleRateHz; // sample rate of the content, as set in open()
+        float                   mMsecsPerFrame;
+        size_t                  mFrameSize;
+        audio_session_t         mSessionId;
+        uid_t                   mUid;
+        int                     mPid;
+        float                   mSendLevel;
+        int                     mAuxEffectId;
+        audio_output_flags_t    mFlags;
+        sp<media::VolumeHandler>       mVolumeHandler;
+        audio_port_handle_t     mSelectedDeviceId;
+        audio_port_handle_t     mRoutedDeviceId;
+        bool                    mDeviceCallbackEnabled;
+        wp<AudioSystem::AudioDeviceCallback>        mDeviceCallback;
+        mutable Mutex           mLock;
+
+        // static variables below not protected by mutex
+        static bool             mIsOnEmulator;
+        static int              mMinBufferCount;  // 12 for emulator; otherwise 4
+
+        // CallbackData is what is passed to the AudioTrack as the "user" data.
+        // We need to be able to target this to a different Output on the fly,
+        // so we can't use the Output itself for this.
+        class CallbackData {
+            friend AudioOutput;
+        public:
+            explicit CallbackData(AudioOutput *cookie) {
+                mData = cookie;
+                mSwitching = false;
+            }
+            AudioOutput *   getOutput() const { return mData; }
+            void            setOutput(AudioOutput* newcookie) { mData = newcookie; }
+            // lock/unlock are used by the callback before accessing the payload of this object
+            void            lock() const { mLock.lock(); }
+            void            unlock() const { mLock.unlock(); }
+
+            // tryBeginTrackSwitch/endTrackSwitch are used when the CallbackData is handed over
+            // to the next sink.
+
+            // tryBeginTrackSwitch() returns true only if it obtains the lock.
+            bool            tryBeginTrackSwitch() {
+                LOG_ALWAYS_FATAL_IF(mSwitching, "tryBeginTrackSwitch() already called");
+                if (mLock.tryLock() != OK) {
+                    return false;
+                }
+                mSwitching = true;
+                return true;
+            }
+            void            endTrackSwitch() {
+                if (mSwitching) {
+                    mLock.unlock();
+                }
+                mSwitching = false;
+            }
+        private:
+            AudioOutput *   mData;
+            mutable Mutex   mLock; // a recursive mutex might make this unnecessary.
+            bool            mSwitching;
+            DISALLOW_EVIL_CONSTRUCTORS(CallbackData);
+        };
+
+    }; // AudioOutput
+
+
+public:
+    MediaPlayer2Manager();
+    virtual ~MediaPlayer2Manager();
+
+    static MediaPlayer2Manager& get();
+
+    // MediaPlayer2Manager interface
+    virtual sp<MediaPlayer2Engine> create(const sp<MediaPlayer2EngineClient>& client,
+                                          audio_session_t audioSessionId);
+
+    virtual status_t            dump(int fd, const Vector<String16>& args);
+
+            void                removeClient(const wp<Client>& client);
+            bool                hasClient(wp<Client> client);
+
+private:
+    class Client : public MediaPlayer2Engine {
+        // MediaPlayer2Engine interface
+        virtual void            disconnect();
+        virtual status_t        setVideoSurfaceTexture(
+                const sp<ANativeWindowWrapper>& nww) override;
+        virtual status_t        setBufferingSettings(const BufferingSettings& buffering) override;
+        virtual status_t        getBufferingSettings(
+                                        BufferingSettings* buffering /* nonnull */) override;
+        virtual status_t        prepareAsync();
+        virtual status_t        start();
+        virtual status_t        stop();
+        virtual status_t        pause();
+        virtual status_t        isPlaying(bool* state);
+        virtual status_t        setPlaybackSettings(const AudioPlaybackRate& rate);
+        virtual status_t        getPlaybackSettings(AudioPlaybackRate* rate /* nonnull */);
+        virtual status_t        setSyncSettings(const AVSyncSettings& rate, float videoFpsHint);
+        virtual status_t        getSyncSettings(AVSyncSettings* rate /* nonnull */,
+                                                float* videoFps /* nonnull */);
+        virtual status_t        seekTo(
+                int msec,
+                MediaPlayer2SeekMode mode = MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC);
+        virtual status_t        getCurrentPosition(int* msec);
+        virtual status_t        getDuration(int* msec);
+        virtual status_t        reset();
+        virtual status_t        notifyAt(int64_t mediaTimeUs);
+        virtual status_t        setAudioStreamType(audio_stream_type_t type);
+        virtual status_t        setLooping(int loop);
+        virtual status_t        setVolume(float leftVolume, float rightVolume);
+        virtual status_t        invoke(const Parcel& request, Parcel *reply);
+        virtual status_t        setMetadataFilter(const Parcel& filter);
+        virtual status_t        getMetadata(bool update_only,
+                                            bool apply_filter,
+                                            Parcel *reply);
+        virtual status_t        setAuxEffectSendLevel(float level);
+        virtual status_t        attachAuxEffect(int effectId);
+        virtual status_t        setParameter(int key, const Parcel &request);
+        virtual status_t        getParameter(int key, Parcel *reply);
+        virtual status_t        setRetransmitEndpoint(const struct sockaddr_in* endpoint);
+        virtual status_t        getRetransmitEndpoint(struct sockaddr_in* endpoint);
+        virtual status_t        setNextPlayer(const sp<MediaPlayer2Engine>& player);
+
+        virtual media::VolumeShaper::Status applyVolumeShaper(
+                                        const sp<media::VolumeShaper::Configuration>& configuration,
+                                        const sp<media::VolumeShaper::Operation>& operation) override;
+        virtual sp<media::VolumeShaper::State> getVolumeShaperState(int id) override;
+
+        sp<MediaPlayer2Base>    createPlayer(player2_type playerType);
+
+        virtual status_t        setDataSource(
+                        const sp<MediaHTTPService> &httpService,
+                        const char *url,
+                        const KeyedVector<String8, String8> *headers);
+
+        virtual status_t        setDataSource(int fd, int64_t offset, int64_t length);
+
+        virtual status_t        setDataSource(const sp<IStreamSource> &source);
+        virtual status_t        setDataSource(const sp<DataSource> &source);
+
+
+        sp<MediaPlayer2Base>    setDataSource_pre(player2_type playerType);
+        status_t                setDataSource_post(const sp<MediaPlayer2Base>& p,
+                                                   status_t status);
+
+        static  void            notify(const wp<MediaPlayer2Engine> &listener, int msg,
+                                       int ext1, int ext2, const Parcel *obj);
+
+                pid_t           pid() const { return mPid; }
+        virtual status_t        dump(int fd, const Vector<String16>& args);
+
+                audio_session_t getAudioSessionId() { return mAudioSessionId; }
+        // Modular DRM
+        virtual status_t prepareDrm(const uint8_t uuid[16], const Vector<uint8_t>& drmSessionId);
+        virtual status_t releaseDrm();
+        // AudioRouting
+        virtual status_t setOutputDevice(audio_port_handle_t deviceId);
+        virtual status_t getRoutedDeviceId(audio_port_handle_t* deviceId);
+        virtual status_t enableAudioDeviceCallback(bool enabled);
+
+    private:
+        class AudioDeviceUpdatedNotifier: public AudioSystem::AudioDeviceCallback
+        {
+        public:
+            AudioDeviceUpdatedNotifier(const sp<MediaPlayer2Base>& listener) {
+                mListener = listener;
+            }
+            ~AudioDeviceUpdatedNotifier() {}
+
+            virtual void onAudioDeviceUpdate(audio_io_handle_t audioIo,
+                                             audio_port_handle_t deviceId);
+
+        private:
+            wp<MediaPlayer2Base> mListener;
+        };
+
+        friend class MediaPlayer2Manager;
+                                Client(pid_t pid,
+                                       int32_t connId,
+                                       const sp<MediaPlayer2EngineClient>& client,
+                                       audio_session_t audioSessionId,
+                                       uid_t uid);
+                                Client();
+        virtual                 ~Client();
+
+                void            deletePlayer();
+
+        sp<MediaPlayer2Base>     getPlayer() const { Mutex::Autolock lock(mLock); return mPlayer; }
+
+
+
+        // @param type Of the metadata to be tested.
+        // @return true if the metadata should be dropped according to
+        //              the filters.
+        bool shouldDropMetadata(media::Metadata::Type type) const;
+
+        // Add a new element to the set of metadata updated. Noop if
+        // the element exists already.
+        // @param type Of the metadata to be recorded.
+        void addNewMetadataUpdate(media::Metadata::Type type);
+
+        // Disconnect from the currently connected ANativeWindow.
+        void disconnectNativeWindow_l();
+
+        status_t setAudioAttributes_l(const Parcel &request);
+
+        mutable     Mutex                        mLock;
+                    sp<MediaPlayer2Base>         mPlayer;
+                    sp<MediaPlayer2EngineClient> mClient;
+                    sp<AudioOutput>              mAudioOutput;
+                    pid_t                        mPid;
+                    status_t                     mStatus;
+                    bool                         mLoop;
+                    int32_t                      mConnId;
+                    audio_session_t              mAudioSessionId;
+                    audio_attributes_t *         mAudioAttributes;
+                    uid_t                        mUid;
+                    sp<ANativeWindowWrapper>     mConnectedWindow;
+                    struct sockaddr_in           mRetransmitEndpoint;
+                    bool                         mRetransmitEndpointValid;
+                    sp<Client>                   mNextClient;
+
+        // Metadata filters.
+        media::Metadata::Filter mMetadataAllow;  // protected by mLock
+        media::Metadata::Filter mMetadataDrop;  // protected by mLock
+
+        // Metadata updated. For each MEDIA_INFO_METADATA_UPDATE
+        // notification we try to update mMetadataUpdated which is a
+        // set: no duplicate.
+        // getMetadata clears this set.
+        media::Metadata::Filter mMetadataUpdated;  // protected by mLock
+
+        sp<AudioDeviceUpdatedNotifier> mAudioDeviceUpdatedListener;
+#if CALLBACK_ANTAGONIZER
+                    Antagonizer*                mAntagonizer;
+#endif
+    }; // Client
+
+// ----------------------------------------------------------------------------
+
+    pid_t mPid;
+    uid_t mUid;
+
+    mutable Mutex mLock;
+    SortedVector< wp<Client> > mClients;
+    int32_t mNextConnId;
+};
+
+// ----------------------------------------------------------------------------
+
+}; // namespace android
+
+#endif // ANDROID_MEDIAPLAYER2MANAGER_H
diff --git a/media/libmedia/NdkWrapper.cpp b/media/libmedia/NdkWrapper.cpp
new file mode 100644
index 0000000..942393d
--- /dev/null
+++ b/media/libmedia/NdkWrapper.cpp
@@ -0,0 +1,1144 @@
+/*
+ * Copyright 2017, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NdkWrapper"
+
+#include <media/NdkWrapper.h>
+
+#include <android/native_window.h>
+#include <log/log.h>
+#include <media/NdkMediaCodec.h>
+#include <media/NdkMediaCrypto.h>
+#include <media/NdkMediaDrm.h>
+#include <media/NdkMediaFormat.h>
+#include <media/NdkMediaExtractor.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <utils/Errors.h>
+
+namespace android {
+
+static const size_t kAESBlockSize = 16;  // AES_BLOCK_SIZE
+
+static const char *AMediaFormatKeyGroupInt32[] = {
+    AMEDIAFORMAT_KEY_AAC_DRC_ATTENUATION_FACTOR,
+    AMEDIAFORMAT_KEY_AAC_DRC_BOOST_FACTOR,
+    AMEDIAFORMAT_KEY_AAC_DRC_HEAVY_COMPRESSION,
+    AMEDIAFORMAT_KEY_AAC_DRC_TARGET_REFERENCE_LEVEL,
+    AMEDIAFORMAT_KEY_AAC_ENCODED_TARGET_LEVEL,
+    AMEDIAFORMAT_KEY_AAC_MAX_OUTPUT_CHANNEL_COUNT,
+    AMEDIAFORMAT_KEY_AAC_PROFILE,
+    AMEDIAFORMAT_KEY_AAC_SBR_MODE,
+    AMEDIAFORMAT_KEY_AUDIO_SESSION_ID,
+    AMEDIAFORMAT_KEY_BITRATE_MODE,
+    AMEDIAFORMAT_KEY_BIT_RATE,
+    AMEDIAFORMAT_KEY_CAPTURE_RATE,
+    AMEDIAFORMAT_KEY_CHANNEL_COUNT,
+    AMEDIAFORMAT_KEY_CHANNEL_MASK,
+    AMEDIAFORMAT_KEY_COLOR_FORMAT,
+    AMEDIAFORMAT_KEY_COLOR_RANGE,
+    AMEDIAFORMAT_KEY_COLOR_STANDARD,
+    AMEDIAFORMAT_KEY_COLOR_TRANSFER,
+    AMEDIAFORMAT_KEY_COMPLEXITY,
+    AMEDIAFORMAT_KEY_FLAC_COMPRESSION_LEVEL,
+    AMEDIAFORMAT_KEY_GRID_COLS,
+    AMEDIAFORMAT_KEY_GRID_HEIGHT,
+    AMEDIAFORMAT_KEY_GRID_ROWS,
+    AMEDIAFORMAT_KEY_GRID_WIDTH,
+    AMEDIAFORMAT_KEY_HEIGHT,
+    AMEDIAFORMAT_KEY_INTRA_REFRESH_PERIOD,
+    AMEDIAFORMAT_KEY_IS_ADTS,
+    AMEDIAFORMAT_KEY_IS_AUTOSELECT,
+    AMEDIAFORMAT_KEY_IS_DEFAULT,
+    AMEDIAFORMAT_KEY_IS_FORCED_SUBTITLE,
+    AMEDIAFORMAT_KEY_LATENCY,
+    AMEDIAFORMAT_KEY_LEVEL,
+    AMEDIAFORMAT_KEY_MAX_HEIGHT,
+    AMEDIAFORMAT_KEY_MAX_INPUT_SIZE,
+    AMEDIAFORMAT_KEY_MAX_WIDTH,
+    AMEDIAFORMAT_KEY_PCM_ENCODING,
+    AMEDIAFORMAT_KEY_PRIORITY,
+    AMEDIAFORMAT_KEY_PROFILE,
+    AMEDIAFORMAT_KEY_PUSH_BLANK_BUFFERS_ON_STOP,
+    AMEDIAFORMAT_KEY_ROTATION,
+    AMEDIAFORMAT_KEY_SAMPLE_RATE,
+    AMEDIAFORMAT_KEY_SLICE_HEIGHT,
+    AMEDIAFORMAT_KEY_STRIDE,
+    AMEDIAFORMAT_KEY_TRACK_ID,
+    AMEDIAFORMAT_KEY_WIDTH,
+};
+
+static const char *AMediaFormatKeyGroupInt64[] = {
+    AMEDIAFORMAT_KEY_DURATION,
+    AMEDIAFORMAT_KEY_REPEAT_PREVIOUS_FRAME_AFTER,
+};
+
+static const char *AMediaFormatKeyGroupString[] = {
+    AMEDIAFORMAT_KEY_LANGUAGE,
+    AMEDIAFORMAT_KEY_MIME,
+    AMEDIAFORMAT_KEY_TEMPORAL_LAYERING,
+};
+
+static const char *AMediaFormatKeyGroupBuffer[] = {
+    AMEDIAFORMAT_KEY_HDR_STATIC_INFO,
+};
+
+static const char *AMediaFormatKeyGroupRect[] = {
+    AMEDIAFORMAT_KEY_DISPLAY_CROP,
+};
+
+static const char *AMediaFormatKeyGroupFloatInt32[] = {
+    AMEDIAFORMAT_KEY_FRAME_RATE,
+    AMEDIAFORMAT_KEY_I_FRAME_INTERVAL,
+    AMEDIAFORMAT_KEY_OPERATING_RATE,
+};
+
+static status_t translateErrorCode(media_status_t err) {
+    if (err == AMEDIA_OK) {
+        return OK;
+    } else if (err == AMEDIACODEC_INFO_TRY_AGAIN_LATER) {
+        return -EAGAIN;
+    }
+
+    ALOGE("ndk error code: %d", err);
+    return UNKNOWN_ERROR;
+}
+
+static int32_t translateActionCode(int32_t actionCode) {
+    if (AMediaCodecActionCode_isTransient(actionCode)) {
+        return ACTION_CODE_TRANSIENT;
+    } else if (AMediaCodecActionCode_isRecoverable(actionCode)) {
+        return ACTION_CODE_RECOVERABLE;
+    }
+    return ACTION_CODE_FATAL;
+}
+
+static CryptoPlugin::Mode translateToCryptoPluginMode(cryptoinfo_mode_t mode) {
+    CryptoPlugin::Mode ret = CryptoPlugin::kMode_Unencrypted;
+    switch (mode) {
+        case AMEDIACODECRYPTOINFO_MODE_AES_CTR: {
+            ret = CryptoPlugin::kMode_AES_CTR;
+            break;
+        }
+
+        case AMEDIACODECRYPTOINFO_MODE_AES_WV: {
+            ret = CryptoPlugin::kMode_AES_WV;
+            break;
+        }
+
+        case AMEDIACODECRYPTOINFO_MODE_AES_CBC: {
+            ret = CryptoPlugin::kMode_AES_CBC;
+            break;
+        }
+
+        default:
+            break;
+    }
+
+    return ret;
+}
+
+static cryptoinfo_mode_t translateToCryptoInfoMode(CryptoPlugin::Mode mode) {
+    cryptoinfo_mode_t ret = AMEDIACODECRYPTOINFO_MODE_CLEAR;
+    switch (mode) {
+        case CryptoPlugin::kMode_AES_CTR: {
+            ret = AMEDIACODECRYPTOINFO_MODE_AES_CTR;
+            break;
+        }
+
+        case CryptoPlugin::kMode_AES_WV: {
+            ret = AMEDIACODECRYPTOINFO_MODE_AES_WV;
+            break;
+        }
+
+        case CryptoPlugin::kMode_AES_CBC: {
+            ret = AMEDIACODECRYPTOINFO_MODE_AES_CBC;
+            break;
+        }
+
+        default:
+            break;
+    }
+
+    return ret;
+}
+
+//////////// AMediaFormatWrapper
+// static
+sp<AMediaFormatWrapper> AMediaFormatWrapper::Create(const sp<AMessage> &message) {
+    sp<AMediaFormatWrapper> aMediaFormat = new AMediaFormatWrapper();
+
+    for (size_t i = 0; i < message->countEntries(); ++i) {
+        AMessage::Type valueType;
+        const char *key = message->getEntryNameAt(i, &valueType);
+
+        switch (valueType) {
+            case AMessage::kTypeInt32: {
+                int32_t val;
+                if (!message->findInt32(key, &val)) {
+                    ALOGE("AMediaFormatWrapper::Create: error at item %zu", i);
+                    continue;
+                }
+                aMediaFormat->setInt32(key, val);
+                break;
+            }
+
+            case AMessage::kTypeInt64: {
+                int64_t val;
+                if (!message->findInt64(key, &val)) {
+                    ALOGE("AMediaFormatWrapper::Create: error at item %zu", i);
+                    continue;
+                }
+                aMediaFormat->setInt64(key, val);
+                break;
+            }
+
+            case AMessage::kTypeFloat: {
+                float val;
+                if (!message->findFloat(key, &val)) {
+                    ALOGE("AMediaFormatWrapper::Create: error at item %zu", i);
+                    continue;
+                }
+                aMediaFormat->setFloat(key, val);
+                break;
+            }
+
+            case AMessage::kTypeDouble: {
+                double val;
+                if (!message->findDouble(key, &val)) {
+                    ALOGE("AMediaFormatWrapper::Create: error at item %zu", i);
+                    continue;
+                }
+                aMediaFormat->setDouble(key, val);
+                break;
+            }
+
+            case AMessage::kTypeSize: {
+                size_t val;
+                if (!message->findSize(key, &val)) {
+                    ALOGE("AMediaFormatWrapper::Create: error at item %zu", i);
+                    continue;
+                }
+                aMediaFormat->setSize(key, val);
+                break;
+            }
+
+            case AMessage::kTypeRect: {
+                int32_t left, top, right, bottom;
+                if (!message->findRect(key, &left, &top, &right, &bottom)) {
+                    ALOGE("AMediaFormatWrapper::Create: error at item %zu", i);
+                    continue;
+                }
+                aMediaFormat->setRect(key, left, top, right, bottom);
+                break;
+            }
+
+            case AMessage::kTypeString: {
+                AString val;
+                if (!message->findString(key, &val)) {
+                    ALOGE("AMediaFormatWrapper::Create: error at item %zu", i);
+                    continue;
+                }
+                aMediaFormat->setString(key, val);
+                break;
+            }
+
+            case AMessage::kTypeBuffer: {
+                sp<ABuffer> val;
+                if (!message->findBuffer(key, &val)) {
+                    ALOGE("AMediaFormatWrapper::Create: error at item %zu", i);
+                    continue;
+                }
+                aMediaFormat->setBuffer(key, val->data(), val->size());
+                break;
+            }
+
+            default: {
+                break;
+            }
+        }
+    }
+
+    return aMediaFormat;
+}
+
+AMediaFormatWrapper::AMediaFormatWrapper() {
+    mAMediaFormat = AMediaFormat_new();
+}
+
+AMediaFormatWrapper::AMediaFormatWrapper(AMediaFormat *aMediaFormat)
+    : mAMediaFormat(aMediaFormat) {
+}
+
+AMediaFormatWrapper::~AMediaFormatWrapper() {
+    release();
+}
+
+status_t AMediaFormatWrapper::release() {
+    if (mAMediaFormat != NULL) {
+        media_status_t err = AMediaFormat_delete(mAMediaFormat);
+        mAMediaFormat = NULL;
+        return translateErrorCode(err);
+    }
+    return OK;
+}
+
+AMediaFormat *AMediaFormatWrapper::getAMediaFormat() const {
+    return mAMediaFormat;
+}
+
+sp<AMessage> AMediaFormatWrapper::toAMessage() const {
+    if (mAMediaFormat == NULL) {
+        return NULL;
+    }
+
+    sp<AMessage> msg = new AMessage;
+    for (auto& key : AMediaFormatKeyGroupInt32) {
+        int32_t val;
+        if (getInt32(key, &val)) {
+            msg->setInt32(key, val);
+        }
+    }
+    for (auto& key : AMediaFormatKeyGroupInt64) {
+        int64_t val;
+        if (getInt64(key, &val)) {
+            msg->setInt64(key, val);
+        }
+    }
+    for (auto& key : AMediaFormatKeyGroupString) {
+        AString val;
+        if (getString(key, &val)) {
+            msg->setString(key, val);
+        }
+    }
+    for (auto& key : AMediaFormatKeyGroupBuffer) {
+        void *data;
+        size_t size;
+        if (getBuffer(key, &data, &size)) {
+            sp<ABuffer> buffer = ABuffer::CreateAsCopy(data, size);
+            msg->setBuffer(key, buffer);
+        }
+    }
+    for (auto& key : AMediaFormatKeyGroupRect) {
+        int32_t left, top, right, bottom;
+        if (getRect(key, &left, &top, &right, &bottom)) {
+            msg->setRect(key, left, top, right, bottom);
+        }
+    }
+    for (auto& key : AMediaFormatKeyGroupFloatInt32) {
+        float valFloat;
+        if (getFloat(key, &valFloat)) {
+            msg->setFloat(key, valFloat);
+        } else {
+            int32_t valInt32;
+            if (getInt32(key, &valInt32)) {
+                msg->setFloat(key, (float)valInt32);
+            }
+        }
+    }
+    return msg;
+}
+
+const char* AMediaFormatWrapper::toString() const {
+    if (mAMediaFormat == NULL) {
+        return NULL;
+    }
+    return AMediaFormat_toString(mAMediaFormat);
+}
+
+bool AMediaFormatWrapper::getInt32(const char *name, int32_t *out) const {
+    if (mAMediaFormat == NULL) {
+        return false;
+    }
+    return AMediaFormat_getInt32(mAMediaFormat, name, out);
+}
+
+bool AMediaFormatWrapper::getInt64(const char *name, int64_t *out) const {
+    if (mAMediaFormat == NULL) {
+        return false;
+    }
+    return AMediaFormat_getInt64(mAMediaFormat, name, out);
+}
+
+bool AMediaFormatWrapper::getFloat(const char *name, float *out) const {
+    if (mAMediaFormat == NULL) {
+        return false;
+    }
+    return AMediaFormat_getFloat(mAMediaFormat, name, out);
+}
+
+bool AMediaFormatWrapper::getDouble(const char *name, double *out) const {
+    if (mAMediaFormat == NULL) {
+        return false;
+    }
+    return AMediaFormat_getDouble(mAMediaFormat, name, out);
+}
+
+bool AMediaFormatWrapper::getSize(const char *name, size_t *out) const {
+    if (mAMediaFormat == NULL) {
+        return false;
+    }
+    return AMediaFormat_getSize(mAMediaFormat, name, out);
+}
+
+bool AMediaFormatWrapper::getRect(
+        const char *name, int32_t *left, int32_t *top, int32_t *right, int32_t *bottom) const {
+    if (mAMediaFormat == NULL) {
+        return false;
+    }
+    return AMediaFormat_getRect(mAMediaFormat, name, left, top, right, bottom);
+}
+
+bool AMediaFormatWrapper::getBuffer(const char *name, void** data, size_t *outSize) const {
+    if (mAMediaFormat == NULL) {
+        return false;
+    }
+    return AMediaFormat_getBuffer(mAMediaFormat, name, data, outSize);
+}
+
+bool AMediaFormatWrapper::getString(const char *name, AString *out) const {
+    if (mAMediaFormat == NULL) {
+        return false;
+    }
+    const char *outChar = NULL;
+    bool ret = AMediaFormat_getString(mAMediaFormat, name, &outChar);
+    if (ret) {
+        *out = AString(outChar);
+    }
+    return ret;
+}
+
+void AMediaFormatWrapper::setInt32(const char* name, int32_t value) {
+    if (mAMediaFormat != NULL) {
+        AMediaFormat_setInt32(mAMediaFormat, name, value);
+    }
+}
+
+void AMediaFormatWrapper::setInt64(const char* name, int64_t value) {
+    if (mAMediaFormat != NULL) {
+        AMediaFormat_setInt64(mAMediaFormat, name, value);
+    }
+}
+
+void AMediaFormatWrapper::setFloat(const char* name, float value) {
+    if (mAMediaFormat != NULL) {
+        AMediaFormat_setFloat(mAMediaFormat, name, value);
+    }
+}
+
+void AMediaFormatWrapper::setDouble(const char* name, double value) {
+    if (mAMediaFormat != NULL) {
+        AMediaFormat_setDouble(mAMediaFormat, name, value);
+    }
+}
+
+void AMediaFormatWrapper::setSize(const char* name, size_t value) {
+    if (mAMediaFormat != NULL) {
+        AMediaFormat_setSize(mAMediaFormat, name, value);
+    }
+}
+
+void AMediaFormatWrapper::setRect(
+        const char* name, int32_t left, int32_t top, int32_t right, int32_t bottom) {
+    if (mAMediaFormat != NULL) {
+        AMediaFormat_setRect(mAMediaFormat, name, left, top, right, bottom);
+    }
+}
+
+void AMediaFormatWrapper::setString(const char* name, const AString &value) {
+    if (mAMediaFormat != NULL) {
+        AMediaFormat_setString(mAMediaFormat, name, value.c_str());
+    }
+}
+
+void AMediaFormatWrapper::setBuffer(const char* name, void* data, size_t size) {
+    if (mAMediaFormat != NULL) {
+        AMediaFormat_setBuffer(mAMediaFormat, name, data, size);
+    }
+}
+
+
+//////////// ANativeWindowWrapper
+ANativeWindowWrapper::ANativeWindowWrapper(ANativeWindow *aNativeWindow)
+    : mANativeWindow(aNativeWindow) {
+    if (aNativeWindow != NULL) {
+        ANativeWindow_acquire(aNativeWindow);
+    }
+}
+
+ANativeWindowWrapper::~ANativeWindowWrapper() {
+    release();
+}
+
+status_t ANativeWindowWrapper::release() {
+    if (mANativeWindow != NULL) {
+        ANativeWindow_release(mANativeWindow);
+        mANativeWindow = NULL;
+    }
+    return OK;
+}
+
+ANativeWindow *ANativeWindowWrapper::getANativeWindow() const {
+    return mANativeWindow;
+}
+
+
+//////////// AMediaDrmWrapper
+AMediaDrmWrapper::AMediaDrmWrapper(const uint8_t uuid[16]) {
+    mAMediaDrm = AMediaDrm_createByUUID(uuid);
+}
+
+AMediaDrmWrapper::AMediaDrmWrapper(AMediaDrm *aMediaDrm)
+    : mAMediaDrm(aMediaDrm) {
+}
+
+AMediaDrmWrapper::~AMediaDrmWrapper() {
+    release();
+}
+
+status_t AMediaDrmWrapper::release() {
+    if (mAMediaDrm != NULL) {
+        AMediaDrm_release(mAMediaDrm);
+        mAMediaDrm = NULL;
+    }
+    return OK;
+}
+
+AMediaDrm *AMediaDrmWrapper::getAMediaDrm() const {
+    return mAMediaDrm;
+}
+
+// static
+bool AMediaDrmWrapper::isCryptoSchemeSupported(
+        const uint8_t uuid[16],
+        const char *mimeType) {
+    return AMediaDrm_isCryptoSchemeSupported(uuid, mimeType);
+}
+
+
+//////////// AMediaCryptoWrapper
+AMediaCryptoWrapper::AMediaCryptoWrapper(
+        const uint8_t uuid[16], const void *initData, size_t initDataSize) {
+    mAMediaCrypto = AMediaCrypto_new(uuid, initData, initDataSize);
+}
+
+AMediaCryptoWrapper::AMediaCryptoWrapper(AMediaCrypto *aMediaCrypto)
+    : mAMediaCrypto(aMediaCrypto) {
+}
+
+AMediaCryptoWrapper::~AMediaCryptoWrapper() {
+    release();
+}
+
+status_t AMediaCryptoWrapper::release() {
+    if (mAMediaCrypto != NULL) {
+        AMediaCrypto_delete(mAMediaCrypto);
+        mAMediaCrypto = NULL;
+    }
+    return OK;
+}
+
+AMediaCrypto *AMediaCryptoWrapper::getAMediaCrypto() const {
+    return mAMediaCrypto;
+}
+
+bool AMediaCryptoWrapper::isCryptoSchemeSupported(const uint8_t uuid[16]) {
+    if (mAMediaCrypto == NULL) {
+        return false;
+    }
+    return AMediaCrypto_isCryptoSchemeSupported(uuid);
+}
+
+bool AMediaCryptoWrapper::requiresSecureDecoderComponent(const char *mime) {
+    if (mAMediaCrypto == NULL) {
+        return false;
+    }
+    return AMediaCrypto_requiresSecureDecoderComponent(mime);
+}
+
+
+//////////// AMediaCodecCryptoInfoWrapper
+// static
+sp<AMediaCodecCryptoInfoWrapper> AMediaCodecCryptoInfoWrapper::Create(sp<MetaData> meta) {
+    if (meta == NULL) {
+        ALOGE("Create: Unexpected. No meta data for sample.");
+        return NULL;
+    }
+
+    uint32_t type;
+    const void *crypteddata;
+    size_t cryptedsize;
+
+    if (!meta->findData(kKeyEncryptedSizes, &type, &crypteddata, &cryptedsize)) {
+        return NULL;
+    }
+
+    int numSubSamples = cryptedsize / sizeof(size_t);
+
+    if (numSubSamples <= 0) {
+        ALOGE("Create: INVALID numSubSamples: %d", numSubSamples);
+        return NULL;
+    }
+
+    const void *cleardata;
+    size_t clearsize;
+    if (meta->findData(kKeyPlainSizes, &type, &cleardata, &clearsize)) {
+        if (clearsize != cryptedsize) {
+            // The two must be of the same length.
+            ALOGE("Create: mismatch cryptedsize: %zu != clearsize: %zu", cryptedsize, clearsize);
+            return NULL;
+        }
+    }
+
+    const void *key;
+    size_t keysize;
+    if (meta->findData(kKeyCryptoKey, &type, &key, &keysize)) {
+        if (keysize != kAESBlockSize) {
+            // Keys must be 16 bytes in length.
+            ALOGE("Create: Keys must be %zu bytes in length: %zu", kAESBlockSize, keysize);
+            return NULL;
+        }
+    }
+
+    const void *iv;
+    size_t ivsize;
+    if (meta->findData(kKeyCryptoIV, &type, &iv, &ivsize)) {
+        if (ivsize != kAESBlockSize) {
+            // IVs must be 16 bytes in length.
+            ALOGE("Create: IV must be %zu bytes in length: %zu", kAESBlockSize, ivsize);
+            return NULL;
+        }
+    }
+
+    int32_t mode;
+    if (!meta->findInt32(kKeyCryptoMode, &mode)) {
+        mode = CryptoPlugin::kMode_AES_CTR;
+    }
+
+    return new AMediaCodecCryptoInfoWrapper(
+            numSubSamples,
+            (uint8_t*) key,
+            (uint8_t*) iv,
+            (CryptoPlugin::Mode)mode,
+            (size_t*) cleardata,
+            (size_t*) crypteddata);
+}
+
+AMediaCodecCryptoInfoWrapper::AMediaCodecCryptoInfoWrapper(
+        int numsubsamples,
+        uint8_t key[16],
+        uint8_t iv[16],
+        CryptoPlugin::Mode mode,
+        size_t *clearbytes,
+        size_t *encryptedbytes) {
+    mAMediaCodecCryptoInfo =
+        AMediaCodecCryptoInfo_new(numsubsamples,
+                                  key,
+                                  iv,
+                                  translateToCryptoInfoMode(mode),
+                                  clearbytes,
+                                  encryptedbytes);
+}
+
+AMediaCodecCryptoInfoWrapper::AMediaCodecCryptoInfoWrapper(
+        AMediaCodecCryptoInfo *aMediaCodecCryptoInfo)
+    : mAMediaCodecCryptoInfo(aMediaCodecCryptoInfo) {
+}
+
+AMediaCodecCryptoInfoWrapper::~AMediaCodecCryptoInfoWrapper() {
+    release();
+}
+
+status_t AMediaCodecCryptoInfoWrapper::release() {
+    if (mAMediaCodecCryptoInfo != NULL) {
+        media_status_t err = AMediaCodecCryptoInfo_delete(mAMediaCodecCryptoInfo);
+        mAMediaCodecCryptoInfo = NULL;
+        return translateErrorCode(err);
+    }
+    return OK;
+}
+
+AMediaCodecCryptoInfo *AMediaCodecCryptoInfoWrapper::getAMediaCodecCryptoInfo() const {
+    return mAMediaCodecCryptoInfo;
+}
+
+void AMediaCodecCryptoInfoWrapper::setPattern(CryptoPlugin::Pattern *pattern) {
+    if (mAMediaCodecCryptoInfo == NULL || pattern == NULL) {
+        return;
+    }
+    cryptoinfo_pattern_t ndkPattern = {(int32_t)pattern->mEncryptBlocks,
+                                       (int32_t)pattern->mSkipBlocks };
+    return AMediaCodecCryptoInfo_setPattern(mAMediaCodecCryptoInfo, &ndkPattern);
+}
+
+size_t AMediaCodecCryptoInfoWrapper::getNumSubSamples() {
+    if (mAMediaCodecCryptoInfo == NULL) {
+        return 0;
+    }
+    return AMediaCodecCryptoInfo_getNumSubSamples(mAMediaCodecCryptoInfo);
+}
+
+status_t AMediaCodecCryptoInfoWrapper::getKey(uint8_t *dst) {
+    if (mAMediaCodecCryptoInfo == NULL) {
+        return DEAD_OBJECT;
+    }
+    if (dst == NULL) {
+        return BAD_VALUE;
+    }
+    return translateErrorCode(
+        AMediaCodecCryptoInfo_getKey(mAMediaCodecCryptoInfo, dst));
+}
+
+status_t AMediaCodecCryptoInfoWrapper::getIV(uint8_t *dst) {
+    if (mAMediaCodecCryptoInfo == NULL) {
+        return DEAD_OBJECT;
+    }
+    if (dst == NULL) {
+        return BAD_VALUE;
+    }
+    return translateErrorCode(
+        AMediaCodecCryptoInfo_getIV(mAMediaCodecCryptoInfo, dst));
+}
+
+CryptoPlugin::Mode AMediaCodecCryptoInfoWrapper::getMode() {
+    if (mAMediaCodecCryptoInfo == NULL) {
+        return CryptoPlugin::kMode_Unencrypted;
+    }
+    return translateToCryptoPluginMode(
+        AMediaCodecCryptoInfo_getMode(mAMediaCodecCryptoInfo));
+}
+
+status_t AMediaCodecCryptoInfoWrapper::getClearBytes(size_t *dst) {
+    if (mAMediaCodecCryptoInfo == NULL) {
+        return DEAD_OBJECT;
+    }
+    if (dst == NULL) {
+        return BAD_VALUE;
+    }
+    return translateErrorCode(
+        AMediaCodecCryptoInfo_getClearBytes(mAMediaCodecCryptoInfo, dst));
+}
+
+status_t AMediaCodecCryptoInfoWrapper::getEncryptedBytes(size_t *dst) {
+    if (mAMediaCodecCryptoInfo == NULL) {
+        return DEAD_OBJECT;
+    }
+    if (dst == NULL) {
+        return BAD_VALUE;
+    }
+    return translateErrorCode(
+        AMediaCodecCryptoInfo_getEncryptedBytes(mAMediaCodecCryptoInfo, dst));
+}
+
+
+//////////// AMediaCodecWrapper
+// static
+sp<AMediaCodecWrapper> AMediaCodecWrapper::CreateCodecByName(const AString &name) {
+    AMediaCodec *aMediaCodec = AMediaCodec_createCodecByName(name.c_str());
+    return new AMediaCodecWrapper(aMediaCodec);
+}
+
+// static
+sp<AMediaCodecWrapper> AMediaCodecWrapper::CreateDecoderByType(const AString &mimeType) {
+    AMediaCodec *aMediaCodec = AMediaCodec_createDecoderByType(mimeType.c_str());
+    return new AMediaCodecWrapper(aMediaCodec);
+}
+
+// static
+void AMediaCodecWrapper::OnInputAvailableCB(
+        AMediaCodec * /* aMediaCodec */,
+        void *userdata,
+        int32_t index) {
+    ALOGV("OnInputAvailableCB: index(%d)", index);
+    sp<AMessage> msg = sp<AMessage>((AMessage *)userdata)->dup();
+    msg->setInt32("callbackID", CB_INPUT_AVAILABLE);
+    msg->setInt32("index", index);
+    msg->post();
+}
+
+// static
+void AMediaCodecWrapper::OnOutputAvailableCB(
+        AMediaCodec * /* aMediaCodec */,
+        void *userdata,
+        int32_t index,
+        AMediaCodecBufferInfo *bufferInfo) {
+    ALOGV("OnOutputAvailableCB: index(%d), (%d, %d, %lld, 0x%x)",
+          index, bufferInfo->offset, bufferInfo->size,
+          (long long)bufferInfo->presentationTimeUs, bufferInfo->flags);
+    sp<AMessage> msg = sp<AMessage>((AMessage *)userdata)->dup();
+    msg->setInt32("callbackID", CB_OUTPUT_AVAILABLE);
+    msg->setInt32("index", index);
+    msg->setSize("offset", (size_t)(bufferInfo->offset));
+    msg->setSize("size", (size_t)(bufferInfo->size));
+    msg->setInt64("timeUs", bufferInfo->presentationTimeUs);
+    msg->setInt32("flags", (int32_t)(bufferInfo->flags));
+    msg->post();
+}
+
+// static
+void AMediaCodecWrapper::OnFormatChangedCB(
+        AMediaCodec * /* aMediaCodec */,
+        void *userdata,
+        AMediaFormat *format) {
+    sp<AMediaFormatWrapper> formatWrapper = new AMediaFormatWrapper(format);
+    sp<AMessage> outputFormat = formatWrapper->toAMessage();
+    ALOGV("OnFormatChangedCB: format(%s)", outputFormat->debugString().c_str());
+
+    sp<AMessage> msg = sp<AMessage>((AMessage *)userdata)->dup();
+    msg->setInt32("callbackID", CB_OUTPUT_FORMAT_CHANGED);
+    msg->setMessage("format", outputFormat);
+    msg->post();
+}
+
+// static
+void AMediaCodecWrapper::OnErrorCB(
+        AMediaCodec * /* aMediaCodec */,
+        void *userdata,
+        media_status_t err,
+        int32_t actionCode,
+        const char *detail) {
+    ALOGV("OnErrorCB: err(%d), actionCode(%d), detail(%s)", err, actionCode, detail);
+    sp<AMessage> msg = sp<AMessage>((AMessage *)userdata)->dup();
+    msg->setInt32("callbackID", CB_ERROR);
+    msg->setInt32("err", translateErrorCode(err));
+    msg->setInt32("actionCode", translateActionCode(actionCode));
+    msg->setString("detail", detail);
+    msg->post();
+}
+
+AMediaCodecWrapper::AMediaCodecWrapper(AMediaCodec *aMediaCodec)
+    : mAMediaCodec(aMediaCodec) {
+}
+
+AMediaCodecWrapper::~AMediaCodecWrapper() {
+    release();
+}
+
+status_t AMediaCodecWrapper::release() {
+    if (mAMediaCodec != NULL) {
+        AMediaCodecOnAsyncNotifyCallback aCB = {};
+        AMediaCodec_setAsyncNotifyCallback(mAMediaCodec, aCB, NULL);
+        mCallback = NULL;
+
+        media_status_t err = AMediaCodec_delete(mAMediaCodec);
+        mAMediaCodec = NULL;
+        return translateErrorCode(err);
+    }
+    return OK;
+}
+
+AMediaCodec *AMediaCodecWrapper::getAMediaCodec() const {
+    return mAMediaCodec;
+}
+
+status_t AMediaCodecWrapper::getName(AString *outComponentName) const {
+    if (mAMediaCodec == NULL) {
+        return DEAD_OBJECT;
+    }
+    char *name = NULL;
+    media_status_t err = AMediaCodec_getName(mAMediaCodec, &name);
+    if (err != AMEDIA_OK) {
+        return translateErrorCode(err);
+    }
+
+    *outComponentName = AString(name);
+    AMediaCodec_releaseName(mAMediaCodec, name);
+    return OK;
+}
+
+status_t AMediaCodecWrapper::configure(
+    const sp<AMediaFormatWrapper> &format,
+    const sp<ANativeWindowWrapper> &nww,
+    const sp<AMediaCryptoWrapper> &crypto,
+    uint32_t flags) {
+    if (mAMediaCodec == NULL) {
+        return DEAD_OBJECT;
+    }
+
+    media_status_t err = AMediaCodec_configure(
+            mAMediaCodec,
+            format->getAMediaFormat(),
+            (nww == NULL ? NULL : nww->getANativeWindow()),
+            crypto == NULL ? NULL : crypto->getAMediaCrypto(),
+            flags);
+
+    return translateErrorCode(err);
+}
+
+status_t AMediaCodecWrapper::setCallback(const sp<AMessage> &callback) {
+    if (mAMediaCodec == NULL) {
+        return DEAD_OBJECT;
+    }
+
+    mCallback = callback;
+
+    AMediaCodecOnAsyncNotifyCallback aCB = {
+        OnInputAvailableCB,
+        OnOutputAvailableCB,
+        OnFormatChangedCB,
+        OnErrorCB
+    };
+
+    return translateErrorCode(
+            AMediaCodec_setAsyncNotifyCallback(mAMediaCodec, aCB, callback.get()));
+}
+
+status_t AMediaCodecWrapper::releaseCrypto() {
+    if (mAMediaCodec == NULL) {
+        return DEAD_OBJECT;
+    }
+    return translateErrorCode(AMediaCodec_releaseCrypto(mAMediaCodec));
+}
+
+status_t AMediaCodecWrapper::start() {
+    if (mAMediaCodec == NULL) {
+        return DEAD_OBJECT;
+    }
+    return translateErrorCode(AMediaCodec_start(mAMediaCodec));
+}
+
+status_t AMediaCodecWrapper::stop() {
+    if (mAMediaCodec == NULL) {
+        return DEAD_OBJECT;
+    }
+    return translateErrorCode(AMediaCodec_stop(mAMediaCodec));
+}
+
+status_t AMediaCodecWrapper::flush() {
+    if (mAMediaCodec == NULL) {
+        return DEAD_OBJECT;
+    }
+    return translateErrorCode(AMediaCodec_flush(mAMediaCodec));
+}
+
+uint8_t* AMediaCodecWrapper::getInputBuffer(size_t idx, size_t *out_size) {
+    if (mAMediaCodec == NULL) {
+        return NULL;
+    }
+    return AMediaCodec_getInputBuffer(mAMediaCodec, idx, out_size);
+}
+
+uint8_t* AMediaCodecWrapper::getOutputBuffer(size_t idx, size_t *out_size) {
+    if (mAMediaCodec == NULL) {
+        return NULL;
+    }
+    return AMediaCodec_getOutputBuffer(mAMediaCodec, idx, out_size);
+}
+
+status_t AMediaCodecWrapper::queueInputBuffer(
+        size_t idx,
+        size_t offset,
+        size_t size,
+        uint64_t time,
+        uint32_t flags) {
+    if (mAMediaCodec == NULL) {
+        return DEAD_OBJECT;
+    }
+    return translateErrorCode(
+        AMediaCodec_queueInputBuffer(mAMediaCodec, idx, offset, size, time, flags));
+}
+
+status_t AMediaCodecWrapper::queueSecureInputBuffer(
+        size_t idx,
+        size_t offset,
+        sp<AMediaCodecCryptoInfoWrapper> &codecCryptoInfo,
+        uint64_t time,
+        uint32_t flags) {
+    if (mAMediaCodec == NULL) {
+        return DEAD_OBJECT;
+    }
+    return translateErrorCode(
+        AMediaCodec_queueSecureInputBuffer(
+            mAMediaCodec,
+            idx,
+            offset,
+            codecCryptoInfo->getAMediaCodecCryptoInfo(),
+            time,
+            flags));
+}
+
+sp<AMediaFormatWrapper> AMediaCodecWrapper::getOutputFormat() {
+    if (mAMediaCodec == NULL) {
+        return NULL;
+    }
+    return new AMediaFormatWrapper(AMediaCodec_getOutputFormat(mAMediaCodec));
+}
+
+sp<AMediaFormatWrapper> AMediaCodecWrapper::getInputFormat() {
+    if (mAMediaCodec == NULL) {
+        return NULL;
+    }
+    return new AMediaFormatWrapper(AMediaCodec_getInputFormat(mAMediaCodec));
+}
+
+status_t AMediaCodecWrapper::releaseOutputBuffer(size_t idx, bool render) {
+    if (mAMediaCodec == NULL) {
+        return DEAD_OBJECT;
+    }
+    return translateErrorCode(
+        AMediaCodec_releaseOutputBuffer(mAMediaCodec, idx, render));
+}
+
+status_t AMediaCodecWrapper::setOutputSurface(const sp<ANativeWindowWrapper> &nww) {
+    if (mAMediaCodec == NULL) {
+        return DEAD_OBJECT;
+    }
+    return translateErrorCode(
+        AMediaCodec_setOutputSurface(mAMediaCodec,
+                                     (nww == NULL ? NULL : nww->getANativeWindow())));
+}
+
+status_t AMediaCodecWrapper::releaseOutputBufferAtTime(size_t idx, int64_t timestampNs) {
+    if (mAMediaCodec == NULL) {
+        return DEAD_OBJECT;
+    }
+    return translateErrorCode(
+        AMediaCodec_releaseOutputBufferAtTime(mAMediaCodec, idx, timestampNs));
+}
+
+status_t AMediaCodecWrapper::setParameters(const sp<AMediaFormatWrapper> &params) {
+    if (mAMediaCodec == NULL) {
+        return DEAD_OBJECT;
+    }
+    return translateErrorCode(
+        AMediaCodec_setParameters(mAMediaCodec, params->getAMediaFormat()));
+}
+
+//////////// AMediaExtractorWrapper
+
+AMediaExtractorWrapper::AMediaExtractorWrapper(AMediaExtractor *aMediaExtractor)
+    : mAMediaExtractor(aMediaExtractor) {
+}
+
+AMediaExtractorWrapper::~AMediaExtractorWrapper() {
+    release();
+}
+
+status_t AMediaExtractorWrapper::release() {
+    if (mAMediaExtractor != NULL) {
+        media_status_t err = AMediaExtractor_delete(mAMediaExtractor);
+        mAMediaExtractor = NULL;
+        return translateErrorCode(err);
+    }
+    return OK;
+}
+
+AMediaExtractor *AMediaExtractorWrapper::getAMediaExtractor() const {
+    return mAMediaExtractor;
+}
+
+status_t AMediaExtractorWrapper::setDataSource(int fd, off64_t offset, off64_t length) {
+    if (mAMediaExtractor == NULL) {
+        return DEAD_OBJECT;
+    }
+    return translateErrorCode(AMediaExtractor_setDataSourceFd(
+            mAMediaExtractor, fd, offset, length));
+}
+
+status_t AMediaExtractorWrapper::setDataSource(const char *location) {
+    if (mAMediaExtractor == NULL) {
+        return DEAD_OBJECT;
+    }
+    return translateErrorCode(AMediaExtractor_setDataSource(mAMediaExtractor, location));
+}
+
+size_t AMediaExtractorWrapper::getTrackCount() {
+    if (mAMediaExtractor == NULL) {
+        return 0;
+    }
+    return AMediaExtractor_getTrackCount(mAMediaExtractor);
+}
+
+sp<AMediaFormatWrapper> AMediaExtractorWrapper::getTrackFormat(size_t idx) {
+    if (mAMediaExtractor == NULL) {
+        return NULL;
+    }
+    return new AMediaFormatWrapper(AMediaExtractor_getTrackFormat(mAMediaExtractor, idx));
+}
+
+status_t AMediaExtractorWrapper::selectTrack(size_t idx) {
+    if (mAMediaExtractor == NULL) {
+        return DEAD_OBJECT;
+    }
+    return translateErrorCode(AMediaExtractor_selectTrack(mAMediaExtractor, idx));
+}
+
+status_t AMediaExtractorWrapper::unselectTrack(size_t idx) {
+    if (mAMediaExtractor == NULL) {
+        return DEAD_OBJECT;
+    }
+    return translateErrorCode(AMediaExtractor_unselectTrack(mAMediaExtractor, idx));
+}
+
+ssize_t AMediaExtractorWrapper::readSampleData(const sp<ABuffer> &buffer) {
+    if (mAMediaExtractor == NULL) {
+        return -1;
+    }
+    return AMediaExtractor_readSampleData(mAMediaExtractor, buffer->data(), buffer->capacity());
+}
+
+uint32_t AMediaExtractorWrapper::getSampleFlags() {
+    if (mAMediaExtractor == NULL) {
+        return 0;
+    }
+    return AMediaExtractor_getSampleFlags(mAMediaExtractor);
+}
+
+int AMediaExtractorWrapper::getSampleTrackIndex() {
+    if (mAMediaExtractor == NULL) {
+        return -1;
+    }
+    return AMediaExtractor_getSampleTrackIndex(mAMediaExtractor);
+}
+
+int64_t AMediaExtractorWrapper::getSampleTime() {
+    if (mAMediaExtractor == NULL) {
+        return -1;
+    }
+    return AMediaExtractor_getSampleTime(mAMediaExtractor);
+}
+
+bool AMediaExtractorWrapper::advance() {
+    if (mAMediaExtractor == NULL) {
+        return false;
+    }
+    return AMediaExtractor_advance(mAMediaExtractor);
+}
+
+status_t AMediaExtractorWrapper::seekTo(int64_t seekPosUs, SeekMode mode) {
+    if (mAMediaExtractor == NULL) {
+        return DEAD_OBJECT;
+    }
+    return AMediaExtractor_seekTo(mAMediaExtractor, seekPosUs, mode);
+}
+
+PsshInfo* AMediaExtractorWrapper::getPsshInfo() {
+    if (mAMediaExtractor == NULL) {
+        return NULL;
+    }
+    return AMediaExtractor_getPsshInfo(mAMediaExtractor);
+}
+
+sp<AMediaCodecCryptoInfoWrapper> AMediaExtractorWrapper::getSampleCryptoInfo() {
+    if (mAMediaExtractor == NULL) {
+        return NULL;
+    }
+    return new AMediaCodecCryptoInfoWrapper(AMediaExtractor_getSampleCryptoInfo(mAMediaExtractor));
+}
+
+}  // namespace android
diff --git a/media/libmedia/TestPlayerStub.cpp b/media/libmedia/TestPlayerStub.cpp
new file mode 100644
index 0000000..3548793
--- /dev/null
+++ b/media/libmedia/TestPlayerStub.cpp
@@ -0,0 +1,198 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "TestPlayerStub"
+#include "utils/Log.h"
+
+#include "TestPlayerStub.h"
+
+#include <dlfcn.h>  // for dlopen/dlclose
+#include <stdlib.h>
+#include <string.h>
+#include <cutils/properties.h>
+#include <utils/Errors.h>  // for status_t
+
+#include "media/MediaPlayer2Interface.h"
+
+
+namespace {
+using android::status_t;
+using android::MediaPlayer2Base;
+
+const char *kTestUrlScheme = "test:";
+const char *kUrlParam = "url=";
+
+const char *kBuildTypePropName = "ro.build.type";
+const char *kEngBuild = "eng";
+const char *kTestBuild = "test";
+
+// @return true if the current build is 'eng' or 'test'.
+bool isTestBuild()
+{
+    char prop[PROPERTY_VALUE_MAX] = { '\0', };
+
+    property_get(kBuildTypePropName, prop, "\0");
+    return strcmp(prop, kEngBuild) == 0 || strcmp(prop, kTestBuild) == 0;
+}
+
+// @return true if the url scheme is 'test:'
+bool isTestUrl(const char *url)
+{
+    return url && strncmp(url, kTestUrlScheme, strlen(kTestUrlScheme)) == 0;
+}
+
+}  // anonymous namespace
+
+namespace android {
+
+TestPlayerStub::TestPlayerStub()
+    :mUrl(NULL), mFilename(NULL), mContentUrl(NULL),
+     mHandle(NULL), mNewPlayer(NULL), mDeletePlayer(NULL),
+     mPlayer(NULL) { }
+
+TestPlayerStub::~TestPlayerStub()
+{
+    resetInternal();
+}
+
+status_t TestPlayerStub::initCheck()
+{
+    return isTestBuild() ? OK : INVALID_OPERATION;
+}
+
+// Parse mUrl to get:
+// * The library to be dlopened.
+// * The url to be passed to the real setDataSource impl.
+//
+// mUrl is expected to be in following format:
+//
+// test:<name of the .so>?url=<url for setDataSource>
+//
+// The value of the url parameter is treated as a string (no
+// unescaping of illegal charaters).
+status_t TestPlayerStub::parseUrl()
+{
+    if (strlen(mUrl) < strlen(kTestUrlScheme)) {
+        resetInternal();
+        return BAD_VALUE;
+    }
+
+    char *i = mUrl + strlen(kTestUrlScheme);
+
+    mFilename = i;
+
+    while (*i != '\0' && *i != '?') {
+        ++i;
+    }
+
+    if (*i == '\0' || strncmp(i + 1, kUrlParam, strlen(kUrlParam)) != 0) {
+        resetInternal();
+        return BAD_VALUE;
+    }
+    *i = '\0';  // replace '?' to nul-terminate mFilename
+
+    mContentUrl = i + 1 + strlen(kUrlParam);
+    return OK;
+}
+
+// Load the dynamic library.
+// Create the test player.
+// Call setDataSource on the test player with the url in param.
+status_t TestPlayerStub::setDataSource(
+        const sp<MediaHTTPService> &httpService,
+        const char *url,
+        const KeyedVector<String8, String8> *headers) {
+    if (!isTestUrl(url) || NULL != mHandle) {
+        return INVALID_OPERATION;
+    }
+
+    mUrl = strdup(url);
+
+    status_t status = parseUrl();
+
+    if (OK != status) {
+        resetInternal();
+        return status;
+    }
+
+    ::dlerror();  // Clears any pending error.
+
+    // Load the test player from the url. dlopen will fail if the lib
+    // is not there. dls are under /system/lib
+    // None of the entry points should be NULL.
+    mHandle = ::dlopen(mFilename, RTLD_NOW | RTLD_GLOBAL);
+    if (!mHandle) {
+        ALOGE("dlopen failed: %s", ::dlerror());
+        resetInternal();
+        return UNKNOWN_ERROR;
+    }
+
+    // Load the 2 entry points to create and delete instances.
+    const char *err;
+    mNewPlayer = reinterpret_cast<NEW_PLAYER>(dlsym(mHandle,
+                                                    "newPlayer"));
+    err = ::dlerror();
+    if (err || mNewPlayer == NULL) {
+        // if err is NULL the string <null> is inserted in the logs =>
+        // mNewPlayer was NULL.
+        ALOGE("dlsym for newPlayer failed %s", err);
+        resetInternal();
+        return UNKNOWN_ERROR;
+    }
+
+    mDeletePlayer = reinterpret_cast<DELETE_PLAYER>(dlsym(mHandle,
+                                                          "deletePlayer"));
+    err = ::dlerror();
+    if (err || mDeletePlayer == NULL) {
+        ALOGE("dlsym for deletePlayer failed %s", err);
+        resetInternal();
+        return UNKNOWN_ERROR;
+    }
+
+    mPlayer = (*mNewPlayer)();
+    return mPlayer->setDataSource(httpService, mContentUrl, headers);
+}
+
+// Internal cleanup.
+status_t TestPlayerStub::resetInternal()
+{
+    if(mUrl) {
+        free(mUrl);
+        mUrl = NULL;
+    }
+    mFilename = NULL;
+    mContentUrl = NULL;
+
+    if (mPlayer) {
+        ALOG_ASSERT(mDeletePlayer != NULL, "mDeletePlayer is null");
+        (*mDeletePlayer)(mPlayer);
+        mPlayer = NULL;
+    }
+
+    if (mHandle) {
+        ::dlclose(mHandle);
+        mHandle = NULL;
+    }
+    return OK;
+}
+
+/* static */ bool TestPlayerStub::canBeUsed(const char *url)
+{
+    return isTestBuild() && isTestUrl(url);
+}
+
+}  // namespace android
diff --git a/media/libmedia/TestPlayerStub.h b/media/libmedia/TestPlayerStub.h
new file mode 100644
index 0000000..27c8bf4
--- /dev/null
+++ b/media/libmedia/TestPlayerStub.h
@@ -0,0 +1,133 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_FRAMEWORKS_BASE_MEDIA_LIBMEDIA_TESTPLAYERSTUB_H__
+#define ANDROID_FRAMEWORKS_BASE_MEDIA_LIBMEDIA_TESTPLAYERSTUB_H__
+
+#include <media/MediaPlayer2Interface.h>
+#include <utils/Errors.h>
+
+namespace android {
+class MediaPlayer2Base;  // in media/MediaPlayer2Interface.h
+
+// Wrapper around a test media player that gets dynamically loaded.
+//
+// The URL passed to setDataSource has this format:
+//
+//   test:<name of the .so>?url=<url for the real setDataSource impl.>
+//
+// e.g:
+//   test:invoke_test_media_player.so?url=http://youtube.com/
+//   test:invoke_test_media_player.so?url=speedtest
+//
+// TestPlayerStub::setDataSource loads the library in the test url. 2
+// entry points with C linkage are expected. One to create the test
+// player and one to destroy it.
+//
+// extern "C" android::MediaPlayer2Base* newPlayer();
+// extern "C" android::status_t deletePlayer(android::MediaPlayer2Base *p);
+//
+// Once the test player has been loaded, its setDataSource
+// implementation is called with the value of the 'url' parameter.
+//
+// typical usage in a java test:
+// ============================
+//
+//  MediaPlayer2 p = new MediaPlayer2();
+//  p.setDataSource("test:invoke_mock_media_player.so?url=http://youtube.com");
+//  p.prepare();
+//  ...
+//  p.release();
+
+class TestPlayerStub : public MediaPlayer2Interface {
+  public:
+    typedef MediaPlayer2Base* (*NEW_PLAYER)();
+    typedef status_t (*DELETE_PLAYER)(MediaPlayer2Base *);
+
+    TestPlayerStub();
+    virtual ~TestPlayerStub();
+
+    // Called right after the constructor. Check if the current build
+    // allows test players.
+    virtual status_t initCheck();
+
+    // @param url Should be a test url. See class comment.
+    virtual status_t setDataSource(
+            const sp<MediaHTTPService> &httpService,
+            const char* url,
+            const KeyedVector<String8, String8> *headers);
+
+    // Test player for a file descriptor source is not supported.
+    virtual status_t setDataSource(int, int64_t, int64_t)  {
+        return INVALID_OPERATION;
+    }
+
+
+    // All the methods below wrap the mPlayer instance.
+    virtual status_t setVideoSurfaceTexture(
+            const android::sp<android::ANativeWindowWrapper>& st)  {
+        return mPlayer->setVideoSurfaceTexture(st);
+    }
+    virtual status_t prepare() {return mPlayer->prepare();}
+    virtual status_t prepareAsync()  {return mPlayer->prepareAsync();}
+    virtual status_t start()  {return mPlayer->start();}
+    virtual status_t stop()  {return mPlayer->stop();}
+    virtual status_t pause()  {return mPlayer->pause();}
+    virtual bool isPlaying() {return mPlayer->isPlaying();}
+    virtual status_t seekTo(
+            int msec,
+            MediaPlayer2SeekMode mode = MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC) {
+        return mPlayer->seekTo(msec, mode);
+    }
+    virtual status_t getCurrentPosition(int *p)  {
+        return mPlayer->getCurrentPosition(p);
+    }
+    virtual status_t getDuration(int *d)  {return mPlayer->getDuration(d);}
+    virtual status_t reset() {return mPlayer->reset();}
+    virtual status_t setLooping(int b)  {return mPlayer->setLooping(b);}
+    virtual player2_type playerType() {return mPlayer->playerType();}
+    virtual status_t invoke(const android::Parcel& in, android::Parcel *out) {
+        return mPlayer->invoke(in, out);
+    }
+    virtual status_t setParameter(int key, const Parcel &request) {
+        return mPlayer->setParameter(key, request);
+    }
+    virtual status_t getParameter(int key, Parcel *reply) {
+        return mPlayer->getParameter(key, reply);
+    }
+
+
+    // @return true if the current build is 'eng' or 'test' and the
+    //              url's scheme is 'test:'
+    static bool canBeUsed(const char *url);
+
+  private:
+    // Release the player, dlclose the library.
+    status_t resetInternal();
+    status_t parseUrl();
+
+    char *mUrl;                // test:foo.so?url=http://bar
+    char *mFilename;           // foo.so
+    char *mContentUrl;         // http://bar
+    void *mHandle;             // returned by dlopen
+    NEW_PLAYER    mNewPlayer;
+    DELETE_PLAYER mDeletePlayer;
+    MediaPlayer2Base *mPlayer; // wrapped player
+};
+
+}  // namespace android
+
+#endif  // ANDROID_FRAMEWORKS_BASE_MEDIA_LIBMEDIA_TESTPLAYERSTUB_H__
diff --git a/media/libmedia/TypeConverter.cpp b/media/libmedia/TypeConverter.cpp
index e6c8f9c..9b06047 100644
--- a/media/libmedia/TypeConverter.cpp
+++ b/media/libmedia/TypeConverter.cpp
@@ -277,6 +277,16 @@
     TERMINATOR
 };
 
+template<>
+const AudioContentTypeConverter::Table AudioContentTypeConverter::mTable[] = {
+    MAKE_STRING_FROM_ENUM(AUDIO_CONTENT_TYPE_UNKNOWN),
+    MAKE_STRING_FROM_ENUM(AUDIO_CONTENT_TYPE_SPEECH),
+    MAKE_STRING_FROM_ENUM(AUDIO_CONTENT_TYPE_MUSIC),
+    MAKE_STRING_FROM_ENUM(AUDIO_CONTENT_TYPE_MOVIE),
+    MAKE_STRING_FROM_ENUM(AUDIO_CONTENT_TYPE_SONIFICATION),
+    TERMINATOR
+};
+
 template <>
 const UsageTypeConverter::Table UsageTypeConverter::mTable[] = {
     MAKE_STRING_FROM_ENUM(AUDIO_USAGE_UNKNOWN),
diff --git a/media/libmedia/include/media/BufferingSettings.h b/media/libmedia/include/media/BufferingSettings.h
index e812d2a..d2a3e40 100644
--- a/media/libmedia/include/media/BufferingSettings.h
+++ b/media/libmedia/include/media/BufferingSettings.h
@@ -21,45 +21,14 @@
 
 namespace android {
 
-enum BufferingMode : int {
-    // Do not support buffering.
-    BUFFERING_MODE_NONE             = 0,
-    // Support only time based buffering.
-    BUFFERING_MODE_TIME_ONLY        = 1,
-    // Support only size based buffering.
-    BUFFERING_MODE_SIZE_ONLY        = 2,
-    // Support both time and size based buffering, time based calculation precedes size based.
-    // Size based calculation will be used only when time information is not available for
-    // the stream.
-    BUFFERING_MODE_TIME_THEN_SIZE   = 3,
-    // Number of modes.
-    BUFFERING_MODE_COUNT            = 4,
-};
-
 struct BufferingSettings : public Parcelable {
-    static const int kNoWatermark = -1;
+    static const int kNoMark = -1;
 
-    static bool IsValidBufferingMode(int mode);
-    static bool IsTimeBasedBufferingMode(int mode);
-    static bool IsSizeBasedBufferingMode(int mode);
+    int mInitialMarkMs;
 
-    BufferingMode mInitialBufferingMode;  // for prepare
-    BufferingMode mRebufferingMode;  // for playback
-
-    int mInitialWatermarkMs;  // time based
-    int mInitialWatermarkKB;  // size based
-
-    // When cached data is below this mark, playback will be paused for buffering
-    // till data reach |mRebufferingWatermarkHighMs| or end of stream.
-    int mRebufferingWatermarkLowMs;
-    // When cached data is above this mark, buffering will be paused.
-    int mRebufferingWatermarkHighMs;
-
-    // When cached data is below this mark, playback will be paused for buffering
-    // till data reach |mRebufferingWatermarkHighKB| or end of stream.
-    int mRebufferingWatermarkLowKB;
-    // When cached data is above this mark, buffering will be paused.
-    int mRebufferingWatermarkHighKB;
+    // When cached data is above this mark, playback will be resumed if it has been paused
+    // due to low cached data.
+    int mResumePlaybackMarkMs;
 
     BufferingSettings();
 
diff --git a/media/libmedia/include/media/IHDCP.h b/media/libmedia/include/media/IHDCP.h
deleted file mode 100644
index 352561e..0000000
--- a/media/libmedia/include/media/IHDCP.h
+++ /dev/null
@@ -1,120 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <binder/IInterface.h>
-#include <media/hardware/HDCPAPI.h>
-#include <media/stagefright/foundation/ABase.h>
-#include <ui/GraphicBuffer.h>
-
-namespace android {
-
-struct IHDCPObserver : public IInterface {
-    DECLARE_META_INTERFACE(HDCPObserver);
-
-    virtual void notify(
-            int msg, int ext1, int ext2, const Parcel *obj) = 0;
-
-private:
-    DISALLOW_EVIL_CONSTRUCTORS(IHDCPObserver);
-};
-
-struct IHDCP : public IInterface {
-    DECLARE_META_INTERFACE(HDCP);
-
-    // Called to specify the observer that receives asynchronous notifications
-    // from the HDCP implementation to signal completion/failure of asynchronous
-    // operations (such as initialization) or out of band events.
-    virtual status_t setObserver(const sp<IHDCPObserver> &observer) = 0;
-
-    // Request to setup an HDCP session with the specified host listening
-    // on the specified port.
-    virtual status_t initAsync(const char *host, unsigned port) = 0;
-
-    // Request to shutdown the active HDCP session.
-    virtual status_t shutdownAsync() = 0;
-
-    // Returns the capability bitmask of this HDCP session.
-    // Possible return values (please refer to HDCAPAPI.h):
-    //   HDCP_CAPS_ENCRYPT: mandatory, meaning the HDCP module can encrypt
-    //   from an input byte-array buffer to an output byte-array buffer
-    //   HDCP_CAPS_ENCRYPT_NATIVE: the HDCP module supports encryption from
-    //   a native buffer to an output byte-array buffer. The format of the
-    //   input native buffer is specific to vendor's encoder implementation.
-    //   It is the same format as that used by the encoder when
-    //   "storeMetaDataInBuffers" extension is enabled on its output port.
-    virtual uint32_t getCaps() = 0;
-
-    // ENCRYPTION only:
-    // Encrypt data according to the HDCP spec. "size" bytes of data are
-    // available at "inData" (virtual address), "size" may not be a multiple
-    // of 128 bits (16 bytes). An equal number of encrypted bytes should be
-    // written to the buffer at "outData" (virtual address).
-    // This operation is to be synchronous, i.e. this call does not return
-    // until outData contains size bytes of encrypted data.
-    // streamCTR will be assigned by the caller (to 0 for the first PES stream,
-    // 1 for the second and so on)
-    // inputCTR _will_be_maintained_by_the_callee_ for each PES stream.
-    virtual status_t encrypt(
-            const void *inData, size_t size, uint32_t streamCTR,
-            uint64_t *outInputCTR, void *outData) = 0;
-
-    // Encrypt data according to the HDCP spec. "size" bytes of data starting
-    // at location "offset" are available in "buffer" (buffer handle). "size"
-    // may not be a multiple of 128 bits (16 bytes). An equal number of
-    // encrypted bytes should be written to the buffer at "outData" (virtual
-    // address). This operation is to be synchronous, i.e. this call does not
-    // return until outData contains size bytes of encrypted data.
-    // streamCTR will be assigned by the caller (to 0 for the first PES stream,
-    // 1 for the second and so on)
-    // inputCTR _will_be_maintained_by_the_callee_ for each PES stream.
-    virtual status_t encryptNative(
-            const sp<GraphicBuffer> &graphicBuffer,
-            size_t offset, size_t size, uint32_t streamCTR,
-            uint64_t *outInputCTR, void *outData) = 0;
-
-    // DECRYPTION only:
-    // Decrypt data according to the HDCP spec.
-    // "size" bytes of encrypted data are available at "inData"
-    // (virtual address), "size" may not be a multiple of 128 bits (16 bytes).
-    // An equal number of decrypted bytes should be written to the buffer
-    // at "outData" (virtual address).
-    // This operation is to be synchronous, i.e. this call does not return
-    // until outData contains size bytes of decrypted data.
-    // Both streamCTR and inputCTR will be provided by the caller.
-    virtual status_t decrypt(
-            const void *inData, size_t size,
-            uint32_t streamCTR, uint64_t inputCTR,
-            void *outData) = 0;
-
-private:
-    DISALLOW_EVIL_CONSTRUCTORS(IHDCP);
-};
-
-struct BnHDCPObserver : public BnInterface<IHDCPObserver> {
-    virtual status_t onTransact(
-            uint32_t code, const Parcel &data, Parcel *reply,
-            uint32_t flags = 0);
-};
-
-struct BnHDCP : public BnInterface<IHDCP> {
-    virtual status_t onTransact(
-            uint32_t code, const Parcel &data, Parcel *reply,
-            uint32_t flags = 0);
-};
-
-}  // namespace android
-
-
diff --git a/media/libmedia/include/media/IMediaCodecService.h b/media/libmedia/include/media/IMediaCodecService.h
deleted file mode 100644
index 59fb1c0..0000000
--- a/media/libmedia/include/media/IMediaCodecService.h
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Copyright (C) 2015 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_IMEDIACODECSERVICE_H
-#define ANDROID_IMEDIACODECSERVICE_H
-
-#include <binder/IInterface.h>
-#include <binder/IMemory.h>
-#include <binder/Parcel.h>
-#include <media/IDataSource.h>
-#include <media/IOMX.h>
-#include <media/IOMXStore.h>
-
-namespace android {
-
-class IMediaCodecService: public IInterface
-{
-public:
-    DECLARE_META_INTERFACE(MediaCodecService);
-
-    virtual sp<IOMX> getOMX() = 0;
-    virtual sp<IOMXStore> getOMXStore() = 0;
-};
-
-class BnMediaCodecService: public BnInterface<IMediaCodecService>
-{
-public:
-    virtual status_t    onTransact(uint32_t code, const Parcel& data, Parcel* reply,
-                                uint32_t flags = 0);
-};
-
-}   // namespace android
-
-#endif  // ANDROID_IMEDIACODECSERVICE_H
diff --git a/media/libmedia/include/media/IMediaExtractor.h b/media/libmedia/include/media/IMediaExtractor.h
index 0ac7673..44f8c1d 100644
--- a/media/libmedia/include/media/IMediaExtractor.h
+++ b/media/libmedia/include/media/IMediaExtractor.h
@@ -18,8 +18,8 @@
 
 #define IMEDIA_EXTRACTOR_BASE_H_
 
+#include <media/DataSource.h>
 #include <media/IMediaSource.h>
-#include <media/stagefright/DataSource.h>
 #include <vector>
 
 namespace android {
diff --git a/media/libmedia/include/media/IMediaHTTPConnection.h b/media/libmedia/include/media/IMediaHTTPConnection.h
index 2a63eb7..0fb6bb1 100644
--- a/media/libmedia/include/media/IMediaHTTPConnection.h
+++ b/media/libmedia/include/media/IMediaHTTPConnection.h
@@ -19,16 +19,15 @@
 #define I_MEDIA_HTTP_CONNECTION_H_
 
 #include <binder/IInterface.h>
+#include <media/MediaHTTPConnection.h>
 #include <media/stagefright/foundation/ABase.h>
 #include <utils/KeyedVector.h>
 
 namespace android {
 
-struct IMediaHTTPConnection;
-
 /** MUST stay in sync with IMediaHTTPConnection.aidl */
 
-struct IMediaHTTPConnection : public IInterface {
+struct IMediaHTTPConnection : public MediaHTTPConnection, public IInterface {
     DECLARE_META_INTERFACE(MediaHTTPConnection);
 
     virtual bool connect(
diff --git a/media/libmedia/include/media/IMediaHTTPService.h b/media/libmedia/include/media/IMediaHTTPService.h
index f66d6c8..e948b78 100644
--- a/media/libmedia/include/media/IMediaHTTPService.h
+++ b/media/libmedia/include/media/IMediaHTTPService.h
@@ -19,18 +19,19 @@
 #define I_MEDIA_HTTP_SERVICE_H_
 
 #include <binder/IInterface.h>
+#include <media/MediaHTTPService.h>
 #include <media/stagefright/foundation/ABase.h>
 
 namespace android {
 
-struct IMediaHTTPConnection;
+struct MediaHTTPConnection;
 
 /** MUST stay in sync with IMediaHTTPService.aidl */
 
-struct IMediaHTTPService : public IInterface {
+struct IMediaHTTPService : public MediaHTTPService, public IInterface {
     DECLARE_META_INTERFACE(MediaHTTPService);
 
-    virtual sp<IMediaHTTPConnection> makeHTTPConnection() = 0;
+    virtual sp<MediaHTTPConnection> makeHTTPConnection() = 0;
 
 private:
     DISALLOW_EVIL_CONSTRUCTORS(IMediaHTTPService);
diff --git a/media/libmedia/include/media/IMediaMetadataRetriever.h b/media/libmedia/include/media/IMediaMetadataRetriever.h
index ea95161..5491535 100644
--- a/media/libmedia/include/media/IMediaMetadataRetriever.h
+++ b/media/libmedia/include/media/IMediaMetadataRetriever.h
@@ -44,6 +44,11 @@
             const sp<IDataSource>& dataSource, const char *mime) = 0;
     virtual sp<IMemory>     getFrameAtTime(
             int64_t timeUs, int option, int colorFormat, bool metaOnly) = 0;
+    virtual sp<IMemory>     getImageAtIndex(
+            int index, int colorFormat, bool metaOnly) = 0;
+    virtual status_t        getFrameAtIndex(
+            std::vector<sp<IMemory> > *frames,
+            int frameIndex, int numFrames, int colorFormat, bool metaOnly) = 0;
     virtual sp<IMemory>     extractAlbumArt() = 0;
     virtual const char*     extractMetadata(int keyCode) = 0;
 };
diff --git a/media/libmedia/include/media/IMediaPlayer.h b/media/libmedia/include/media/IMediaPlayer.h
index e5a98dd..97a998e 100644
--- a/media/libmedia/include/media/IMediaPlayer.h
+++ b/media/libmedia/include/media/IMediaPlayer.h
@@ -23,7 +23,7 @@
 #include <utils/KeyedVector.h>
 #include <system/audio.h>
 
-#include <media/IMediaSource.h>
+#include <media/MediaSource.h>
 #include <media/VolumeShaper.h>
 
 // Fwd decl to make sure everyone agrees that the scope of struct sockaddr_in is
@@ -42,7 +42,7 @@
 struct AVSyncSettings;
 struct BufferingSettings;
 
-typedef IMediaSource::ReadOptions::SeekMode MediaPlayerSeekMode;
+typedef MediaSource::ReadOptions::SeekMode MediaPlayerSeekMode;
 
 class IMediaPlayer: public IInterface
 {
@@ -61,7 +61,7 @@
     virtual status_t        setDataSource(const sp<IDataSource>& source) = 0;
     virtual status_t        setVideoSurfaceTexture(
                                     const sp<IGraphicBufferProducer>& bufferProducer) = 0;
-    virtual status_t        getDefaultBufferingSettings(
+    virtual status_t        getBufferingSettings(
                                     BufferingSettings* buffering /* nonnull */) = 0;
     virtual status_t        setBufferingSettings(const BufferingSettings& buffering) = 0;
     virtual status_t        prepareAsync() = 0;
@@ -79,6 +79,7 @@
             MediaPlayerSeekMode mode = MediaPlayerSeekMode::SEEK_PREVIOUS_SYNC) = 0;
     virtual status_t        getCurrentPosition(int* msec) = 0;
     virtual status_t        getDuration(int* msec) = 0;
+    virtual status_t        notifyAt(int64_t mediaTimeUs) = 0;
     virtual status_t        reset() = 0;
     virtual status_t        setAudioStreamType(audio_stream_type_t type) = 0;
     virtual status_t        setLooping(int loop) = 0;
@@ -91,10 +92,10 @@
     virtual status_t        getRetransmitEndpoint(struct sockaddr_in* endpoint) = 0;
     virtual status_t        setNextPlayer(const sp<IMediaPlayer>& next) = 0;
 
-    virtual VolumeShaper::Status applyVolumeShaper(
-                                    const sp<VolumeShaper::Configuration>& configuration,
-                                    const sp<VolumeShaper::Operation>& operation) = 0;
-    virtual sp<VolumeShaper::State> getVolumeShaperState(int id) = 0;
+    virtual media::VolumeShaper::Status applyVolumeShaper(
+                                    const sp<media::VolumeShaper::Configuration>& configuration,
+                                    const sp<media::VolumeShaper::Operation>& operation) = 0;
+    virtual sp<media::VolumeShaper::State> getVolumeShaperState(int id) = 0;
 
     // Modular DRM
     virtual status_t        prepareDrm(const uint8_t uuid[16],
@@ -130,6 +131,11 @@
     virtual status_t        getMetadata(bool update_only,
                                         bool apply_filter,
                                         Parcel *metadata) = 0;
+
+    // AudioRouting
+    virtual status_t        setOutputDevice(audio_port_handle_t deviceId) = 0;
+    virtual status_t        getRoutedDeviceId(audio_port_handle_t *deviceId) = 0;
+    virtual status_t        enableAudioDeviceCallback(bool enabled) = 0;
 };
 
 // ----------------------------------------------------------------------------
diff --git a/media/libmedia/include/media/IMediaPlayerService.h b/media/libmedia/include/media/IMediaPlayerService.h
index f21bb3a..217de14 100644
--- a/media/libmedia/include/media/IMediaPlayerService.h
+++ b/media/libmedia/include/media/IMediaPlayerService.h
@@ -31,7 +31,6 @@
 
 namespace android {
 
-struct IHDCP;
 class IMediaCodecList;
 struct IMediaHTTPService;
 class IMediaRecorder;
@@ -49,8 +48,6 @@
     virtual sp<IMediaMetadataRetriever> createMetadataRetriever() = 0;
     virtual sp<IMediaPlayer> create(const sp<IMediaPlayerClient>& client,
             audio_session_t audioSessionId = AUDIO_SESSION_ALLOCATE) = 0;
-    virtual sp<IOMX>            getOMX() = 0;
-    virtual sp<IHDCP>           makeHDCP(bool createEncryptionModule) = 0;
     virtual sp<IMediaCodecList> getCodecList() const = 0;
 
     // Connects to a remote display.
diff --git a/media/libmedia/include/media/IMediaRecorder.h b/media/libmedia/include/media/IMediaRecorder.h
index 9d0341a..3cef329 100644
--- a/media/libmedia/include/media/IMediaRecorder.h
+++ b/media/libmedia/include/media/IMediaRecorder.h
@@ -19,6 +19,7 @@
 #define ANDROID_IMEDIARECORDER_H
 
 #include <binder/IInterface.h>
+#include <system/audio.h>
 
 namespace android {
 
@@ -64,6 +65,10 @@
     virtual status_t release() = 0;
     virtual status_t setInputSurface(const sp<PersistentSurface>& surface) = 0;
     virtual sp<IGraphicBufferProducer> querySurfaceMediaSource() = 0;
+
+    virtual status_t setInputDevice(audio_port_handle_t deviceId) = 0;
+    virtual status_t getRoutedDeviceId(audio_port_handle_t *deviceId) = 0;
+    virtual status_t enableAudioDeviceCallback(bool enabled) = 0;
 };
 
 // ----------------------------------------------------------------------------
diff --git a/media/libmedia/include/media/IMediaSource.h b/media/libmedia/include/media/IMediaSource.h
index 2bde782..493742e 100644
--- a/media/libmedia/include/media/IMediaSource.h
+++ b/media/libmedia/include/media/IMediaSource.h
@@ -22,12 +22,12 @@
 
 #include <binder/IInterface.h>
 #include <binder/IMemory.h>
+#include <media/MediaSource.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaErrors.h>
 
 namespace android {
 
-struct MediaSource;
 class MetaData;
 class MediaBufferGroup;
 
@@ -55,51 +55,6 @@
     // Returns the format of the data output by this media source.
     virtual sp<MetaData> getFormat() = 0;
 
-    // Options that modify read() behaviour. The default is to
-    // a) not request a seek
-    // b) not be late, i.e. lateness_us = 0
-    struct ReadOptions {
-        enum SeekMode : int32_t {
-            SEEK_PREVIOUS_SYNC,
-            SEEK_NEXT_SYNC,
-            SEEK_CLOSEST_SYNC,
-            SEEK_CLOSEST,
-        };
-
-        ReadOptions();
-
-        // Reset everything back to defaults.
-        void reset();
-
-        void setSeekTo(int64_t time_us, SeekMode mode = SEEK_CLOSEST_SYNC);
-        void clearSeekTo();
-        bool getSeekTo(int64_t *time_us, SeekMode *mode) const;
-
-        // TODO: remove this if unused.
-        void setLateBy(int64_t lateness_us);
-        int64_t getLateBy() const;
-
-        void setNonBlocking();
-        void clearNonBlocking();
-        bool getNonBlocking() const;
-
-        // Used to clear all non-persistent options for multiple buffer reads.
-        void clearNonPersistent() {
-            clearSeekTo();
-        }
-
-    private:
-        enum Options {
-            kSeekTo_Option      = 1,
-        };
-
-        uint32_t mOptions;
-        int64_t mSeekTimeUs;
-        SeekMode mSeekMode;
-        int64_t mLatenessUs;
-        bool mNonBlocking;
-    } __attribute__((packed)); // sent through Binder
-
     // Returns a new buffer of data. Call blocks until a
     // buffer is available, an error is encountered or the end of the stream
     // is reached.
@@ -110,7 +65,8 @@
     //
     // TODO: consider removing read() in favor of readMultiple().
     virtual status_t read(
-            MediaBuffer **buffer, const ReadOptions *options = NULL) = 0;
+            MediaBuffer **buffer,
+            const MediaSource::ReadOptions *options = NULL) = 0;
 
     // Returns a vector of new buffers of data, where the new buffers are added
     // to the end of the vector.
@@ -126,7 +82,7 @@
     // non-persistent options (e.g. seek) apply only to the first read.
     virtual status_t readMultiple(
             Vector<MediaBuffer *> *buffers, uint32_t maxNumBuffers = 1,
-            const ReadOptions *options = nullptr) = 0;
+            const MediaSource::ReadOptions *options = nullptr) = 0;
 
     // Returns true if |readMultiple| is supported, otherwise false.
     virtual bool supportReadMultiple() = 0;
@@ -139,14 +95,6 @@
     // until a subsequent read-with-seek. Currently only supported by
     // OMXCodec.
     virtual status_t pause()  = 0;
-
-    // The consumer of this media source requests that the given buffers
-    // are to be returned exclusively in response to read calls.
-    // This will be called after a successful start() and before the
-    // first read() call.
-    // Callee assumes ownership of the buffers if no error is returned.
-    virtual status_t setBuffers(const Vector<MediaBuffer *> & /* buffers */) = 0;
-
 };
 
 class BnMediaSource: public BnInterface<IMediaSource>
@@ -161,14 +109,10 @@
         return ERROR_UNSUPPORTED;
     }
 
-    virtual status_t setBuffers(const Vector<MediaBuffer *> & /* buffers */) {
-        return ERROR_UNSUPPORTED;
-    }
-
     // TODO: Implement this for local media sources.
     virtual status_t readMultiple(
             Vector<MediaBuffer *> * /* buffers */, uint32_t /* maxNumBuffers = 1 */,
-            const ReadOptions * /* options = nullptr */) {
+            const MediaSource::ReadOptions * /* options = nullptr */) {
         return ERROR_UNSUPPORTED;
     }
 
diff --git a/media/libmedia/include/media/IOMXStore.h b/media/libmedia/include/media/IOMXStore.h
deleted file mode 100644
index 628db70..0000000
--- a/media/libmedia/include/media/IOMXStore.h
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_IOMXSTORE_H_
-
-#define ANDROID_IOMXSTORE_H_
-
-#include <media/IOMX.h>
-#include <android/hardware/media/omx/1.0/IOmxStore.h>
-
-#include <binder/IInterface.h>
-#include <binder/IBinder.h>
-
-#include <vector>
-#include <string>
-
-namespace android {
-
-using hardware::media::omx::V1_0::IOmxStore;
-
-class IOMXStore : public IInterface {
-public:
-    DECLARE_META_INTERFACE(OMXStore);
-
-    struct Attribute {
-        std::string key;
-        std::string value;
-    };
-
-    struct NodeInfo {
-        std::string name;
-        std::string owner;
-        std::vector<Attribute> attributes;
-    };
-
-    struct RoleInfo {
-        std::string role;
-        std::string type;
-        bool isEncoder;
-        bool preferPlatformNodes;
-        std::vector<NodeInfo> nodes;
-    };
-
-    virtual status_t listServiceAttributes(
-            std::vector<Attribute>* attributes) = 0;
-
-    virtual status_t getNodePrefix(std::string* prefix) = 0;
-
-    virtual status_t listRoles(std::vector<RoleInfo>* roleList) = 0;
-
-    virtual status_t getOmx(const std::string& name, sp<IOMX>* omx) = 0;
-};
-
-
-////////////////////////////////////////////////////////////////////////////////
-
-class BnOMXStore : public BnInterface<IOMXStore> {
-public:
-    virtual status_t onTransact(
-            uint32_t code, const Parcel &data, Parcel *reply,
-            uint32_t flags = 0);
-};
-
-}  // namespace android
-
-#endif  // ANDROID_IOMX_H_
diff --git a/media/libmedia/include/media/IStreamSource.h b/media/libmedia/include/media/IStreamSource.h
index 4a6aafd..c08c3e8 100644
--- a/media/libmedia/include/media/IStreamSource.h
+++ b/media/libmedia/include/media/IStreamSource.h
@@ -54,40 +54,6 @@
 
     virtual void queueBuffer(size_t index, size_t size) = 0;
 
-    // When signalling a discontinuity you can optionally
-    // specify an int64_t PTS timestamp in "msg".
-    // If present, rendering of data following the discontinuity
-    // will be suppressed until media time reaches this timestamp.
-    static const char *const kKeyResumeAtPTS;
-
-    // When signalling a discontinuity you can optionally
-    // specify the type(s) of discontinuity, i.e. if the
-    // audio format has changed, the video format has changed,
-    // time has jumped or any combination thereof.
-    // To do so, include a non-zero int32_t value
-    // under the key "kKeyDiscontinuityMask" when issuing the DISCONTINUITY
-    // command.
-    // If there is a change in audio/video format, The new logical stream
-    // must start with proper codec initialization
-    // information for playback to continue, i.e. SPS and PPS in the case
-    // of AVC video etc.
-    // If this key is not present, only a time discontinuity is assumed.
-    // The value should be a bitmask of values from
-    // ATSParser::DiscontinuityType.
-    static const char *const kKeyDiscontinuityMask;
-
-    // Optionally signalled as part of a discontinuity that includes
-    // DISCONTINUITY_TIME. It indicates the media time (in us) to be associated
-    // with the next PTS occuring in the stream. The value is of type int64_t.
-    static const char *const kKeyMediaTimeUs;
-
-    // Optionally signalled as part of a discontinuity that includes
-    // DISCONTINUITY_TIME. It indicates the media time (in us) of a recent
-    // sample from the same content, and is used as a hint for the parser to
-    // handle PTS wraparound. This is required when a new parser is created
-    // to continue parsing content from the same timeline.
-    static const char *const kKeyRecentMediaTimeUs;
-
     virtual void issueCommand(
             Command cmd, bool synchronous, const sp<AMessage> &msg = NULL) = 0;
 };
diff --git a/media/libmedia/include/media/MediaHTTPConnection.h b/media/libmedia/include/media/MediaHTTPConnection.h
new file mode 100644
index 0000000..17813a2
--- /dev/null
+++ b/media/libmedia/include/media/MediaHTTPConnection.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MEDIA_HTTP_CONNECTION_H_
+
+#define MEDIA_HTTP_CONNECTION_H_
+
+#include <media/stagefright/foundation/ABase.h>
+#include <utils/KeyedVector.h>
+#include <utils/RefBase.h>
+#include <utils/String8.h>
+
+namespace android {
+
+struct MediaHTTPConnection : public virtual RefBase {
+    MediaHTTPConnection() {}
+
+    virtual bool connect(
+            const char *uri, const KeyedVector<String8, String8> *headers) = 0;
+
+    virtual void disconnect() = 0;
+    virtual ssize_t readAt(off64_t offset, void *data, size_t size) = 0;
+    virtual off64_t getSize() = 0;
+    virtual status_t getMIMEType(String8 *mimeType) = 0;
+    virtual status_t getUri(String8 *uri) = 0;
+
+private:
+    DISALLOW_EVIL_CONSTRUCTORS(MediaHTTPConnection);
+};
+
+}  // namespace android
+
+#endif  // MEDIA_HTTP_CONNECTION_H_
diff --git a/media/libmedia/include/media/MediaHTTPService.h b/media/libmedia/include/media/MediaHTTPService.h
new file mode 100644
index 0000000..6e9f125
--- /dev/null
+++ b/media/libmedia/include/media/MediaHTTPService.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MEDIA_HTTP_SERVICE_H_
+
+#define MEDIA_HTTP_SERVICE_H_
+
+#include <media/stagefright/foundation/ABase.h>
+
+namespace android {
+
+struct MediaHTTPConnection;
+
+struct MediaHTTPService : public virtual RefBase {
+    MediaHTTPService() {}
+
+    virtual sp<MediaHTTPConnection> makeHTTPConnection() = 0;
+
+private:
+    DISALLOW_EVIL_CONSTRUCTORS(MediaHTTPService);
+};
+
+}  // namespace android
+
+#endif  // MEDIA_HTTP_SERVICE_H_
diff --git a/media/libmedia/include/media/MediaMetadataRetrieverInterface.h b/media/libmedia/include/media/MediaMetadataRetrieverInterface.h
index 257002d..116b548 100644
--- a/media/libmedia/include/media/MediaMetadataRetrieverInterface.h
+++ b/media/libmedia/include/media/MediaMetadataRetrieverInterface.h
@@ -22,6 +22,7 @@
 #include <media/mediametadataretriever.h>
 #include <media/mediascanner.h>
 #include <private/media/VideoFrame.h>
+#include <media/stagefright/MediaErrors.h>
 
 namespace android {
 
@@ -41,9 +42,14 @@
             const KeyedVector<String8, String8> *headers = NULL) = 0;
 
     virtual status_t    setDataSource(int fd, int64_t offset, int64_t length) = 0;
-    virtual status_t setDataSource(const sp<DataSource>& source, const char *mime) = 0;
+    virtual status_t    setDataSource(const sp<DataSource>& source, const char *mime) = 0;
     virtual VideoFrame* getFrameAtTime(
             int64_t timeUs, int option, int colorFormat, bool metaOnly) = 0;
+    virtual VideoFrame* getImageAtIndex(
+            int index, int colorFormat, bool metaOnly) = 0;
+    virtual status_t getFrameAtIndex(
+            std::vector<VideoFrame*>* frames,
+            int frameIndex, int numFrames, int colorFormat, bool metaOnly) = 0;
     virtual MediaAlbumArt* extractAlbumArt() = 0;
     virtual const char* extractMetadata(int keyCode) = 0;
 };
@@ -58,6 +64,13 @@
     virtual VideoFrame* getFrameAtTime(
             int64_t /*timeUs*/, int /*option*/, int /*colorFormat*/, bool /*metaOnly*/)
     { return NULL; }
+    virtual VideoFrame* getImageAtIndex(
+            int /*index*/, int /*colorFormat*/, bool /*metaOnly*/)
+    { return NULL; }
+    virtual status_t getFrameAtIndex(
+            std::vector<VideoFrame*>* /*frames*/,
+            int /*frameIndex*/, int /*numFrames*/, int /*colorFormat*/, bool /*metaOnly*/)
+    { return ERROR_UNSUPPORTED; }
     virtual MediaAlbumArt* extractAlbumArt() { return NULL; }
     virtual const char* extractMetadata(int /*keyCode*/) { return NULL; }
 };
diff --git a/media/libmedia/include/media/MediaPlayer2Engine.h b/media/libmedia/include/media/MediaPlayer2Engine.h
new file mode 100644
index 0000000..00f5fb1
--- /dev/null
+++ b/media/libmedia/include/media/MediaPlayer2Engine.h
@@ -0,0 +1,138 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIAPLAYER2ENGINE_H
+#define ANDROID_MEDIAPLAYER2ENGINE_H
+
+#include <utils/RefBase.h>
+#include <binder/Parcel.h>
+#include <utils/KeyedVector.h>
+#include <system/audio.h>
+
+#include <media/MediaSource.h>
+#include <media/VolumeShaper.h>
+
+// Fwd decl to make sure everyone agrees that the scope of struct sockaddr_in is
+// global, and not in android::
+struct sockaddr_in;
+
+namespace android {
+
+struct ANativeWindowWrapper;
+struct AVSyncSettings;
+struct AudioPlaybackRate;
+struct BufferingSettings;
+class DataSource;
+struct IStreamSource;
+struct MediaHTTPService;
+class Parcel;
+
+typedef MediaSource::ReadOptions::SeekMode MediaPlayer2SeekMode;
+
+class MediaPlayer2Engine: public RefBase
+{
+public:
+    virtual void            disconnect() = 0;
+
+    virtual status_t        setDataSource(
+            const sp<MediaHTTPService> &httpService,
+            const char *url,
+            const KeyedVector<String8, String8>* headers) = 0;
+
+    virtual status_t        setDataSource(int fd, int64_t offset, int64_t length) = 0;
+    virtual status_t        setDataSource(const sp<IStreamSource>& source) = 0;
+    virtual status_t        setDataSource(const sp<DataSource>& source) = 0;
+    virtual status_t        setVideoSurfaceTexture(const sp<ANativeWindowWrapper>& nww) = 0;
+    virtual status_t        getBufferingSettings(
+                                    BufferingSettings* buffering /* nonnull */) = 0;
+    virtual status_t        setBufferingSettings(const BufferingSettings& buffering) = 0;
+    virtual status_t        prepareAsync() = 0;
+    virtual status_t        start() = 0;
+    virtual status_t        stop() = 0;
+    virtual status_t        pause() = 0;
+    virtual status_t        isPlaying(bool* state) = 0;
+    virtual status_t        setPlaybackSettings(const AudioPlaybackRate& rate) = 0;
+    virtual status_t        getPlaybackSettings(AudioPlaybackRate* rate /* nonnull */) = 0;
+    virtual status_t        setSyncSettings(const AVSyncSettings& sync, float videoFpsHint) = 0;
+    virtual status_t        getSyncSettings(AVSyncSettings* sync /* nonnull */,
+                                            float* videoFps /* nonnull */) = 0;
+    virtual status_t        seekTo(
+            int msec,
+            MediaPlayer2SeekMode mode = MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC) = 0;
+    virtual status_t        getCurrentPosition(int* msec) = 0;
+    virtual status_t        getDuration(int* msec) = 0;
+    virtual status_t        notifyAt(int64_t mediaTimeUs) = 0;
+    virtual status_t        reset() = 0;
+    virtual status_t        setAudioStreamType(audio_stream_type_t type) = 0;
+    virtual status_t        setLooping(int loop) = 0;
+    virtual status_t        setVolume(float leftVolume, float rightVolume) = 0;
+    virtual status_t        setAuxEffectSendLevel(float level) = 0;
+    virtual status_t        attachAuxEffect(int effectId) = 0;
+    virtual status_t        setParameter(int key, const Parcel& request) = 0;
+    virtual status_t        getParameter(int key, Parcel* reply) = 0;
+    virtual status_t        setRetransmitEndpoint(const struct sockaddr_in* endpoint) = 0;
+    virtual status_t        getRetransmitEndpoint(struct sockaddr_in* endpoint) = 0;
+    virtual status_t        setNextPlayer(const sp<MediaPlayer2Engine>& next) = 0;
+
+    virtual media::VolumeShaper::Status applyVolumeShaper(
+                                    const sp<media::VolumeShaper::Configuration>& configuration,
+                                    const sp<media::VolumeShaper::Operation>& operation) = 0;
+    virtual sp<media::VolumeShaper::State> getVolumeShaperState(int id) = 0;
+
+    // Modular DRM
+    virtual status_t        prepareDrm(const uint8_t uuid[16],
+                                    const Vector<uint8_t>& drmSessionId) = 0;
+    virtual status_t        releaseDrm() = 0;
+
+    // Invoke a generic method on the player by using opaque parcels
+    // for the request and reply.
+    // @param request Parcel that must start with the media player
+    // interface token.
+    // @param[out] reply Parcel to hold the reply data. Cannot be null.
+    // @return OK if the invocation was made successfully.
+    virtual status_t        invoke(const Parcel& request, Parcel *reply) = 0;
+
+    // Set a new metadata filter.
+    // @param filter A set of allow and drop rules serialized in a Parcel.
+    // @return OK if the invocation was made successfully.
+    virtual status_t        setMetadataFilter(const Parcel& filter) = 0;
+
+    // Retrieve a set of metadata.
+    // @param update_only Include only the metadata that have changed
+    //                    since the last invocation of getMetadata.
+    //                    The set is built using the unfiltered
+    //                    notifications the native player sent to the
+    //                    MediaPlayer2Manager during that period of
+    //                    time. If false, all the metadatas are considered.
+    // @param apply_filter If true, once the metadata set has been built based
+    //                     on the value update_only, the current filter is
+    //                     applied.
+    // @param[out] metadata On exit contains a set (possibly empty) of metadata.
+    //                      Valid only if the call returned OK.
+    // @return OK if the invocation was made successfully.
+    virtual status_t        getMetadata(bool update_only,
+                                        bool apply_filter,
+                                        Parcel *metadata) = 0;
+
+    // AudioRouting
+    virtual status_t        setOutputDevice(audio_port_handle_t deviceId) = 0;
+    virtual status_t        getRoutedDeviceId(audio_port_handle_t *deviceId) = 0;
+    virtual status_t        enableAudioDeviceCallback(bool enabled) = 0;
+};
+
+}; // namespace android
+
+#endif // ANDROID_MEDIAPLAYER2ENGINE_H
diff --git a/include/media/AudioClient.h b/media/libmedia/include/media/MediaPlayer2EngineClient.h
similarity index 61%
copy from include/media/AudioClient.h
copy to media/libmedia/include/media/MediaPlayer2EngineClient.h
index 9efd76d..22df095 100644
--- a/include/media/AudioClient.h
+++ b/media/libmedia/include/media/MediaPlayer2EngineClient.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2017 The Android Open Source Project
+ * Copyright 2017 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,25 +14,20 @@
  * limitations under the License.
  */
 
+#ifndef ANDROID_MEDIAPLAYER2ENGINECLIENT_H
+#define ANDROID_MEDIAPLAYER2ENGINECLIENT_H
 
-#ifndef ANDROID_AUDIO_CLIENT_H
-#define ANDROID_AUDIO_CLIENT_H
-
-#include <system/audio.h>
-#include <utils/String16.h>
+#include <utils/RefBase.h>
+#include <binder/Parcel.h>
 
 namespace android {
 
-class AudioClient {
- public:
-    AudioClient() :
-        clientUid(-1), clientPid(-1), packageName("") {}
-
-    uid_t clientUid;
-    pid_t clientPid;
-    String16 packageName;
+class MediaPlayer2EngineClient: public RefBase
+{
+public:
+    virtual void notify(int msg, int ext1, int ext2, const Parcel *obj) = 0;
 };
 
 }; // namespace android
 
-#endif  // ANDROID_AUDIO_CLIENT_H
+#endif // ANDROID_MEDIAPLAYER2ENGINECLIENT_H
diff --git a/media/libmedia/include/media/MediaPlayer2Interface.h b/media/libmedia/include/media/MediaPlayer2Interface.h
new file mode 100644
index 0000000..931a110
--- /dev/null
+++ b/media/libmedia/include/media/MediaPlayer2Interface.h
@@ -0,0 +1,340 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIAPLAYER2INTERFACE_H
+#define ANDROID_MEDIAPLAYER2INTERFACE_H
+
+#ifdef __cplusplus
+
+#include <sys/types.h>
+#include <utils/Errors.h>
+#include <utils/KeyedVector.h>
+#include <utils/String8.h>
+#include <utils/RefBase.h>
+
+#include <media/AudioResamplerPublic.h>
+#include <media/AudioSystem.h>
+#include <media/AudioTimestamp.h>
+#include <media/AVSyncSettings.h>
+#include <media/BufferingSettings.h>
+#include <media/Metadata.h>
+#include <media/mediaplayer2.h>
+#include <media/stagefright/foundation/AHandler.h>
+
+// Fwd decl to make sure everyone agrees that the scope of struct sockaddr_in is
+// global, and not in android::
+struct sockaddr_in;
+
+namespace android {
+
+class DataSource;
+struct MediaHTTPService;
+class Parcel;
+struct ANativeWindowWrapper;
+
+template<typename T> class SortedVector;
+
+enum player2_type {
+    PLAYER2_STAGEFRIGHT_PLAYER = 3,
+    PLAYER2_NU_PLAYER2 = 4,
+    // Test players are available only in the 'test' and 'eng' builds.
+    // The shared library with the test player is passed passed as an
+    // argument to the 'test:' url in the setDataSource call.
+    PLAYER2_TEST_PLAYER = 5,
+};
+
+
+#define DEFAULT_AUDIOSINK_BUFFERCOUNT 4
+#define DEFAULT_AUDIOSINK_BUFFERSIZE 1200
+#define DEFAULT_AUDIOSINK_SAMPLERATE 44100
+
+// when the channel mask isn't known, use the channel count to derive a mask in AudioSink::open()
+#define CHANNEL_MASK_USE_CHANNEL_ORDER 0
+
+// duration below which we do not allow deep audio buffering
+#define AUDIO_SINK_MIN_DEEP_BUFFER_DURATION_US 5000000
+
+// abstract base class - use MediaPlayer2Interface
+class MediaPlayer2Base : public AHandler
+{
+public:
+    // callback mechanism for passing messages to MediaPlayer2 object
+    typedef void (*NotifyCallback)(const wp<MediaPlayer2Engine> &listener,
+            int msg, int ext1, int ext2, const Parcel *obj);
+
+    // AudioSink: abstraction layer for audio output
+    class AudioSink : public RefBase {
+    public:
+        enum cb_event_t {
+            CB_EVENT_FILL_BUFFER,   // Request to write more data to buffer.
+            CB_EVENT_STREAM_END,    // Sent after all the buffers queued in AF and HW are played
+                                    // back (after stop is called)
+            CB_EVENT_TEAR_DOWN      // The AudioTrack was invalidated due to use case change:
+                                    // Need to re-evaluate offloading options
+        };
+
+        // Callback returns the number of bytes actually written to the buffer.
+        typedef size_t (*AudioCallback)(
+                AudioSink *audioSink, void *buffer, size_t size, void *cookie,
+                        cb_event_t event);
+
+        virtual             ~AudioSink() {}
+        virtual bool        ready() const = 0; // audio output is open and ready
+        virtual ssize_t     bufferSize() const = 0;
+        virtual ssize_t     frameCount() const = 0;
+        virtual ssize_t     channelCount() const = 0;
+        virtual ssize_t     frameSize() const = 0;
+        virtual uint32_t    latency() const = 0;
+        virtual float       msecsPerFrame() const = 0;
+        virtual status_t    getPosition(uint32_t *position) const = 0;
+        virtual status_t    getTimestamp(AudioTimestamp &ts) const = 0;
+        virtual int64_t     getPlayedOutDurationUs(int64_t nowUs) const = 0;
+        virtual status_t    getFramesWritten(uint32_t *frameswritten) const = 0;
+        virtual audio_session_t getSessionId() const = 0;
+        virtual audio_stream_type_t getAudioStreamType() const = 0;
+        virtual uint32_t    getSampleRate() const = 0;
+        virtual int64_t     getBufferDurationInUs() const = 0;
+
+        // If no callback is specified, use the "write" API below to submit
+        // audio data.
+        virtual status_t    open(
+                uint32_t sampleRate, int channelCount, audio_channel_mask_t channelMask,
+                audio_format_t format=AUDIO_FORMAT_PCM_16_BIT,
+                int bufferCount=DEFAULT_AUDIOSINK_BUFFERCOUNT,
+                AudioCallback cb = NULL,
+                void *cookie = NULL,
+                audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
+                const audio_offload_info_t *offloadInfo = NULL,
+                bool doNotReconnect = false,
+                uint32_t suggestedFrameCount = 0) = 0;
+
+        virtual status_t    start() = 0;
+
+        /* Input parameter |size| is in byte units stored in |buffer|.
+         * Data is copied over and actual number of bytes written (>= 0)
+         * is returned, or no data is copied and a negative status code
+         * is returned (even when |blocking| is true).
+         * When |blocking| is false, AudioSink will immediately return after
+         * part of or full |buffer| is copied over.
+         * When |blocking| is true, AudioSink will wait to copy the entire
+         * buffer, unless an error occurs or the copy operation is
+         * prematurely stopped.
+         */
+        virtual ssize_t     write(const void* buffer, size_t size, bool blocking = true) = 0;
+
+        virtual void        stop() = 0;
+        virtual void        flush() = 0;
+        virtual void        pause() = 0;
+        virtual void        close() = 0;
+
+        virtual status_t    setPlaybackRate(const AudioPlaybackRate& rate) = 0;
+        virtual status_t    getPlaybackRate(AudioPlaybackRate* rate /* nonnull */) = 0;
+        virtual bool        needsTrailingPadding() { return true; }
+
+        virtual status_t    setParameters(const String8& /* keyValuePairs */) { return NO_ERROR; }
+        virtual String8     getParameters(const String8& /* keys */) { return String8::empty(); }
+
+        virtual media::VolumeShaper::Status applyVolumeShaper(
+                                    const sp<media::VolumeShaper::Configuration>& configuration,
+                                    const sp<media::VolumeShaper::Operation>& operation);
+        virtual sp<media::VolumeShaper::State> getVolumeShaperState(int id);
+
+        // AudioRouting
+        virtual status_t    setOutputDevice(audio_port_handle_t deviceId);
+        virtual status_t    getRoutedDeviceId(audio_port_handle_t* deviceId);
+        virtual status_t    enableAudioDeviceCallback(bool enabled);
+    };
+
+                        MediaPlayer2Base() : mClient(0), mNotify(0) {}
+    virtual             ~MediaPlayer2Base() {}
+    virtual status_t    initCheck() = 0;
+    virtual bool        hardwareOutput() = 0;
+
+    virtual status_t    setUID(uid_t /* uid */) {
+        return INVALID_OPERATION;
+    }
+
+    virtual status_t    setDataSource(
+            const sp<MediaHTTPService> &httpService,
+            const char *url,
+            const KeyedVector<String8, String8> *headers = NULL) = 0;
+
+    virtual status_t    setDataSource(int fd, int64_t offset, int64_t length) = 0;
+
+    virtual status_t    setDataSource(const sp<IStreamSource>& /* source */) {
+        return INVALID_OPERATION;
+    }
+
+    virtual status_t    setDataSource(const sp<DataSource>& /* source */) {
+        return INVALID_OPERATION;
+    }
+
+    // pass the buffered native window to the media player service
+    virtual status_t    setVideoSurfaceTexture(const sp<ANativeWindowWrapper>& nww) = 0;
+
+    virtual status_t    getBufferingSettings(
+                                BufferingSettings* buffering /* nonnull */) {
+        *buffering = BufferingSettings();
+        return OK;
+    }
+    virtual status_t    setBufferingSettings(const BufferingSettings& /* buffering */) {
+        return OK;
+    }
+
+    virtual status_t    prepare() = 0;
+    virtual status_t    prepareAsync() = 0;
+    virtual status_t    start() = 0;
+    virtual status_t    stop() = 0;
+    virtual status_t    pause() = 0;
+    virtual bool        isPlaying() = 0;
+    virtual status_t    setPlaybackSettings(const AudioPlaybackRate& rate) {
+        // by default, players only support setting rate to the default
+        if (!isAudioPlaybackRateEqual(rate, AUDIO_PLAYBACK_RATE_DEFAULT)) {
+            return BAD_VALUE;
+        }
+        return OK;
+    }
+    virtual status_t    getPlaybackSettings(AudioPlaybackRate* rate /* nonnull */) {
+        *rate = AUDIO_PLAYBACK_RATE_DEFAULT;
+        return OK;
+    }
+    virtual status_t    setSyncSettings(const AVSyncSettings& sync, float /* videoFps */) {
+        // By default, players only support setting sync source to default; all other sync
+        // settings are ignored. There is no requirement for getters to return set values.
+        if (sync.mSource != AVSYNC_SOURCE_DEFAULT) {
+            return BAD_VALUE;
+        }
+        return OK;
+    }
+    virtual status_t    getSyncSettings(
+                                AVSyncSettings* sync /* nonnull */, float* videoFps /* nonnull */) {
+        *sync = AVSyncSettings();
+        *videoFps = -1.f;
+        return OK;
+    }
+    virtual status_t    seekTo(
+            int msec, MediaPlayer2SeekMode mode = MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC) = 0;
+    virtual status_t    getCurrentPosition(int *msec) = 0;
+    virtual status_t    getDuration(int *msec) = 0;
+    virtual status_t    reset() = 0;
+    virtual status_t    notifyAt(int64_t /* mediaTimeUs */) {
+        return INVALID_OPERATION;
+    }
+    virtual status_t    setLooping(int loop) = 0;
+    virtual player2_type playerType() = 0;
+    virtual status_t    setParameter(int key, const Parcel &request) = 0;
+    virtual status_t    getParameter(int key, Parcel *reply) = 0;
+
+    // default no-op implementation of optional extensions
+    virtual status_t setRetransmitEndpoint(const struct sockaddr_in* /* endpoint */) {
+        return INVALID_OPERATION;
+    }
+    virtual status_t getRetransmitEndpoint(struct sockaddr_in* /* endpoint */) {
+        return INVALID_OPERATION;
+    }
+    virtual status_t setNextPlayer(const sp<MediaPlayer2Base>& /* next */) {
+        return OK;
+    }
+
+    // Invoke a generic method on the player by using opaque parcels
+    // for the request and reply.
+    //
+    // @param request Parcel that is positioned at the start of the
+    //                data sent by the java layer.
+    // @param[out] reply Parcel to hold the reply data. Cannot be null.
+    // @return OK if the call was successful.
+    virtual status_t    invoke(const Parcel& request, Parcel *reply) = 0;
+
+    // The Client in the MetadataPlayerService calls this method on
+    // the native player to retrieve all or a subset of metadata.
+    //
+    // @param ids SortedList of metadata ID to be fetch. If empty, all
+    //            the known metadata should be returned.
+    // @param[inout] records Parcel where the player appends its metadata.
+    // @return OK if the call was successful.
+    virtual status_t    getMetadata(const media::Metadata::Filter& /* ids */,
+                                    Parcel* /* records */) {
+        return INVALID_OPERATION;
+    };
+
+    void        setNotifyCallback(
+            const wp<MediaPlayer2Engine> &client, NotifyCallback notifyFunc) {
+        Mutex::Autolock autoLock(mNotifyLock);
+        mClient = client; mNotify = notifyFunc;
+    }
+
+    void        sendEvent(int msg, int ext1=0, int ext2=0,
+                          const Parcel *obj=NULL) {
+        NotifyCallback notifyCB;
+        wp<MediaPlayer2Engine> client;
+        {
+            Mutex::Autolock autoLock(mNotifyLock);
+            notifyCB = mNotify;
+            client = mClient;
+        }
+
+        if (notifyCB) notifyCB(client, msg, ext1, ext2, obj);
+    }
+
+    virtual status_t dump(int /* fd */, const Vector<String16>& /* args */) const {
+        return INVALID_OPERATION;
+    }
+
+    virtual void onMessageReceived(const sp<AMessage> & /* msg */) override { }
+
+    // Modular DRM
+    virtual status_t prepareDrm(const uint8_t /* uuid */[16], const Vector<uint8_t>& /* drmSessionId */) {
+        return INVALID_OPERATION;
+    }
+    virtual status_t releaseDrm() {
+        return INVALID_OPERATION;
+    }
+
+private:
+    friend class MediaPlayer2Manager;
+
+    Mutex                  mNotifyLock;
+    wp<MediaPlayer2Engine> mClient;
+    NotifyCallback         mNotify;
+};
+
+// Implement this class for media players that use the AudioFlinger software mixer
+class MediaPlayer2Interface : public MediaPlayer2Base
+{
+public:
+    virtual             ~MediaPlayer2Interface() { }
+    virtual bool        hardwareOutput() { return false; }
+    virtual void        setAudioSink(const sp<AudioSink>& audioSink) { mAudioSink = audioSink; }
+protected:
+    sp<AudioSink>       mAudioSink;
+};
+
+// Implement this class for media players that output audio directly to hardware
+class MediaPlayer2HWInterface : public MediaPlayer2Base
+{
+public:
+    virtual             ~MediaPlayer2HWInterface() {}
+    virtual bool        hardwareOutput() { return true; }
+    virtual status_t    setVolume(float leftVolume, float rightVolume) = 0;
+    virtual status_t    setAudioStreamType(audio_stream_type_t streamType) = 0;
+};
+
+}; // namespace android
+
+#endif // __cplusplus
+
+
+#endif // ANDROID_MEDIAPLAYER2INTERFACE_H
diff --git a/media/libmedia/include/media/MediaRecorderBase.h b/media/libmedia/include/media/MediaRecorderBase.h
index 40dd9f9..748153c 100644
--- a/media/libmedia/include/media/MediaRecorderBase.h
+++ b/media/libmedia/include/media/MediaRecorderBase.h
@@ -18,6 +18,7 @@
 
 #define MEDIA_RECORDER_BASE_H_
 
+#include <media/AudioSystem.h>
 #include <media/mediarecorder.h>
 
 #include <system/audio.h>
@@ -62,6 +63,10 @@
     virtual status_t dump(int fd, const Vector<String16>& args) const = 0;
     virtual status_t setInputSurface(const sp<PersistentSurface>& surface) = 0;
     virtual sp<IGraphicBufferProducer> querySurfaceMediaSource() const = 0;
+    virtual status_t setInputDevice(audio_port_handle_t deviceId) = 0;
+    virtual status_t getRoutedDeviceId(audio_port_handle_t* deviceId) = 0;
+    virtual void setAudioDeviceCallback(const sp<AudioSystem::AudioDeviceCallback>& callback) = 0;
+    virtual status_t enableAudioDeviceCallback(bool enabled) = 0;
 
 
 protected:
diff --git a/media/libmedia/include/media/MidiIoWrapper.h b/media/libmedia/include/media/MidiIoWrapper.h
index e6f8cf7..2754b2c 100644
--- a/media/libmedia/include/media/MidiIoWrapper.h
+++ b/media/libmedia/include/media/MidiIoWrapper.h
@@ -19,7 +19,7 @@
 
 #include <libsonivox/eas_types.h>
 
-#include "media/stagefright/DataSource.h"
+#include <media/DataSource.h>
 
 namespace android {
 
diff --git a/media/libmedia/include/media/NdkWrapper.h b/media/libmedia/include/media/NdkWrapper.h
new file mode 100644
index 0000000..00e0fd4
--- /dev/null
+++ b/media/libmedia/include/media/NdkWrapper.h
@@ -0,0 +1,325 @@
+/*
+ * Copyright 2017, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef NDK_WRAPPER_H_
+
+#define NDK_WRAPPER_H_
+
+#include <media/NdkMediaError.h>
+#include <media/NdkMediaExtractor.h>
+#include <media/hardware/CryptoAPI.h>
+#include <media/stagefright/foundation/ABase.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <utils/Errors.h>
+#include <utils/RefBase.h>
+
+struct AMediaCodec;
+struct AMediaCodecBufferInfo;
+struct AMediaCodecCryptoInfo;
+struct AMediaCrypto;
+struct AMediaDrm;
+struct AMediaFormat;
+struct AMediaExtractor;
+struct ANativeWindow;
+struct PsshInfo;
+
+namespace android {
+
+struct AMessage;
+class MetaData;
+
+struct AMediaFormatWrapper : public RefBase {
+    static sp<AMediaFormatWrapper> Create(const sp<AMessage> &message);
+
+    AMediaFormatWrapper();
+    AMediaFormatWrapper(AMediaFormat *aMediaFormat);
+
+    // the returned AMediaFormat is still owned by this wrapper.
+    AMediaFormat *getAMediaFormat() const;
+
+    sp<AMessage> toAMessage() const ;
+    const char* toString() const ;
+
+    status_t release();
+
+    bool getInt32(const char *name, int32_t *out) const;
+    bool getInt64(const char *name, int64_t *out) const;
+    bool getFloat(const char *name, float *out) const;
+    bool getDouble(const char *name, double *out) const;
+    bool getSize(const char *name, size_t *out) const;
+    bool getRect(const char *name,
+                 int32_t *left, int32_t *top, int32_t *right, int32_t *bottom) const;
+    bool getBuffer(const char *name, void** data, size_t *outsize) const;
+    bool getString(const char *name, AString *out) const;
+
+    void setInt32(const char* name, int32_t value);
+    void setInt64(const char* name, int64_t value);
+    void setFloat(const char* name, float value);
+    void setDouble(const char *name, double value);
+    void setSize(const char* name, size_t value);
+    void setRect(const char* name,
+                 int32_t left, int32_t top, int32_t right, int32_t bottom);
+    void setString(const char* name, const AString &value);
+    void setBuffer(const char* name, void* data, size_t size);
+
+protected:
+    virtual ~AMediaFormatWrapper();
+
+private:
+    AMediaFormat *mAMediaFormat;
+
+    DISALLOW_EVIL_CONSTRUCTORS(AMediaFormatWrapper);
+};
+
+struct ANativeWindowWrapper : public RefBase {
+    ANativeWindowWrapper(ANativeWindow *aNativeWindow);
+
+    // the returned ANativeWindow is still owned by this wrapper.
+    ANativeWindow *getANativeWindow() const;
+
+    status_t release();
+
+protected:
+    virtual ~ANativeWindowWrapper();
+
+private:
+    ANativeWindow *mANativeWindow;
+
+    DISALLOW_EVIL_CONSTRUCTORS(ANativeWindowWrapper);
+};
+
+struct AMediaDrmWrapper : public RefBase {
+    AMediaDrmWrapper(const uint8_t uuid[16]);
+    AMediaDrmWrapper(AMediaDrm *aMediaDrm);
+
+    // the returned AMediaDrm is still owned by this wrapper.
+    AMediaDrm *getAMediaDrm() const;
+
+    status_t release();
+
+    static bool isCryptoSchemeSupported(const uint8_t uuid[16], const char *mimeType);
+
+protected:
+    virtual ~AMediaDrmWrapper();
+
+private:
+    AMediaDrm *mAMediaDrm;
+
+    DISALLOW_EVIL_CONSTRUCTORS(AMediaDrmWrapper);
+};
+
+struct AMediaCryptoWrapper : public RefBase {
+    AMediaCryptoWrapper(const uint8_t uuid[16],
+                        const void *initData,
+                        size_t initDataSize);
+    AMediaCryptoWrapper(AMediaCrypto *aMediaCrypto);
+
+    // the returned AMediaCrypto is still owned by this wrapper.
+    AMediaCrypto *getAMediaCrypto() const;
+
+    status_t release();
+
+    bool isCryptoSchemeSupported(const uint8_t uuid[16]);
+
+    bool requiresSecureDecoderComponent(const char *mime);
+
+protected:
+    virtual ~AMediaCryptoWrapper();
+
+private:
+    AMediaCrypto *mAMediaCrypto;
+
+    DISALLOW_EVIL_CONSTRUCTORS(AMediaCryptoWrapper);
+};
+
+struct AMediaCodecCryptoInfoWrapper : public RefBase {
+    static sp<AMediaCodecCryptoInfoWrapper> Create(sp<MetaData> meta);
+
+    AMediaCodecCryptoInfoWrapper(int numsubsamples,
+                                 uint8_t key[16],
+                                 uint8_t iv[16],
+                                 CryptoPlugin::Mode mode,
+                                 size_t *clearbytes,
+                                 size_t *encryptedbytes);
+    AMediaCodecCryptoInfoWrapper(AMediaCodecCryptoInfo *aMediaCodecCryptoInfo);
+
+    // the returned AMediaCryptoInfo is still owned by this wrapper.
+    AMediaCodecCryptoInfo *getAMediaCodecCryptoInfo() const;
+
+    status_t release();
+
+    void setPattern(CryptoPlugin::Pattern *pattern);
+
+    size_t getNumSubSamples();
+
+    status_t getKey(uint8_t *dst);
+
+    status_t getIV(uint8_t *dst);
+
+    CryptoPlugin::Mode getMode();
+
+    status_t getClearBytes(size_t *dst);
+
+    status_t getEncryptedBytes(size_t *dst);
+
+protected:
+    virtual ~AMediaCodecCryptoInfoWrapper();
+
+private:
+    AMediaCodecCryptoInfo *mAMediaCodecCryptoInfo;
+
+    DISALLOW_EVIL_CONSTRUCTORS(AMediaCodecCryptoInfoWrapper);
+};
+
+struct AMediaCodecWrapper : public RefBase {
+    enum {
+        CB_INPUT_AVAILABLE = 1,
+        CB_OUTPUT_AVAILABLE = 2,
+        CB_ERROR = 3,
+        CB_OUTPUT_FORMAT_CHANGED = 4,
+    };
+
+    static sp<AMediaCodecWrapper> CreateCodecByName(const AString &name);
+    static sp<AMediaCodecWrapper> CreateDecoderByType(const AString &mimeType);
+
+    static void OnInputAvailableCB(AMediaCodec *codec,
+                                   void *userdata,
+                                   int32_t index);
+    static void OnOutputAvailableCB(AMediaCodec *codec,
+                                    void *userdata,
+                                    int32_t index,
+                                    AMediaCodecBufferInfo *bufferInfo);
+    static void OnFormatChangedCB(AMediaCodec *codec,
+                                  void *userdata,
+                                  AMediaFormat *format);
+    static void OnErrorCB(AMediaCodec *codec,
+                          void *userdata,
+                          media_status_t err,
+                          int32_t actionCode,
+                          const char *detail);
+
+    AMediaCodecWrapper(AMediaCodec *aMediaCodec);
+
+    // the returned AMediaCodec is still owned by this wrapper.
+    AMediaCodec *getAMediaCodec() const;
+
+    status_t release();
+
+    status_t getName(AString* outComponentName) const;
+
+    status_t configure(
+            const sp<AMediaFormatWrapper> &format,
+            const sp<ANativeWindowWrapper> &nww,
+            const sp<AMediaCryptoWrapper> &crypto,
+            uint32_t flags);
+
+    status_t setCallback(const sp<AMessage> &callback);
+
+    status_t releaseCrypto();
+
+    status_t start();
+    status_t stop();
+    status_t flush();
+
+    uint8_t* getInputBuffer(size_t idx, size_t *out_size);
+    uint8_t* getOutputBuffer(size_t idx, size_t *out_size);
+
+    status_t queueInputBuffer(
+            size_t idx,
+            size_t offset,
+            size_t size,
+            uint64_t time,
+            uint32_t flags);
+
+    status_t queueSecureInputBuffer(
+            size_t idx,
+            size_t offset,
+            sp<AMediaCodecCryptoInfoWrapper> &codecCryptoInfo,
+            uint64_t time,
+            uint32_t flags);
+
+    sp<AMediaFormatWrapper> getOutputFormat();
+    sp<AMediaFormatWrapper> getInputFormat();
+
+    status_t releaseOutputBuffer(size_t idx, bool render);
+
+    status_t setOutputSurface(const sp<ANativeWindowWrapper> &nww);
+
+    status_t releaseOutputBufferAtTime(size_t idx, int64_t timestampNs);
+
+    status_t setParameters(const sp<AMediaFormatWrapper> &params);
+
+protected:
+    virtual ~AMediaCodecWrapper();
+
+private:
+    AMediaCodec *mAMediaCodec;
+
+    sp<AMessage> mCallback;
+
+    DISALLOW_EVIL_CONSTRUCTORS(AMediaCodecWrapper);
+};
+
+struct AMediaExtractorWrapper : public RefBase {
+
+    AMediaExtractorWrapper(AMediaExtractor *aMediaExtractor);
+
+    // the returned AMediaExtractor is still owned by this wrapper.
+    AMediaExtractor *getAMediaExtractor() const;
+
+    status_t release();
+
+    status_t setDataSource(int fd, off64_t offset, off64_t length);
+
+    status_t setDataSource(const char *location);
+
+    size_t getTrackCount();
+
+    sp<AMediaFormatWrapper> getTrackFormat(size_t idx);
+
+    status_t selectTrack(size_t idx);
+
+    status_t unselectTrack(size_t idx);
+
+    ssize_t readSampleData(const sp<ABuffer> &buffer);
+
+    uint32_t getSampleFlags();
+
+    int getSampleTrackIndex();
+
+    int64_t getSampleTime();
+
+    bool advance();
+
+    status_t seekTo(int64_t seekPosUs, SeekMode mode);
+
+    // the returned PsshInfo is still owned by this wrapper.
+    PsshInfo* getPsshInfo();
+
+    sp<AMediaCodecCryptoInfoWrapper> getSampleCryptoInfo();
+
+protected:
+    virtual ~AMediaExtractorWrapper();
+
+private:
+    AMediaExtractor *mAMediaExtractor;
+
+    DISALLOW_EVIL_CONSTRUCTORS(AMediaExtractorWrapper);
+};
+
+}  // namespace android
+
+#endif  // NDK_WRAPPER_H_
diff --git a/media/libmedia/include/media/TypeConverter.h b/media/libmedia/include/media/TypeConverter.h
index 84e22b1..86f0d4c 100644
--- a/media/libmedia/include/media/TypeConverter.h
+++ b/media/libmedia/include/media/TypeConverter.h
@@ -80,6 +80,11 @@
     typedef audio_mode_t Type;
     typedef Vector<Type> Collection;
 };
+struct AudioContentTraits
+{
+    typedef audio_content_type_t Type;
+    typedef Vector<Type> Collection;
+};
 struct UsageTraits
 {
     typedef audio_usage_t Type;
@@ -226,6 +231,7 @@
 typedef TypeConverter<GainModeTraits> GainModeConverter;
 typedef TypeConverter<StreamTraits> StreamTypeConverter;
 typedef TypeConverter<AudioModeTraits> AudioModeConverter;
+typedef TypeConverter<AudioContentTraits> AudioContentTypeConverter;
 typedef TypeConverter<UsageTraits> UsageTypeConverter;
 typedef TypeConverter<SourceTraits> SourceTypeConverter;
 
@@ -240,6 +246,7 @@
 template<> const GainModeConverter::Table GainModeConverter::mTable[];
 template<> const StreamTypeConverter::Table StreamTypeConverter::mTable[];
 template<> const AudioModeConverter::Table AudioModeConverter::mTable[];
+template<> const AudioContentTypeConverter::Table AudioContentTypeConverter::mTable[];
 template<> const UsageTypeConverter::Table UsageTypeConverter::mTable[];
 template<> const SourceTypeConverter::Table SourceTypeConverter::mTable[];
 
diff --git a/media/libmedia/include/media/mediametadataretriever.h b/media/libmedia/include/media/mediametadataretriever.h
index 65c266b..3511253 100644
--- a/media/libmedia/include/media/mediametadataretriever.h
+++ b/media/libmedia/include/media/mediametadataretriever.h
@@ -59,6 +59,13 @@
     METADATA_KEY_LOCATION        = 23,
     METADATA_KEY_VIDEO_ROTATION  = 24,
     METADATA_KEY_CAPTURE_FRAMERATE = 25,
+    METADATA_KEY_HAS_IMAGE       = 26,
+    METADATA_KEY_IMAGE_COUNT     = 27,
+    METADATA_KEY_IMAGE_PRIMARY   = 28,
+    METADATA_KEY_IMAGE_WIDTH     = 29,
+    METADATA_KEY_IMAGE_HEIGHT    = 30,
+    METADATA_KEY_IMAGE_ROTATION  = 31,
+    METADATA_KEY_VIDEO_FRAME_COUNT  = 32,
 
     // Add more here...
 };
@@ -80,6 +87,11 @@
             const sp<IDataSource>& dataSource, const char *mime = NULL);
     sp<IMemory> getFrameAtTime(int64_t timeUs, int option,
             int colorFormat = HAL_PIXEL_FORMAT_RGB_565, bool metaOnly = false);
+    sp<IMemory> getImageAtIndex(int index,
+            int colorFormat = HAL_PIXEL_FORMAT_RGB_565, bool metaOnly = false);
+    status_t getFrameAtIndex(
+            std::vector<sp<IMemory> > *frames, int frameIndex, int numFrames = 1,
+            int colorFormat = HAL_PIXEL_FORMAT_RGB_565, bool metaOnly = false);
     sp<IMemory> extractAlbumArt();
     const char* extractMetadata(int keyCode);
 
diff --git a/media/libmedia/include/media/mediaplayer.h b/media/libmedia/include/media/mediaplayer.h
index 623c374..c4dbf42 100644
--- a/media/libmedia/include/media/mediaplayer.h
+++ b/media/libmedia/include/media/mediaplayer.h
@@ -17,6 +17,8 @@
 #ifndef ANDROID_MEDIAPLAYER_H
 #define ANDROID_MEDIAPLAYER_H
 
+#include <media/mediaplayer_common.h>
+
 #include <arpa/inet.h>
 
 #include <binder/IMemory.h>
@@ -50,12 +52,14 @@
     MEDIA_PAUSED            = 7,
     MEDIA_STOPPED           = 8,
     MEDIA_SKIPPED           = 9,
+    MEDIA_NOTIFY_TIME       = 98,
     MEDIA_TIMED_TEXT        = 99,
     MEDIA_ERROR             = 100,
     MEDIA_INFO              = 200,
     MEDIA_SUBTITLE_DATA     = 201,
     MEDIA_META_DATA         = 202,
     MEDIA_DRM_INFO          = 210,
+    MEDIA_AUDIO_ROUTING_CHANGED = 10000,
 };
 
 // Generic error codes for the media player framework.  Errors are fatal, the
@@ -186,16 +190,6 @@
     INVOKE_ID_GET_SELECTED_TRACK = 7
 };
 
-// Keep MEDIA_TRACK_TYPE_* in sync with MediaPlayer.java.
-enum media_track_type {
-    MEDIA_TRACK_TYPE_UNKNOWN = 0,
-    MEDIA_TRACK_TYPE_VIDEO = 1,
-    MEDIA_TRACK_TYPE_AUDIO = 2,
-    MEDIA_TRACK_TYPE_TIMEDTEXT = 3,
-    MEDIA_TRACK_TYPE_SUBTITLE = 4,
-    MEDIA_TRACK_TYPE_METADATA = 5,
-};
-
 // ----------------------------------------------------------------------------
 // ref-counted object for callbacks
 class MediaPlayerListener: virtual public RefBase
@@ -225,7 +219,6 @@
             status_t        setVideoSurfaceTexture(
                                     const sp<IGraphicBufferProducer>& bufferProducer);
             status_t        setListener(const sp<MediaPlayerListener>& listener);
-            status_t        getDefaultBufferingSettings(BufferingSettings* buffering /* nonnull */);
             status_t        getBufferingSettings(BufferingSettings* buffering /* nonnull */);
             status_t        setBufferingSettings(const BufferingSettings& buffering);
             status_t        prepare();
@@ -245,6 +238,7 @@
             status_t        seekTo(
                     int msec,
                     MediaPlayerSeekMode mode = MediaPlayerSeekMode::SEEK_PREVIOUS_SYNC);
+            status_t        notifyAt(int64_t mediaTimeUs);
             status_t        getCurrentPosition(int *msec);
             status_t        getDuration(int *msec);
             status_t        reset();
@@ -266,13 +260,17 @@
             status_t        setRetransmitEndpoint(const char* addrString, uint16_t port);
             status_t        setNextMediaPlayer(const sp<MediaPlayer>& player);
 
-            VolumeShaper::Status applyVolumeShaper(
-                                    const sp<VolumeShaper::Configuration>& configuration,
-                                    const sp<VolumeShaper::Operation>& operation);
-            sp<VolumeShaper::State> getVolumeShaperState(int id);
+            media::VolumeShaper::Status applyVolumeShaper(
+                                    const sp<media::VolumeShaper::Configuration>& configuration,
+                                    const sp<media::VolumeShaper::Operation>& operation);
+            sp<media::VolumeShaper::State> getVolumeShaperState(int id);
             // Modular DRM
             status_t        prepareDrm(const uint8_t uuid[16], const Vector<uint8_t>& drmSessionId);
             status_t        releaseDrm();
+            // AudioRouting
+            status_t        setOutputDevice(audio_port_handle_t deviceId);
+            audio_port_handle_t getRoutedDeviceId();
+            status_t        enableAudioDeviceCallback(bool enabled);
 
 private:
             void            clear_l();
@@ -309,7 +307,6 @@
     float                       mSendLevel;
     struct sockaddr_in          mRetransmitEndpoint;
     bool                        mRetransmitEndpointValid;
-    BufferingSettings           mCurrentBufferingSettings;
 };
 
 }; // namespace android
diff --git a/media/libmedia/include/media/mediaplayer2.h b/media/libmedia/include/media/mediaplayer2.h
new file mode 100644
index 0000000..c96765f
--- /dev/null
+++ b/media/libmedia/include/media/mediaplayer2.h
@@ -0,0 +1,306 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIAPLAYER2_H
+#define ANDROID_MEDIAPLAYER2_H
+
+#include <media/mediaplayer_common.h>
+
+#include <arpa/inet.h>
+
+#include <media/AudioResamplerPublic.h>
+#include <media/BufferingSettings.h>
+#include <media/MediaPlayer2EngineClient.h>
+#include <media/MediaPlayer2Engine.h>
+
+#include <utils/Condition.h>
+#include <utils/KeyedVector.h>
+#include <utils/String8.h>
+#include <utils/ThreadDefs.h>
+
+namespace android {
+
+struct AVSyncSettings;
+struct ANativeWindowWrapper;
+class DataSource;
+struct MediaHTTPService;
+
+enum media2_event_type {
+    MEDIA2_NOP               = 0, // interface test message
+    MEDIA2_PREPARED          = 1,
+    MEDIA2_PLAYBACK_COMPLETE = 2,
+    MEDIA2_BUFFERING_UPDATE  = 3,
+    MEDIA2_SEEK_COMPLETE     = 4,
+    MEDIA2_SET_VIDEO_SIZE    = 5,
+    MEDIA2_STARTED           = 6,
+    MEDIA2_PAUSED            = 7,
+    MEDIA2_STOPPED           = 8,
+    MEDIA2_SKIPPED           = 9,
+    MEDIA2_NOTIFY_TIME       = 98,
+    MEDIA2_TIMED_TEXT        = 99,
+    MEDIA2_ERROR             = 100,
+    MEDIA2_INFO              = 200,
+    MEDIA2_SUBTITLE_DATA     = 201,
+    MEDIA2_META_DATA         = 202,
+    MEDIA2_DRM_INFO          = 210,
+    MEDIA2_AUDIO_ROUTING_CHANGED = 10000,
+};
+
+// Generic error codes for the media player framework.  Errors are fatal, the
+// playback must abort.
+//
+// Errors are communicated back to the client using the
+// MediaPlayer2Listener::notify method defined below.
+// In this situation, 'notify' is invoked with the following:
+//   'msg' is set to MEDIA_ERROR.
+//   'ext1' should be a value from the enum media2_error_type.
+//   'ext2' contains an implementation dependant error code to provide
+//          more details. Should default to 0 when not used.
+//
+// The codes are distributed as follow:
+//   0xx: Reserved
+//   1xx: Android Player errors. Something went wrong inside the MediaPlayer2.
+//   2xx: Media errors (e.g Codec not supported). There is a problem with the
+//        media itself.
+//   3xx: Runtime errors. Some extraordinary condition arose making the playback
+//        impossible.
+//
+enum media2_error_type {
+    // 0xx
+    MEDIA2_ERROR_UNKNOWN = 1,
+    // 1xx
+    MEDIA2_ERROR_SERVER_DIED = 100,
+    // 2xx
+    MEDIA2_ERROR_NOT_VALID_FOR_PROGRESSIVE_PLAYBACK = 200,
+    // 3xx
+};
+
+
+// Info and warning codes for the media player framework.  These are non fatal,
+// the playback is going on but there might be some user visible issues.
+//
+// Info and warning messages are communicated back to the client using the
+// MediaPlayer2Listener::notify method defined below.  In this situation,
+// 'notify' is invoked with the following:
+//   'msg' is set to MEDIA_INFO.
+//   'ext1' should be a value from the enum media2_info_type.
+//   'ext2' contains an implementation dependant info code to provide
+//          more details. Should default to 0 when not used.
+//
+// The codes are distributed as follow:
+//   0xx: Reserved
+//   7xx: Android Player info/warning (e.g player lagging behind.)
+//   8xx: Media info/warning (e.g media badly interleaved.)
+//
+enum media2_info_type {
+    // 0xx
+    MEDIA2_INFO_UNKNOWN = 1,
+    // The player was started because it was used as the next player for another
+    // player, which just completed playback
+    MEDIA2_INFO_STARTED_AS_NEXT = 2,
+    // The player just pushed the very first video frame for rendering
+    MEDIA2_INFO_RENDERING_START = 3,
+    // 7xx
+    // The video is too complex for the decoder: it can't decode frames fast
+    // enough. Possibly only the audio plays fine at this stage.
+    MEDIA2_INFO_VIDEO_TRACK_LAGGING = 700,
+    // MediaPlayer2 is temporarily pausing playback internally in order to
+    // buffer more data.
+    MEDIA2_INFO_BUFFERING_START = 701,
+    // MediaPlayer2 is resuming playback after filling buffers.
+    MEDIA2_INFO_BUFFERING_END = 702,
+    // Bandwidth in recent past
+    MEDIA2_INFO_NETWORK_BANDWIDTH = 703,
+
+    // 8xx
+    // Bad interleaving means that a media has been improperly interleaved or not
+    // interleaved at all, e.g has all the video samples first then all the audio
+    // ones. Video is playing but a lot of disk seek may be happening.
+    MEDIA2_INFO_BAD_INTERLEAVING = 800,
+    // The media is not seekable (e.g live stream).
+    MEDIA2_INFO_NOT_SEEKABLE = 801,
+    // New media metadata is available.
+    MEDIA2_INFO_METADATA_UPDATE = 802,
+    // Audio can not be played.
+    MEDIA2_INFO_PLAY_AUDIO_ERROR = 804,
+    // Video can not be played.
+    MEDIA2_INFO_PLAY_VIDEO_ERROR = 805,
+
+    //9xx
+    MEDIA2_INFO_TIMED_TEXT_ERROR = 900,
+};
+
+
+
+enum media_player2_states {
+    MEDIA_PLAYER2_STATE_ERROR        = 0,
+    MEDIA_PLAYER2_IDLE               = 1 << 0,
+    MEDIA_PLAYER2_INITIALIZED        = 1 << 1,
+    MEDIA_PLAYER2_PREPARING          = 1 << 2,
+    MEDIA_PLAYER2_PREPARED           = 1 << 3,
+    MEDIA_PLAYER2_STARTED            = 1 << 4,
+    MEDIA_PLAYER2_PAUSED             = 1 << 5,
+    MEDIA_PLAYER2_STOPPED            = 1 << 6,
+    MEDIA_PLAYER2_PLAYBACK_COMPLETE  = 1 << 7
+};
+
+// Keep KEY_PARAMETER_* in sync with MediaPlayer2.java.
+// The same enum space is used for both set and get, in case there are future keys that
+// can be both set and get.  But as of now, all parameters are either set only or get only.
+enum media2_parameter_keys {
+    // Streaming/buffering parameters
+    MEDIA2_KEY_PARAMETER_CACHE_STAT_COLLECT_FREQ_MS = 1100,            // set only
+
+    // Return a Parcel containing a single int, which is the channel count of the
+    // audio track, or zero for error (e.g. no audio track) or unknown.
+    MEDIA2_KEY_PARAMETER_AUDIO_CHANNEL_COUNT = 1200,                   // get only
+
+    // Playback rate expressed in permille (1000 is normal speed), saved as int32_t, with negative
+    // values used for rewinding or reverse playback.
+    MEDIA2_KEY_PARAMETER_PLAYBACK_RATE_PERMILLE = 1300,                // set only
+
+    // Set a Parcel containing the value of a parcelled Java AudioAttribute instance
+    MEDIA2_KEY_PARAMETER_AUDIO_ATTRIBUTES = 1400                       // set only
+};
+
+// Keep INVOKE_ID_* in sync with MediaPlayer2.java.
+enum media_player2_invoke_ids {
+    MEDIA_PLAYER2_INVOKE_ID_GET_TRACK_INFO = 1,
+    MEDIA_PLAYER2_INVOKE_ID_ADD_EXTERNAL_SOURCE = 2,
+    MEDIA_PLAYER2_INVOKE_ID_ADD_EXTERNAL_SOURCE_FD = 3,
+    MEDIA_PLAYER2_INVOKE_ID_SELECT_TRACK = 4,
+    MEDIA_PLAYER2_INVOKE_ID_UNSELECT_TRACK = 5,
+    MEDIA_PLAYER2_INVOKE_ID_SET_VIDEO_SCALING_MODE = 6,
+    MEDIA_PLAYER2_INVOKE_ID_GET_SELECTED_TRACK = 7
+};
+
+// ----------------------------------------------------------------------------
+// ref-counted object for callbacks
+class MediaPlayer2Listener: virtual public RefBase
+{
+public:
+    virtual void notify(int msg, int ext1, int ext2, const Parcel *obj) = 0;
+};
+
+class MediaPlayer2 : public MediaPlayer2EngineClient
+{
+public:
+    MediaPlayer2();
+    ~MediaPlayer2();
+            void            disconnect();
+
+            status_t        setDataSource(
+                    const sp<MediaHTTPService> &httpService,
+                    const char *url,
+                    const KeyedVector<String8, String8> *headers);
+
+            status_t        setDataSource(int fd, int64_t offset, int64_t length);
+            status_t        setDataSource(const sp<DataSource> &source);
+            status_t        setVideoSurfaceTexture(const sp<ANativeWindowWrapper>& nww);
+            status_t        setListener(const sp<MediaPlayer2Listener>& listener);
+            status_t        getBufferingSettings(BufferingSettings* buffering /* nonnull */);
+            status_t        setBufferingSettings(const BufferingSettings& buffering);
+            status_t        prepare();
+            status_t        prepareAsync();
+            status_t        start();
+            status_t        stop();
+            status_t        pause();
+            bool            isPlaying();
+            status_t        setPlaybackSettings(const AudioPlaybackRate& rate);
+            status_t        getPlaybackSettings(AudioPlaybackRate* rate /* nonnull */);
+            status_t        setSyncSettings(const AVSyncSettings& sync, float videoFpsHint);
+            status_t        getSyncSettings(
+                                    AVSyncSettings* sync /* nonnull */,
+                                    float* videoFps /* nonnull */);
+            status_t        getVideoWidth(int *w);
+            status_t        getVideoHeight(int *h);
+            status_t        seekTo(
+                    int msec,
+                    MediaPlayer2SeekMode mode = MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC);
+            status_t        notifyAt(int64_t mediaTimeUs);
+            status_t        getCurrentPosition(int *msec);
+            status_t        getDuration(int *msec);
+            status_t        reset();
+            status_t        setAudioStreamType(audio_stream_type_t type);
+            status_t        getAudioStreamType(audio_stream_type_t *type);
+            status_t        setLooping(int loop);
+            bool            isLooping();
+            status_t        setVolume(float leftVolume, float rightVolume);
+            void            notify(int msg, int ext1, int ext2, const Parcel *obj = NULL);
+            status_t        invoke(const Parcel& request, Parcel *reply);
+            status_t        setMetadataFilter(const Parcel& filter);
+            status_t        getMetadata(bool update_only, bool apply_filter, Parcel *metadata);
+            status_t        setAudioSessionId(audio_session_t sessionId);
+            audio_session_t getAudioSessionId();
+            status_t        setAuxEffectSendLevel(float level);
+            status_t        attachAuxEffect(int effectId);
+            status_t        setParameter(int key, const Parcel& request);
+            status_t        getParameter(int key, Parcel* reply);
+            status_t        setRetransmitEndpoint(const char* addrString, uint16_t port);
+            status_t        setNextMediaPlayer(const sp<MediaPlayer2>& player);
+
+            media::VolumeShaper::Status applyVolumeShaper(
+                                    const sp<media::VolumeShaper::Configuration>& configuration,
+                                    const sp<media::VolumeShaper::Operation>& operation);
+            sp<media::VolumeShaper::State> getVolumeShaperState(int id);
+            // Modular DRM
+            status_t        prepareDrm(const uint8_t uuid[16], const Vector<uint8_t>& drmSessionId);
+            status_t        releaseDrm();
+            // AudioRouting
+            status_t        setOutputDevice(audio_port_handle_t deviceId);
+            audio_port_handle_t getRoutedDeviceId();
+            status_t        enableAudioDeviceCallback(bool enabled);
+
+private:
+            void            clear_l();
+            status_t        seekTo_l(int msec, MediaPlayer2SeekMode mode);
+            status_t        prepareAsync_l();
+            status_t        getDuration_l(int *msec);
+            status_t        attachNewPlayer(const sp<MediaPlayer2Engine>& player);
+            status_t        reset_l();
+            status_t        doSetRetransmitEndpoint(const sp<MediaPlayer2Engine>& player);
+            status_t        checkStateForKeySet_l(int key);
+
+    sp<MediaPlayer2Engine>      mPlayer;
+    thread_id_t                 mLockThreadId;
+    Mutex                       mLock;
+    Mutex                       mNotifyLock;
+    Condition                   mSignal;
+    sp<MediaPlayer2Listener>    mListener;
+    void*                       mCookie;
+    media_player2_states        mCurrentState;
+    int                         mCurrentPosition;
+    MediaPlayer2SeekMode        mCurrentSeekMode;
+    int                         mSeekPosition;
+    MediaPlayer2SeekMode        mSeekMode;
+    bool                        mPrepareSync;
+    status_t                    mPrepareStatus;
+    audio_stream_type_t         mStreamType;
+    Parcel*                     mAudioAttributesParcel;
+    bool                        mLoop;
+    float                       mLeftVolume;
+    float                       mRightVolume;
+    int                         mVideoWidth;
+    int                         mVideoHeight;
+    audio_session_t             mAudioSessionId;
+    float                       mSendLevel;
+    struct sockaddr_in          mRetransmitEndpoint;
+    bool                        mRetransmitEndpointValid;
+};
+
+}; // namespace android
+
+#endif // ANDROID_MEDIAPLAYER2_H
diff --git a/media/libmedia/include/media/mediaplayer_common.h b/media/libmedia/include/media/mediaplayer_common.h
new file mode 100644
index 0000000..d5a0135
--- /dev/null
+++ b/media/libmedia/include/media/mediaplayer_common.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIAPLAYER_COMMON_H
+#define ANDROID_MEDIAPLAYER_COMMON_H
+
+namespace android {
+
+// Keep MEDIA_TRACK_TYPE_* in sync with MediaPlayer.java.
+enum media_track_type {
+    MEDIA_TRACK_TYPE_UNKNOWN = 0,
+    MEDIA_TRACK_TYPE_VIDEO = 1,
+    MEDIA_TRACK_TYPE_AUDIO = 2,
+    MEDIA_TRACK_TYPE_TIMEDTEXT = 3,
+    MEDIA_TRACK_TYPE_SUBTITLE = 4,
+    MEDIA_TRACK_TYPE_METADATA = 5,
+};
+
+}; // namespace android
+
+#endif // ANDROID_MEDIAPLAYER_COMMON_H
diff --git a/media/libmedia/include/media/mediarecorder.h b/media/libmedia/include/media/mediarecorder.h
index 071e7a1..5f2a6fe 100644
--- a/media/libmedia/include/media/mediarecorder.h
+++ b/media/libmedia/include/media/mediarecorder.h
@@ -77,6 +77,9 @@
     /* VP8/VORBIS data in a WEBM container */
     OUTPUT_FORMAT_WEBM = 9,
 
+    /* HEIC data in a HEIF container */
+    OUTPUT_FORMAT_HEIF = 10,
+
     OUTPUT_FORMAT_LIST_END // must be last - used to validate format type
 };
 
@@ -138,6 +141,8 @@
     MEDIA_RECORDER_TRACK_EVENT_ERROR              = 100,
     MEDIA_RECORDER_TRACK_EVENT_INFO               = 101,
     MEDIA_RECORDER_TRACK_EVENT_LIST_END           = 1000,
+
+    MEDIA_RECORDER_AUDIO_ROUTING_CHANGED          = 10000,
 };
 
 /*
@@ -250,6 +255,9 @@
     status_t    setInputSurface(const sp<PersistentSurface>& surface);
     sp<IGraphicBufferProducer>     querySurfaceMediaSourceFromMediaServer();
     status_t    getMetrics(Parcel *reply);
+    status_t    setInputDevice(audio_port_handle_t deviceId);
+    status_t    getRoutedDeviceId(audio_port_handle_t *deviceId);
+    status_t    enableAudioDeviceCallback(bool enabled);
 
 private:
     void                    doCleanUp();
diff --git a/media/libmedia/include/media/omx/1.0/Conversion.h b/media/libmedia/include/media/omx/1.0/Conversion.h
index 9816fe1..94f2e8d 100644
--- a/media/libmedia/include/media/omx/1.0/Conversion.h
+++ b/media/libmedia/include/media/omx/1.0/Conversion.h
@@ -24,7 +24,6 @@
 
 #include <hidl/MQDescriptor.h>
 #include <hidl/Status.h>
-#include <hidlmemory/mapping.h>
 
 #include <binder/Binder.h>
 #include <binder/Status.h>
@@ -36,7 +35,6 @@
 #include <media/OMXBuffer.h>
 #include <media/hardware/VideoAPI.h>
 
-#include <android/hidl/memory/1.0/IMemory.h>
 #include <android/hardware/media/omx/1.0/types.h>
 #include <android/hardware/media/omx/1.0/IOmx.h>
 #include <android/hardware/media/omx/1.0/IOmxNode.h>
diff --git a/media/libmedia/mediametadataretriever.cpp b/media/libmedia/mediametadataretriever.cpp
index 7d27d57..6a4204b 100644
--- a/media/libmedia/mediametadataretriever.cpp
+++ b/media/libmedia/mediametadataretriever.cpp
@@ -154,6 +154,32 @@
     return mRetriever->getFrameAtTime(timeUs, option, colorFormat, metaOnly);
 }
 
+sp<IMemory> MediaMetadataRetriever::getImageAtIndex(
+        int index, int colorFormat, bool metaOnly) {
+    ALOGV("getImageAtIndex: index(%d) colorFormat(%d) metaOnly(%d)",
+            index, colorFormat, metaOnly);
+    Mutex::Autolock _l(mLock);
+    if (mRetriever == 0) {
+        ALOGE("retriever is not initialized");
+        return NULL;
+    }
+    return mRetriever->getImageAtIndex(index, colorFormat, metaOnly);
+}
+
+status_t MediaMetadataRetriever::getFrameAtIndex(
+        std::vector<sp<IMemory> > *frames,
+        int frameIndex, int numFrames, int colorFormat, bool metaOnly) {
+    ALOGV("getFrameAtIndex: frameIndex(%d), numFrames(%d), colorFormat(%d) metaOnly(%d)",
+            frameIndex, numFrames, colorFormat, metaOnly);
+    Mutex::Autolock _l(mLock);
+    if (mRetriever == 0) {
+        ALOGE("retriever is not initialized");
+        return INVALID_OPERATION;
+    }
+    return mRetriever->getFrameAtIndex(
+            frames, frameIndex, numFrames, colorFormat, metaOnly);
+}
+
 const char* MediaMetadataRetriever::extractMetadata(int keyCode)
 {
     ALOGV("extractMetadata(%d)", keyCode);
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index b976721..26908e5 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -48,6 +48,8 @@
 
 namespace android {
 
+using media::VolumeShaper;
+
 MediaPlayer::MediaPlayer()
 {
     ALOGV("constructor");
@@ -137,10 +139,8 @@
         mPlayer = player;
         if (player != 0) {
             mCurrentState = MEDIA_PLAYER_INITIALIZED;
-            player->getDefaultBufferingSettings(&mCurrentBufferingSettings);
             err = NO_ERROR;
         } else {
-            mCurrentBufferingSettings = BufferingSettings();
             ALOGE("Unable to create media player");
         }
     }
@@ -247,17 +247,6 @@
     return mPlayer->setVideoSurfaceTexture(bufferProducer);
 }
 
-status_t MediaPlayer::getDefaultBufferingSettings(BufferingSettings* buffering /* nonnull */)
-{
-    ALOGV("getDefaultBufferingSettings");
-
-    Mutex::Autolock _l(mLock);
-    if (mPlayer == 0) {
-        return NO_INIT;
-    }
-    return mPlayer->getDefaultBufferingSettings(buffering);
-}
-
 status_t MediaPlayer::getBufferingSettings(BufferingSettings* buffering /* nonnull */)
 {
     ALOGV("getBufferingSettings");
@@ -266,8 +255,7 @@
     if (mPlayer == 0) {
         return NO_INIT;
     }
-    *buffering = mCurrentBufferingSettings;
-    return NO_ERROR;
+    return mPlayer->getBufferingSettings(buffering);
 }
 
 status_t MediaPlayer::setBufferingSettings(const BufferingSettings& buffering)
@@ -278,11 +266,7 @@
     if (mPlayer == 0) {
         return NO_INIT;
     }
-    status_t err =  mPlayer->setBufferingSettings(buffering);
-    if (err == NO_ERROR) {
-        mCurrentBufferingSettings = buffering;
-    }
-    return err;
+    return mPlayer->setBufferingSettings(buffering);
 }
 
 // must call with lock held
@@ -608,6 +592,15 @@
     return result;
 }
 
+status_t MediaPlayer::notifyAt(int64_t mediaTimeUs)
+{
+    Mutex::Autolock _l(mLock);
+    if (mPlayer != 0) {
+        return mPlayer->notifyAt(mediaTimeUs);
+    }
+    return INVALID_OPERATION;
+}
+
 status_t MediaPlayer::reset_l()
 {
     mLoop = false;
@@ -625,7 +618,6 @@
         // setDataSource has to be called again to create a
         // new mediaplayer.
         mPlayer = 0;
-        mCurrentBufferingSettings = BufferingSettings();
         return ret;
     }
     clear_l();
@@ -649,8 +641,12 @@
 status_t MediaPlayer::reset()
 {
     ALOGV("reset");
+    mLockThreadId = getThreadId();
     Mutex::Autolock _l(mLock);
-    return reset_l();
+    status_t result = reset_l();
+    mLockThreadId = 0;
+
+    return result;
 }
 
 status_t MediaPlayer::setAudioStreamType(audio_stream_type_t type)
@@ -860,7 +856,7 @@
     // this will deadlock.
     //
     // The threadId hack below works around this for the care of prepare,
-    // seekTo and start within the same process.
+    // seekTo, start, and reset within the same process.
     // FIXME: Remember, this is a hack, it's not even a hack that is applied
     // consistently for all use-cases, this needs to be revisited.
     if (mLockThreadId != getThreadId()) {
@@ -944,6 +940,9 @@
         mVideoWidth = ext1;
         mVideoHeight = ext2;
         break;
+    case MEDIA_NOTIFY_TIME:
+        ALOGV("Received notify time message");
+        break;
     case MEDIA_TIMED_TEXT:
         ALOGV("Received timed text message");
         break;
@@ -1076,4 +1075,39 @@
     return status;
 }
 
+status_t MediaPlayer::setOutputDevice(audio_port_handle_t deviceId)
+{
+    Mutex::Autolock _l(mLock);
+    if (mPlayer == NULL) {
+        ALOGV("setOutputDevice: player not init");
+        return NO_INIT;
+    }
+    return mPlayer->setOutputDevice(deviceId);
+}
+
+audio_port_handle_t MediaPlayer::getRoutedDeviceId()
+{
+    Mutex::Autolock _l(mLock);
+    if (mPlayer == NULL) {
+        ALOGV("getRoutedDeviceId: player not init");
+        return AUDIO_PORT_HANDLE_NONE;
+    }
+    audio_port_handle_t deviceId;
+    status_t status = mPlayer->getRoutedDeviceId(&deviceId);
+    if (status != NO_ERROR) {
+        return AUDIO_PORT_HANDLE_NONE;
+    }
+    return deviceId;
+}
+
+status_t MediaPlayer::enableAudioDeviceCallback(bool enabled)
+{
+    Mutex::Autolock _l(mLock);
+    if (mPlayer == NULL) {
+        ALOGV("addAudioDeviceCallback: player not init");
+        return NO_INIT;
+    }
+    return mPlayer->enableAudioDeviceCallback(enabled);
+}
+
 } // namespace android
diff --git a/media/libmedia/mediaplayer2.cpp b/media/libmedia/mediaplayer2.cpp
new file mode 100644
index 0000000..5c34d4a
--- /dev/null
+++ b/media/libmedia/mediaplayer2.cpp
@@ -0,0 +1,1098 @@
+/*
+**
+** Copyright 2017, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaPlayer2Native"
+
+#include <fcntl.h>
+#include <inttypes.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+#include <unistd.h>
+
+#include <utils/Log.h>
+
+#include <binder/IServiceManager.h>
+#include <binder/IPCThreadState.h>
+
+#include <media/mediaplayer2.h>
+#include <media/AudioResamplerPublic.h>
+#include <media/AudioSystem.h>
+#include <media/AVSyncSettings.h>
+#include <media/DataSource.h>
+#include <media/MediaAnalyticsItem.h>
+#include <media/NdkWrapper.h>
+
+#include <binder/MemoryBase.h>
+
+#include <utils/KeyedVector.h>
+#include <utils/String8.h>
+
+#include <system/audio.h>
+#include <system/window.h>
+
+#include "MediaPlayer2Manager.h"
+
+namespace android {
+
+using media::VolumeShaper;
+
+MediaPlayer2::MediaPlayer2()
+{
+    ALOGV("constructor");
+    mListener = NULL;
+    mCookie = NULL;
+    mStreamType = AUDIO_STREAM_MUSIC;
+    mAudioAttributesParcel = NULL;
+    mCurrentPosition = -1;
+    mCurrentSeekMode = MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC;
+    mSeekPosition = -1;
+    mSeekMode = MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC;
+    mCurrentState = MEDIA_PLAYER2_IDLE;
+    mPrepareSync = false;
+    mPrepareStatus = NO_ERROR;
+    mLoop = false;
+    mLeftVolume = mRightVolume = 1.0;
+    mVideoWidth = mVideoHeight = 0;
+    mLockThreadId = 0;
+    mAudioSessionId = (audio_session_t) AudioSystem::newAudioUniqueId(AUDIO_UNIQUE_ID_USE_SESSION);
+    AudioSystem::acquireAudioSessionId(mAudioSessionId, -1);
+    mSendLevel = 0;
+    mRetransmitEndpointValid = false;
+}
+
+MediaPlayer2::~MediaPlayer2()
+{
+    ALOGV("destructor");
+    if (mAudioAttributesParcel != NULL) {
+        delete mAudioAttributesParcel;
+        mAudioAttributesParcel = NULL;
+    }
+    AudioSystem::releaseAudioSessionId(mAudioSessionId, -1);
+    disconnect();
+    IPCThreadState::self()->flushCommands();
+}
+
+void MediaPlayer2::disconnect()
+{
+    ALOGV("disconnect");
+    sp<MediaPlayer2Engine> p;
+    {
+        Mutex::Autolock _l(mLock);
+        p = mPlayer;
+        mPlayer.clear();
+    }
+
+    if (p != 0) {
+        p->disconnect();
+    }
+}
+
+// always call with lock held
+void MediaPlayer2::clear_l()
+{
+    mCurrentPosition = -1;
+    mCurrentSeekMode = MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC;
+    mSeekPosition = -1;
+    mSeekMode = MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC;
+    mVideoWidth = mVideoHeight = 0;
+    mRetransmitEndpointValid = false;
+}
+
+status_t MediaPlayer2::setListener(const sp<MediaPlayer2Listener>& listener)
+{
+    ALOGV("setListener");
+    Mutex::Autolock _l(mLock);
+    mListener = listener;
+    return NO_ERROR;
+}
+
+
+status_t MediaPlayer2::attachNewPlayer(const sp<MediaPlayer2Engine>& player)
+{
+    status_t err = UNKNOWN_ERROR;
+    sp<MediaPlayer2Engine> p;
+    { // scope for the lock
+        Mutex::Autolock _l(mLock);
+
+        if ( !( (mCurrentState & MEDIA_PLAYER2_IDLE) ||
+                (mCurrentState == MEDIA_PLAYER2_STATE_ERROR ) ) ) {
+            ALOGE("attachNewPlayer called in state %d", mCurrentState);
+            return INVALID_OPERATION;
+        }
+
+        clear_l();
+        p = mPlayer;
+        mPlayer = player;
+        if (player != 0) {
+            mCurrentState = MEDIA_PLAYER2_INITIALIZED;
+            err = NO_ERROR;
+        } else {
+            ALOGE("Unable to create media player");
+        }
+    }
+
+    if (p != 0) {
+        p->disconnect();
+    }
+
+    return err;
+}
+
+status_t MediaPlayer2::setDataSource(
+        const sp<MediaHTTPService> &httpService,
+        const char *url, const KeyedVector<String8, String8> *headers)
+{
+    ALOGV("setDataSource(%s)", url);
+    status_t err = BAD_VALUE;
+    if (url != NULL) {
+        sp<MediaPlayer2Engine> player(MediaPlayer2Manager::get().create(this, mAudioSessionId));
+        if ((NO_ERROR != doSetRetransmitEndpoint(player)) ||
+            (NO_ERROR != player->setDataSource(httpService, url, headers))) {
+            player.clear();
+        }
+        err = attachNewPlayer(player);
+    }
+    return err;
+}
+
+status_t MediaPlayer2::setDataSource(int fd, int64_t offset, int64_t length)
+{
+    ALOGV("setDataSource(%d, %" PRId64 ", %" PRId64 ")", fd, offset, length);
+    status_t err = UNKNOWN_ERROR;
+    sp<MediaPlayer2Engine> player(MediaPlayer2Manager::get().create(this, mAudioSessionId));
+    if ((NO_ERROR != doSetRetransmitEndpoint(player)) ||
+        (NO_ERROR != player->setDataSource(fd, offset, length))) {
+        player.clear();
+    }
+    err = attachNewPlayer(player);
+    return err;
+}
+
+status_t MediaPlayer2::setDataSource(const sp<DataSource> &source)
+{
+    ALOGV("setDataSource(DataSource)");
+    status_t err = UNKNOWN_ERROR;
+    sp<MediaPlayer2Engine> player(MediaPlayer2Manager::get().create(this, mAudioSessionId));
+    if ((NO_ERROR != doSetRetransmitEndpoint(player)) ||
+        (NO_ERROR != player->setDataSource(source))) {
+        player.clear();
+    }
+    err = attachNewPlayer(player);
+    return err;
+}
+
+status_t MediaPlayer2::invoke(const Parcel& request, Parcel *reply)
+{
+    Mutex::Autolock _l(mLock);
+    const bool hasBeenInitialized =
+            (mCurrentState != MEDIA_PLAYER2_STATE_ERROR) &&
+            ((mCurrentState & MEDIA_PLAYER2_IDLE) != MEDIA_PLAYER2_IDLE);
+    if ((mPlayer != NULL) && hasBeenInitialized) {
+        ALOGV("invoke %zu", request.dataSize());
+        return  mPlayer->invoke(request, reply);
+    }
+    ALOGE("invoke failed: wrong state %X, mPlayer(%p)", mCurrentState, mPlayer.get());
+    return INVALID_OPERATION;
+}
+
+status_t MediaPlayer2::setMetadataFilter(const Parcel& filter)
+{
+    ALOGD("setMetadataFilter");
+    Mutex::Autolock lock(mLock);
+    if (mPlayer == NULL) {
+        return NO_INIT;
+    }
+    return mPlayer->setMetadataFilter(filter);
+}
+
+status_t MediaPlayer2::getMetadata(bool update_only, bool apply_filter, Parcel *metadata)
+{
+    ALOGD("getMetadata");
+    Mutex::Autolock lock(mLock);
+    if (mPlayer == NULL) {
+        return NO_INIT;
+    }
+    return mPlayer->getMetadata(update_only, apply_filter, metadata);
+}
+
+status_t MediaPlayer2::setVideoSurfaceTexture(const sp<ANativeWindowWrapper>& nww)
+{
+    ALOGV("setVideoSurfaceTexture");
+    Mutex::Autolock _l(mLock);
+    if (mPlayer == 0) return NO_INIT;
+    return mPlayer->setVideoSurfaceTexture(nww);
+}
+
+status_t MediaPlayer2::getBufferingSettings(BufferingSettings* buffering /* nonnull */)
+{
+    ALOGV("getBufferingSettings");
+
+    Mutex::Autolock _l(mLock);
+    if (mPlayer == 0) {
+        return NO_INIT;
+    }
+    return mPlayer->getBufferingSettings(buffering);
+}
+
+status_t MediaPlayer2::setBufferingSettings(const BufferingSettings& buffering)
+{
+    ALOGV("setBufferingSettings");
+
+    Mutex::Autolock _l(mLock);
+    if (mPlayer == 0) {
+        return NO_INIT;
+    }
+    return mPlayer->setBufferingSettings(buffering);
+}
+
+// must call with lock held
+status_t MediaPlayer2::prepareAsync_l()
+{
+    if ( (mPlayer != 0) && ( mCurrentState & (MEDIA_PLAYER2_INITIALIZED | MEDIA_PLAYER2_STOPPED) ) ) {
+        if (mAudioAttributesParcel != NULL) {
+            mPlayer->setParameter(MEDIA2_KEY_PARAMETER_AUDIO_ATTRIBUTES, *mAudioAttributesParcel);
+        } else {
+            mPlayer->setAudioStreamType(mStreamType);
+        }
+        mCurrentState = MEDIA_PLAYER2_PREPARING;
+        return mPlayer->prepareAsync();
+    }
+    ALOGE("prepareAsync called in state %d, mPlayer(%p)", mCurrentState, mPlayer.get());
+    return INVALID_OPERATION;
+}
+
+// TODO: In case of error, prepareAsync provides the caller with 2 error codes,
+// one defined in the Android framework and one provided by the implementation
+// that generated the error. The sync version of prepare returns only 1 error
+// code.
+status_t MediaPlayer2::prepare()
+{
+    ALOGV("prepare");
+    Mutex::Autolock _l(mLock);
+    mLockThreadId = getThreadId();
+    if (mPrepareSync) {
+        mLockThreadId = 0;
+        return -EALREADY;
+    }
+    mPrepareSync = true;
+    status_t ret = prepareAsync_l();
+    if (ret != NO_ERROR) {
+        mLockThreadId = 0;
+        return ret;
+    }
+
+    if (mPrepareSync) {
+        mSignal.wait(mLock);  // wait for prepare done
+        mPrepareSync = false;
+    }
+    ALOGV("prepare complete - status=%d", mPrepareStatus);
+    mLockThreadId = 0;
+    return mPrepareStatus;
+}
+
+status_t MediaPlayer2::prepareAsync()
+{
+    ALOGV("prepareAsync");
+    Mutex::Autolock _l(mLock);
+    return prepareAsync_l();
+}
+
+status_t MediaPlayer2::start()
+{
+    ALOGV("start");
+
+    status_t ret = NO_ERROR;
+    Mutex::Autolock _l(mLock);
+
+    mLockThreadId = getThreadId();
+
+    if (mCurrentState & MEDIA_PLAYER2_STARTED) {
+        ret = NO_ERROR;
+    } else if ( (mPlayer != 0) && ( mCurrentState & ( MEDIA_PLAYER2_PREPARED |
+                    MEDIA_PLAYER2_PLAYBACK_COMPLETE | MEDIA_PLAYER2_PAUSED ) ) ) {
+        mPlayer->setLooping(mLoop);
+        mPlayer->setVolume(mLeftVolume, mRightVolume);
+        mPlayer->setAuxEffectSendLevel(mSendLevel);
+        mCurrentState = MEDIA_PLAYER2_STARTED;
+        ret = mPlayer->start();
+        if (ret != NO_ERROR) {
+            mCurrentState = MEDIA_PLAYER2_STATE_ERROR;
+        } else {
+            if (mCurrentState == MEDIA_PLAYER2_PLAYBACK_COMPLETE) {
+                ALOGV("playback completed immediately following start()");
+            }
+        }
+    } else {
+        ALOGE("start called in state %d, mPlayer(%p)", mCurrentState, mPlayer.get());
+        ret = INVALID_OPERATION;
+    }
+
+    mLockThreadId = 0;
+
+    return ret;
+}
+
+status_t MediaPlayer2::stop()
+{
+    ALOGV("stop");
+    Mutex::Autolock _l(mLock);
+    if (mCurrentState & MEDIA_PLAYER2_STOPPED) return NO_ERROR;
+    if ( (mPlayer != 0) && ( mCurrentState & ( MEDIA_PLAYER2_STARTED | MEDIA_PLAYER2_PREPARED |
+                    MEDIA_PLAYER2_PAUSED | MEDIA_PLAYER2_PLAYBACK_COMPLETE ) ) ) {
+        status_t ret = mPlayer->stop();
+        if (ret != NO_ERROR) {
+            mCurrentState = MEDIA_PLAYER2_STATE_ERROR;
+        } else {
+            mCurrentState = MEDIA_PLAYER2_STOPPED;
+        }
+        return ret;
+    }
+    ALOGE("stop called in state %d, mPlayer(%p)", mCurrentState, mPlayer.get());
+    return INVALID_OPERATION;
+}
+
+status_t MediaPlayer2::pause()
+{
+    ALOGV("pause");
+    Mutex::Autolock _l(mLock);
+    if (mCurrentState & (MEDIA_PLAYER2_PAUSED|MEDIA_PLAYER2_PLAYBACK_COMPLETE))
+        return NO_ERROR;
+    if ((mPlayer != 0) && (mCurrentState & MEDIA_PLAYER2_STARTED)) {
+        status_t ret = mPlayer->pause();
+        if (ret != NO_ERROR) {
+            mCurrentState = MEDIA_PLAYER2_STATE_ERROR;
+        } else {
+            mCurrentState = MEDIA_PLAYER2_PAUSED;
+        }
+        return ret;
+    }
+    ALOGE("pause called in state %d, mPlayer(%p)", mCurrentState, mPlayer.get());
+    return INVALID_OPERATION;
+}
+
+bool MediaPlayer2::isPlaying()
+{
+    Mutex::Autolock _l(mLock);
+    if (mPlayer != 0) {
+        bool temp = false;
+        mPlayer->isPlaying(&temp);
+        ALOGV("isPlaying: %d", temp);
+        if ((mCurrentState & MEDIA_PLAYER2_STARTED) && ! temp) {
+            ALOGE("internal/external state mismatch corrected");
+            mCurrentState = MEDIA_PLAYER2_PAUSED;
+        } else if ((mCurrentState & MEDIA_PLAYER2_PAUSED) && temp) {
+            ALOGE("internal/external state mismatch corrected");
+            mCurrentState = MEDIA_PLAYER2_STARTED;
+        }
+        return temp;
+    }
+    ALOGV("isPlaying: no active player");
+    return false;
+}
+
+status_t MediaPlayer2::setPlaybackSettings(const AudioPlaybackRate& rate)
+{
+    ALOGV("setPlaybackSettings: %f %f %d %d",
+            rate.mSpeed, rate.mPitch, rate.mFallbackMode, rate.mStretchMode);
+    // Negative speed and pitch does not make sense. Further validation will
+    // be done by the respective mediaplayers.
+    if (rate.mSpeed < 0.f || rate.mPitch < 0.f) {
+        return BAD_VALUE;
+    }
+    Mutex::Autolock _l(mLock);
+    if (mPlayer == 0 || (mCurrentState & MEDIA_PLAYER2_STOPPED)) {
+        return INVALID_OPERATION;
+    }
+
+    if (rate.mSpeed != 0.f && !(mCurrentState & MEDIA_PLAYER2_STARTED)
+            && (mCurrentState & (MEDIA_PLAYER2_PREPARED | MEDIA_PLAYER2_PAUSED
+                    | MEDIA_PLAYER2_PLAYBACK_COMPLETE))) {
+        mPlayer->setLooping(mLoop);
+        mPlayer->setVolume(mLeftVolume, mRightVolume);
+        mPlayer->setAuxEffectSendLevel(mSendLevel);
+    }
+
+    status_t err = mPlayer->setPlaybackSettings(rate);
+    if (err == OK) {
+        if (rate.mSpeed == 0.f && mCurrentState == MEDIA_PLAYER2_STARTED) {
+            mCurrentState = MEDIA_PLAYER2_PAUSED;
+        } else if (rate.mSpeed != 0.f
+                && (mCurrentState & (MEDIA_PLAYER2_PREPARED | MEDIA_PLAYER2_PAUSED
+                    | MEDIA_PLAYER2_PLAYBACK_COMPLETE))) {
+            mCurrentState = MEDIA_PLAYER2_STARTED;
+        }
+    }
+    return err;
+}
+
+status_t MediaPlayer2::getPlaybackSettings(AudioPlaybackRate* rate /* nonnull */)
+{
+    Mutex::Autolock _l(mLock);
+    if (mPlayer == 0) return INVALID_OPERATION;
+    return mPlayer->getPlaybackSettings(rate);
+}
+
+status_t MediaPlayer2::setSyncSettings(const AVSyncSettings& sync, float videoFpsHint)
+{
+    ALOGV("setSyncSettings: %u %u %f %f",
+            sync.mSource, sync.mAudioAdjustMode, sync.mTolerance, videoFpsHint);
+    Mutex::Autolock _l(mLock);
+    if (mPlayer == 0) return INVALID_OPERATION;
+    return mPlayer->setSyncSettings(sync, videoFpsHint);
+}
+
+status_t MediaPlayer2::getSyncSettings(
+        AVSyncSettings* sync /* nonnull */, float* videoFps /* nonnull */)
+{
+    Mutex::Autolock _l(mLock);
+    if (mPlayer == 0) return INVALID_OPERATION;
+    return mPlayer->getSyncSettings(sync, videoFps);
+}
+
+status_t MediaPlayer2::getVideoWidth(int *w)
+{
+    ALOGV("getVideoWidth");
+    Mutex::Autolock _l(mLock);
+    if (mPlayer == 0) return INVALID_OPERATION;
+    *w = mVideoWidth;
+    return NO_ERROR;
+}
+
+status_t MediaPlayer2::getVideoHeight(int *h)
+{
+    ALOGV("getVideoHeight");
+    Mutex::Autolock _l(mLock);
+    if (mPlayer == 0) return INVALID_OPERATION;
+    *h = mVideoHeight;
+    return NO_ERROR;
+}
+
+status_t MediaPlayer2::getCurrentPosition(int *msec)
+{
+    ALOGV("getCurrentPosition");
+    Mutex::Autolock _l(mLock);
+    if (mPlayer != 0) {
+        if (mCurrentPosition >= 0) {
+            ALOGV("Using cached seek position: %d", mCurrentPosition);
+            *msec = mCurrentPosition;
+            return NO_ERROR;
+        }
+        return mPlayer->getCurrentPosition(msec);
+    }
+    return INVALID_OPERATION;
+}
+
+status_t MediaPlayer2::getDuration_l(int *msec)
+{
+    ALOGV("getDuration_l");
+    bool isValidState = (mCurrentState & (MEDIA_PLAYER2_PREPARED | MEDIA_PLAYER2_STARTED |
+            MEDIA_PLAYER2_PAUSED | MEDIA_PLAYER2_STOPPED | MEDIA_PLAYER2_PLAYBACK_COMPLETE));
+    if (mPlayer != 0 && isValidState) {
+        int durationMs;
+        status_t ret = mPlayer->getDuration(&durationMs);
+
+        if (ret != OK) {
+            // Do not enter error state just because no duration was available.
+            durationMs = -1;
+            ret = OK;
+        }
+
+        if (msec) {
+            *msec = durationMs;
+        }
+        return ret;
+    }
+    ALOGE("Attempt to call getDuration in wrong state: mPlayer=%p, mCurrentState=%u",
+            mPlayer.get(), mCurrentState);
+    return INVALID_OPERATION;
+}
+
+status_t MediaPlayer2::getDuration(int *msec)
+{
+    Mutex::Autolock _l(mLock);
+    return getDuration_l(msec);
+}
+
+status_t MediaPlayer2::seekTo_l(int msec, MediaPlayer2SeekMode mode)
+{
+    ALOGV("seekTo (%d, %d)", msec, mode);
+    if ((mPlayer != 0) && ( mCurrentState & ( MEDIA_PLAYER2_STARTED | MEDIA_PLAYER2_PREPARED |
+            MEDIA_PLAYER2_PAUSED |  MEDIA_PLAYER2_PLAYBACK_COMPLETE) ) ) {
+        if ( msec < 0 ) {
+            ALOGW("Attempt to seek to invalid position: %d", msec);
+            msec = 0;
+        }
+
+        int durationMs;
+        status_t err = mPlayer->getDuration(&durationMs);
+
+        if (err != OK) {
+            ALOGW("Stream has no duration and is therefore not seekable.");
+            return err;
+        }
+
+        if (msec > durationMs) {
+            ALOGW("Attempt to seek to past end of file: request = %d, "
+                  "durationMs = %d",
+                  msec,
+                  durationMs);
+
+            msec = durationMs;
+        }
+
+        // cache duration
+        mCurrentPosition = msec;
+        mCurrentSeekMode = mode;
+        if (mSeekPosition < 0) {
+            mSeekPosition = msec;
+            mSeekMode = mode;
+            return mPlayer->seekTo(msec, mode);
+        }
+        else {
+            ALOGV("Seek in progress - queue up seekTo[%d, %d]", msec, mode);
+            return NO_ERROR;
+        }
+    }
+    ALOGE("Attempt to perform seekTo in wrong state: mPlayer=%p, mCurrentState=%u", mPlayer.get(),
+            mCurrentState);
+    return INVALID_OPERATION;
+}
+
+status_t MediaPlayer2::seekTo(int msec, MediaPlayer2SeekMode mode)
+{
+    mLockThreadId = getThreadId();
+    Mutex::Autolock _l(mLock);
+    status_t result = seekTo_l(msec, mode);
+    mLockThreadId = 0;
+
+    return result;
+}
+
+status_t MediaPlayer2::notifyAt(int64_t mediaTimeUs)
+{
+    Mutex::Autolock _l(mLock);
+    if (mPlayer != 0) {
+        return mPlayer->notifyAt(mediaTimeUs);
+    }
+    return INVALID_OPERATION;
+}
+
+status_t MediaPlayer2::reset_l()
+{
+    mLoop = false;
+    if (mCurrentState == MEDIA_PLAYER2_IDLE) return NO_ERROR;
+    mPrepareSync = false;
+    if (mPlayer != 0) {
+        status_t ret = mPlayer->reset();
+        if (ret != NO_ERROR) {
+            ALOGE("reset() failed with return code (%d)", ret);
+            mCurrentState = MEDIA_PLAYER2_STATE_ERROR;
+        } else {
+            mPlayer->disconnect();
+            mCurrentState = MEDIA_PLAYER2_IDLE;
+        }
+        // setDataSource has to be called again to create a
+        // new mediaplayer.
+        mPlayer = 0;
+        return ret;
+    }
+    clear_l();
+    return NO_ERROR;
+}
+
+status_t MediaPlayer2::doSetRetransmitEndpoint(const sp<MediaPlayer2Engine>& player) {
+    Mutex::Autolock _l(mLock);
+
+    if (player == NULL) {
+        return UNKNOWN_ERROR;
+    }
+
+    if (mRetransmitEndpointValid) {
+        return player->setRetransmitEndpoint(&mRetransmitEndpoint);
+    }
+
+    return OK;
+}
+
+status_t MediaPlayer2::reset()
+{
+    ALOGV("reset");
+    mLockThreadId = getThreadId();
+    Mutex::Autolock _l(mLock);
+    status_t result = reset_l();
+    mLockThreadId = 0;
+
+    return result;
+}
+
+status_t MediaPlayer2::setAudioStreamType(audio_stream_type_t type)
+{
+    ALOGV("MediaPlayer2::setAudioStreamType");
+    Mutex::Autolock _l(mLock);
+    if (mStreamType == type) return NO_ERROR;
+    if (mCurrentState & ( MEDIA_PLAYER2_PREPARED | MEDIA_PLAYER2_STARTED |
+                MEDIA_PLAYER2_PAUSED | MEDIA_PLAYER2_PLAYBACK_COMPLETE ) ) {
+        // Can't change the stream type after prepare
+        ALOGE("setAudioStream called in state %d", mCurrentState);
+        return INVALID_OPERATION;
+    }
+    // cache
+    mStreamType = type;
+    return OK;
+}
+
+status_t MediaPlayer2::getAudioStreamType(audio_stream_type_t *type)
+{
+    ALOGV("getAudioStreamType");
+    Mutex::Autolock _l(mLock);
+    *type = mStreamType;
+    return OK;
+}
+
+status_t MediaPlayer2::setLooping(int loop)
+{
+    ALOGV("MediaPlayer2::setLooping");
+    Mutex::Autolock _l(mLock);
+    mLoop = (loop != 0);
+    if (mPlayer != 0) {
+        return mPlayer->setLooping(loop);
+    }
+    return OK;
+}
+
+bool MediaPlayer2::isLooping() {
+    ALOGV("isLooping");
+    Mutex::Autolock _l(mLock);
+    if (mPlayer != 0) {
+        return mLoop;
+    }
+    ALOGV("isLooping: no active player");
+    return false;
+}
+
+status_t MediaPlayer2::setVolume(float leftVolume, float rightVolume)
+{
+    ALOGV("MediaPlayer2::setVolume(%f, %f)", leftVolume, rightVolume);
+    Mutex::Autolock _l(mLock);
+    mLeftVolume = leftVolume;
+    mRightVolume = rightVolume;
+    if (mPlayer != 0) {
+        return mPlayer->setVolume(leftVolume, rightVolume);
+    }
+    return OK;
+}
+
+status_t MediaPlayer2::setAudioSessionId(audio_session_t sessionId)
+{
+    ALOGV("MediaPlayer2::setAudioSessionId(%d)", sessionId);
+    Mutex::Autolock _l(mLock);
+    if (!(mCurrentState & MEDIA_PLAYER2_IDLE)) {
+        ALOGE("setAudioSessionId called in state %d", mCurrentState);
+        return INVALID_OPERATION;
+    }
+    if (sessionId < 0) {
+        return BAD_VALUE;
+    }
+    if (sessionId != mAudioSessionId) {
+        AudioSystem::acquireAudioSessionId(sessionId, -1);
+        AudioSystem::releaseAudioSessionId(mAudioSessionId, -1);
+        mAudioSessionId = sessionId;
+    }
+    return NO_ERROR;
+}
+
+audio_session_t MediaPlayer2::getAudioSessionId()
+{
+    Mutex::Autolock _l(mLock);
+    return mAudioSessionId;
+}
+
+status_t MediaPlayer2::setAuxEffectSendLevel(float level)
+{
+    ALOGV("MediaPlayer2::setAuxEffectSendLevel(%f)", level);
+    Mutex::Autolock _l(mLock);
+    mSendLevel = level;
+    if (mPlayer != 0) {
+        return mPlayer->setAuxEffectSendLevel(level);
+    }
+    return OK;
+}
+
+status_t MediaPlayer2::attachAuxEffect(int effectId)
+{
+    ALOGV("MediaPlayer2::attachAuxEffect(%d)", effectId);
+    Mutex::Autolock _l(mLock);
+    if (mPlayer == 0 ||
+        (mCurrentState & MEDIA_PLAYER2_IDLE) ||
+        (mCurrentState == MEDIA_PLAYER2_STATE_ERROR )) {
+        ALOGE("attachAuxEffect called in state %d, mPlayer(%p)", mCurrentState, mPlayer.get());
+        return INVALID_OPERATION;
+    }
+
+    return mPlayer->attachAuxEffect(effectId);
+}
+
+// always call with lock held
+status_t MediaPlayer2::checkStateForKeySet_l(int key)
+{
+    switch(key) {
+    case MEDIA2_KEY_PARAMETER_AUDIO_ATTRIBUTES:
+        if (mCurrentState & ( MEDIA_PLAYER2_PREPARED | MEDIA_PLAYER2_STARTED |
+                MEDIA_PLAYER2_PAUSED | MEDIA_PLAYER2_PLAYBACK_COMPLETE) ) {
+            // Can't change the audio attributes after prepare
+            ALOGE("trying to set audio attributes called in state %d", mCurrentState);
+            return INVALID_OPERATION;
+        }
+        break;
+    default:
+        // parameter doesn't require player state check
+        break;
+    }
+    return OK;
+}
+
+status_t MediaPlayer2::setParameter(int key, const Parcel& request)
+{
+    ALOGV("MediaPlayer2::setParameter(%d)", key);
+    status_t status = INVALID_OPERATION;
+    Mutex::Autolock _l(mLock);
+    if (checkStateForKeySet_l(key) != OK) {
+        return status;
+    }
+    switch (key) {
+    case MEDIA2_KEY_PARAMETER_AUDIO_ATTRIBUTES:
+        // save the marshalled audio attributes
+        if (mAudioAttributesParcel != NULL) { delete mAudioAttributesParcel; };
+        mAudioAttributesParcel = new Parcel();
+        mAudioAttributesParcel->appendFrom(&request, 0, request.dataSize());
+        status = OK;
+        break;
+    default:
+        ALOGV_IF(mPlayer == NULL, "setParameter: no active player");
+        break;
+    }
+
+    if (mPlayer != NULL) {
+        status = mPlayer->setParameter(key, request);
+    }
+    return status;
+}
+
+status_t MediaPlayer2::getParameter(int key, Parcel *reply)
+{
+    ALOGV("MediaPlayer2::getParameter(%d)", key);
+    Mutex::Autolock _l(mLock);
+    if (mPlayer != NULL) {
+        status_t status =  mPlayer->getParameter(key, reply);
+        if (status != OK) {
+            ALOGD("getParameter returns %d", status);
+        }
+        return status;
+    }
+    ALOGV("getParameter: no active player");
+    return INVALID_OPERATION;
+}
+
+status_t MediaPlayer2::setRetransmitEndpoint(const char* addrString,
+                                            uint16_t port) {
+    ALOGV("MediaPlayer2::setRetransmitEndpoint(%s:%hu)",
+            addrString ? addrString : "(null)", port);
+
+    Mutex::Autolock _l(mLock);
+    if ((mPlayer != NULL) || (mCurrentState != MEDIA_PLAYER2_IDLE))
+        return INVALID_OPERATION;
+
+    if (NULL == addrString) {
+        mRetransmitEndpointValid = false;
+        return OK;
+    }
+
+    struct in_addr saddr;
+    if(!inet_aton(addrString, &saddr)) {
+        return BAD_VALUE;
+    }
+
+    memset(&mRetransmitEndpoint, 0, sizeof(mRetransmitEndpoint));
+    mRetransmitEndpoint.sin_family = AF_INET;
+    mRetransmitEndpoint.sin_addr   = saddr;
+    mRetransmitEndpoint.sin_port   = htons(port);
+    mRetransmitEndpointValid       = true;
+
+    return OK;
+}
+
+void MediaPlayer2::notify(int msg, int ext1, int ext2, const Parcel *obj)
+{
+    ALOGV("message received msg=%d, ext1=%d, ext2=%d", msg, ext1, ext2);
+    bool send = true;
+    bool locked = false;
+
+    // TODO: In the future, we might be on the same thread if the app is
+    // running in the same process as the media server. In that case,
+    // this will deadlock.
+    //
+    // The threadId hack below works around this for the care of prepare,
+    // seekTo, start, and reset within the same process.
+    // FIXME: Remember, this is a hack, it's not even a hack that is applied
+    // consistently for all use-cases, this needs to be revisited.
+    if (mLockThreadId != getThreadId()) {
+        mLock.lock();
+        locked = true;
+    }
+
+    // Allows calls from JNI in idle state to notify errors
+    if (!(msg == MEDIA2_ERROR && mCurrentState == MEDIA_PLAYER2_IDLE) && mPlayer == 0) {
+        ALOGV("notify(%d, %d, %d) callback on disconnected mediaplayer", msg, ext1, ext2);
+        if (locked) mLock.unlock();   // release the lock when done.
+        return;
+    }
+
+    switch (msg) {
+    case MEDIA2_NOP: // interface test message
+        break;
+    case MEDIA2_PREPARED:
+        ALOGV("MediaPlayer2::notify() prepared");
+        mCurrentState = MEDIA_PLAYER2_PREPARED;
+        if (mPrepareSync) {
+            ALOGV("signal application thread");
+            mPrepareSync = false;
+            mPrepareStatus = NO_ERROR;
+            mSignal.signal();
+        }
+        break;
+    case MEDIA2_DRM_INFO:
+        ALOGV("MediaPlayer2::notify() MEDIA2_DRM_INFO(%d, %d, %d, %p)", msg, ext1, ext2, obj);
+        break;
+    case MEDIA2_PLAYBACK_COMPLETE:
+        ALOGV("playback complete");
+        if (mCurrentState == MEDIA_PLAYER2_IDLE) {
+            ALOGE("playback complete in idle state");
+        }
+        if (!mLoop) {
+            mCurrentState = MEDIA_PLAYER2_PLAYBACK_COMPLETE;
+        }
+        break;
+    case MEDIA2_ERROR:
+        // Always log errors.
+        // ext1: Media framework error code.
+        // ext2: Implementation dependant error code.
+        ALOGE("error (%d, %d)", ext1, ext2);
+        mCurrentState = MEDIA_PLAYER2_STATE_ERROR;
+        if (mPrepareSync)
+        {
+            ALOGV("signal application thread");
+            mPrepareSync = false;
+            mPrepareStatus = ext1;
+            mSignal.signal();
+            send = false;
+        }
+        break;
+    case MEDIA2_INFO:
+        // ext1: Media framework error code.
+        // ext2: Implementation dependant error code.
+        if (ext1 != MEDIA2_INFO_VIDEO_TRACK_LAGGING) {
+            ALOGW("info/warning (%d, %d)", ext1, ext2);
+        }
+        break;
+    case MEDIA2_SEEK_COMPLETE:
+        ALOGV("Received seek complete");
+        if (mSeekPosition != mCurrentPosition || (mSeekMode != mCurrentSeekMode)) {
+            ALOGV("Executing queued seekTo(%d, %d)", mCurrentPosition, mCurrentSeekMode);
+            mSeekPosition = -1;
+            mSeekMode = MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC;
+            seekTo_l(mCurrentPosition, mCurrentSeekMode);
+        }
+        else {
+            ALOGV("All seeks complete - return to regularly scheduled program");
+            mCurrentPosition = mSeekPosition = -1;
+            mCurrentSeekMode = mSeekMode = MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC;
+        }
+        break;
+    case MEDIA2_BUFFERING_UPDATE:
+        ALOGV("buffering %d", ext1);
+        break;
+    case MEDIA2_SET_VIDEO_SIZE:
+        ALOGV("New video size %d x %d", ext1, ext2);
+        mVideoWidth = ext1;
+        mVideoHeight = ext2;
+        break;
+    case MEDIA2_NOTIFY_TIME:
+        ALOGV("Received notify time message");
+        break;
+    case MEDIA2_TIMED_TEXT:
+        ALOGV("Received timed text message");
+        break;
+    case MEDIA2_SUBTITLE_DATA:
+        ALOGV("Received subtitle data message");
+        break;
+    case MEDIA2_META_DATA:
+        ALOGV("Received timed metadata message");
+        break;
+    default:
+        ALOGV("unrecognized message: (%d, %d, %d)", msg, ext1, ext2);
+        break;
+    }
+
+    sp<MediaPlayer2Listener> listener = mListener;
+    if (locked) mLock.unlock();
+
+    // this prevents re-entrant calls into client code
+    if ((listener != 0) && send) {
+        Mutex::Autolock _l(mNotifyLock);
+        ALOGV("callback application");
+        listener->notify(msg, ext1, ext2, obj);
+        ALOGV("back from callback");
+    }
+}
+
+status_t MediaPlayer2::setNextMediaPlayer(const sp<MediaPlayer2>& next) {
+    Mutex::Autolock _l(mLock);
+    if (mPlayer == NULL) {
+        return NO_INIT;
+    }
+
+    if (next != NULL && !(next->mCurrentState &
+            (MEDIA_PLAYER2_PREPARED | MEDIA_PLAYER2_PAUSED | MEDIA_PLAYER2_PLAYBACK_COMPLETE))) {
+        ALOGE("next player is not prepared");
+        return INVALID_OPERATION;
+    }
+
+    return mPlayer->setNextPlayer(next == NULL ? NULL : next->mPlayer);
+}
+
+VolumeShaper::Status MediaPlayer2::applyVolumeShaper(
+        const sp<VolumeShaper::Configuration>& configuration,
+        const sp<VolumeShaper::Operation>& operation)
+{
+    Mutex::Autolock _l(mLock);
+    if (mPlayer == nullptr) {
+        return VolumeShaper::Status(NO_INIT);
+    }
+    VolumeShaper::Status status = mPlayer->applyVolumeShaper(configuration, operation);
+    return status;
+}
+
+sp<VolumeShaper::State> MediaPlayer2::getVolumeShaperState(int id)
+{
+    Mutex::Autolock _l(mLock);
+    if (mPlayer == nullptr) {
+        return nullptr;
+    }
+    return mPlayer->getVolumeShaperState(id);
+}
+
+// Modular DRM
+status_t MediaPlayer2::prepareDrm(const uint8_t uuid[16], const Vector<uint8_t>& drmSessionId)
+{
+    // TODO change to ALOGV
+    ALOGD("prepareDrm: uuid: %p  drmSessionId: %p(%zu)", uuid,
+            drmSessionId.array(), drmSessionId.size());
+    Mutex::Autolock _l(mLock);
+    if (mPlayer == NULL) {
+        return NO_INIT;
+    }
+
+    // Only allowed it in player's preparing/prepared state.
+    // We get here only if MEDIA_DRM_INFO has already arrived (e.g., prepare is half-way through or
+    // completed) so the state change to "prepared" might not have happened yet (e.g., buffering).
+    // Still, we can allow prepareDrm for the use case of being called in OnDrmInfoListener.
+    if (!(mCurrentState & (MEDIA_PLAYER2_PREPARING | MEDIA_PLAYER2_PREPARED))) {
+        ALOGE("prepareDrm is called in the wrong state (%d).", mCurrentState);
+        return INVALID_OPERATION;
+    }
+
+    if (drmSessionId.isEmpty()) {
+        ALOGE("prepareDrm: Unexpected. Can't proceed with crypto. Empty drmSessionId.");
+        return INVALID_OPERATION;
+    }
+
+    // Passing down to mediaserver mainly for creating the crypto
+    status_t status = mPlayer->prepareDrm(uuid, drmSessionId);
+    ALOGE_IF(status != OK, "prepareDrm: Failed at mediaserver with ret: %d", status);
+
+    // TODO change to ALOGV
+    ALOGD("prepareDrm: mediaserver::prepareDrm ret=%d", status);
+
+    return status;
+}
+
+status_t MediaPlayer2::releaseDrm()
+{
+    Mutex::Autolock _l(mLock);
+    if (mPlayer == NULL) {
+        return NO_INIT;
+    }
+
+    // Not allowing releaseDrm in an active/resumable state
+    if (mCurrentState & (MEDIA_PLAYER2_STARTED |
+                         MEDIA_PLAYER2_PAUSED |
+                         MEDIA_PLAYER2_PLAYBACK_COMPLETE |
+                         MEDIA_PLAYER2_STATE_ERROR)) {
+        ALOGE("releaseDrm Unexpected state %d. Can only be called in stopped/idle.", mCurrentState);
+        return INVALID_OPERATION;
+    }
+
+    status_t status = mPlayer->releaseDrm();
+    // TODO change to ALOGV
+    ALOGD("releaseDrm: mediaserver::releaseDrm ret: %d", status);
+    if (status != OK) {
+        ALOGE("releaseDrm: Failed at mediaserver with ret: %d", status);
+        // Overriding to OK so the client proceed with its own cleanup
+        // Client can't do more cleanup. mediaserver release its crypto at end of session anyway.
+        status = OK;
+    }
+
+    return status;
+}
+
+status_t MediaPlayer2::setOutputDevice(audio_port_handle_t deviceId)
+{
+    Mutex::Autolock _l(mLock);
+    if (mPlayer == NULL) {
+        ALOGV("setOutputDevice: player not init");
+        return NO_INIT;
+    }
+    return mPlayer->setOutputDevice(deviceId);
+}
+
+audio_port_handle_t MediaPlayer2::getRoutedDeviceId()
+{
+    Mutex::Autolock _l(mLock);
+    if (mPlayer == NULL) {
+        ALOGV("getRoutedDeviceId: player not init");
+        return AUDIO_PORT_HANDLE_NONE;
+    }
+    audio_port_handle_t deviceId;
+    status_t status = mPlayer->getRoutedDeviceId(&deviceId);
+    if (status != NO_ERROR) {
+        return AUDIO_PORT_HANDLE_NONE;
+    }
+    return deviceId;
+}
+
+status_t MediaPlayer2::enableAudioDeviceCallback(bool enabled)
+{
+    Mutex::Autolock _l(mLock);
+    if (mPlayer == NULL) {
+        ALOGV("addAudioDeviceCallback: player not init");
+        return NO_INIT;
+    }
+    return mPlayer->enableAudioDeviceCallback(enabled);
+}
+
+} // namespace android
diff --git a/media/libmedia/mediarecorder.cpp b/media/libmedia/mediarecorder.cpp
index 4405930..aab845b 100644
--- a/media/libmedia/mediarecorder.cpp
+++ b/media/libmedia/mediarecorder.cpp
@@ -792,4 +792,41 @@
     notify(MEDIA_RECORDER_EVENT_ERROR, MEDIA_ERROR_SERVER_DIED, 0);
 }
 
+status_t MediaRecorder::setInputDevice(audio_port_handle_t deviceId)
+{
+    ALOGV("setInputDevice");
+
+    if (mMediaRecorder == NULL) {
+        ALOGE("media recorder is not initialized yet");
+        return INVALID_OPERATION;
+    }
+    return mMediaRecorder->setInputDevice(deviceId);
+}
+
+status_t MediaRecorder::getRoutedDeviceId(audio_port_handle_t* deviceId)
+{
+    ALOGV("getRoutedDeviceId");
+
+    if (mMediaRecorder == NULL) {
+        ALOGE("media recorder is not initialized yet");
+        return INVALID_OPERATION;
+    }
+    status_t status = mMediaRecorder->getRoutedDeviceId(deviceId);
+    if (status != NO_ERROR) {
+        *deviceId = AUDIO_PORT_HANDLE_NONE;
+    }
+    return status;
+}
+
+status_t MediaRecorder::enableAudioDeviceCallback(bool enabled)
+{
+    ALOGV("enableAudioDeviceCallback");
+
+    if (mMediaRecorder == NULL) {
+        ALOGE("media recorder is not initialized yet");
+        return INVALID_OPERATION;
+    }
+    return mMediaRecorder->enableAudioDeviceCallback(enabled);
+}
+
 } // namespace android
diff --git a/media/libmedia/nuplayer2/Android.bp b/media/libmedia/nuplayer2/Android.bp
new file mode 100644
index 0000000..d609ba0
--- /dev/null
+++ b/media/libmedia/nuplayer2/Android.bp
@@ -0,0 +1,68 @@
+cc_library_static {
+
+    srcs: [
+        "JWakeLock.cpp",
+        "GenericSource.cpp",
+        "HTTPLiveSource.cpp",
+        "NuPlayer2.cpp",
+        "NuPlayer2CCDecoder.cpp",
+        "NuPlayer2Decoder.cpp",
+        "NuPlayer2DecoderBase.cpp",
+        "NuPlayer2DecoderPassThrough.cpp",
+        "NuPlayer2Driver.cpp",
+        "NuPlayer2Drm.cpp",
+        "NuPlayer2Renderer.cpp",
+        "NuPlayer2StreamListener.cpp",
+        "RTSPSource.cpp",
+        "StreamingSource.cpp",
+    ],
+
+    header_libs: [
+        "media_plugin_headers",
+    ],
+
+    include_dirs: [
+        "frameworks/av/media/libmedia/include",
+        "frameworks/av/media/libstagefright",
+        "frameworks/av/media/libstagefright/httplive",
+        "frameworks/av/media/libstagefright/include",
+        "frameworks/av/media/libstagefright/mpeg2ts",
+        "frameworks/av/media/libstagefright/rtsp",
+        "frameworks/av/media/libstagefright/timedtext",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    product_variables: {
+        debuggable: {
+            cflags: [
+                "-DENABLE_STAGEFRIGHT_EXPERIMENTS",
+            ],
+        }
+    },
+
+    shared_libs: [
+        "libbinder",
+        "libui",
+        "libgui",
+        "libmedia",
+        "libmediadrm",
+        "libmediandk",
+        "libpowermanager",
+    ],
+
+    name: "libstagefright_nuplayer2",
+
+    tags: ["eng"],
+
+    sanitize: {
+        cfi: true,
+        diag: {
+            cfi: true,
+        },
+    },
+
+}
diff --git a/media/libmedia/nuplayer2/GenericSource.cpp b/media/libmedia/nuplayer2/GenericSource.cpp
new file mode 100644
index 0000000..011691a
--- /dev/null
+++ b/media/libmedia/nuplayer2/GenericSource.cpp
@@ -0,0 +1,1676 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "GenericSource"
+
+#include "GenericSource.h"
+#include "NuPlayer2Drm.h"
+
+#include "AnotherPacketSource.h"
+#include <binder/IServiceManager.h>
+#include <cutils/properties.h>
+#include <media/DataSource.h>
+#include <media/IMediaExtractorService.h>
+#include <media/MediaHTTPService.h>
+#include <media/MediaExtractor.h>
+#include <media/MediaSource.h>
+#include <media/NdkWrapper.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/DataSourceFactory.h>
+#include <media/stagefright/FileSource.h>
+#include <media/stagefright/InterfaceUtils.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaClock.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaExtractorFactory.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/Utils.h>
+#include "../../libstagefright/include/NuCachedSource2.h"
+#include "../../libstagefright/include/HTTPBase.h"
+
+namespace android {
+
+static const int kInitialMarkMs        = 5000;  // 5secs
+
+//static const int kPausePlaybackMarkMs  = 2000;  // 2secs
+static const int kResumePlaybackMarkMs = 15000;  // 15secs
+
+NuPlayer2::GenericSource::GenericSource(
+        const sp<AMessage> &notify,
+        bool uidValid,
+        uid_t uid,
+        const sp<MediaClock> &mediaClock)
+    : Source(notify),
+      mAudioTimeUs(0),
+      mAudioLastDequeueTimeUs(0),
+      mVideoTimeUs(0),
+      mVideoLastDequeueTimeUs(0),
+      mPrevBufferPercentage(-1),
+      mPollBufferingGeneration(0),
+      mSentPauseOnBuffering(false),
+      mAudioDataGeneration(0),
+      mVideoDataGeneration(0),
+      mFetchSubtitleDataGeneration(0),
+      mFetchTimedTextDataGeneration(0),
+      mDurationUs(-1ll),
+      mAudioIsVorbis(false),
+      mIsSecure(false),
+      mIsStreaming(false),
+      mUIDValid(uidValid),
+      mUID(uid),
+      mMediaClock(mediaClock),
+      mFd(-1),
+      mBitrate(-1ll),
+      mPendingReadBufferTypes(0) {
+    ALOGV("GenericSource");
+    CHECK(mediaClock != NULL);
+
+    mBufferingSettings.mInitialMarkMs = kInitialMarkMs;
+    mBufferingSettings.mResumePlaybackMarkMs = kResumePlaybackMarkMs;
+    resetDataSource();
+}
+
+void NuPlayer2::GenericSource::resetDataSource() {
+    ALOGV("resetDataSource");
+
+    mHTTPService.clear();
+    mHttpSource.clear();
+    mDisconnected = false;
+    mUri.clear();
+    mUriHeaders.clear();
+    if (mFd >= 0) {
+        close(mFd);
+        mFd = -1;
+    }
+    mOffset = 0;
+    mLength = 0;
+    mStarted = false;
+    mPreparing = false;
+
+    mIsDrmProtected = false;
+    mIsDrmReleased = false;
+    mIsSecure = false;
+    mMimes.clear();
+}
+
+status_t NuPlayer2::GenericSource::setDataSource(
+        const sp<MediaHTTPService> &httpService,
+        const char *url,
+        const KeyedVector<String8, String8> *headers) {
+    Mutex::Autolock _l(mLock);
+    ALOGV("setDataSource url: %s", url);
+
+    resetDataSource();
+
+    mHTTPService = httpService;
+    mUri = url;
+
+    if (headers) {
+        mUriHeaders = *headers;
+    }
+
+    // delay data source creation to prepareAsync() to avoid blocking
+    // the calling thread in setDataSource for any significant time.
+    return OK;
+}
+
+status_t NuPlayer2::GenericSource::setDataSource(
+        int fd, int64_t offset, int64_t length) {
+    Mutex::Autolock _l(mLock);
+    ALOGV("setDataSource %d/%lld/%lld", fd, (long long)offset, (long long)length);
+
+    resetDataSource();
+
+    mFd = dup(fd);
+    mOffset = offset;
+    mLength = length;
+
+    // delay data source creation to prepareAsync() to avoid blocking
+    // the calling thread in setDataSource for any significant time.
+    return OK;
+}
+
+status_t NuPlayer2::GenericSource::setDataSource(const sp<DataSource>& source) {
+    Mutex::Autolock _l(mLock);
+    ALOGV("setDataSource (source: %p)", source.get());
+
+    resetDataSource();
+    mDataSource = source;
+    return OK;
+}
+
+sp<MetaData> NuPlayer2::GenericSource::getFileFormatMeta() const {
+    Mutex::Autolock _l(mLock);
+    return mFileMeta;
+}
+
+status_t NuPlayer2::GenericSource::initFromDataSource() {
+    sp<IMediaExtractor> extractor;
+    CHECK(mDataSource != NULL || mFd != -1);
+    sp<DataSource> dataSource = mDataSource;
+    const int fd = mFd;
+    const int64_t offset = mOffset;
+    const int64_t length = mLength;
+
+    mLock.unlock();
+    // This might take long time if data source is not reliable.
+    if (dataSource != nullptr) {
+        extractor = MediaExtractorFactory::Create(dataSource, NULL /* mime */);
+    } else {
+        extractor = MediaExtractorFactory::CreateFromFd(
+                fd, offset, length, NULL /* mime */, &dataSource);
+    }
+
+    if (dataSource == nullptr) {
+        ALOGE("initFromDataSource, failed to create data source!");
+        mLock.lock();
+        return UNKNOWN_ERROR;
+    }
+
+    if (extractor == NULL) {
+        ALOGE("initFromDataSource, cannot create extractor!");
+        mLock.lock();
+        return UNKNOWN_ERROR;
+    }
+
+    sp<MetaData> fileMeta = extractor->getMetaData();
+
+    size_t numtracks = extractor->countTracks();
+    if (numtracks == 0) {
+        ALOGE("initFromDataSource, source has no track!");
+        mLock.lock();
+        return UNKNOWN_ERROR;
+    }
+
+    mLock.lock();
+    mFd = -1;
+    mDataSource = dataSource;
+    mFileMeta = fileMeta;
+    if (mFileMeta != NULL) {
+        int64_t duration;
+        if (mFileMeta->findInt64(kKeyDuration, &duration)) {
+            mDurationUs = duration;
+        }
+    }
+
+    int32_t totalBitrate = 0;
+
+    mMimes.clear();
+
+    for (size_t i = 0; i < numtracks; ++i) {
+        sp<IMediaSource> track = extractor->getTrack(i);
+        if (track == NULL) {
+            continue;
+        }
+
+        sp<MetaData> meta = extractor->getTrackMetaData(i);
+        if (meta == NULL) {
+            ALOGE("no metadata for track %zu", i);
+            return UNKNOWN_ERROR;
+        }
+
+        const char *mime;
+        CHECK(meta->findCString(kKeyMIMEType, &mime));
+
+        ALOGV("initFromDataSource track[%zu]: %s", i, mime);
+
+        // Do the string compare immediately with "mime",
+        // we can't assume "mime" would stay valid after another
+        // extractor operation, some extractors might modify meta
+        // during getTrack() and make it invalid.
+        if (!strncasecmp(mime, "audio/", 6)) {
+            if (mAudioTrack.mSource == NULL) {
+                mAudioTrack.mIndex = i;
+                mAudioTrack.mSource = track;
+                mAudioTrack.mPackets =
+                    new AnotherPacketSource(mAudioTrack.mSource->getFormat());
+
+                if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_VORBIS)) {
+                    mAudioIsVorbis = true;
+                } else {
+                    mAudioIsVorbis = false;
+                }
+
+                mMimes.add(String8(mime));
+            }
+        } else if (!strncasecmp(mime, "video/", 6)) {
+            if (mVideoTrack.mSource == NULL) {
+                mVideoTrack.mIndex = i;
+                mVideoTrack.mSource = track;
+                mVideoTrack.mPackets =
+                    new AnotherPacketSource(mVideoTrack.mSource->getFormat());
+
+                // video always at the beginning
+                mMimes.insertAt(String8(mime), 0);
+            }
+        }
+
+        mSources.push(track);
+        int64_t durationUs;
+        if (meta->findInt64(kKeyDuration, &durationUs)) {
+            if (durationUs > mDurationUs) {
+                mDurationUs = durationUs;
+            }
+        }
+
+        int32_t bitrate;
+        if (totalBitrate >= 0 && meta->findInt32(kKeyBitRate, &bitrate)) {
+            totalBitrate += bitrate;
+        } else {
+            totalBitrate = -1;
+        }
+    }
+
+    ALOGV("initFromDataSource mSources.size(): %zu  mIsSecure: %d  mime[0]: %s", mSources.size(),
+            mIsSecure, (mMimes.isEmpty() ? "NONE" : mMimes[0].string()));
+
+    if (mSources.size() == 0) {
+        ALOGE("b/23705695");
+        return UNKNOWN_ERROR;
+    }
+
+    // Modular DRM: The return value doesn't affect source initialization.
+    (void)checkDrmInfo();
+
+    mBitrate = totalBitrate;
+
+    return OK;
+}
+
+status_t NuPlayer2::GenericSource::getBufferingSettings(
+        BufferingSettings* buffering /* nonnull */) {
+    {
+        Mutex::Autolock _l(mLock);
+        *buffering = mBufferingSettings;
+    }
+
+    ALOGV("getBufferingSettings{%s}", buffering->toString().string());
+    return OK;
+}
+
+status_t NuPlayer2::GenericSource::setBufferingSettings(const BufferingSettings& buffering) {
+    ALOGV("setBufferingSettings{%s}", buffering.toString().string());
+
+    Mutex::Autolock _l(mLock);
+    mBufferingSettings = buffering;
+    return OK;
+}
+
+status_t NuPlayer2::GenericSource::startSources() {
+    // Start the selected A/V tracks now before we start buffering.
+    // Widevine sources might re-initialize crypto when starting, if we delay
+    // this to start(), all data buffered during prepare would be wasted.
+    // (We don't actually start reading until start().)
+    //
+    // TODO: this logic may no longer be relevant after the removal of widevine
+    // support
+    if (mAudioTrack.mSource != NULL && mAudioTrack.mSource->start() != OK) {
+        ALOGE("failed to start audio track!");
+        return UNKNOWN_ERROR;
+    }
+
+    if (mVideoTrack.mSource != NULL && mVideoTrack.mSource->start() != OK) {
+        ALOGE("failed to start video track!");
+        return UNKNOWN_ERROR;
+    }
+
+    return OK;
+}
+
+int64_t NuPlayer2::GenericSource::getLastReadPosition() {
+    if (mAudioTrack.mSource != NULL) {
+        return mAudioTimeUs;
+    } else if (mVideoTrack.mSource != NULL) {
+        return mVideoTimeUs;
+    } else {
+        return 0;
+    }
+}
+
+bool NuPlayer2::GenericSource::isStreaming() const {
+    Mutex::Autolock _l(mLock);
+    return mIsStreaming;
+}
+
+NuPlayer2::GenericSource::~GenericSource() {
+    ALOGV("~GenericSource");
+    if (mLooper != NULL) {
+        mLooper->unregisterHandler(id());
+        mLooper->stop();
+    }
+    if (mDataSource != NULL) {
+        mDataSource->close();
+    }
+    resetDataSource();
+}
+
+void NuPlayer2::GenericSource::prepareAsync() {
+    Mutex::Autolock _l(mLock);
+    ALOGV("prepareAsync: (looper: %d)", (mLooper != NULL));
+
+    if (mLooper == NULL) {
+        mLooper = new ALooper;
+        mLooper->setName("generic");
+        mLooper->start(false, /* runOnCallingThread */
+                       true,  /* canCallJava */
+                       PRIORITY_DEFAULT);
+
+        mLooper->registerHandler(this);
+    }
+
+    sp<AMessage> msg = new AMessage(kWhatPrepareAsync, this);
+    msg->post();
+}
+
+void NuPlayer2::GenericSource::onPrepareAsync() {
+    ALOGV("onPrepareAsync: mDataSource: %d", (mDataSource != NULL));
+
+    // delayed data source creation
+    if (mDataSource == NULL) {
+        // set to false first, if the extractor
+        // comes back as secure, set it to true then.
+        mIsSecure = false;
+
+        if (!mUri.empty()) {
+            const char* uri = mUri.c_str();
+            String8 contentType;
+
+            if (!strncasecmp("http://", uri, 7) || !strncasecmp("https://", uri, 8)) {
+                mHttpSource = DataSourceFactory::CreateMediaHTTP(mHTTPService);
+                if (mHttpSource == NULL) {
+                    ALOGE("Failed to create http source!");
+                    notifyPreparedAndCleanup(UNKNOWN_ERROR);
+                    return;
+                }
+            }
+
+            mLock.unlock();
+            // This might take long time if connection has some issue.
+            sp<DataSource> dataSource = DataSourceFactory::CreateFromURI(
+                   mHTTPService, uri, &mUriHeaders, &contentType,
+                   static_cast<HTTPBase *>(mHttpSource.get()));
+            mLock.lock();
+            if (!mDisconnected) {
+                mDataSource = dataSource;
+            }
+        }
+
+        if (mFd == -1 && mDataSource == NULL) {
+            ALOGE("Failed to create data source!");
+            notifyPreparedAndCleanup(UNKNOWN_ERROR);
+            return;
+        }
+    }
+
+    if (mDataSource != nullptr && mDataSource->flags() & DataSource::kIsCachingDataSource) {
+        mCachedSource = static_cast<NuCachedSource2 *>(mDataSource.get());
+    }
+
+    // For cached streaming cases, we need to wait for enough
+    // buffering before reporting prepared.
+    mIsStreaming = (mCachedSource != NULL);
+
+    // init extractor from data source
+    status_t err = initFromDataSource();
+
+    if (err != OK) {
+        ALOGE("Failed to init from data source!");
+        notifyPreparedAndCleanup(err);
+        return;
+    }
+
+    if (mVideoTrack.mSource != NULL) {
+        sp<MetaData> meta = getFormatMeta_l(false /* audio */);
+        sp<AMessage> msg = new AMessage;
+        err = convertMetaDataToMessage(meta, &msg);
+        if(err != OK) {
+            notifyPreparedAndCleanup(err);
+            return;
+        }
+        notifyVideoSizeChanged(msg);
+    }
+
+    notifyFlagsChanged(
+            // FLAG_SECURE will be known if/when prepareDrm is called by the app
+            // FLAG_PROTECTED will be known if/when prepareDrm is called by the app
+            FLAG_CAN_PAUSE |
+            FLAG_CAN_SEEK_BACKWARD |
+            FLAG_CAN_SEEK_FORWARD |
+            FLAG_CAN_SEEK);
+
+    finishPrepareAsync();
+
+    ALOGV("onPrepareAsync: Done");
+}
+
+void NuPlayer2::GenericSource::finishPrepareAsync() {
+    ALOGV("finishPrepareAsync");
+
+    status_t err = startSources();
+    if (err != OK) {
+        ALOGE("Failed to init start data source!");
+        notifyPreparedAndCleanup(err);
+        return;
+    }
+
+    if (mIsStreaming) {
+        mCachedSource->resumeFetchingIfNecessary();
+        mPreparing = true;
+        schedulePollBuffering();
+    } else {
+        notifyPrepared();
+    }
+
+    if (mAudioTrack.mSource != NULL) {
+        postReadBuffer(MEDIA_TRACK_TYPE_AUDIO);
+    }
+
+    if (mVideoTrack.mSource != NULL) {
+        postReadBuffer(MEDIA_TRACK_TYPE_VIDEO);
+    }
+}
+
+void NuPlayer2::GenericSource::notifyPreparedAndCleanup(status_t err) {
+    if (err != OK) {
+        mDataSource.clear();
+        mCachedSource.clear();
+        mHttpSource.clear();
+
+        mBitrate = -1;
+        mPrevBufferPercentage = -1;
+        ++mPollBufferingGeneration;
+    }
+    notifyPrepared(err);
+}
+
+void NuPlayer2::GenericSource::start() {
+    Mutex::Autolock _l(mLock);
+    ALOGI("start");
+
+    if (mAudioTrack.mSource != NULL) {
+        postReadBuffer(MEDIA_TRACK_TYPE_AUDIO);
+    }
+
+    if (mVideoTrack.mSource != NULL) {
+        postReadBuffer(MEDIA_TRACK_TYPE_VIDEO);
+    }
+
+    mStarted = true;
+}
+
+void NuPlayer2::GenericSource::stop() {
+    Mutex::Autolock _l(mLock);
+    mStarted = false;
+}
+
+void NuPlayer2::GenericSource::pause() {
+    Mutex::Autolock _l(mLock);
+    mStarted = false;
+}
+
+void NuPlayer2::GenericSource::resume() {
+    Mutex::Autolock _l(mLock);
+    mStarted = true;
+}
+
+void NuPlayer2::GenericSource::disconnect() {
+    sp<DataSource> dataSource, httpSource;
+    {
+        Mutex::Autolock _l(mLock);
+        dataSource = mDataSource;
+        httpSource = mHttpSource;
+        mDisconnected = true;
+    }
+
+    if (dataSource != NULL) {
+        // disconnect data source
+        if (dataSource->flags() & DataSource::kIsCachingDataSource) {
+            static_cast<NuCachedSource2 *>(dataSource.get())->disconnect();
+        }
+    } else if (httpSource != NULL) {
+        static_cast<HTTPBase *>(httpSource.get())->disconnect();
+    }
+}
+
+status_t NuPlayer2::GenericSource::feedMoreTSData() {
+    return OK;
+}
+
+void NuPlayer2::GenericSource::sendCacheStats() {
+    int32_t kbps = 0;
+    status_t err = UNKNOWN_ERROR;
+
+    if (mCachedSource != NULL) {
+        err = mCachedSource->getEstimatedBandwidthKbps(&kbps);
+    }
+
+    if (err == OK) {
+        sp<AMessage> notify = dupNotify();
+        notify->setInt32("what", kWhatCacheStats);
+        notify->setInt32("bandwidth", kbps);
+        notify->post();
+    }
+}
+
+void NuPlayer2::GenericSource::onMessageReceived(const sp<AMessage> &msg) {
+    Mutex::Autolock _l(mLock);
+    switch (msg->what()) {
+      case kWhatPrepareAsync:
+      {
+          onPrepareAsync();
+          break;
+      }
+      case kWhatFetchSubtitleData:
+      {
+          fetchTextData(kWhatSendSubtitleData, MEDIA_TRACK_TYPE_SUBTITLE,
+                  mFetchSubtitleDataGeneration, mSubtitleTrack.mPackets, msg);
+          break;
+      }
+
+      case kWhatFetchTimedTextData:
+      {
+          fetchTextData(kWhatSendTimedTextData, MEDIA_TRACK_TYPE_TIMEDTEXT,
+                  mFetchTimedTextDataGeneration, mTimedTextTrack.mPackets, msg);
+          break;
+      }
+
+      case kWhatSendSubtitleData:
+      {
+          sendTextData(kWhatSubtitleData, MEDIA_TRACK_TYPE_SUBTITLE,
+                  mFetchSubtitleDataGeneration, mSubtitleTrack.mPackets, msg);
+          break;
+      }
+
+      case kWhatSendGlobalTimedTextData:
+      {
+          sendGlobalTextData(kWhatTimedTextData, mFetchTimedTextDataGeneration, msg);
+          break;
+      }
+      case kWhatSendTimedTextData:
+      {
+          sendTextData(kWhatTimedTextData, MEDIA_TRACK_TYPE_TIMEDTEXT,
+                  mFetchTimedTextDataGeneration, mTimedTextTrack.mPackets, msg);
+          break;
+      }
+
+      case kWhatChangeAVSource:
+      {
+          int32_t trackIndex;
+          CHECK(msg->findInt32("trackIndex", &trackIndex));
+          const sp<IMediaSource> source = mSources.itemAt(trackIndex);
+
+          Track* track;
+          const char *mime;
+          media_track_type trackType, counterpartType;
+          sp<MetaData> meta = source->getFormat();
+          meta->findCString(kKeyMIMEType, &mime);
+          if (!strncasecmp(mime, "audio/", 6)) {
+              track = &mAudioTrack;
+              trackType = MEDIA_TRACK_TYPE_AUDIO;
+              counterpartType = MEDIA_TRACK_TYPE_VIDEO;;
+          } else {
+              CHECK(!strncasecmp(mime, "video/", 6));
+              track = &mVideoTrack;
+              trackType = MEDIA_TRACK_TYPE_VIDEO;
+              counterpartType = MEDIA_TRACK_TYPE_AUDIO;;
+          }
+
+
+          if (track->mSource != NULL) {
+              track->mSource->stop();
+          }
+          track->mSource = source;
+          track->mSource->start();
+          track->mIndex = trackIndex;
+          ++mAudioDataGeneration;
+          ++mVideoDataGeneration;
+
+          int64_t timeUs, actualTimeUs;
+          const bool formatChange = true;
+          if (trackType == MEDIA_TRACK_TYPE_AUDIO) {
+              timeUs = mAudioLastDequeueTimeUs;
+          } else {
+              timeUs = mVideoLastDequeueTimeUs;
+          }
+          readBuffer(trackType, timeUs, MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC /* mode */,
+                  &actualTimeUs, formatChange);
+          readBuffer(counterpartType, -1, MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC /* mode */,
+                  NULL, !formatChange);
+          ALOGV("timeUs %lld actualTimeUs %lld", (long long)timeUs, (long long)actualTimeUs);
+
+          break;
+      }
+
+      case kWhatSeek:
+      {
+          onSeek(msg);
+          break;
+      }
+
+      case kWhatReadBuffer:
+      {
+          onReadBuffer(msg);
+          break;
+      }
+
+      case kWhatPollBuffering:
+      {
+          int32_t generation;
+          CHECK(msg->findInt32("generation", &generation));
+          if (generation == mPollBufferingGeneration) {
+              onPollBuffering();
+          }
+          break;
+      }
+
+      default:
+          Source::onMessageReceived(msg);
+          break;
+    }
+}
+
+void NuPlayer2::GenericSource::fetchTextData(
+        uint32_t sendWhat,
+        media_track_type type,
+        int32_t curGen,
+        const sp<AnotherPacketSource>& packets,
+        const sp<AMessage>& msg) {
+    int32_t msgGeneration;
+    CHECK(msg->findInt32("generation", &msgGeneration));
+    if (msgGeneration != curGen) {
+        // stale
+        return;
+    }
+
+    int32_t avail;
+    if (packets->hasBufferAvailable(&avail)) {
+        return;
+    }
+
+    int64_t timeUs;
+    CHECK(msg->findInt64("timeUs", &timeUs));
+
+    int64_t subTimeUs = 0;
+    readBuffer(type, timeUs, MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC /* mode */, &subTimeUs);
+
+    status_t eosResult;
+    if (!packets->hasBufferAvailable(&eosResult)) {
+        return;
+    }
+
+    if (msg->what() == kWhatFetchSubtitleData) {
+        subTimeUs -= 1000000ll;  // send subtile data one second earlier
+    }
+    sp<AMessage> msg2 = new AMessage(sendWhat, this);
+    msg2->setInt32("generation", msgGeneration);
+    mMediaClock->addTimer(msg2, subTimeUs);
+}
+
+void NuPlayer2::GenericSource::sendTextData(
+        uint32_t what,
+        media_track_type type,
+        int32_t curGen,
+        const sp<AnotherPacketSource>& packets,
+        const sp<AMessage>& msg) {
+    int32_t msgGeneration;
+    CHECK(msg->findInt32("generation", &msgGeneration));
+    if (msgGeneration != curGen) {
+        // stale
+        return;
+    }
+
+    int64_t subTimeUs;
+    if (packets->nextBufferTime(&subTimeUs) != OK) {
+        return;
+    }
+
+    int64_t nextSubTimeUs;
+    readBuffer(type, -1, MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC /* mode */, &nextSubTimeUs);
+
+    sp<ABuffer> buffer;
+    status_t dequeueStatus = packets->dequeueAccessUnit(&buffer);
+    if (dequeueStatus == OK) {
+        sp<AMessage> notify = dupNotify();
+        notify->setInt32("what", what);
+        notify->setBuffer("buffer", buffer);
+        notify->post();
+
+        if (msg->what() == kWhatSendSubtitleData) {
+            nextSubTimeUs -= 1000000ll;  // send subtile data one second earlier
+        }
+        mMediaClock->addTimer(msg, nextSubTimeUs);
+    }
+}
+
+void NuPlayer2::GenericSource::sendGlobalTextData(
+        uint32_t what,
+        int32_t curGen,
+        sp<AMessage> msg) {
+    int32_t msgGeneration;
+    CHECK(msg->findInt32("generation", &msgGeneration));
+    if (msgGeneration != curGen) {
+        // stale
+        return;
+    }
+
+    uint32_t textType;
+    const void *data;
+    size_t size = 0;
+    if (mTimedTextTrack.mSource->getFormat()->findData(
+                    kKeyTextFormatData, &textType, &data, &size)) {
+        mGlobalTimedText = new ABuffer(size);
+        if (mGlobalTimedText->data()) {
+            memcpy(mGlobalTimedText->data(), data, size);
+            sp<AMessage> globalMeta = mGlobalTimedText->meta();
+            globalMeta->setInt64("timeUs", 0);
+            globalMeta->setString("mime", MEDIA_MIMETYPE_TEXT_3GPP);
+            globalMeta->setInt32("global", 1);
+            sp<AMessage> notify = dupNotify();
+            notify->setInt32("what", what);
+            notify->setBuffer("buffer", mGlobalTimedText);
+            notify->post();
+        }
+    }
+}
+
+sp<MetaData> NuPlayer2::GenericSource::getFormatMeta(bool audio) {
+    Mutex::Autolock _l(mLock);
+    return getFormatMeta_l(audio);
+}
+
+sp<MetaData> NuPlayer2::GenericSource::getFormatMeta_l(bool audio) {
+    sp<IMediaSource> source = audio ? mAudioTrack.mSource : mVideoTrack.mSource;
+
+    if (source == NULL) {
+        return NULL;
+    }
+
+    return source->getFormat();
+}
+
+status_t NuPlayer2::GenericSource::dequeueAccessUnit(
+        bool audio, sp<ABuffer> *accessUnit) {
+    Mutex::Autolock _l(mLock);
+    // If has gone through stop/releaseDrm sequence, we no longer send down any buffer b/c
+    // the codec's crypto object has gone away (b/37960096).
+    // Note: This will be unnecessary when stop() changes behavior and releases codec (b/35248283).
+    if (!mStarted && mIsDrmReleased) {
+        return -EWOULDBLOCK;
+    }
+
+    Track *track = audio ? &mAudioTrack : &mVideoTrack;
+
+    if (track->mSource == NULL) {
+        return -EWOULDBLOCK;
+    }
+
+    status_t finalResult;
+    if (!track->mPackets->hasBufferAvailable(&finalResult)) {
+        if (finalResult == OK) {
+            postReadBuffer(
+                    audio ? MEDIA_TRACK_TYPE_AUDIO : MEDIA_TRACK_TYPE_VIDEO);
+            return -EWOULDBLOCK;
+        }
+        return finalResult;
+    }
+
+    status_t result = track->mPackets->dequeueAccessUnit(accessUnit);
+
+    // start pulling in more buffers if cache is running low
+    // so that decoder has less chance of being starved
+    if (!mIsStreaming) {
+        if (track->mPackets->getAvailableBufferCount(&finalResult) < 2) {
+            postReadBuffer(audio? MEDIA_TRACK_TYPE_AUDIO : MEDIA_TRACK_TYPE_VIDEO);
+        }
+    } else {
+        int64_t durationUs = track->mPackets->getBufferedDurationUs(&finalResult);
+        // TODO: maxRebufferingMarkMs could be larger than
+        // mBufferingSettings.mResumePlaybackMarkMs
+        int64_t restartBufferingMarkUs =
+             mBufferingSettings.mResumePlaybackMarkMs * 1000ll / 2;
+        if (finalResult == OK) {
+            if (durationUs < restartBufferingMarkUs) {
+                postReadBuffer(audio? MEDIA_TRACK_TYPE_AUDIO : MEDIA_TRACK_TYPE_VIDEO);
+            }
+            if (track->mPackets->getAvailableBufferCount(&finalResult) < 2
+                && !mSentPauseOnBuffering && !mPreparing) {
+                mCachedSource->resumeFetchingIfNecessary();
+                sendCacheStats();
+                mSentPauseOnBuffering = true;
+                sp<AMessage> notify = dupNotify();
+                notify->setInt32("what", kWhatPauseOnBufferingStart);
+                notify->post();
+            }
+        }
+    }
+
+    if (result != OK) {
+        if (mSubtitleTrack.mSource != NULL) {
+            mSubtitleTrack.mPackets->clear();
+            mFetchSubtitleDataGeneration++;
+        }
+        if (mTimedTextTrack.mSource != NULL) {
+            mTimedTextTrack.mPackets->clear();
+            mFetchTimedTextDataGeneration++;
+        }
+        return result;
+    }
+
+    int64_t timeUs;
+    status_t eosResult; // ignored
+    CHECK((*accessUnit)->meta()->findInt64("timeUs", &timeUs));
+    if (audio) {
+        mAudioLastDequeueTimeUs = timeUs;
+    } else {
+        mVideoLastDequeueTimeUs = timeUs;
+    }
+
+    if (mSubtitleTrack.mSource != NULL
+            && !mSubtitleTrack.mPackets->hasBufferAvailable(&eosResult)) {
+        sp<AMessage> msg = new AMessage(kWhatFetchSubtitleData, this);
+        msg->setInt64("timeUs", timeUs);
+        msg->setInt32("generation", mFetchSubtitleDataGeneration);
+        msg->post();
+    }
+
+    if (mTimedTextTrack.mSource != NULL
+            && !mTimedTextTrack.mPackets->hasBufferAvailable(&eosResult)) {
+        sp<AMessage> msg = new AMessage(kWhatFetchTimedTextData, this);
+        msg->setInt64("timeUs", timeUs);
+        msg->setInt32("generation", mFetchTimedTextDataGeneration);
+        msg->post();
+    }
+
+    return result;
+}
+
+status_t NuPlayer2::GenericSource::getDuration(int64_t *durationUs) {
+    Mutex::Autolock _l(mLock);
+    *durationUs = mDurationUs;
+    return OK;
+}
+
+size_t NuPlayer2::GenericSource::getTrackCount() const {
+    Mutex::Autolock _l(mLock);
+    return mSources.size();
+}
+
+sp<AMessage> NuPlayer2::GenericSource::getTrackInfo(size_t trackIndex) const {
+    Mutex::Autolock _l(mLock);
+    size_t trackCount = mSources.size();
+    if (trackIndex >= trackCount) {
+        return NULL;
+    }
+
+    sp<AMessage> format = new AMessage();
+    sp<MetaData> meta = mSources.itemAt(trackIndex)->getFormat();
+    if (meta == NULL) {
+        ALOGE("no metadata for track %zu", trackIndex);
+        return NULL;
+    }
+
+    const char *mime;
+    CHECK(meta->findCString(kKeyMIMEType, &mime));
+    format->setString("mime", mime);
+
+    int32_t trackType;
+    if (!strncasecmp(mime, "video/", 6)) {
+        trackType = MEDIA_TRACK_TYPE_VIDEO;
+    } else if (!strncasecmp(mime, "audio/", 6)) {
+        trackType = MEDIA_TRACK_TYPE_AUDIO;
+    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_TEXT_3GPP)) {
+        trackType = MEDIA_TRACK_TYPE_TIMEDTEXT;
+    } else {
+        trackType = MEDIA_TRACK_TYPE_UNKNOWN;
+    }
+    format->setInt32("type", trackType);
+
+    const char *lang;
+    if (!meta->findCString(kKeyMediaLanguage, &lang)) {
+        lang = "und";
+    }
+    format->setString("language", lang);
+
+    if (trackType == MEDIA_TRACK_TYPE_SUBTITLE) {
+        int32_t isAutoselect = 1, isDefault = 0, isForced = 0;
+        meta->findInt32(kKeyTrackIsAutoselect, &isAutoselect);
+        meta->findInt32(kKeyTrackIsDefault, &isDefault);
+        meta->findInt32(kKeyTrackIsForced, &isForced);
+
+        format->setInt32("auto", !!isAutoselect);
+        format->setInt32("default", !!isDefault);
+        format->setInt32("forced", !!isForced);
+    }
+
+    return format;
+}
+
+ssize_t NuPlayer2::GenericSource::getSelectedTrack(media_track_type type) const {
+    Mutex::Autolock _l(mLock);
+    const Track *track = NULL;
+    switch (type) {
+    case MEDIA_TRACK_TYPE_VIDEO:
+        track = &mVideoTrack;
+        break;
+    case MEDIA_TRACK_TYPE_AUDIO:
+        track = &mAudioTrack;
+        break;
+    case MEDIA_TRACK_TYPE_TIMEDTEXT:
+        track = &mTimedTextTrack;
+        break;
+    case MEDIA_TRACK_TYPE_SUBTITLE:
+        track = &mSubtitleTrack;
+        break;
+    default:
+        break;
+    }
+
+    if (track != NULL && track->mSource != NULL) {
+        return track->mIndex;
+    }
+
+    return -1;
+}
+
+status_t NuPlayer2::GenericSource::selectTrack(size_t trackIndex, bool select, int64_t timeUs) {
+    Mutex::Autolock _l(mLock);
+    ALOGV("%s track: %zu", select ? "select" : "deselect", trackIndex);
+
+    if (trackIndex >= mSources.size()) {
+        return BAD_INDEX;
+    }
+
+    if (!select) {
+        Track* track = NULL;
+        if (mSubtitleTrack.mSource != NULL && trackIndex == mSubtitleTrack.mIndex) {
+            track = &mSubtitleTrack;
+            mFetchSubtitleDataGeneration++;
+        } else if (mTimedTextTrack.mSource != NULL && trackIndex == mTimedTextTrack.mIndex) {
+            track = &mTimedTextTrack;
+            mFetchTimedTextDataGeneration++;
+        }
+        if (track == NULL) {
+            return INVALID_OPERATION;
+        }
+        track->mSource->stop();
+        track->mSource = NULL;
+        track->mPackets->clear();
+        return OK;
+    }
+
+    const sp<IMediaSource> source = mSources.itemAt(trackIndex);
+    sp<MetaData> meta = source->getFormat();
+    const char *mime;
+    CHECK(meta->findCString(kKeyMIMEType, &mime));
+    if (!strncasecmp(mime, "text/", 5)) {
+        bool isSubtitle = strcasecmp(mime, MEDIA_MIMETYPE_TEXT_3GPP);
+        Track *track = isSubtitle ? &mSubtitleTrack : &mTimedTextTrack;
+        if (track->mSource != NULL && track->mIndex == trackIndex) {
+            return OK;
+        }
+        track->mIndex = trackIndex;
+        if (track->mSource != NULL) {
+            track->mSource->stop();
+        }
+        track->mSource = mSources.itemAt(trackIndex);
+        track->mSource->start();
+        if (track->mPackets == NULL) {
+            track->mPackets = new AnotherPacketSource(track->mSource->getFormat());
+        } else {
+            track->mPackets->clear();
+            track->mPackets->setFormat(track->mSource->getFormat());
+
+        }
+
+        if (isSubtitle) {
+            mFetchSubtitleDataGeneration++;
+        } else {
+            mFetchTimedTextDataGeneration++;
+        }
+
+        status_t eosResult; // ignored
+        if (mSubtitleTrack.mSource != NULL
+                && !mSubtitleTrack.mPackets->hasBufferAvailable(&eosResult)) {
+            sp<AMessage> msg = new AMessage(kWhatFetchSubtitleData, this);
+            msg->setInt64("timeUs", timeUs);
+            msg->setInt32("generation", mFetchSubtitleDataGeneration);
+            msg->post();
+        }
+
+        sp<AMessage> msg2 = new AMessage(kWhatSendGlobalTimedTextData, this);
+        msg2->setInt32("generation", mFetchTimedTextDataGeneration);
+        msg2->post();
+
+        if (mTimedTextTrack.mSource != NULL
+                && !mTimedTextTrack.mPackets->hasBufferAvailable(&eosResult)) {
+            sp<AMessage> msg = new AMessage(kWhatFetchTimedTextData, this);
+            msg->setInt64("timeUs", timeUs);
+            msg->setInt32("generation", mFetchTimedTextDataGeneration);
+            msg->post();
+        }
+
+        return OK;
+    } else if (!strncasecmp(mime, "audio/", 6) || !strncasecmp(mime, "video/", 6)) {
+        bool audio = !strncasecmp(mime, "audio/", 6);
+        Track *track = audio ? &mAudioTrack : &mVideoTrack;
+        if (track->mSource != NULL && track->mIndex == trackIndex) {
+            return OK;
+        }
+
+        sp<AMessage> msg = new AMessage(kWhatChangeAVSource, this);
+        msg->setInt32("trackIndex", trackIndex);
+        msg->post();
+        return OK;
+    }
+
+    return INVALID_OPERATION;
+}
+
+status_t NuPlayer2::GenericSource::seekTo(int64_t seekTimeUs, MediaPlayer2SeekMode mode) {
+    ALOGV("seekTo: %lld, %d", (long long)seekTimeUs, mode);
+    sp<AMessage> msg = new AMessage(kWhatSeek, this);
+    msg->setInt64("seekTimeUs", seekTimeUs);
+    msg->setInt32("mode", mode);
+
+    // Need to call readBuffer on |mLooper| to ensure the calls to
+    // IMediaSource::read* are serialized. Note that IMediaSource::read*
+    // is called without |mLock| acquired and MediaSource is not thread safe.
+    sp<AMessage> response;
+    status_t err = msg->postAndAwaitResponse(&response);
+    if (err == OK && response != NULL) {
+        CHECK(response->findInt32("err", &err));
+    }
+
+    return err;
+}
+
+void NuPlayer2::GenericSource::onSeek(const sp<AMessage>& msg) {
+    int64_t seekTimeUs;
+    int32_t mode;
+    CHECK(msg->findInt64("seekTimeUs", &seekTimeUs));
+    CHECK(msg->findInt32("mode", &mode));
+
+    sp<AMessage> response = new AMessage;
+    status_t err = doSeek(seekTimeUs, (MediaPlayer2SeekMode)mode);
+    response->setInt32("err", err);
+
+    sp<AReplyToken> replyID;
+    CHECK(msg->senderAwaitsResponse(&replyID));
+    response->postReply(replyID);
+}
+
+status_t NuPlayer2::GenericSource::doSeek(int64_t seekTimeUs, MediaPlayer2SeekMode mode) {
+    if (mVideoTrack.mSource != NULL) {
+        ++mVideoDataGeneration;
+
+        int64_t actualTimeUs;
+        readBuffer(MEDIA_TRACK_TYPE_VIDEO, seekTimeUs, mode, &actualTimeUs);
+
+        if (mode != MediaPlayer2SeekMode::SEEK_CLOSEST) {
+            seekTimeUs = actualTimeUs;
+        }
+        mVideoLastDequeueTimeUs = actualTimeUs;
+    }
+
+    if (mAudioTrack.mSource != NULL) {
+        ++mAudioDataGeneration;
+        readBuffer(MEDIA_TRACK_TYPE_AUDIO, seekTimeUs, MediaPlayer2SeekMode::SEEK_CLOSEST);
+        mAudioLastDequeueTimeUs = seekTimeUs;
+    }
+
+    if (mSubtitleTrack.mSource != NULL) {
+        mSubtitleTrack.mPackets->clear();
+        mFetchSubtitleDataGeneration++;
+    }
+
+    if (mTimedTextTrack.mSource != NULL) {
+        mTimedTextTrack.mPackets->clear();
+        mFetchTimedTextDataGeneration++;
+    }
+
+    ++mPollBufferingGeneration;
+    schedulePollBuffering();
+    return OK;
+}
+
+sp<ABuffer> NuPlayer2::GenericSource::mediaBufferToABuffer(
+        MediaBuffer* mb,
+        media_track_type trackType) {
+    bool audio = trackType == MEDIA_TRACK_TYPE_AUDIO;
+    size_t outLength = mb->range_length();
+
+    if (audio && mAudioIsVorbis) {
+        outLength += sizeof(int32_t);
+    }
+
+    sp<ABuffer> ab;
+
+    if (mIsDrmProtected)   {
+        // Modular DRM
+        // Enabled for both video/audio so 1) media buffer is reused without extra copying
+        // 2) meta data can be retrieved in onInputBufferFetched for calling queueSecureInputBuffer.
+
+        // data is already provided in the buffer
+        ab = new ABuffer(NULL, mb->range_length());
+        mb->add_ref();
+        ab->setMediaBufferBase(mb);
+
+        // Modular DRM: Required b/c of the above add_ref.
+        // If ref>0, there must be an observer, or it'll crash at release().
+        // TODO: MediaBuffer might need to be revised to ease such need.
+        mb->setObserver(this);
+        // setMediaBufferBase() interestingly doesn't increment the ref count on its own.
+        // Extra increment (since we want to keep mb alive and attached to ab beyond this function
+        // call. This is to counter the effect of mb->release() towards the end.
+        mb->add_ref();
+
+    } else {
+        ab = new ABuffer(outLength);
+        memcpy(ab->data(),
+               (const uint8_t *)mb->data() + mb->range_offset(),
+               mb->range_length());
+    }
+
+    if (audio && mAudioIsVorbis) {
+        int32_t numPageSamples;
+        if (!mb->meta_data()->findInt32(kKeyValidSamples, &numPageSamples)) {
+            numPageSamples = -1;
+        }
+
+        uint8_t* abEnd = ab->data() + mb->range_length();
+        memcpy(abEnd, &numPageSamples, sizeof(numPageSamples));
+    }
+
+    sp<AMessage> meta = ab->meta();
+
+    int64_t timeUs;
+    CHECK(mb->meta_data()->findInt64(kKeyTime, &timeUs));
+    meta->setInt64("timeUs", timeUs);
+
+    if (trackType == MEDIA_TRACK_TYPE_VIDEO) {
+        int32_t layerId;
+        if (mb->meta_data()->findInt32(kKeyTemporalLayerId, &layerId)) {
+            meta->setInt32("temporal-layer-id", layerId);
+        }
+    }
+
+    if (trackType == MEDIA_TRACK_TYPE_TIMEDTEXT) {
+        const char *mime;
+        CHECK(mTimedTextTrack.mSource != NULL
+                && mTimedTextTrack.mSource->getFormat()->findCString(kKeyMIMEType, &mime));
+        meta->setString("mime", mime);
+    }
+
+    int64_t durationUs;
+    if (mb->meta_data()->findInt64(kKeyDuration, &durationUs)) {
+        meta->setInt64("durationUs", durationUs);
+    }
+
+    if (trackType == MEDIA_TRACK_TYPE_SUBTITLE) {
+        meta->setInt32("trackIndex", mSubtitleTrack.mIndex);
+    }
+
+    uint32_t dataType; // unused
+    const void *seiData;
+    size_t seiLength;
+    if (mb->meta_data()->findData(kKeySEI, &dataType, &seiData, &seiLength)) {
+        sp<ABuffer> sei = ABuffer::CreateAsCopy(seiData, seiLength);;
+        meta->setBuffer("sei", sei);
+    }
+
+    const void *mpegUserDataPointer;
+    size_t mpegUserDataLength;
+    if (mb->meta_data()->findData(
+            kKeyMpegUserData, &dataType, &mpegUserDataPointer, &mpegUserDataLength)) {
+        sp<ABuffer> mpegUserData = ABuffer::CreateAsCopy(mpegUserDataPointer, mpegUserDataLength);
+        meta->setBuffer("mpegUserData", mpegUserData);
+    }
+
+    mb->release();
+    mb = NULL;
+
+    return ab;
+}
+
+int32_t NuPlayer2::GenericSource::getDataGeneration(media_track_type type) const {
+    int32_t generation = -1;
+    switch (type) {
+    case MEDIA_TRACK_TYPE_VIDEO:
+        generation = mVideoDataGeneration;
+        break;
+    case MEDIA_TRACK_TYPE_AUDIO:
+        generation = mAudioDataGeneration;
+        break;
+    case MEDIA_TRACK_TYPE_TIMEDTEXT:
+        generation = mFetchTimedTextDataGeneration;
+        break;
+    case MEDIA_TRACK_TYPE_SUBTITLE:
+        generation = mFetchSubtitleDataGeneration;
+        break;
+    default:
+        break;
+    }
+
+    return generation;
+}
+
+void NuPlayer2::GenericSource::postReadBuffer(media_track_type trackType) {
+    if ((mPendingReadBufferTypes & (1 << trackType)) == 0) {
+        mPendingReadBufferTypes |= (1 << trackType);
+        sp<AMessage> msg = new AMessage(kWhatReadBuffer, this);
+        msg->setInt32("trackType", trackType);
+        msg->post();
+    }
+}
+
+void NuPlayer2::GenericSource::onReadBuffer(const sp<AMessage>& msg) {
+    int32_t tmpType;
+    CHECK(msg->findInt32("trackType", &tmpType));
+    media_track_type trackType = (media_track_type)tmpType;
+    mPendingReadBufferTypes &= ~(1 << trackType);
+    readBuffer(trackType);
+}
+
+void NuPlayer2::GenericSource::readBuffer(
+        media_track_type trackType, int64_t seekTimeUs, MediaPlayer2SeekMode mode,
+        int64_t *actualTimeUs, bool formatChange) {
+    Track *track;
+    size_t maxBuffers = 1;
+    switch (trackType) {
+        case MEDIA_TRACK_TYPE_VIDEO:
+            track = &mVideoTrack;
+            maxBuffers = 8;  // too large of a number may influence seeks
+            break;
+        case MEDIA_TRACK_TYPE_AUDIO:
+            track = &mAudioTrack;
+            maxBuffers = 64;
+            break;
+        case MEDIA_TRACK_TYPE_SUBTITLE:
+            track = &mSubtitleTrack;
+            break;
+        case MEDIA_TRACK_TYPE_TIMEDTEXT:
+            track = &mTimedTextTrack;
+            break;
+        default:
+            TRESPASS();
+    }
+
+    if (track->mSource == NULL) {
+        return;
+    }
+
+    if (actualTimeUs) {
+        *actualTimeUs = seekTimeUs;
+    }
+
+    MediaSource::ReadOptions options;
+
+    bool seeking = false;
+    if (seekTimeUs >= 0) {
+        options.setSeekTo(seekTimeUs, mode);
+        seeking = true;
+    }
+
+    const bool couldReadMultiple = (track->mSource->supportReadMultiple());
+
+    if (couldReadMultiple) {
+        options.setNonBlocking();
+    }
+
+    int32_t generation = getDataGeneration(trackType);
+    for (size_t numBuffers = 0; numBuffers < maxBuffers; ) {
+        Vector<MediaBuffer *> mediaBuffers;
+        status_t err = NO_ERROR;
+
+        sp<IMediaSource> source = track->mSource;
+        mLock.unlock();
+        if (couldReadMultiple) {
+            err = source->readMultiple(
+                    &mediaBuffers, maxBuffers - numBuffers, &options);
+        } else {
+            MediaBuffer *mbuf = NULL;
+            err = source->read(&mbuf, &options);
+            if (err == OK && mbuf != NULL) {
+                mediaBuffers.push_back(mbuf);
+            }
+        }
+        mLock.lock();
+
+        options.clearNonPersistent();
+
+        size_t id = 0;
+        size_t count = mediaBuffers.size();
+
+        // in case track has been changed since we don't have lock for some time.
+        if (generation != getDataGeneration(trackType)) {
+            for (; id < count; ++id) {
+                mediaBuffers[id]->release();
+            }
+            break;
+        }
+
+        for (; id < count; ++id) {
+            int64_t timeUs;
+            MediaBuffer *mbuf = mediaBuffers[id];
+            if (!mbuf->meta_data()->findInt64(kKeyTime, &timeUs)) {
+                mbuf->meta_data()->dumpToLog();
+                track->mPackets->signalEOS(ERROR_MALFORMED);
+                break;
+            }
+            if (trackType == MEDIA_TRACK_TYPE_AUDIO) {
+                mAudioTimeUs = timeUs;
+            } else if (trackType == MEDIA_TRACK_TYPE_VIDEO) {
+                mVideoTimeUs = timeUs;
+            }
+
+            queueDiscontinuityIfNeeded(seeking, formatChange, trackType, track);
+
+            sp<ABuffer> buffer = mediaBufferToABuffer(mbuf, trackType);
+            if (numBuffers == 0 && actualTimeUs != nullptr) {
+                *actualTimeUs = timeUs;
+            }
+            if (seeking && buffer != nullptr) {
+                sp<AMessage> meta = buffer->meta();
+                if (meta != nullptr && mode == MediaPlayer2SeekMode::SEEK_CLOSEST
+                        && seekTimeUs > timeUs) {
+                    sp<AMessage> extra = new AMessage;
+                    extra->setInt64("resume-at-mediaTimeUs", seekTimeUs);
+                    meta->setMessage("extra", extra);
+                }
+            }
+
+            track->mPackets->queueAccessUnit(buffer);
+            formatChange = false;
+            seeking = false;
+            ++numBuffers;
+        }
+        if (id < count) {
+            // Error, some mediaBuffer doesn't have kKeyTime.
+            for (; id < count; ++id) {
+                mediaBuffers[id]->release();
+            }
+            break;
+        }
+
+        if (err == WOULD_BLOCK) {
+            break;
+        } else if (err == INFO_FORMAT_CHANGED) {
+#if 0
+            track->mPackets->queueDiscontinuity(
+                    ATSParser::DISCONTINUITY_FORMATCHANGE,
+                    NULL,
+                    false /* discard */);
+#endif
+        } else if (err != OK) {
+            queueDiscontinuityIfNeeded(seeking, formatChange, trackType, track);
+            track->mPackets->signalEOS(err);
+            break;
+        }
+    }
+
+    if (mIsStreaming
+        && (trackType == MEDIA_TRACK_TYPE_VIDEO || trackType == MEDIA_TRACK_TYPE_AUDIO)) {
+        status_t finalResult;
+        int64_t durationUs = track->mPackets->getBufferedDurationUs(&finalResult);
+
+        // TODO: maxRebufferingMarkMs could be larger than
+        // mBufferingSettings.mResumePlaybackMarkMs
+        int64_t markUs = (mPreparing ? mBufferingSettings.mInitialMarkMs
+            : mBufferingSettings.mResumePlaybackMarkMs) * 1000ll;
+        if (finalResult == ERROR_END_OF_STREAM || durationUs >= markUs) {
+            if (mPreparing || mSentPauseOnBuffering) {
+                Track *counterTrack =
+                    (trackType == MEDIA_TRACK_TYPE_VIDEO ? &mAudioTrack : &mVideoTrack);
+                if (counterTrack->mSource != NULL) {
+                    durationUs = counterTrack->mPackets->getBufferedDurationUs(&finalResult);
+                }
+                if (finalResult == ERROR_END_OF_STREAM || durationUs >= markUs) {
+                    if (mPreparing) {
+                        notifyPrepared();
+                        mPreparing = false;
+                    } else {
+                        sendCacheStats();
+                        mSentPauseOnBuffering = false;
+                        sp<AMessage> notify = dupNotify();
+                        notify->setInt32("what", kWhatResumeOnBufferingEnd);
+                        notify->post();
+                    }
+                }
+            }
+            return;
+        }
+
+        postReadBuffer(trackType);
+    }
+}
+
+void NuPlayer2::GenericSource::queueDiscontinuityIfNeeded(
+        bool seeking, bool formatChange, media_track_type trackType, Track *track) {
+    // formatChange && seeking: track whose source is changed during selection
+    // formatChange && !seeking: track whose source is not changed during selection
+    // !formatChange: normal seek
+    if ((seeking || formatChange)
+            && (trackType == MEDIA_TRACK_TYPE_AUDIO
+            || trackType == MEDIA_TRACK_TYPE_VIDEO)) {
+        ATSParser::DiscontinuityType type = (formatChange && seeking)
+                ? ATSParser::DISCONTINUITY_FORMATCHANGE
+                : ATSParser::DISCONTINUITY_NONE;
+        track->mPackets->queueDiscontinuity(type, NULL /* extra */, true /* discard */);
+    }
+}
+
+void NuPlayer2::GenericSource::notifyBufferingUpdate(int32_t percentage) {
+    // Buffering percent could go backward as it's estimated from remaining
+    // data and last access time. This could cause the buffering position
+    // drawn on media control to jitter slightly. Remember previously reported
+    // percentage and don't allow it to go backward.
+    if (percentage < mPrevBufferPercentage) {
+        percentage = mPrevBufferPercentage;
+    } else if (percentage > 100) {
+        percentage = 100;
+    }
+
+    mPrevBufferPercentage = percentage;
+
+    ALOGV("notifyBufferingUpdate: buffering %d%%", percentage);
+
+    sp<AMessage> notify = dupNotify();
+    notify->setInt32("what", kWhatBufferingUpdate);
+    notify->setInt32("percentage", percentage);
+    notify->post();
+}
+
+void NuPlayer2::GenericSource::schedulePollBuffering() {
+    sp<AMessage> msg = new AMessage(kWhatPollBuffering, this);
+    msg->setInt32("generation", mPollBufferingGeneration);
+    // Enquires buffering status every second.
+    msg->post(1000000ll);
+}
+
+void NuPlayer2::GenericSource::onPollBuffering() {
+    status_t finalStatus = UNKNOWN_ERROR;
+    int64_t cachedDurationUs = -1ll;
+    ssize_t cachedDataRemaining = -1;
+
+    if (mCachedSource != NULL) {
+        cachedDataRemaining = mCachedSource->approxDataRemaining(&finalStatus);
+
+        if (finalStatus == OK) {
+            off64_t size;
+            int64_t bitrate = 0ll;
+            if (mDurationUs > 0 && mCachedSource->getSize(&size) == OK) {
+                // |bitrate| uses bits/second unit, while size is number of bytes.
+                bitrate = size * 8000000ll / mDurationUs;
+            } else if (mBitrate > 0) {
+                bitrate = mBitrate;
+            }
+            if (bitrate > 0) {
+                cachedDurationUs = cachedDataRemaining * 8000000ll / bitrate;
+            }
+        }
+    }
+
+    if (finalStatus != OK) {
+        ALOGV("onPollBuffering: EOS (finalStatus = %d)", finalStatus);
+
+        if (finalStatus == ERROR_END_OF_STREAM) {
+            notifyBufferingUpdate(100);
+        }
+
+        return;
+    }
+
+    if (cachedDurationUs >= 0ll) {
+        if (mDurationUs > 0ll) {
+            int64_t cachedPosUs = getLastReadPosition() + cachedDurationUs;
+            int percentage = 100.0 * cachedPosUs / mDurationUs;
+            if (percentage > 100) {
+                percentage = 100;
+            }
+
+            notifyBufferingUpdate(percentage);
+        }
+
+        ALOGV("onPollBuffering: cachedDurationUs %.1f sec", cachedDurationUs / 1000000.0f);
+    }
+
+    schedulePollBuffering();
+}
+
+// Modular DRM
+status_t NuPlayer2::GenericSource::prepareDrm(
+        const uint8_t uuid[16],
+        const Vector<uint8_t> &drmSessionId,
+        sp<AMediaCryptoWrapper> *outCrypto) {
+    Mutex::Autolock _l(mLock);
+    ALOGV("prepareDrm");
+
+    mIsDrmProtected = false;
+    mIsDrmReleased = false;
+    mIsSecure = false;
+
+    status_t status = OK;
+    sp<AMediaCryptoWrapper> crypto =
+        new AMediaCryptoWrapper(uuid, drmSessionId.array(), drmSessionId.size());
+    if (crypto == NULL) {
+        ALOGE("prepareDrm: failed to create crypto.");
+        return UNKNOWN_ERROR;
+    }
+    ALOGV("prepareDrm: crypto created for uuid: %s",
+            DrmUUID::toHexString(uuid).string());
+
+    *outCrypto = crypto;
+    // as long a there is an active crypto
+    mIsDrmProtected = true;
+
+    if (mMimes.size() == 0) {
+        status = UNKNOWN_ERROR;
+        ALOGE("prepareDrm: Unexpected. Must have at least one track. status: %d", status);
+        return status;
+    }
+
+    // first mime in this list is either the video track, or the first audio track
+    const char *mime = mMimes[0].string();
+    mIsSecure = crypto->requiresSecureDecoderComponent(mime);
+    ALOGV("prepareDrm: requiresSecureDecoderComponent mime: %s  isSecure: %d",
+            mime, mIsSecure);
+
+    // Checking the member flags while in the looper to send out the notification.
+    // The legacy mDecryptHandle!=NULL check (for FLAG_PROTECTED) is equivalent to mIsDrmProtected.
+    notifyFlagsChanged(
+            (mIsSecure ? FLAG_SECURE : 0) |
+            // Setting "protected screen" only for L1: b/38390836
+            (mIsSecure ? FLAG_PROTECTED : 0) |
+            FLAG_CAN_PAUSE |
+            FLAG_CAN_SEEK_BACKWARD |
+            FLAG_CAN_SEEK_FORWARD |
+            FLAG_CAN_SEEK);
+
+    if (status == OK) {
+        ALOGV("prepareDrm: mCrypto: %p", outCrypto->get());
+        ALOGD("prepareDrm ret: %d ", status);
+    } else {
+        ALOGE("prepareDrm err: %d", status);
+    }
+    return status;
+}
+
+status_t NuPlayer2::GenericSource::releaseDrm() {
+    Mutex::Autolock _l(mLock);
+    ALOGV("releaseDrm");
+
+    if (mIsDrmProtected) {
+        mIsDrmProtected = false;
+        // to prevent returning any more buffer after stop/releaseDrm (b/37960096)
+        mIsDrmReleased = true;
+        ALOGV("releaseDrm: mIsDrmProtected is reset.");
+    } else {
+        ALOGE("releaseDrm: mIsDrmProtected is already false.");
+    }
+
+    return OK;
+}
+
+status_t NuPlayer2::GenericSource::checkDrmInfo()
+{
+    // clearing the flag at prepare in case the player is reused after stop/releaseDrm with the
+    // same source without being reset (called by prepareAsync/initFromDataSource)
+    mIsDrmReleased = false;
+
+    if (mFileMeta == NULL) {
+        ALOGI("checkDrmInfo: No metadata");
+        return OK; // letting the caller responds accordingly
+    }
+
+    uint32_t type;
+    const void *pssh;
+    size_t psshsize;
+
+    if (!mFileMeta->findData(kKeyPssh, &type, &pssh, &psshsize)) {
+        ALOGV("checkDrmInfo: No PSSH");
+        return OK; // source without DRM info
+    }
+
+    Parcel parcel;
+    NuPlayer2Drm::retrieveDrmInfo(pssh, psshsize, &parcel);
+    ALOGV("checkDrmInfo: MEDIA2_DRM_INFO PSSH size: %d  Parcel size: %d  objects#: %d",
+          (int)psshsize, (int)parcel.dataSize(), (int)parcel.objectsCount());
+
+    if (parcel.dataSize() == 0) {
+        ALOGE("checkDrmInfo: Unexpected parcel size: 0");
+        return UNKNOWN_ERROR;
+    }
+
+    // Can't pass parcel as a message to the player. Converting Parcel->ABuffer to pass it
+    // to the Player's onSourceNotify then back to Parcel for calling driver's notifyListener.
+    sp<ABuffer> drmInfoBuffer = ABuffer::CreateAsCopy(parcel.data(), parcel.dataSize());
+    notifyDrmInfo(drmInfoBuffer);
+
+    return OK;
+}
+
+void NuPlayer2::GenericSource::signalBufferReturned(MediaBuffer *buffer)
+{
+    //ALOGV("signalBufferReturned %p  refCount: %d", buffer, buffer->localRefcount());
+
+    buffer->setObserver(NULL);
+    buffer->release(); // this leads to delete since that there is no observor
+}
+
+}  // namespace android
diff --git a/media/libmedia/nuplayer2/GenericSource.h b/media/libmedia/nuplayer2/GenericSource.h
new file mode 100644
index 0000000..0666d27
--- /dev/null
+++ b/media/libmedia/nuplayer2/GenericSource.h
@@ -0,0 +1,247 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef GENERIC_SOURCE_H_
+
+#define GENERIC_SOURCE_H_
+
+#include "NuPlayer2.h"
+#include "NuPlayer2Source.h"
+
+#include "ATSParser.h"
+
+#include <media/mediaplayer2.h>
+#include <media/stagefright/MediaBuffer.h>
+
+namespace android {
+
+class DecryptHandle;
+struct AnotherPacketSource;
+struct ARTSPController;
+class DataSource;
+class IDataSource;
+struct MediaHTTPService;
+struct MediaSource;
+class MediaBuffer;
+struct MediaClock;
+struct NuCachedSource2;
+
+struct NuPlayer2::GenericSource : public NuPlayer2::Source,
+                                 public MediaBufferObserver // Modular DRM
+{
+    GenericSource(const sp<AMessage> &notify, bool uidValid, uid_t uid,
+                  const sp<MediaClock> &mediaClock);
+
+    status_t setDataSource(
+            const sp<MediaHTTPService> &httpService,
+            const char *url,
+            const KeyedVector<String8, String8> *headers);
+
+    status_t setDataSource(int fd, int64_t offset, int64_t length);
+
+    status_t setDataSource(const sp<DataSource>& dataSource);
+
+    virtual status_t getBufferingSettings(
+            BufferingSettings* buffering /* nonnull */) override;
+    virtual status_t setBufferingSettings(const BufferingSettings& buffering) override;
+
+    virtual void prepareAsync();
+
+    virtual void start();
+    virtual void stop();
+    virtual void pause();
+    virtual void resume();
+
+    virtual void disconnect();
+
+    virtual status_t feedMoreTSData();
+
+    virtual sp<MetaData> getFileFormatMeta() const;
+
+    virtual status_t dequeueAccessUnit(bool audio, sp<ABuffer> *accessUnit);
+
+    virtual status_t getDuration(int64_t *durationUs);
+    virtual size_t getTrackCount() const;
+    virtual sp<AMessage> getTrackInfo(size_t trackIndex) const;
+    virtual ssize_t getSelectedTrack(media_track_type type) const;
+    virtual status_t selectTrack(size_t trackIndex, bool select, int64_t timeUs);
+    virtual status_t seekTo(
+        int64_t seekTimeUs,
+        MediaPlayer2SeekMode mode = MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC) override;
+
+    virtual bool isStreaming() const;
+
+    // Modular DRM
+    virtual void signalBufferReturned(MediaBuffer *buffer);
+
+    virtual status_t prepareDrm(
+            const uint8_t uuid[16],
+            const Vector<uint8_t> &drmSessionId,
+            sp<AMediaCryptoWrapper> *outCrypto);
+
+    virtual status_t releaseDrm();
+
+
+protected:
+    virtual ~GenericSource();
+
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+
+    virtual sp<MetaData> getFormatMeta(bool audio);
+
+private:
+    enum {
+        kWhatPrepareAsync,
+        kWhatFetchSubtitleData,
+        kWhatFetchTimedTextData,
+        kWhatSendSubtitleData,
+        kWhatSendGlobalTimedTextData,
+        kWhatSendTimedTextData,
+        kWhatChangeAVSource,
+        kWhatPollBuffering,
+        kWhatSeek,
+        kWhatReadBuffer,
+        kWhatStart,
+        kWhatResume,
+        kWhatSecureDecodersInstantiated,
+    };
+
+    struct Track {
+        size_t mIndex;
+        sp<IMediaSource> mSource;
+        sp<AnotherPacketSource> mPackets;
+    };
+
+    Vector<sp<IMediaSource> > mSources;
+    Track mAudioTrack;
+    int64_t mAudioTimeUs;
+    int64_t mAudioLastDequeueTimeUs;
+    Track mVideoTrack;
+    int64_t mVideoTimeUs;
+    int64_t mVideoLastDequeueTimeUs;
+    Track mSubtitleTrack;
+    Track mTimedTextTrack;
+
+    BufferingSettings mBufferingSettings;
+    int32_t mPrevBufferPercentage;
+    int32_t mPollBufferingGeneration;
+    bool mSentPauseOnBuffering;
+
+    int32_t mAudioDataGeneration;
+    int32_t mVideoDataGeneration;
+    int32_t mFetchSubtitleDataGeneration;
+    int32_t mFetchTimedTextDataGeneration;
+    int64_t mDurationUs;
+    bool mAudioIsVorbis;
+    // Secure codec is required.
+    bool mIsSecure;
+    bool mIsStreaming;
+    bool mUIDValid;
+    uid_t mUID;
+    const sp<MediaClock> mMediaClock;
+    sp<MediaHTTPService> mHTTPService;
+    AString mUri;
+    KeyedVector<String8, String8> mUriHeaders;
+    int mFd;
+    int64_t mOffset;
+    int64_t mLength;
+
+    bool mDisconnected;
+    sp<DataSource> mDataSource;
+    sp<NuCachedSource2> mCachedSource;
+    sp<DataSource> mHttpSource;
+    sp<MetaData> mFileMeta;
+    bool mStarted;
+    bool mPreparing;
+    int64_t mBitrate;
+    uint32_t mPendingReadBufferTypes;
+    sp<ABuffer> mGlobalTimedText;
+
+    mutable Mutex mLock;
+
+    sp<ALooper> mLooper;
+
+    void resetDataSource();
+
+    status_t initFromDataSource();
+    int64_t getLastReadPosition();
+
+    void notifyPreparedAndCleanup(status_t err);
+    void onSecureDecodersInstantiated(status_t err);
+    void finishPrepareAsync();
+    status_t startSources();
+
+    void onSeek(const sp<AMessage>& msg);
+    status_t doSeek(int64_t seekTimeUs, MediaPlayer2SeekMode mode);
+
+    void onPrepareAsync();
+
+    void fetchTextData(
+            uint32_t what, media_track_type type,
+            int32_t curGen, const sp<AnotherPacketSource>& packets, const sp<AMessage>& msg);
+
+    void sendGlobalTextData(
+            uint32_t what,
+            int32_t curGen, sp<AMessage> msg);
+
+    void sendTextData(
+            uint32_t what, media_track_type type,
+            int32_t curGen, const sp<AnotherPacketSource>& packets, const sp<AMessage>& msg);
+
+    sp<ABuffer> mediaBufferToABuffer(
+            MediaBuffer *mbuf,
+            media_track_type trackType);
+
+    void postReadBuffer(media_track_type trackType);
+    void onReadBuffer(const sp<AMessage>& msg);
+    // When |mode| is MediaPlayer2SeekMode::SEEK_CLOSEST, the buffer read shall
+    // include an item indicating skipping rendering all buffers with timestamp
+    // earlier than |seekTimeUs|.
+    // For other modes, the buffer read will not include the item as above in order
+    // to facilitate fast seek operation.
+    void readBuffer(
+            media_track_type trackType,
+            int64_t seekTimeUs = -1ll,
+            MediaPlayer2SeekMode mode = MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC,
+            int64_t *actualTimeUs = NULL, bool formatChange = false);
+
+    void queueDiscontinuityIfNeeded(
+            bool seeking, bool formatChange, media_track_type trackType, Track *track);
+
+    void schedulePollBuffering();
+    void onPollBuffering();
+    void notifyBufferingUpdate(int32_t percentage);
+
+    void sendCacheStats();
+
+    sp<MetaData> getFormatMeta_l(bool audio);
+    int32_t getDataGeneration(media_track_type type) const;
+
+    // Modular DRM
+    // The source is DRM protected and is prepared for DRM.
+    bool mIsDrmProtected;
+    // releaseDrm has been processed.
+    bool mIsDrmReleased;
+    Vector<String8> mMimes;
+
+    status_t checkDrmInfo();
+
+    DISALLOW_EVIL_CONSTRUCTORS(GenericSource);
+};
+
+}  // namespace android
+
+#endif  // GENERIC_SOURCE_H_
diff --git a/media/libmedia/nuplayer2/HTTPLiveSource.cpp b/media/libmedia/nuplayer2/HTTPLiveSource.cpp
new file mode 100644
index 0000000..e0e3df9
--- /dev/null
+++ b/media/libmedia/nuplayer2/HTTPLiveSource.cpp
@@ -0,0 +1,449 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "HTTPLiveSource"
+#include <utils/Log.h>
+
+#include "HTTPLiveSource.h"
+
+#include "AnotherPacketSource.h"
+#include "LiveDataSource.h"
+
+#include <media/MediaHTTPService.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/Utils.h>
+
+// default buffer prepare/ready/underflow marks
+static const int kReadyMarkMs     = 5000;  // 5 seconds
+static const int kPrepareMarkMs   = 1500;  // 1.5 seconds
+
+namespace android {
+
+NuPlayer2::HTTPLiveSource::HTTPLiveSource(
+        const sp<AMessage> &notify,
+        const sp<MediaHTTPService> &httpService,
+        const char *url,
+        const KeyedVector<String8, String8> *headers)
+    : Source(notify),
+      mHTTPService(httpService),
+      mURL(url),
+      mFlags(0),
+      mFinalResult(OK),
+      mOffset(0),
+      mFetchSubtitleDataGeneration(0),
+      mFetchMetaDataGeneration(0),
+      mHasMetadata(false),
+      mMetadataSelected(false) {
+    mBufferingSettings.mInitialMarkMs = kPrepareMarkMs;
+    mBufferingSettings.mResumePlaybackMarkMs = kReadyMarkMs;
+    if (headers) {
+        mExtraHeaders = *headers;
+
+        ssize_t index =
+            mExtraHeaders.indexOfKey(String8("x-hide-urls-from-log"));
+
+        if (index >= 0) {
+            mFlags |= kFlagIncognito;
+
+            mExtraHeaders.removeItemsAt(index);
+        }
+    }
+}
+
+NuPlayer2::HTTPLiveSource::~HTTPLiveSource() {
+    if (mLiveSession != NULL) {
+        mLiveSession->disconnect();
+
+        mLiveLooper->unregisterHandler(mLiveSession->id());
+        mLiveLooper->unregisterHandler(id());
+        mLiveLooper->stop();
+
+        mLiveSession.clear();
+        mLiveLooper.clear();
+    }
+}
+
+status_t NuPlayer2::HTTPLiveSource::getBufferingSettings(
+            BufferingSettings* buffering /* nonnull */) {
+    *buffering = mBufferingSettings;
+
+    return OK;
+}
+
+status_t NuPlayer2::HTTPLiveSource::setBufferingSettings(const BufferingSettings& buffering) {
+    mBufferingSettings = buffering;
+
+    if (mLiveSession != NULL) {
+        mLiveSession->setBufferingSettings(mBufferingSettings);
+    }
+
+    return OK;
+}
+
+void NuPlayer2::HTTPLiveSource::prepareAsync() {
+    if (mLiveLooper == NULL) {
+        mLiveLooper = new ALooper;
+        mLiveLooper->setName("http live");
+        mLiveLooper->start(false, /* runOnCallingThread */
+                           true /* canCallJava */);
+
+        mLiveLooper->registerHandler(this);
+    }
+
+    sp<AMessage> notify = new AMessage(kWhatSessionNotify, this);
+
+    mLiveSession = new LiveSession(
+            notify,
+            (mFlags & kFlagIncognito) ? LiveSession::kFlagIncognito : 0,
+            mHTTPService);
+
+    mLiveLooper->registerHandler(mLiveSession);
+
+    mLiveSession->setBufferingSettings(mBufferingSettings);
+    mLiveSession->connectAsync(
+            mURL.c_str(), mExtraHeaders.isEmpty() ? NULL : &mExtraHeaders);
+}
+
+void NuPlayer2::HTTPLiveSource::start() {
+}
+
+sp<MetaData> NuPlayer2::HTTPLiveSource::getFormatMeta(bool audio) {
+    sp<MetaData> meta;
+    if (mLiveSession != NULL) {
+        mLiveSession->getStreamFormatMeta(
+                audio ? LiveSession::STREAMTYPE_AUDIO
+                      : LiveSession::STREAMTYPE_VIDEO,
+                &meta);
+    }
+
+    return meta;
+}
+
+sp<AMessage> NuPlayer2::HTTPLiveSource::getFormat(bool audio) {
+    sp<MetaData> meta;
+    status_t err = -EWOULDBLOCK;
+    if (mLiveSession != NULL) {
+        err = mLiveSession->getStreamFormatMeta(
+                audio ? LiveSession::STREAMTYPE_AUDIO
+                      : LiveSession::STREAMTYPE_VIDEO,
+                &meta);
+    }
+
+    sp<AMessage> format;
+    if (err == -EWOULDBLOCK) {
+        format = new AMessage();
+        format->setInt32("err", err);
+        return format;
+    }
+
+    if (err != OK || convertMetaDataToMessage(meta, &format) != OK) {
+        return NULL;
+    }
+    return format;
+}
+
+status_t NuPlayer2::HTTPLiveSource::feedMoreTSData() {
+    return OK;
+}
+
+status_t NuPlayer2::HTTPLiveSource::dequeueAccessUnit(
+        bool audio, sp<ABuffer> *accessUnit) {
+    return mLiveSession->dequeueAccessUnit(
+            audio ? LiveSession::STREAMTYPE_AUDIO
+                  : LiveSession::STREAMTYPE_VIDEO,
+            accessUnit);
+}
+
+status_t NuPlayer2::HTTPLiveSource::getDuration(int64_t *durationUs) {
+    return mLiveSession->getDuration(durationUs);
+}
+
+size_t NuPlayer2::HTTPLiveSource::getTrackCount() const {
+    return mLiveSession->getTrackCount();
+}
+
+sp<AMessage> NuPlayer2::HTTPLiveSource::getTrackInfo(size_t trackIndex) const {
+    return mLiveSession->getTrackInfo(trackIndex);
+}
+
+ssize_t NuPlayer2::HTTPLiveSource::getSelectedTrack(media_track_type type) const {
+    if (mLiveSession == NULL) {
+        return -1;
+    } else if (type == MEDIA_TRACK_TYPE_METADATA) {
+        // MEDIA_TRACK_TYPE_METADATA is always last track
+        // mMetadataSelected can only be true when mHasMetadata is true
+        return mMetadataSelected ? (mLiveSession->getTrackCount() - 1) : -1;
+    } else {
+        return mLiveSession->getSelectedTrack(type);
+    }
+}
+
+status_t NuPlayer2::HTTPLiveSource::selectTrack(size_t trackIndex, bool select, int64_t /*timeUs*/) {
+    if (mLiveSession == NULL) {
+        return INVALID_OPERATION;
+    }
+
+    status_t err = INVALID_OPERATION;
+    bool postFetchMsg = false, isSub = false;
+    if (!mHasMetadata || trackIndex != mLiveSession->getTrackCount() - 1) {
+        err = mLiveSession->selectTrack(trackIndex, select);
+        postFetchMsg = select;
+        isSub = true;
+    } else {
+        // metadata track; i.e. (mHasMetadata && trackIndex == mLiveSession->getTrackCount() - 1)
+        if (mMetadataSelected && !select) {
+            err = OK;
+        } else if (!mMetadataSelected && select) {
+            postFetchMsg = true;
+            err = OK;
+        } else {
+            err = BAD_VALUE; // behave as LiveSession::selectTrack
+        }
+
+        mMetadataSelected = select;
+    }
+
+    if (err == OK) {
+        int32_t &generation = isSub ? mFetchSubtitleDataGeneration : mFetchMetaDataGeneration;
+        generation++;
+        if (postFetchMsg) {
+            int32_t what = isSub ? kWhatFetchSubtitleData : kWhatFetchMetaData;
+            sp<AMessage> msg = new AMessage(what, this);
+            msg->setInt32("generation", generation);
+            msg->post();
+        }
+    }
+
+    // LiveSession::selectTrack returns BAD_VALUE when selecting the currently
+    // selected track, or unselecting a non-selected track. In this case it's an
+    // no-op so we return OK.
+    return (err == OK || err == BAD_VALUE) ? (status_t)OK : err;
+}
+
+status_t NuPlayer2::HTTPLiveSource::seekTo(int64_t seekTimeUs, MediaPlayer2SeekMode mode) {
+    if (mLiveSession->isSeekable()) {
+        return mLiveSession->seekTo(seekTimeUs, mode);
+    } else {
+        return INVALID_OPERATION;
+    }
+}
+
+void NuPlayer2::HTTPLiveSource::pollForRawData(
+        const sp<AMessage> &msg, int32_t currentGeneration,
+        LiveSession::StreamType fetchType, int32_t pushWhat) {
+
+    int32_t generation;
+    CHECK(msg->findInt32("generation", &generation));
+
+    if (generation != currentGeneration) {
+        return;
+    }
+
+    sp<ABuffer> buffer;
+    while (mLiveSession->dequeueAccessUnit(fetchType, &buffer) == OK) {
+
+        sp<AMessage> notify = dupNotify();
+        notify->setInt32("what", pushWhat);
+        notify->setBuffer("buffer", buffer);
+
+        int64_t timeUs, baseUs, delayUs;
+        CHECK(buffer->meta()->findInt64("baseUs", &baseUs));
+        CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+        delayUs = baseUs + timeUs - ALooper::GetNowUs();
+
+        if (fetchType == LiveSession::STREAMTYPE_SUBTITLES) {
+            notify->post();
+            msg->post(delayUs > 0ll ? delayUs : 0ll);
+            return;
+        } else if (fetchType == LiveSession::STREAMTYPE_METADATA) {
+            if (delayUs < -1000000ll) { // 1 second
+                continue;
+            }
+            notify->post();
+            // push all currently available metadata buffers in each invocation of pollForRawData
+            // continue;
+        } else {
+            TRESPASS();
+        }
+    }
+
+    // try again in 1 second
+    msg->post(1000000ll);
+}
+
+void NuPlayer2::HTTPLiveSource::onMessageReceived(const sp<AMessage> &msg) {
+    switch (msg->what()) {
+        case kWhatSessionNotify:
+        {
+            onSessionNotify(msg);
+            break;
+        }
+
+        case kWhatFetchSubtitleData:
+        {
+            pollForRawData(
+                    msg, mFetchSubtitleDataGeneration,
+                    /* fetch */ LiveSession::STREAMTYPE_SUBTITLES,
+                    /* push */ kWhatSubtitleData);
+
+            break;
+        }
+
+        case kWhatFetchMetaData:
+        {
+            if (!mMetadataSelected) {
+                break;
+            }
+
+            pollForRawData(
+                    msg, mFetchMetaDataGeneration,
+                    /* fetch */ LiveSession::STREAMTYPE_METADATA,
+                    /* push */ kWhatTimedMetaData);
+
+            break;
+        }
+
+        default:
+            Source::onMessageReceived(msg);
+            break;
+    }
+}
+
+void NuPlayer2::HTTPLiveSource::onSessionNotify(const sp<AMessage> &msg) {
+    int32_t what;
+    CHECK(msg->findInt32("what", &what));
+
+    switch (what) {
+        case LiveSession::kWhatPrepared:
+        {
+            // notify the current size here if we have it, otherwise report an initial size of (0,0)
+            sp<AMessage> format = getFormat(false /* audio */);
+            int32_t width;
+            int32_t height;
+            if (format != NULL &&
+                    format->findInt32("width", &width) && format->findInt32("height", &height)) {
+                notifyVideoSizeChanged(format);
+            } else {
+                notifyVideoSizeChanged();
+            }
+
+            uint32_t flags = 0;
+            if (mLiveSession->isSeekable()) {
+                flags |= FLAG_CAN_PAUSE;
+                flags |= FLAG_CAN_SEEK;
+                flags |= FLAG_CAN_SEEK_BACKWARD;
+                flags |= FLAG_CAN_SEEK_FORWARD;
+            }
+
+            if (mLiveSession->hasDynamicDuration()) {
+                flags |= FLAG_DYNAMIC_DURATION;
+            }
+
+            notifyFlagsChanged(flags);
+
+            notifyPrepared();
+            break;
+        }
+
+        case LiveSession::kWhatPreparationFailed:
+        {
+            status_t err;
+            CHECK(msg->findInt32("err", &err));
+
+            notifyPrepared(err);
+            break;
+        }
+
+        case LiveSession::kWhatStreamsChanged:
+        {
+            uint32_t changedMask;
+            CHECK(msg->findInt32(
+                        "changedMask", (int32_t *)&changedMask));
+
+            bool audio = changedMask & LiveSession::STREAMTYPE_AUDIO;
+            bool video = changedMask & LiveSession::STREAMTYPE_VIDEO;
+
+            sp<AMessage> reply;
+            CHECK(msg->findMessage("reply", &reply));
+
+            sp<AMessage> notify = dupNotify();
+            notify->setInt32("what", kWhatQueueDecoderShutdown);
+            notify->setInt32("audio", audio);
+            notify->setInt32("video", video);
+            notify->setMessage("reply", reply);
+            notify->post();
+            break;
+        }
+
+        case LiveSession::kWhatBufferingStart:
+        {
+            sp<AMessage> notify = dupNotify();
+            notify->setInt32("what", kWhatPauseOnBufferingStart);
+            notify->post();
+            break;
+        }
+
+        case LiveSession::kWhatBufferingEnd:
+        {
+            sp<AMessage> notify = dupNotify();
+            notify->setInt32("what", kWhatResumeOnBufferingEnd);
+            notify->post();
+            break;
+        }
+
+
+        case LiveSession::kWhatBufferingUpdate:
+        {
+            sp<AMessage> notify = dupNotify();
+            int32_t percentage;
+            CHECK(msg->findInt32("percentage", &percentage));
+            notify->setInt32("what", kWhatBufferingUpdate);
+            notify->setInt32("percentage", percentage);
+            notify->post();
+            break;
+        }
+
+        case LiveSession::kWhatMetadataDetected:
+        {
+            if (!mHasMetadata) {
+                mHasMetadata = true;
+
+                sp<AMessage> notify = dupNotify();
+                // notification without buffer triggers MEDIA2_INFO_METADATA_UPDATE
+                notify->setInt32("what", kWhatTimedMetaData);
+                notify->post();
+            }
+            break;
+        }
+
+        case LiveSession::kWhatError:
+        {
+            break;
+        }
+
+        default:
+            TRESPASS();
+    }
+}
+
+}  // namespace android
+
diff --git a/media/libmedia/nuplayer2/HTTPLiveSource.h b/media/libmedia/nuplayer2/HTTPLiveSource.h
new file mode 100644
index 0000000..7b6a312
--- /dev/null
+++ b/media/libmedia/nuplayer2/HTTPLiveSource.h
@@ -0,0 +1,99 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef HTTP_LIVE_SOURCE_H_
+
+#define HTTP_LIVE_SOURCE_H_
+
+#include "NuPlayer2.h"
+#include "NuPlayer2Source.h"
+
+#include "LiveSession.h"
+
+namespace android {
+
+struct LiveSession;
+
+struct NuPlayer2::HTTPLiveSource : public NuPlayer2::Source {
+    HTTPLiveSource(
+            const sp<AMessage> &notify,
+            const sp<MediaHTTPService> &httpService,
+            const char *url,
+            const KeyedVector<String8, String8> *headers);
+
+    virtual status_t getBufferingSettings(
+            BufferingSettings* buffering /* nonnull */) override;
+    virtual status_t setBufferingSettings(const BufferingSettings& buffering) override;
+
+    virtual void prepareAsync();
+    virtual void start();
+
+    virtual status_t dequeueAccessUnit(bool audio, sp<ABuffer> *accessUnit);
+    virtual sp<MetaData> getFormatMeta(bool audio);
+    virtual sp<AMessage> getFormat(bool audio);
+
+    virtual status_t feedMoreTSData();
+    virtual status_t getDuration(int64_t *durationUs);
+    virtual size_t getTrackCount() const;
+    virtual sp<AMessage> getTrackInfo(size_t trackIndex) const;
+    virtual ssize_t getSelectedTrack(media_track_type /* type */) const;
+    virtual status_t selectTrack(size_t trackIndex, bool select, int64_t timeUs);
+    virtual status_t seekTo(
+            int64_t seekTimeUs,
+            MediaPlayer2SeekMode mode = MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC) override;
+
+protected:
+    virtual ~HTTPLiveSource();
+
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+
+private:
+    enum Flags {
+        // Don't log any URLs.
+        kFlagIncognito = 1,
+    };
+
+    enum {
+        kWhatSessionNotify,
+        kWhatFetchSubtitleData,
+        kWhatFetchMetaData,
+    };
+
+    sp<MediaHTTPService> mHTTPService;
+    AString mURL;
+    KeyedVector<String8, String8> mExtraHeaders;
+    uint32_t mFlags;
+    status_t mFinalResult;
+    off64_t mOffset;
+    sp<ALooper> mLiveLooper;
+    sp<LiveSession> mLiveSession;
+    int32_t mFetchSubtitleDataGeneration;
+    int32_t mFetchMetaDataGeneration;
+    bool mHasMetadata;
+    bool mMetadataSelected;
+    BufferingSettings mBufferingSettings;
+
+    void onSessionNotify(const sp<AMessage> &msg);
+    void pollForRawData(
+            const sp<AMessage> &msg, int32_t currentGeneration,
+            LiveSession::StreamType fetchType, int32_t pushWhat);
+
+    DISALLOW_EVIL_CONSTRUCTORS(HTTPLiveSource);
+};
+
+}  // namespace android
+
+#endif  // HTTP_LIVE_SOURCE_H_
diff --git a/media/libstagefright/foundation/AWakeLock.cpp b/media/libmedia/nuplayer2/JWakeLock.cpp
similarity index 85%
copy from media/libstagefright/foundation/AWakeLock.cpp
copy to media/libmedia/nuplayer2/JWakeLock.cpp
index d9277ac..c9a1071 100644
--- a/media/libstagefright/foundation/AWakeLock.cpp
+++ b/media/libmedia/nuplayer2/JWakeLock.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2015 The Android Open Source Project
+ * Copyright 2017 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -15,26 +15,27 @@
  */
 
 //#define LOG_NDEBUG 0
-#define LOG_TAG "AWakeLock"
+#define LOG_TAG "JWakeLock"
 #include <utils/Log.h>
 
-#include "ADebug.h"
-#include "AWakeLock.h"
+#include "JWakeLock.h"
 
 #include <binder/IPCThreadState.h>
 #include <binder/IServiceManager.h>
+#include <media/stagefright/foundation/ADebug.h>
 #include <powermanager/PowerManager.h>
 
 
 namespace android {
 
-AWakeLock::AWakeLock() :
+//TODO: use JAVA PowerManager, instead of binder
+JWakeLock::JWakeLock() :
     mPowerManager(NULL),
     mWakeLockToken(NULL),
     mWakeLockCount(0),
     mDeathRecipient(new PMDeathRecipient(this)) {}
 
-AWakeLock::~AWakeLock() {
+JWakeLock::~JWakeLock() {
     if (mPowerManager != NULL) {
         sp<IBinder> binder = IInterface::asBinder(mPowerManager);
         binder->unlinkToDeath(mDeathRecipient);
@@ -42,7 +43,7 @@
     clearPowerManager();
 }
 
-bool AWakeLock::acquire() {
+bool JWakeLock::acquire() {
     if (mWakeLockCount == 0) {
         CHECK(mWakeLockToken == NULL);
         if (mPowerManager == NULL) {
@@ -61,7 +62,7 @@
             int64_t token = IPCThreadState::self()->clearCallingIdentity();
             status_t status = mPowerManager->acquireWakeLock(
                     POWERMANAGER_PARTIAL_WAKE_LOCK,
-                    binder, String16("AWakeLock"), String16("media"));
+                    binder, String16("JWakeLock"), String16("media"));
             IPCThreadState::self()->restoreCallingIdentity(token);
             if (status == NO_ERROR) {
                 mWakeLockToken = binder;
@@ -76,7 +77,7 @@
     return false;
 }
 
-void AWakeLock::release(bool force) {
+void JWakeLock::release(bool force) {
     if (mWakeLockCount == 0) {
         return;
     }
@@ -95,12 +96,12 @@
     }
 }
 
-void AWakeLock::clearPowerManager() {
+void JWakeLock::clearPowerManager() {
     release(true);
     mPowerManager.clear();
 }
 
-void AWakeLock::PMDeathRecipient::binderDied(const wp<IBinder>& who __unused) {
+void JWakeLock::PMDeathRecipient::binderDied(const wp<IBinder>& who __unused) {
     if (mWakeLock != NULL) {
         mWakeLock->clearPowerManager();
     }
diff --git a/media/libmedia/nuplayer2/JWakeLock.h b/media/libmedia/nuplayer2/JWakeLock.h
new file mode 100644
index 0000000..eace87e
--- /dev/null
+++ b/media/libmedia/nuplayer2/JWakeLock.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef J_WAKELOCK_H_
+#define J_WAKELOCK_H_
+
+#include <media/stagefright/foundation/ABase.h>
+#include <powermanager/IPowerManager.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+class JWakeLock : public RefBase {
+
+public:
+    JWakeLock();
+
+    // NOTE: acquire and release are not thread safe
+
+    // returns true if wakelock was acquired
+    bool acquire();
+    void release(bool force = false);
+
+    virtual ~JWakeLock();
+
+private:
+    sp<IPowerManager> mPowerManager;
+    sp<IBinder>       mWakeLockToken;
+    uint32_t          mWakeLockCount;
+
+    class PMDeathRecipient : public IBinder::DeathRecipient {
+    public:
+        explicit PMDeathRecipient(JWakeLock *wakeLock) : mWakeLock(wakeLock) {}
+        virtual ~PMDeathRecipient() {}
+
+        // IBinder::DeathRecipient
+        virtual void binderDied(const wp<IBinder> &who);
+
+    private:
+        PMDeathRecipient(const PMDeathRecipient&);
+        PMDeathRecipient& operator= (const PMDeathRecipient&);
+
+        JWakeLock *mWakeLock;
+    };
+
+    const sp<PMDeathRecipient> mDeathRecipient;
+
+    void clearPowerManager();
+
+    DISALLOW_EVIL_CONSTRUCTORS(JWakeLock);
+};
+
+}  // namespace android
+
+#endif  // J_WAKELOCK_H_
diff --git a/media/libstagefright/matroska/MODULE_LICENSE_APACHE2 b/media/libmedia/nuplayer2/MODULE_LICENSE_APACHE2
similarity index 100%
rename from media/libstagefright/matroska/MODULE_LICENSE_APACHE2
rename to media/libmedia/nuplayer2/MODULE_LICENSE_APACHE2
diff --git a/media/libstagefright/matroska/NOTICE b/media/libmedia/nuplayer2/NOTICE
similarity index 100%
rename from media/libstagefright/matroska/NOTICE
rename to media/libmedia/nuplayer2/NOTICE
diff --git a/media/libmedia/nuplayer2/NuPlayer2.cpp b/media/libmedia/nuplayer2/NuPlayer2.cpp
new file mode 100644
index 0000000..2745219
--- /dev/null
+++ b/media/libmedia/nuplayer2/NuPlayer2.cpp
@@ -0,0 +1,2999 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NuPlayer2"
+
+#include <inttypes.h>
+
+#include <utils/Log.h>
+
+#include "NuPlayer2.h"
+
+#include "HTTPLiveSource.h"
+#include "NuPlayer2CCDecoder.h"
+#include "NuPlayer2Decoder.h"
+#include "NuPlayer2DecoderBase.h"
+#include "NuPlayer2DecoderPassThrough.h"
+#include "NuPlayer2Driver.h"
+#include "NuPlayer2Renderer.h"
+#include "NuPlayer2Source.h"
+#include "RTSPSource.h"
+#include "StreamingSource.h"
+#include "GenericSource.h"
+#include "TextDescriptions.h"
+
+#include "ATSParser.h"
+
+#include <cutils/properties.h>
+
+#include <media/AudioParameter.h>
+#include <media/AudioResamplerPublic.h>
+#include <media/AVSyncSettings.h>
+#include <media/MediaCodecBuffer.h>
+#include <media/NdkWrapper.h>
+
+#include <media/stagefright/foundation/hexdump.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/avc_utils.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaClock.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MetaData.h>
+
+#include "ESDS.h"
+#include <media/stagefright/Utils.h>
+
+#include <system/window.h>
+
+namespace android {
+
+static status_t sendMetaDataToHal(sp<MediaPlayer2Base::AudioSink>& sink,
+                                  const sp<MetaData>& meta) {
+    int32_t sampleRate = 0;
+    int32_t bitRate = 0;
+    int32_t channelMask = 0;
+    int32_t delaySamples = 0;
+    int32_t paddingSamples = 0;
+
+    AudioParameter param = AudioParameter();
+
+    if (meta->findInt32(kKeySampleRate, &sampleRate)) {
+        param.addInt(String8(AUDIO_OFFLOAD_CODEC_SAMPLE_RATE), sampleRate);
+    }
+    if (meta->findInt32(kKeyChannelMask, &channelMask)) {
+        param.addInt(String8(AUDIO_OFFLOAD_CODEC_NUM_CHANNEL), channelMask);
+    }
+    if (meta->findInt32(kKeyBitRate, &bitRate)) {
+        param.addInt(String8(AUDIO_OFFLOAD_CODEC_AVG_BIT_RATE), bitRate);
+    }
+    if (meta->findInt32(kKeyEncoderDelay, &delaySamples)) {
+        param.addInt(String8(AUDIO_OFFLOAD_CODEC_DELAY_SAMPLES), delaySamples);
+    }
+    if (meta->findInt32(kKeyEncoderPadding, &paddingSamples)) {
+        param.addInt(String8(AUDIO_OFFLOAD_CODEC_PADDING_SAMPLES), paddingSamples);
+    }
+
+    ALOGV("sendMetaDataToHal: bitRate %d, sampleRate %d, chanMask %d,"
+          "delaySample %d, paddingSample %d", bitRate, sampleRate,
+          channelMask, delaySamples, paddingSamples);
+
+    sink->setParameters(param.toString());
+    return OK;
+}
+
+
+struct NuPlayer2::Action : public RefBase {
+    Action() {}
+
+    virtual void execute(NuPlayer2 *player) = 0;
+
+private:
+    DISALLOW_EVIL_CONSTRUCTORS(Action);
+};
+
+struct NuPlayer2::SeekAction : public Action {
+    explicit SeekAction(int64_t seekTimeUs, MediaPlayer2SeekMode mode)
+        : mSeekTimeUs(seekTimeUs),
+          mMode(mode) {
+    }
+
+    virtual void execute(NuPlayer2 *player) {
+        player->performSeek(mSeekTimeUs, mMode);
+    }
+
+private:
+    int64_t mSeekTimeUs;
+    MediaPlayer2SeekMode mMode;
+
+    DISALLOW_EVIL_CONSTRUCTORS(SeekAction);
+};
+
+struct NuPlayer2::ResumeDecoderAction : public Action {
+    explicit ResumeDecoderAction(bool needNotify)
+        : mNeedNotify(needNotify) {
+    }
+
+    virtual void execute(NuPlayer2 *player) {
+        player->performResumeDecoders(mNeedNotify);
+    }
+
+private:
+    bool mNeedNotify;
+
+    DISALLOW_EVIL_CONSTRUCTORS(ResumeDecoderAction);
+};
+
+struct NuPlayer2::SetSurfaceAction : public Action {
+    explicit SetSurfaceAction(const sp<ANativeWindowWrapper> &nww)
+        : mNativeWindow(nww) {
+    }
+
+    virtual void execute(NuPlayer2 *player) {
+        player->performSetSurface(mNativeWindow);
+    }
+
+private:
+    sp<ANativeWindowWrapper> mNativeWindow;
+
+    DISALLOW_EVIL_CONSTRUCTORS(SetSurfaceAction);
+};
+
+struct NuPlayer2::FlushDecoderAction : public Action {
+    FlushDecoderAction(FlushCommand audio, FlushCommand video)
+        : mAudio(audio),
+          mVideo(video) {
+    }
+
+    virtual void execute(NuPlayer2 *player) {
+        player->performDecoderFlush(mAudio, mVideo);
+    }
+
+private:
+    FlushCommand mAudio;
+    FlushCommand mVideo;
+
+    DISALLOW_EVIL_CONSTRUCTORS(FlushDecoderAction);
+};
+
+struct NuPlayer2::PostMessageAction : public Action {
+    explicit PostMessageAction(const sp<AMessage> &msg)
+        : mMessage(msg) {
+    }
+
+    virtual void execute(NuPlayer2 *) {
+        mMessage->post();
+    }
+
+private:
+    sp<AMessage> mMessage;
+
+    DISALLOW_EVIL_CONSTRUCTORS(PostMessageAction);
+};
+
+// Use this if there's no state necessary to save in order to execute
+// the action.
+struct NuPlayer2::SimpleAction : public Action {
+    typedef void (NuPlayer2::*ActionFunc)();
+
+    explicit SimpleAction(ActionFunc func)
+        : mFunc(func) {
+    }
+
+    virtual void execute(NuPlayer2 *player) {
+        (player->*mFunc)();
+    }
+
+private:
+    ActionFunc mFunc;
+
+    DISALLOW_EVIL_CONSTRUCTORS(SimpleAction);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+NuPlayer2::NuPlayer2(pid_t pid, const sp<MediaClock> &mediaClock)
+    : mUIDValid(false),
+      mPID(pid),
+      mMediaClock(mediaClock),
+      mSourceFlags(0),
+      mOffloadAudio(false),
+      mAudioDecoderGeneration(0),
+      mVideoDecoderGeneration(0),
+      mRendererGeneration(0),
+      mLastStartedPlayingTimeNs(0),
+      mPreviousSeekTimeUs(0),
+      mAudioEOS(false),
+      mVideoEOS(false),
+      mScanSourcesPending(false),
+      mScanSourcesGeneration(0),
+      mPollDurationGeneration(0),
+      mTimedTextGeneration(0),
+      mFlushingAudio(NONE),
+      mFlushingVideo(NONE),
+      mResumePending(false),
+      mVideoScalingMode(NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW),
+      mPlaybackSettings(AUDIO_PLAYBACK_RATE_DEFAULT),
+      mVideoFpsHint(-1.f),
+      mStarted(false),
+      mPrepared(false),
+      mResetting(false),
+      mSourceStarted(false),
+      mAudioDecoderError(false),
+      mVideoDecoderError(false),
+      mPaused(false),
+      mPausedByClient(true),
+      mPausedForBuffering(false),
+      mIsDrmProtected(false),
+      mDataSourceType(DATA_SOURCE_TYPE_NONE) {
+    CHECK(mediaClock != NULL);
+    clearFlushComplete();
+}
+
+NuPlayer2::~NuPlayer2() {
+}
+
+void NuPlayer2::setUID(uid_t uid) {
+    mUIDValid = true;
+    mUID = uid;
+}
+
+void NuPlayer2::setDriver(const wp<NuPlayer2Driver> &driver) {
+    mDriver = driver;
+}
+
+void NuPlayer2::setDataSourceAsync(const sp<IStreamSource> &source) {
+    sp<AMessage> msg = new AMessage(kWhatSetDataSource, this);
+
+    sp<AMessage> notify = new AMessage(kWhatSourceNotify, this);
+
+    msg->setObject("source", new StreamingSource(notify, source));
+    msg->post();
+    mDataSourceType = DATA_SOURCE_TYPE_STREAM;
+}
+
+static bool IsHTTPLiveURL(const char *url) {
+    if (!strncasecmp("http://", url, 7)
+            || !strncasecmp("https://", url, 8)
+            || !strncasecmp("file://", url, 7)) {
+        size_t len = strlen(url);
+        if (len >= 5 && !strcasecmp(".m3u8", &url[len - 5])) {
+            return true;
+        }
+
+        if (strstr(url,"m3u8")) {
+            return true;
+        }
+    }
+
+    return false;
+}
+
+void NuPlayer2::setDataSourceAsync(
+        const sp<MediaHTTPService> &httpService,
+        const char *url,
+        const KeyedVector<String8, String8> *headers) {
+
+    sp<AMessage> msg = new AMessage(kWhatSetDataSource, this);
+    size_t len = strlen(url);
+
+    sp<AMessage> notify = new AMessage(kWhatSourceNotify, this);
+
+    sp<Source> source;
+    if (IsHTTPLiveURL(url)) {
+        source = new HTTPLiveSource(notify, httpService, url, headers);
+        ALOGV("setDataSourceAsync HTTPLiveSource %s", url);
+        mDataSourceType = DATA_SOURCE_TYPE_HTTP_LIVE;
+    } else if (!strncasecmp(url, "rtsp://", 7)) {
+        source = new RTSPSource(
+                notify, httpService, url, headers, mUIDValid, mUID);
+        ALOGV("setDataSourceAsync RTSPSource %s", url);
+        mDataSourceType = DATA_SOURCE_TYPE_RTSP;
+    } else if ((!strncasecmp(url, "http://", 7)
+                || !strncasecmp(url, "https://", 8))
+                    && ((len >= 4 && !strcasecmp(".sdp", &url[len - 4]))
+                    || strstr(url, ".sdp?"))) {
+        source = new RTSPSource(
+                notify, httpService, url, headers, mUIDValid, mUID, true);
+        ALOGV("setDataSourceAsync RTSPSource http/https/.sdp %s", url);
+        mDataSourceType = DATA_SOURCE_TYPE_RTSP;
+    } else {
+        ALOGV("setDataSourceAsync GenericSource %s", url);
+
+        sp<GenericSource> genericSource =
+                new GenericSource(notify, mUIDValid, mUID, mMediaClock);
+
+        status_t err = genericSource->setDataSource(httpService, url, headers);
+
+        if (err == OK) {
+            source = genericSource;
+        } else {
+            ALOGE("Failed to set data source!");
+        }
+
+        // regardless of success/failure
+        mDataSourceType = DATA_SOURCE_TYPE_GENERIC_URL;
+    }
+    msg->setObject("source", source);
+    msg->post();
+}
+
+void NuPlayer2::setDataSourceAsync(int fd, int64_t offset, int64_t length) {
+    sp<AMessage> msg = new AMessage(kWhatSetDataSource, this);
+
+    sp<AMessage> notify = new AMessage(kWhatSourceNotify, this);
+
+    sp<GenericSource> source =
+            new GenericSource(notify, mUIDValid, mUID, mMediaClock);
+
+    ALOGV("setDataSourceAsync fd %d/%lld/%lld source: %p",
+            fd, (long long)offset, (long long)length, source.get());
+
+    status_t err = source->setDataSource(fd, offset, length);
+
+    if (err != OK) {
+        ALOGE("Failed to set data source!");
+        source = NULL;
+    }
+
+    msg->setObject("source", source);
+    msg->post();
+    mDataSourceType = DATA_SOURCE_TYPE_GENERIC_FD;
+}
+
+void NuPlayer2::setDataSourceAsync(const sp<DataSource> &dataSource) {
+    sp<AMessage> msg = new AMessage(kWhatSetDataSource, this);
+    sp<AMessage> notify = new AMessage(kWhatSourceNotify, this);
+
+    sp<GenericSource> source = new GenericSource(notify, mUIDValid, mUID, mMediaClock);
+    status_t err = source->setDataSource(dataSource);
+
+    if (err != OK) {
+        ALOGE("Failed to set data source!");
+        source = NULL;
+    }
+
+    msg->setObject("source", source);
+    msg->post();
+    mDataSourceType = DATA_SOURCE_TYPE_MEDIA;
+}
+
+status_t NuPlayer2::getBufferingSettings(
+        BufferingSettings *buffering /* nonnull */) {
+    sp<AMessage> msg = new AMessage(kWhatGetBufferingSettings, this);
+    sp<AMessage> response;
+    status_t err = msg->postAndAwaitResponse(&response);
+    if (err == OK && response != NULL) {
+        CHECK(response->findInt32("err", &err));
+        if (err == OK) {
+            readFromAMessage(response, buffering);
+        }
+    }
+    return err;
+}
+
+status_t NuPlayer2::setBufferingSettings(const BufferingSettings& buffering) {
+    sp<AMessage> msg = new AMessage(kWhatSetBufferingSettings, this);
+    writeToAMessage(msg, buffering);
+    sp<AMessage> response;
+    status_t err = msg->postAndAwaitResponse(&response);
+    if (err == OK && response != NULL) {
+        CHECK(response->findInt32("err", &err));
+    }
+    return err;
+}
+
+void NuPlayer2::prepareAsync() {
+    ALOGV("prepareAsync");
+
+    (new AMessage(kWhatPrepare, this))->post();
+}
+
+void NuPlayer2::setVideoSurfaceTextureAsync(const sp<ANativeWindowWrapper> &nww) {
+    sp<AMessage> msg = new AMessage(kWhatSetVideoSurface, this);
+
+    if (nww == NULL || nww->getANativeWindow() == NULL) {
+        msg->setObject("surface", NULL);
+    } else {
+        msg->setObject("surface", nww);
+    }
+
+    msg->post();
+}
+
+void NuPlayer2::setAudioSink(const sp<MediaPlayer2Base::AudioSink> &sink) {
+    sp<AMessage> msg = new AMessage(kWhatSetAudioSink, this);
+    msg->setObject("sink", sink);
+    msg->post();
+}
+
+void NuPlayer2::start() {
+    (new AMessage(kWhatStart, this))->post();
+}
+
+status_t NuPlayer2::setPlaybackSettings(const AudioPlaybackRate &rate) {
+    // do some cursory validation of the settings here. audio modes are
+    // only validated when set on the audiosink.
+     if ((rate.mSpeed != 0.f && rate.mSpeed < AUDIO_TIMESTRETCH_SPEED_MIN)
+            || rate.mSpeed > AUDIO_TIMESTRETCH_SPEED_MAX
+            || rate.mPitch < AUDIO_TIMESTRETCH_SPEED_MIN
+            || rate.mPitch > AUDIO_TIMESTRETCH_SPEED_MAX) {
+        return BAD_VALUE;
+    }
+    sp<AMessage> msg = new AMessage(kWhatConfigPlayback, this);
+    writeToAMessage(msg, rate);
+    sp<AMessage> response;
+    status_t err = msg->postAndAwaitResponse(&response);
+    if (err == OK && response != NULL) {
+        CHECK(response->findInt32("err", &err));
+    }
+    return err;
+}
+
+status_t NuPlayer2::getPlaybackSettings(AudioPlaybackRate *rate /* nonnull */) {
+    sp<AMessage> msg = new AMessage(kWhatGetPlaybackSettings, this);
+    sp<AMessage> response;
+    status_t err = msg->postAndAwaitResponse(&response);
+    if (err == OK && response != NULL) {
+        CHECK(response->findInt32("err", &err));
+        if (err == OK) {
+            readFromAMessage(response, rate);
+        }
+    }
+    return err;
+}
+
+status_t NuPlayer2::setSyncSettings(const AVSyncSettings &sync, float videoFpsHint) {
+    sp<AMessage> msg = new AMessage(kWhatConfigSync, this);
+    writeToAMessage(msg, sync, videoFpsHint);
+    sp<AMessage> response;
+    status_t err = msg->postAndAwaitResponse(&response);
+    if (err == OK && response != NULL) {
+        CHECK(response->findInt32("err", &err));
+    }
+    return err;
+}
+
+status_t NuPlayer2::getSyncSettings(
+        AVSyncSettings *sync /* nonnull */, float *videoFps /* nonnull */) {
+    sp<AMessage> msg = new AMessage(kWhatGetSyncSettings, this);
+    sp<AMessage> response;
+    status_t err = msg->postAndAwaitResponse(&response);
+    if (err == OK && response != NULL) {
+        CHECK(response->findInt32("err", &err));
+        if (err == OK) {
+            readFromAMessage(response, sync, videoFps);
+        }
+    }
+    return err;
+}
+
+void NuPlayer2::pause() {
+    (new AMessage(kWhatPause, this))->post();
+}
+
+void NuPlayer2::resetAsync() {
+    sp<Source> source;
+    {
+        Mutex::Autolock autoLock(mSourceLock);
+        source = mSource;
+    }
+
+    if (source != NULL) {
+        // During a reset, the data source might be unresponsive already, we need to
+        // disconnect explicitly so that reads exit promptly.
+        // We can't queue the disconnect request to the looper, as it might be
+        // queued behind a stuck read and never gets processed.
+        // Doing a disconnect outside the looper to allows the pending reads to exit
+        // (either successfully or with error).
+        source->disconnect();
+    }
+
+    (new AMessage(kWhatReset, this))->post();
+}
+
+status_t NuPlayer2::notifyAt(int64_t mediaTimeUs) {
+    sp<AMessage> notify = new AMessage(kWhatNotifyTime, this);
+    notify->setInt64("timerUs", mediaTimeUs);
+    mMediaClock->addTimer(notify, mediaTimeUs);
+    return OK;
+}
+
+void NuPlayer2::seekToAsync(int64_t seekTimeUs, MediaPlayer2SeekMode mode, bool needNotify) {
+    sp<AMessage> msg = new AMessage(kWhatSeek, this);
+    msg->setInt64("seekTimeUs", seekTimeUs);
+    msg->setInt32("mode", mode);
+    msg->setInt32("needNotify", needNotify);
+    msg->post();
+}
+
+
+void NuPlayer2::writeTrackInfo(
+        Parcel* reply, const sp<AMessage>& format) const {
+    if (format == NULL) {
+        ALOGE("NULL format");
+        return;
+    }
+    int32_t trackType;
+    if (!format->findInt32("type", &trackType)) {
+        ALOGE("no track type");
+        return;
+    }
+
+    AString mime;
+    if (!format->findString("mime", &mime)) {
+        // Java MediaPlayer only uses mimetype for subtitle and timedtext tracks.
+        // If we can't find the mimetype here it means that we wouldn't be needing
+        // the mimetype on the Java end. We still write a placeholder mime to keep the
+        // (de)serialization logic simple.
+        if (trackType == MEDIA_TRACK_TYPE_AUDIO) {
+            mime = "audio/";
+        } else if (trackType == MEDIA_TRACK_TYPE_VIDEO) {
+            mime = "video/";
+        } else {
+            ALOGE("unknown track type: %d", trackType);
+            return;
+        }
+    }
+
+    AString lang;
+    if (!format->findString("language", &lang)) {
+        ALOGE("no language");
+        return;
+    }
+
+    reply->writeInt32(2); // write something non-zero
+    reply->writeInt32(trackType);
+    reply->writeString16(String16(mime.c_str()));
+    reply->writeString16(String16(lang.c_str()));
+
+    if (trackType == MEDIA_TRACK_TYPE_SUBTITLE) {
+        int32_t isAuto, isDefault, isForced;
+        CHECK(format->findInt32("auto", &isAuto));
+        CHECK(format->findInt32("default", &isDefault));
+        CHECK(format->findInt32("forced", &isForced));
+
+        reply->writeInt32(isAuto);
+        reply->writeInt32(isDefault);
+        reply->writeInt32(isForced);
+    }
+}
+
+void NuPlayer2::onMessageReceived(const sp<AMessage> &msg) {
+    switch (msg->what()) {
+        case kWhatSetDataSource:
+        {
+            ALOGV("kWhatSetDataSource");
+
+            CHECK(mSource == NULL);
+
+            status_t err = OK;
+            sp<RefBase> obj;
+            CHECK(msg->findObject("source", &obj));
+            if (obj != NULL) {
+                Mutex::Autolock autoLock(mSourceLock);
+                mSource = static_cast<Source *>(obj.get());
+            } else {
+                err = UNKNOWN_ERROR;
+            }
+
+            CHECK(mDriver != NULL);
+            sp<NuPlayer2Driver> driver = mDriver.promote();
+            if (driver != NULL) {
+                driver->notifySetDataSourceCompleted(err);
+            }
+            break;
+        }
+
+        case kWhatGetBufferingSettings:
+        {
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
+            ALOGV("kWhatGetBufferingSettings");
+            BufferingSettings buffering;
+            status_t err = OK;
+            if (mSource != NULL) {
+                err = mSource->getBufferingSettings(&buffering);
+            } else {
+                err = INVALID_OPERATION;
+            }
+            sp<AMessage> response = new AMessage;
+            if (err == OK) {
+                writeToAMessage(response, buffering);
+            }
+            response->setInt32("err", err);
+            response->postReply(replyID);
+            break;
+        }
+
+        case kWhatSetBufferingSettings:
+        {
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
+            ALOGV("kWhatSetBufferingSettings");
+            BufferingSettings buffering;
+            readFromAMessage(msg, &buffering);
+            status_t err = OK;
+            if (mSource != NULL) {
+                err = mSource->setBufferingSettings(buffering);
+            } else {
+                err = INVALID_OPERATION;
+            }
+            sp<AMessage> response = new AMessage;
+            response->setInt32("err", err);
+            response->postReply(replyID);
+            break;
+        }
+
+        case kWhatPrepare:
+        {
+            ALOGV("onMessageReceived kWhatPrepare");
+
+            mSource->prepareAsync();
+            break;
+        }
+
+        case kWhatGetTrackInfo:
+        {
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
+            Parcel* reply;
+            CHECK(msg->findPointer("reply", (void**)&reply));
+
+            size_t inbandTracks = 0;
+            if (mSource != NULL) {
+                inbandTracks = mSource->getTrackCount();
+            }
+
+            size_t ccTracks = 0;
+            if (mCCDecoder != NULL) {
+                ccTracks = mCCDecoder->getTrackCount();
+            }
+
+            // total track count
+            reply->writeInt32(inbandTracks + ccTracks);
+
+            // write inband tracks
+            for (size_t i = 0; i < inbandTracks; ++i) {
+                writeTrackInfo(reply, mSource->getTrackInfo(i));
+            }
+
+            // write CC track
+            for (size_t i = 0; i < ccTracks; ++i) {
+                writeTrackInfo(reply, mCCDecoder->getTrackInfo(i));
+            }
+
+            sp<AMessage> response = new AMessage;
+            response->postReply(replyID);
+            break;
+        }
+
+        case kWhatGetSelectedTrack:
+        {
+            status_t err = INVALID_OPERATION;
+            if (mSource != NULL) {
+                err = OK;
+
+                int32_t type32;
+                CHECK(msg->findInt32("type", (int32_t*)&type32));
+                media_track_type type = (media_track_type)type32;
+                ssize_t selectedTrack = mSource->getSelectedTrack(type);
+
+                Parcel* reply;
+                CHECK(msg->findPointer("reply", (void**)&reply));
+                reply->writeInt32(selectedTrack);
+            }
+
+            sp<AMessage> response = new AMessage;
+            response->setInt32("err", err);
+
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+            response->postReply(replyID);
+            break;
+        }
+
+        case kWhatSelectTrack:
+        {
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
+            size_t trackIndex;
+            int32_t select;
+            int64_t timeUs;
+            CHECK(msg->findSize("trackIndex", &trackIndex));
+            CHECK(msg->findInt32("select", &select));
+            CHECK(msg->findInt64("timeUs", &timeUs));
+
+            status_t err = INVALID_OPERATION;
+
+            size_t inbandTracks = 0;
+            if (mSource != NULL) {
+                inbandTracks = mSource->getTrackCount();
+            }
+            size_t ccTracks = 0;
+            if (mCCDecoder != NULL) {
+                ccTracks = mCCDecoder->getTrackCount();
+            }
+
+            if (trackIndex < inbandTracks) {
+                err = mSource->selectTrack(trackIndex, select, timeUs);
+
+                if (!select && err == OK) {
+                    int32_t type;
+                    sp<AMessage> info = mSource->getTrackInfo(trackIndex);
+                    if (info != NULL
+                            && info->findInt32("type", &type)
+                            && type == MEDIA_TRACK_TYPE_TIMEDTEXT) {
+                        ++mTimedTextGeneration;
+                    }
+                }
+            } else {
+                trackIndex -= inbandTracks;
+
+                if (trackIndex < ccTracks) {
+                    err = mCCDecoder->selectTrack(trackIndex, select);
+                }
+            }
+
+            sp<AMessage> response = new AMessage;
+            response->setInt32("err", err);
+
+            response->postReply(replyID);
+            break;
+        }
+
+        case kWhatPollDuration:
+        {
+            int32_t generation;
+            CHECK(msg->findInt32("generation", &generation));
+
+            if (generation != mPollDurationGeneration) {
+                // stale
+                break;
+            }
+
+            int64_t durationUs;
+            if (mDriver != NULL && mSource->getDuration(&durationUs) == OK) {
+                sp<NuPlayer2Driver> driver = mDriver.promote();
+                if (driver != NULL) {
+                    driver->notifyDuration(durationUs);
+                }
+            }
+
+            msg->post(1000000ll);  // poll again in a second.
+            break;
+        }
+
+        case kWhatSetVideoSurface:
+        {
+
+            sp<RefBase> obj;
+            CHECK(msg->findObject("surface", &obj));
+            sp<ANativeWindowWrapper> nww = static_cast<ANativeWindowWrapper *>(obj.get());
+
+            ALOGD("onSetVideoSurface(%p, %s video decoder)",
+                    (nww == NULL ? NULL : nww->getANativeWindow()),
+                    (mSource != NULL && mStarted && mSource->getFormat(false /* audio */) != NULL
+                            && mVideoDecoder != NULL) ? "have" : "no");
+
+            // Need to check mStarted before calling mSource->getFormat because NuPlayer2 might
+            // be in preparing state and it could take long time.
+            // When mStarted is true, mSource must have been set.
+            if (mSource == NULL || !mStarted || mSource->getFormat(false /* audio */) == NULL
+                    // NOTE: mVideoDecoder's mNativeWindow is always non-null
+                    || (mVideoDecoder != NULL && mVideoDecoder->setVideoSurface(nww) == OK)) {
+                performSetSurface(nww);
+                break;
+            }
+
+            mDeferredActions.push_back(
+                    new FlushDecoderAction(
+                            (obj != NULL ? FLUSH_CMD_FLUSH : FLUSH_CMD_NONE) /* audio */,
+                                           FLUSH_CMD_SHUTDOWN /* video */));
+
+            mDeferredActions.push_back(new SetSurfaceAction(nww));
+
+            if (obj != NULL) {
+                if (mStarted) {
+                    // Issue a seek to refresh the video screen only if started otherwise
+                    // the extractor may not yet be started and will assert.
+                    // If the video decoder is not set (perhaps audio only in this case)
+                    // do not perform a seek as it is not needed.
+                    int64_t currentPositionUs = 0;
+                    if (getCurrentPosition(&currentPositionUs) == OK) {
+                        mDeferredActions.push_back(
+                                new SeekAction(currentPositionUs,
+                                        MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC /* mode */));
+                    }
+                }
+
+                // If there is a new surface texture, instantiate decoders
+                // again if possible.
+                mDeferredActions.push_back(
+                        new SimpleAction(&NuPlayer2::performScanSources));
+
+                // After a flush without shutdown, decoder is paused.
+                // Don't resume it until source seek is done, otherwise it could
+                // start pulling stale data too soon.
+                mDeferredActions.push_back(
+                        new ResumeDecoderAction(false /* needNotify */));
+            }
+
+            processDeferredActions();
+            break;
+        }
+
+        case kWhatSetAudioSink:
+        {
+            ALOGV("kWhatSetAudioSink");
+
+            sp<RefBase> obj;
+            CHECK(msg->findObject("sink", &obj));
+
+            mAudioSink = static_cast<MediaPlayer2Base::AudioSink *>(obj.get());
+            break;
+        }
+
+        case kWhatStart:
+        {
+            ALOGV("kWhatStart");
+            if (mStarted) {
+                // do not resume yet if the source is still buffering
+                if (!mPausedForBuffering) {
+                    onResume();
+                }
+            } else {
+                onStart();
+            }
+            mPausedByClient = false;
+            break;
+        }
+
+        case kWhatConfigPlayback:
+        {
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+            AudioPlaybackRate rate /* sanitized */;
+            readFromAMessage(msg, &rate);
+            status_t err = OK;
+            if (mRenderer != NULL) {
+                // AudioSink allows only 1.f and 0.f for offload mode.
+                // For other speed, switch to non-offload mode.
+                if (mOffloadAudio && ((rate.mSpeed != 0.f && rate.mSpeed != 1.f)
+                        || rate.mPitch != 1.f)) {
+                    int64_t currentPositionUs;
+                    if (getCurrentPosition(&currentPositionUs) != OK) {
+                        currentPositionUs = mPreviousSeekTimeUs;
+                    }
+
+                    // Set mPlaybackSettings so that the new audio decoder can
+                    // be created correctly.
+                    mPlaybackSettings = rate;
+                    if (!mPaused) {
+                        mRenderer->pause();
+                    }
+                    restartAudio(
+                            currentPositionUs, true /* forceNonOffload */,
+                            true /* needsToCreateAudioDecoder */);
+                    if (!mPaused) {
+                        mRenderer->resume();
+                    }
+                }
+
+                err = mRenderer->setPlaybackSettings(rate);
+            }
+            if (err == OK) {
+                if (rate.mSpeed == 0.f) {
+                    onPause();
+                    mPausedByClient = true;
+                    // save all other settings (using non-paused speed)
+                    // so we can restore them on start
+                    AudioPlaybackRate newRate = rate;
+                    newRate.mSpeed = mPlaybackSettings.mSpeed;
+                    mPlaybackSettings = newRate;
+                } else { /* rate.mSpeed != 0.f */
+                    mPlaybackSettings = rate;
+                    if (mStarted) {
+                        // do not resume yet if the source is still buffering
+                        if (!mPausedForBuffering) {
+                            onResume();
+                        }
+                    } else if (mPrepared) {
+                        onStart();
+                    }
+
+                    mPausedByClient = false;
+                }
+            }
+
+            if (mVideoDecoder != NULL) {
+                sp<AMessage> params = new AMessage();
+                params->setFloat("playback-speed", mPlaybackSettings.mSpeed);
+                mVideoDecoder->setParameters(params);
+            }
+
+            sp<AMessage> response = new AMessage;
+            response->setInt32("err", err);
+            response->postReply(replyID);
+            break;
+        }
+
+        case kWhatGetPlaybackSettings:
+        {
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+            AudioPlaybackRate rate = mPlaybackSettings;
+            status_t err = OK;
+            if (mRenderer != NULL) {
+                err = mRenderer->getPlaybackSettings(&rate);
+            }
+            if (err == OK) {
+                // get playback settings used by renderer, as it may be
+                // slightly off due to audiosink not taking small changes.
+                mPlaybackSettings = rate;
+                if (mPaused) {
+                    rate.mSpeed = 0.f;
+                }
+            }
+            sp<AMessage> response = new AMessage;
+            if (err == OK) {
+                writeToAMessage(response, rate);
+            }
+            response->setInt32("err", err);
+            response->postReply(replyID);
+            break;
+        }
+
+        case kWhatConfigSync:
+        {
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
+            ALOGV("kWhatConfigSync");
+            AVSyncSettings sync;
+            float videoFpsHint;
+            readFromAMessage(msg, &sync, &videoFpsHint);
+            status_t err = OK;
+            if (mRenderer != NULL) {
+                err = mRenderer->setSyncSettings(sync, videoFpsHint);
+            }
+            if (err == OK) {
+                mSyncSettings = sync;
+                mVideoFpsHint = videoFpsHint;
+            }
+            sp<AMessage> response = new AMessage;
+            response->setInt32("err", err);
+            response->postReply(replyID);
+            break;
+        }
+
+        case kWhatGetSyncSettings:
+        {
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+            AVSyncSettings sync = mSyncSettings;
+            float videoFps = mVideoFpsHint;
+            status_t err = OK;
+            if (mRenderer != NULL) {
+                err = mRenderer->getSyncSettings(&sync, &videoFps);
+                if (err == OK) {
+                    mSyncSettings = sync;
+                    mVideoFpsHint = videoFps;
+                }
+            }
+            sp<AMessage> response = new AMessage;
+            if (err == OK) {
+                writeToAMessage(response, sync, videoFps);
+            }
+            response->setInt32("err", err);
+            response->postReply(replyID);
+            break;
+        }
+
+        case kWhatScanSources:
+        {
+            int32_t generation;
+            CHECK(msg->findInt32("generation", &generation));
+            if (generation != mScanSourcesGeneration) {
+                // Drop obsolete msg.
+                break;
+            }
+
+            mScanSourcesPending = false;
+
+            ALOGV("scanning sources haveAudio=%d, haveVideo=%d",
+                 mAudioDecoder != NULL, mVideoDecoder != NULL);
+
+            bool mHadAnySourcesBefore =
+                (mAudioDecoder != NULL) || (mVideoDecoder != NULL);
+            bool rescan = false;
+
+            // initialize video before audio because successful initialization of
+            // video may change deep buffer mode of audio.
+            if (mNativeWindow != NULL && mNativeWindow->getANativeWindow() != NULL) {
+                if (instantiateDecoder(false, &mVideoDecoder) == -EWOULDBLOCK) {
+                    rescan = true;
+                }
+            }
+
+            // Don't try to re-open audio sink if there's an existing decoder.
+            if (mAudioSink != NULL && mAudioDecoder == NULL) {
+                if (instantiateDecoder(true, &mAudioDecoder) == -EWOULDBLOCK) {
+                    rescan = true;
+                }
+            }
+
+            if (!mHadAnySourcesBefore
+                    && (mAudioDecoder != NULL || mVideoDecoder != NULL)) {
+                // This is the first time we've found anything playable.
+
+                if (mSourceFlags & Source::FLAG_DYNAMIC_DURATION) {
+                    schedulePollDuration();
+                }
+            }
+
+            status_t err;
+            if ((err = mSource->feedMoreTSData()) != OK) {
+                if (mAudioDecoder == NULL && mVideoDecoder == NULL) {
+                    // We're not currently decoding anything (no audio or
+                    // video tracks found) and we just ran out of input data.
+
+                    if (err == ERROR_END_OF_STREAM) {
+                        notifyListener(MEDIA2_PLAYBACK_COMPLETE, 0, 0);
+                    } else {
+                        notifyListener(MEDIA2_ERROR, MEDIA2_ERROR_UNKNOWN, err);
+                    }
+                }
+                break;
+            }
+
+            if (rescan) {
+                msg->post(100000ll);
+                mScanSourcesPending = true;
+            }
+            break;
+        }
+
+        case kWhatVideoNotify:
+        case kWhatAudioNotify:
+        {
+            bool audio = msg->what() == kWhatAudioNotify;
+
+            int32_t currentDecoderGeneration =
+                (audio? mAudioDecoderGeneration : mVideoDecoderGeneration);
+            int32_t requesterGeneration = currentDecoderGeneration - 1;
+            CHECK(msg->findInt32("generation", &requesterGeneration));
+
+            if (requesterGeneration != currentDecoderGeneration) {
+                ALOGV("got message from old %s decoder, generation(%d:%d)",
+                        audio ? "audio" : "video", requesterGeneration,
+                        currentDecoderGeneration);
+                sp<AMessage> reply;
+                if (!(msg->findMessage("reply", &reply))) {
+                    return;
+                }
+
+                reply->setInt32("err", INFO_DISCONTINUITY);
+                reply->post();
+                return;
+            }
+
+            int32_t what;
+            CHECK(msg->findInt32("what", &what));
+
+            if (what == DecoderBase::kWhatInputDiscontinuity) {
+                int32_t formatChange;
+                CHECK(msg->findInt32("formatChange", &formatChange));
+
+                ALOGV("%s discontinuity: formatChange %d",
+                        audio ? "audio" : "video", formatChange);
+
+                if (formatChange) {
+                    mDeferredActions.push_back(
+                            new FlushDecoderAction(
+                                audio ? FLUSH_CMD_SHUTDOWN : FLUSH_CMD_NONE,
+                                audio ? FLUSH_CMD_NONE : FLUSH_CMD_SHUTDOWN));
+                }
+
+                mDeferredActions.push_back(
+                        new SimpleAction(
+                                &NuPlayer2::performScanSources));
+
+                processDeferredActions();
+            } else if (what == DecoderBase::kWhatEOS) {
+                int32_t err;
+                CHECK(msg->findInt32("err", &err));
+
+                if (err == ERROR_END_OF_STREAM) {
+                    ALOGV("got %s decoder EOS", audio ? "audio" : "video");
+                } else {
+                    ALOGV("got %s decoder EOS w/ error %d",
+                         audio ? "audio" : "video",
+                         err);
+                }
+
+                mRenderer->queueEOS(audio, err);
+            } else if (what == DecoderBase::kWhatFlushCompleted) {
+                ALOGV("decoder %s flush completed", audio ? "audio" : "video");
+
+                handleFlushComplete(audio, true /* isDecoder */);
+                finishFlushIfPossible();
+            } else if (what == DecoderBase::kWhatVideoSizeChanged) {
+                sp<AMessage> format;
+                CHECK(msg->findMessage("format", &format));
+
+                sp<AMessage> inputFormat =
+                        mSource->getFormat(false /* audio */);
+
+                setVideoScalingMode(mVideoScalingMode);
+                updateVideoSize(inputFormat, format);
+            } else if (what == DecoderBase::kWhatShutdownCompleted) {
+                ALOGV("%s shutdown completed", audio ? "audio" : "video");
+                if (audio) {
+                    mAudioDecoder.clear();
+                    mAudioDecoderError = false;
+                    ++mAudioDecoderGeneration;
+
+                    CHECK_EQ((int)mFlushingAudio, (int)SHUTTING_DOWN_DECODER);
+                    mFlushingAudio = SHUT_DOWN;
+                } else {
+                    mVideoDecoder.clear();
+                    mVideoDecoderError = false;
+                    ++mVideoDecoderGeneration;
+
+                    CHECK_EQ((int)mFlushingVideo, (int)SHUTTING_DOWN_DECODER);
+                    mFlushingVideo = SHUT_DOWN;
+                }
+
+                finishFlushIfPossible();
+            } else if (what == DecoderBase::kWhatResumeCompleted) {
+                finishResume();
+            } else if (what == DecoderBase::kWhatError) {
+                status_t err;
+                if (!msg->findInt32("err", &err) || err == OK) {
+                    err = UNKNOWN_ERROR;
+                }
+
+                // Decoder errors can be due to Source (e.g. from streaming),
+                // or from decoding corrupted bitstreams, or from other decoder
+                // MediaCodec operations (e.g. from an ongoing reset or seek).
+                // They may also be due to openAudioSink failure at
+                // decoder start or after a format change.
+                //
+                // We try to gracefully shut down the affected decoder if possible,
+                // rather than trying to force the shutdown with something
+                // similar to performReset(). This method can lead to a hang
+                // if MediaCodec functions block after an error, but they should
+                // typically return INVALID_OPERATION instead of blocking.
+
+                FlushStatus *flushing = audio ? &mFlushingAudio : &mFlushingVideo;
+                ALOGE("received error(%#x) from %s decoder, flushing(%d), now shutting down",
+                        err, audio ? "audio" : "video", *flushing);
+
+                switch (*flushing) {
+                    case NONE:
+                        mDeferredActions.push_back(
+                                new FlushDecoderAction(
+                                    audio ? FLUSH_CMD_SHUTDOWN : FLUSH_CMD_NONE,
+                                    audio ? FLUSH_CMD_NONE : FLUSH_CMD_SHUTDOWN));
+                        processDeferredActions();
+                        break;
+                    case FLUSHING_DECODER:
+                        *flushing = FLUSHING_DECODER_SHUTDOWN; // initiate shutdown after flush.
+                        break; // Wait for flush to complete.
+                    case FLUSHING_DECODER_SHUTDOWN:
+                        break; // Wait for flush to complete.
+                    case SHUTTING_DOWN_DECODER:
+                        break; // Wait for shutdown to complete.
+                    case FLUSHED:
+                        getDecoder(audio)->initiateShutdown(); // In the middle of a seek.
+                        *flushing = SHUTTING_DOWN_DECODER;     // Shut down.
+                        break;
+                    case SHUT_DOWN:
+                        finishFlushIfPossible();  // Should not occur.
+                        break;                    // Finish anyways.
+                }
+                if (mSource != nullptr) {
+                    if (audio) {
+                        if (mVideoDecoderError || mSource->getFormat(false /* audio */) == NULL
+                                || mNativeWindow == NULL || mNativeWindow->getANativeWindow() == NULL
+                                || mVideoDecoder == NULL) {
+                            // When both audio and video have error, or this stream has only audio
+                            // which has error, notify client of error.
+                            notifyListener(MEDIA2_ERROR, MEDIA2_ERROR_UNKNOWN, err);
+                        } else {
+                            // Only audio track has error. Video track could be still good to play.
+                            notifyListener(MEDIA2_INFO, MEDIA2_INFO_PLAY_AUDIO_ERROR, err);
+                        }
+                        mAudioDecoderError = true;
+                    } else {
+                        if (mAudioDecoderError || mSource->getFormat(true /* audio */) == NULL
+                                || mAudioSink == NULL || mAudioDecoder == NULL) {
+                            // When both audio and video have error, or this stream has only video
+                            // which has error, notify client of error.
+                            notifyListener(MEDIA2_ERROR, MEDIA2_ERROR_UNKNOWN, err);
+                        } else {
+                            // Only video track has error. Audio track could be still good to play.
+                            notifyListener(MEDIA2_INFO, MEDIA2_INFO_PLAY_VIDEO_ERROR, err);
+                        }
+                        mVideoDecoderError = true;
+                    }
+                }
+            } else {
+                ALOGV("Unhandled decoder notification %d '%c%c%c%c'.",
+                      what,
+                      what >> 24,
+                      (what >> 16) & 0xff,
+                      (what >> 8) & 0xff,
+                      what & 0xff);
+            }
+
+            break;
+        }
+
+        case kWhatRendererNotify:
+        {
+            int32_t requesterGeneration = mRendererGeneration - 1;
+            CHECK(msg->findInt32("generation", &requesterGeneration));
+            if (requesterGeneration != mRendererGeneration) {
+                ALOGV("got message from old renderer, generation(%d:%d)",
+                        requesterGeneration, mRendererGeneration);
+                return;
+            }
+
+            int32_t what;
+            CHECK(msg->findInt32("what", &what));
+
+            if (what == Renderer::kWhatEOS) {
+                int32_t audio;
+                CHECK(msg->findInt32("audio", &audio));
+
+                int32_t finalResult;
+                CHECK(msg->findInt32("finalResult", &finalResult));
+
+                if (audio) {
+                    mAudioEOS = true;
+                } else {
+                    mVideoEOS = true;
+                }
+
+                if (finalResult == ERROR_END_OF_STREAM) {
+                    ALOGV("reached %s EOS", audio ? "audio" : "video");
+                } else {
+                    ALOGE("%s track encountered an error (%d)",
+                         audio ? "audio" : "video", finalResult);
+
+                    notifyListener(
+                            MEDIA2_ERROR, MEDIA2_ERROR_UNKNOWN, finalResult);
+                }
+
+                if ((mAudioEOS || mAudioDecoder == NULL)
+                        && (mVideoEOS || mVideoDecoder == NULL)) {
+                    notifyListener(MEDIA2_PLAYBACK_COMPLETE, 0, 0);
+                }
+            } else if (what == Renderer::kWhatFlushComplete) {
+                int32_t audio;
+                CHECK(msg->findInt32("audio", &audio));
+
+                if (audio) {
+                    mAudioEOS = false;
+                } else {
+                    mVideoEOS = false;
+                }
+
+                ALOGV("renderer %s flush completed.", audio ? "audio" : "video");
+                if (audio && (mFlushingAudio == NONE || mFlushingAudio == FLUSHED
+                        || mFlushingAudio == SHUT_DOWN)) {
+                    // Flush has been handled by tear down.
+                    break;
+                }
+                handleFlushComplete(audio, false /* isDecoder */);
+                finishFlushIfPossible();
+            } else if (what == Renderer::kWhatVideoRenderingStart) {
+                notifyListener(MEDIA2_INFO, MEDIA2_INFO_RENDERING_START, 0);
+            } else if (what == Renderer::kWhatMediaRenderingStart) {
+                ALOGV("media rendering started");
+                notifyListener(MEDIA2_STARTED, 0, 0);
+            } else if (what == Renderer::kWhatAudioTearDown) {
+                int32_t reason;
+                CHECK(msg->findInt32("reason", &reason));
+                ALOGV("Tear down audio with reason %d.", reason);
+                if (reason == Renderer::kDueToTimeout && !(mPaused && mOffloadAudio)) {
+                    // TimeoutWhenPaused is only for offload mode.
+                    ALOGW("Receive a stale message for teardown.");
+                    break;
+                }
+                int64_t positionUs;
+                if (!msg->findInt64("positionUs", &positionUs)) {
+                    positionUs = mPreviousSeekTimeUs;
+                }
+
+                restartAudio(
+                        positionUs, reason == Renderer::kForceNonOffload /* forceNonOffload */,
+                        reason != Renderer::kDueToTimeout /* needsToCreateAudioDecoder */);
+            }
+            break;
+        }
+
+        case kWhatMoreDataQueued:
+        {
+            break;
+        }
+
+        case kWhatReset:
+        {
+            ALOGV("kWhatReset");
+
+            mResetting = true;
+            stopPlaybackTimer("kWhatReset");
+            stopRebufferingTimer(true);
+
+            mDeferredActions.push_back(
+                    new FlushDecoderAction(
+                        FLUSH_CMD_SHUTDOWN /* audio */,
+                        FLUSH_CMD_SHUTDOWN /* video */));
+
+            mDeferredActions.push_back(
+                    new SimpleAction(&NuPlayer2::performReset));
+
+            processDeferredActions();
+            break;
+        }
+
+        case kWhatNotifyTime:
+        {
+            ALOGV("kWhatNotifyTime");
+            int64_t timerUs;
+            CHECK(msg->findInt64("timerUs", &timerUs));
+
+            notifyListener(MEDIA2_NOTIFY_TIME, timerUs, 0);
+            break;
+        }
+
+        case kWhatSeek:
+        {
+            int64_t seekTimeUs;
+            int32_t mode;
+            int32_t needNotify;
+            CHECK(msg->findInt64("seekTimeUs", &seekTimeUs));
+            CHECK(msg->findInt32("mode", &mode));
+            CHECK(msg->findInt32("needNotify", &needNotify));
+
+            ALOGV("kWhatSeek seekTimeUs=%lld us, mode=%d, needNotify=%d",
+                    (long long)seekTimeUs, mode, needNotify);
+
+            if (!mStarted) {
+                // Seek before the player is started. In order to preview video,
+                // need to start the player and pause it. This branch is called
+                // only once if needed. After the player is started, any seek
+                // operation will go through normal path.
+                // Audio-only cases are handled separately.
+                onStart(seekTimeUs, (MediaPlayer2SeekMode)mode);
+                if (mStarted) {
+                    onPause();
+                    mPausedByClient = true;
+                }
+                if (needNotify) {
+                    notifyDriverSeekComplete();
+                }
+                break;
+            }
+
+            mDeferredActions.push_back(
+                    new FlushDecoderAction(FLUSH_CMD_FLUSH /* audio */,
+                                           FLUSH_CMD_FLUSH /* video */));
+
+            mDeferredActions.push_back(
+                    new SeekAction(seekTimeUs, (MediaPlayer2SeekMode)mode));
+
+            // After a flush without shutdown, decoder is paused.
+            // Don't resume it until source seek is done, otherwise it could
+            // start pulling stale data too soon.
+            mDeferredActions.push_back(
+                    new ResumeDecoderAction(needNotify));
+
+            processDeferredActions();
+            break;
+        }
+
+        case kWhatPause:
+        {
+            onPause();
+            mPausedByClient = true;
+            break;
+        }
+
+        case kWhatSourceNotify:
+        {
+            onSourceNotify(msg);
+            break;
+        }
+
+        case kWhatClosedCaptionNotify:
+        {
+            onClosedCaptionNotify(msg);
+            break;
+        }
+
+        case kWhatPrepareDrm:
+        {
+            status_t status = onPrepareDrm(msg);
+
+            sp<AMessage> response = new AMessage;
+            response->setInt32("status", status);
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+            response->postReply(replyID);
+            break;
+        }
+
+        case kWhatReleaseDrm:
+        {
+            status_t status = onReleaseDrm();
+
+            sp<AMessage> response = new AMessage;
+            response->setInt32("status", status);
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+            response->postReply(replyID);
+            break;
+        }
+
+        default:
+            TRESPASS();
+            break;
+    }
+}
+
+void NuPlayer2::onResume() {
+    if (!mPaused || mResetting) {
+        ALOGD_IF(mResetting, "resetting, onResume discarded");
+        return;
+    }
+    mPaused = false;
+    if (mSource != NULL) {
+        mSource->resume();
+    } else {
+        ALOGW("resume called when source is gone or not set");
+    }
+    // |mAudioDecoder| may have been released due to the pause timeout, so re-create it if
+    // needed.
+    if (audioDecoderStillNeeded() && mAudioDecoder == NULL) {
+        instantiateDecoder(true /* audio */, &mAudioDecoder);
+    }
+    if (mRenderer != NULL) {
+        mRenderer->resume();
+    } else {
+        ALOGW("resume called when renderer is gone or not set");
+    }
+
+    startPlaybackTimer("onresume");
+}
+
+status_t NuPlayer2::onInstantiateSecureDecoders() {
+    status_t err;
+    if (!(mSourceFlags & Source::FLAG_SECURE)) {
+        return BAD_TYPE;
+    }
+
+    if (mRenderer != NULL) {
+        ALOGE("renderer should not be set when instantiating secure decoders");
+        return UNKNOWN_ERROR;
+    }
+
+    // TRICKY: We rely on mRenderer being null, so that decoder does not start requesting
+    // data on instantiation.
+    if (mNativeWindow != NULL && mNativeWindow->getANativeWindow() != NULL) {
+        err = instantiateDecoder(false, &mVideoDecoder);
+        if (err != OK) {
+            return err;
+        }
+    }
+
+    if (mAudioSink != NULL) {
+        err = instantiateDecoder(true, &mAudioDecoder);
+        if (err != OK) {
+            return err;
+        }
+    }
+    return OK;
+}
+
+void NuPlayer2::onStart(int64_t startPositionUs, MediaPlayer2SeekMode mode) {
+    ALOGV("onStart: mCrypto: %p", mCrypto.get());
+
+    if (!mSourceStarted) {
+        mSourceStarted = true;
+        mSource->start();
+    }
+    if (startPositionUs > 0) {
+        performSeek(startPositionUs, mode);
+        if (mSource->getFormat(false /* audio */) == NULL) {
+            return;
+        }
+    }
+
+    mOffloadAudio = false;
+    mAudioEOS = false;
+    mVideoEOS = false;
+    mStarted = true;
+    mPaused = false;
+
+    uint32_t flags = 0;
+
+    if (mSource->isRealTime()) {
+        flags |= Renderer::FLAG_REAL_TIME;
+    }
+
+    bool hasAudio = (mSource->getFormat(true /* audio */) != NULL);
+    bool hasVideo = (mSource->getFormat(false /* audio */) != NULL);
+    if (!hasAudio && !hasVideo) {
+        ALOGE("no metadata for either audio or video source");
+        mSource->stop();
+        mSourceStarted = false;
+        notifyListener(MEDIA2_ERROR, MEDIA2_ERROR_UNKNOWN, ERROR_MALFORMED);
+        return;
+    }
+    ALOGV_IF(!hasAudio, "no metadata for audio source");  // video only stream
+
+    sp<MetaData> audioMeta = mSource->getFormatMeta(true /* audio */);
+
+    audio_stream_type_t streamType = AUDIO_STREAM_MUSIC;
+    if (mAudioSink != NULL) {
+        streamType = mAudioSink->getAudioStreamType();
+    }
+
+    mOffloadAudio =
+        canOffloadStream(audioMeta, hasVideo, mSource->isStreaming(), streamType)
+                && (mPlaybackSettings.mSpeed == 1.f && mPlaybackSettings.mPitch == 1.f);
+
+    // Modular DRM: Disabling audio offload if the source is protected
+    if (mOffloadAudio && mIsDrmProtected) {
+        mOffloadAudio = false;
+        ALOGV("onStart: Disabling mOffloadAudio now that the source is protected.");
+    }
+
+    if (mOffloadAudio) {
+        flags |= Renderer::FLAG_OFFLOAD_AUDIO;
+    }
+
+    sp<AMessage> notify = new AMessage(kWhatRendererNotify, this);
+    ++mRendererGeneration;
+    notify->setInt32("generation", mRendererGeneration);
+    mRenderer = new Renderer(mAudioSink, mMediaClock, notify, flags);
+    mRendererLooper = new ALooper;
+    mRendererLooper->setName("NuPlayerRenderer");
+    mRendererLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
+    mRendererLooper->registerHandler(mRenderer);
+
+    status_t err = mRenderer->setPlaybackSettings(mPlaybackSettings);
+    if (err != OK) {
+        mSource->stop();
+        mSourceStarted = false;
+        notifyListener(MEDIA2_ERROR, MEDIA2_ERROR_UNKNOWN, err);
+        return;
+    }
+
+    float rate = getFrameRate();
+    if (rate > 0) {
+        mRenderer->setVideoFrameRate(rate);
+    }
+
+    if (mVideoDecoder != NULL) {
+        mVideoDecoder->setRenderer(mRenderer);
+    }
+    if (mAudioDecoder != NULL) {
+        mAudioDecoder->setRenderer(mRenderer);
+    }
+
+    startPlaybackTimer("onstart");
+
+    postScanSources();
+}
+
+void NuPlayer2::startPlaybackTimer(const char *where) {
+    Mutex::Autolock autoLock(mPlayingTimeLock);
+    if (mLastStartedPlayingTimeNs == 0) {
+        mLastStartedPlayingTimeNs = systemTime();
+        ALOGV("startPlaybackTimer() time %20" PRId64 " (%s)",  mLastStartedPlayingTimeNs, where);
+    }
+}
+
+void NuPlayer2::stopPlaybackTimer(const char *where) {
+    Mutex::Autolock autoLock(mPlayingTimeLock);
+
+    ALOGV("stopPlaybackTimer()  time %20" PRId64 " (%s)", mLastStartedPlayingTimeNs, where);
+
+    if (mLastStartedPlayingTimeNs != 0) {
+        sp<NuPlayer2Driver> driver = mDriver.promote();
+        if (driver != NULL) {
+            int64_t now = systemTime();
+            int64_t played = now - mLastStartedPlayingTimeNs;
+            ALOGV("stopPlaybackTimer()  log  %20" PRId64 "", played);
+
+            if (played > 0) {
+                driver->notifyMorePlayingTimeUs((played+500)/1000);
+            }
+        }
+        mLastStartedPlayingTimeNs = 0;
+    }
+}
+
+void NuPlayer2::startRebufferingTimer() {
+    Mutex::Autolock autoLock(mPlayingTimeLock);
+    if (mLastStartedRebufferingTimeNs == 0) {
+        mLastStartedRebufferingTimeNs = systemTime();
+        ALOGV("startRebufferingTimer() time %20" PRId64 "",  mLastStartedRebufferingTimeNs);
+    }
+}
+
+void NuPlayer2::stopRebufferingTimer(bool exitingPlayback) {
+    Mutex::Autolock autoLock(mPlayingTimeLock);
+
+    ALOGV("stopRebufferTimer()  time %20" PRId64 " (exiting %d)", mLastStartedRebufferingTimeNs, exitingPlayback);
+
+    if (mLastStartedRebufferingTimeNs != 0) {
+        sp<NuPlayer2Driver> driver = mDriver.promote();
+        if (driver != NULL) {
+            int64_t now = systemTime();
+            int64_t rebuffered = now - mLastStartedRebufferingTimeNs;
+            ALOGV("stopRebufferingTimer()  log  %20" PRId64 "", rebuffered);
+
+            if (rebuffered > 0) {
+                driver->notifyMoreRebufferingTimeUs((rebuffered+500)/1000);
+                if (exitingPlayback) {
+                    driver->notifyRebufferingWhenExit(true);
+                }
+            }
+        }
+        mLastStartedRebufferingTimeNs = 0;
+    }
+}
+
+void NuPlayer2::onPause() {
+
+    stopPlaybackTimer("onPause");
+
+    if (mPaused) {
+        return;
+    }
+    mPaused = true;
+    if (mSource != NULL) {
+        mSource->pause();
+    } else {
+        ALOGW("pause called when source is gone or not set");
+    }
+    if (mRenderer != NULL) {
+        mRenderer->pause();
+    } else {
+        ALOGW("pause called when renderer is gone or not set");
+    }
+
+}
+
+bool NuPlayer2::audioDecoderStillNeeded() {
+    // Audio decoder is no longer needed if it's in shut/shutting down status.
+    return ((mFlushingAudio != SHUT_DOWN) && (mFlushingAudio != SHUTTING_DOWN_DECODER));
+}
+
+void NuPlayer2::handleFlushComplete(bool audio, bool isDecoder) {
+    // We wait for both the decoder flush and the renderer flush to complete
+    // before entering either the FLUSHED or the SHUTTING_DOWN_DECODER state.
+
+    mFlushComplete[audio][isDecoder] = true;
+    if (!mFlushComplete[audio][!isDecoder]) {
+        return;
+    }
+
+    FlushStatus *state = audio ? &mFlushingAudio : &mFlushingVideo;
+    switch (*state) {
+        case FLUSHING_DECODER:
+        {
+            *state = FLUSHED;
+            break;
+        }
+
+        case FLUSHING_DECODER_SHUTDOWN:
+        {
+            *state = SHUTTING_DOWN_DECODER;
+
+            ALOGV("initiating %s decoder shutdown", audio ? "audio" : "video");
+            getDecoder(audio)->initiateShutdown();
+            break;
+        }
+
+        default:
+            // decoder flush completes only occur in a flushing state.
+            LOG_ALWAYS_FATAL_IF(isDecoder, "decoder flush in invalid state %d", *state);
+            break;
+    }
+}
+
+void NuPlayer2::finishFlushIfPossible() {
+    if (mFlushingAudio != NONE && mFlushingAudio != FLUSHED
+            && mFlushingAudio != SHUT_DOWN) {
+        return;
+    }
+
+    if (mFlushingVideo != NONE && mFlushingVideo != FLUSHED
+            && mFlushingVideo != SHUT_DOWN) {
+        return;
+    }
+
+    ALOGV("both audio and video are flushed now.");
+
+    mFlushingAudio = NONE;
+    mFlushingVideo = NONE;
+
+    clearFlushComplete();
+
+    processDeferredActions();
+}
+
+void NuPlayer2::postScanSources() {
+    if (mScanSourcesPending) {
+        return;
+    }
+
+    sp<AMessage> msg = new AMessage(kWhatScanSources, this);
+    msg->setInt32("generation", mScanSourcesGeneration);
+    msg->post();
+
+    mScanSourcesPending = true;
+}
+
+void NuPlayer2::tryOpenAudioSinkForOffload(
+        const sp<AMessage> &format, const sp<MetaData> &audioMeta, bool hasVideo) {
+    // Note: This is called early in NuPlayer2 to determine whether offloading
+    // is possible; otherwise the decoders call the renderer openAudioSink directly.
+
+    status_t err = mRenderer->openAudioSink(
+            format, true /* offloadOnly */, hasVideo,
+            AUDIO_OUTPUT_FLAG_NONE, &mOffloadAudio, mSource->isStreaming());
+    if (err != OK) {
+        // Any failure we turn off mOffloadAudio.
+        mOffloadAudio = false;
+    } else if (mOffloadAudio) {
+        sendMetaDataToHal(mAudioSink, audioMeta);
+    }
+}
+
+void NuPlayer2::closeAudioSink() {
+    mRenderer->closeAudioSink();
+}
+
+void NuPlayer2::restartAudio(
+        int64_t currentPositionUs, bool forceNonOffload, bool needsToCreateAudioDecoder) {
+    if (mAudioDecoder != NULL) {
+        mAudioDecoder->pause();
+        mAudioDecoder.clear();
+        mAudioDecoderError = false;
+        ++mAudioDecoderGeneration;
+    }
+    if (mFlushingAudio == FLUSHING_DECODER) {
+        mFlushComplete[1 /* audio */][1 /* isDecoder */] = true;
+        mFlushingAudio = FLUSHED;
+        finishFlushIfPossible();
+    } else if (mFlushingAudio == FLUSHING_DECODER_SHUTDOWN
+            || mFlushingAudio == SHUTTING_DOWN_DECODER) {
+        mFlushComplete[1 /* audio */][1 /* isDecoder */] = true;
+        mFlushingAudio = SHUT_DOWN;
+        finishFlushIfPossible();
+        needsToCreateAudioDecoder = false;
+    }
+    if (mRenderer == NULL) {
+        return;
+    }
+    closeAudioSink();
+    mRenderer->flush(true /* audio */, false /* notifyComplete */);
+    if (mVideoDecoder != NULL) {
+        mRenderer->flush(false /* audio */, false /* notifyComplete */);
+    }
+
+    performSeek(currentPositionUs, MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC /* mode */);
+
+    if (forceNonOffload) {
+        mRenderer->signalDisableOffloadAudio();
+        mOffloadAudio = false;
+    }
+    if (needsToCreateAudioDecoder) {
+        instantiateDecoder(true /* audio */, &mAudioDecoder, !forceNonOffload);
+    }
+}
+
+void NuPlayer2::determineAudioModeChange(const sp<AMessage> &audioFormat) {
+    if (mSource == NULL || mAudioSink == NULL) {
+        return;
+    }
+
+    if (mRenderer == NULL) {
+        ALOGW("No renderer can be used to determine audio mode. Use non-offload for safety.");
+        mOffloadAudio = false;
+        return;
+    }
+
+    sp<MetaData> audioMeta = mSource->getFormatMeta(true /* audio */);
+    sp<AMessage> videoFormat = mSource->getFormat(false /* audio */);
+    audio_stream_type_t streamType = mAudioSink->getAudioStreamType();
+    const bool hasVideo = (videoFormat != NULL);
+    bool canOffload = canOffloadStream(
+            audioMeta, hasVideo, mSource->isStreaming(), streamType)
+                    && (mPlaybackSettings.mSpeed == 1.f && mPlaybackSettings.mPitch == 1.f);
+
+    // Modular DRM: Disabling audio offload if the source is protected
+    if (canOffload && mIsDrmProtected) {
+        canOffload = false;
+        ALOGV("determineAudioModeChange: Disabling mOffloadAudio b/c the source is protected.");
+    }
+
+    if (canOffload) {
+        if (!mOffloadAudio) {
+            mRenderer->signalEnableOffloadAudio();
+        }
+        // open audio sink early under offload mode.
+        tryOpenAudioSinkForOffload(audioFormat, audioMeta, hasVideo);
+    } else {
+        if (mOffloadAudio) {
+            mRenderer->signalDisableOffloadAudio();
+            mOffloadAudio = false;
+        }
+    }
+}
+
+status_t NuPlayer2::instantiateDecoder(
+        bool audio, sp<DecoderBase> *decoder, bool checkAudioModeChange) {
+    // The audio decoder could be cleared by tear down. If still in shut down
+    // process, no need to create a new audio decoder.
+    if (*decoder != NULL || (audio && mFlushingAudio == SHUT_DOWN)) {
+        return OK;
+    }
+
+    sp<AMessage> format = mSource->getFormat(audio);
+
+    if (format == NULL) {
+        return UNKNOWN_ERROR;
+    } else {
+        status_t err;
+        if (format->findInt32("err", &err) && err) {
+            return err;
+        }
+    }
+
+    format->setInt32("priority", 0 /* realtime */);
+
+    if (!audio) {
+        AString mime;
+        CHECK(format->findString("mime", &mime));
+
+        sp<AMessage> ccNotify = new AMessage(kWhatClosedCaptionNotify, this);
+        if (mCCDecoder == NULL) {
+            mCCDecoder = new CCDecoder(ccNotify);
+        }
+
+        if (mSourceFlags & Source::FLAG_SECURE) {
+            format->setInt32("secure", true);
+        }
+
+        if (mSourceFlags & Source::FLAG_PROTECTED) {
+            format->setInt32("protected", true);
+        }
+
+        float rate = getFrameRate();
+        if (rate > 0) {
+            format->setFloat("operating-rate", rate * mPlaybackSettings.mSpeed);
+        }
+    }
+
+    if (audio) {
+        sp<AMessage> notify = new AMessage(kWhatAudioNotify, this);
+        ++mAudioDecoderGeneration;
+        notify->setInt32("generation", mAudioDecoderGeneration);
+
+        if (checkAudioModeChange) {
+            determineAudioModeChange(format);
+        }
+        if (mOffloadAudio) {
+            mSource->setOffloadAudio(true /* offload */);
+
+            const bool hasVideo = (mSource->getFormat(false /*audio */) != NULL);
+            format->setInt32("has-video", hasVideo);
+            *decoder = new DecoderPassThrough(notify, mSource, mRenderer);
+            ALOGV("instantiateDecoder audio DecoderPassThrough  hasVideo: %d", hasVideo);
+        } else {
+            mSource->setOffloadAudio(false /* offload */);
+
+            *decoder = new Decoder(notify, mSource, mPID, mUID, mRenderer);
+            ALOGV("instantiateDecoder audio Decoder");
+        }
+        mAudioDecoderError = false;
+    } else {
+        sp<AMessage> notify = new AMessage(kWhatVideoNotify, this);
+        ++mVideoDecoderGeneration;
+        notify->setInt32("generation", mVideoDecoderGeneration);
+
+        *decoder = new Decoder(
+                notify, mSource, mPID, mUID, mRenderer, mNativeWindow, mCCDecoder);
+        mVideoDecoderError = false;
+
+        // enable FRC if high-quality AV sync is requested, even if not
+        // directly queuing to display, as this will even improve textureview
+        // playback.
+        {
+            if (property_get_bool("persist.sys.media.avsync", false)) {
+                format->setInt32("auto-frc", 1);
+            }
+        }
+    }
+    (*decoder)->init();
+
+    // Modular DRM
+    if (mIsDrmProtected) {
+        format->setObject("crypto", mCrypto);
+        ALOGV("instantiateDecoder: mCrypto: %p isSecure: %d", mCrypto.get(),
+                (mSourceFlags & Source::FLAG_SECURE) != 0);
+    }
+
+    (*decoder)->configure(format);
+
+    if (!audio) {
+        sp<AMessage> params = new AMessage();
+        float rate = getFrameRate();
+        if (rate > 0) {
+            params->setFloat("frame-rate-total", rate);
+        }
+
+        sp<MetaData> fileMeta = getFileMeta();
+        if (fileMeta != NULL) {
+            int32_t videoTemporalLayerCount;
+            if (fileMeta->findInt32(kKeyTemporalLayerCount, &videoTemporalLayerCount)
+                    && videoTemporalLayerCount > 0) {
+                params->setInt32("temporal-layer-count", videoTemporalLayerCount);
+            }
+        }
+
+        if (params->countEntries() > 0) {
+            (*decoder)->setParameters(params);
+        }
+    }
+    return OK;
+}
+
+void NuPlayer2::updateVideoSize(
+        const sp<AMessage> &inputFormat,
+        const sp<AMessage> &outputFormat) {
+    if (inputFormat == NULL) {
+        ALOGW("Unknown video size, reporting 0x0!");
+        notifyListener(MEDIA2_SET_VIDEO_SIZE, 0, 0);
+        return;
+    }
+    int32_t err = OK;
+    inputFormat->findInt32("err", &err);
+    if (err == -EWOULDBLOCK) {
+        ALOGW("Video meta is not available yet!");
+        return;
+    }
+    if (err != OK) {
+        ALOGW("Something is wrong with video meta!");
+        return;
+    }
+
+    int32_t displayWidth, displayHeight;
+    if (outputFormat != NULL) {
+        int32_t width, height;
+        CHECK(outputFormat->findInt32("width", &width));
+        CHECK(outputFormat->findInt32("height", &height));
+
+        int32_t cropLeft, cropTop, cropRight, cropBottom;
+        CHECK(outputFormat->findRect(
+                    "crop",
+                    &cropLeft, &cropTop, &cropRight, &cropBottom));
+
+        displayWidth = cropRight - cropLeft + 1;
+        displayHeight = cropBottom - cropTop + 1;
+
+        ALOGV("Video output format changed to %d x %d "
+             "(crop: %d x %d @ (%d, %d))",
+             width, height,
+             displayWidth,
+             displayHeight,
+             cropLeft, cropTop);
+    } else {
+        CHECK(inputFormat->findInt32("width", &displayWidth));
+        CHECK(inputFormat->findInt32("height", &displayHeight));
+
+        ALOGV("Video input format %d x %d", displayWidth, displayHeight);
+    }
+
+    // Take into account sample aspect ratio if necessary:
+    int32_t sarWidth, sarHeight;
+    if (inputFormat->findInt32("sar-width", &sarWidth)
+            && inputFormat->findInt32("sar-height", &sarHeight)
+            && sarWidth > 0 && sarHeight > 0) {
+        ALOGV("Sample aspect ratio %d : %d", sarWidth, sarHeight);
+
+        displayWidth = (displayWidth * sarWidth) / sarHeight;
+
+        ALOGV("display dimensions %d x %d", displayWidth, displayHeight);
+    } else {
+        int32_t width, height;
+        if (inputFormat->findInt32("display-width", &width)
+                && inputFormat->findInt32("display-height", &height)
+                && width > 0 && height > 0
+                && displayWidth > 0 && displayHeight > 0) {
+            if (displayHeight * (int64_t)width / height > (int64_t)displayWidth) {
+                displayHeight = (int32_t)(displayWidth * (int64_t)height / width);
+            } else {
+                displayWidth = (int32_t)(displayHeight * (int64_t)width / height);
+            }
+            ALOGV("Video display width and height are overridden to %d x %d",
+                 displayWidth, displayHeight);
+        }
+    }
+
+    int32_t rotationDegrees;
+    if (!inputFormat->findInt32("rotation-degrees", &rotationDegrees)) {
+        rotationDegrees = 0;
+    }
+
+    if (rotationDegrees == 90 || rotationDegrees == 270) {
+        int32_t tmp = displayWidth;
+        displayWidth = displayHeight;
+        displayHeight = tmp;
+    }
+
+    notifyListener(
+            MEDIA2_SET_VIDEO_SIZE,
+            displayWidth,
+            displayHeight);
+}
+
+void NuPlayer2::notifyListener(int msg, int ext1, int ext2, const Parcel *in) {
+    if (mDriver == NULL) {
+        return;
+    }
+
+    sp<NuPlayer2Driver> driver = mDriver.promote();
+
+    if (driver == NULL) {
+        return;
+    }
+
+    driver->notifyListener(msg, ext1, ext2, in);
+}
+
+void NuPlayer2::flushDecoder(bool audio, bool needShutdown) {
+    ALOGV("[%s] flushDecoder needShutdown=%d",
+          audio ? "audio" : "video", needShutdown);
+
+    const sp<DecoderBase> &decoder = getDecoder(audio);
+    if (decoder == NULL) {
+        ALOGI("flushDecoder %s without decoder present",
+             audio ? "audio" : "video");
+        return;
+    }
+
+    // Make sure we don't continue to scan sources until we finish flushing.
+    ++mScanSourcesGeneration;
+    if (mScanSourcesPending) {
+        if (!needShutdown) {
+            mDeferredActions.push_back(
+                    new SimpleAction(&NuPlayer2::performScanSources));
+        }
+        mScanSourcesPending = false;
+    }
+
+    decoder->signalFlush();
+
+    FlushStatus newStatus =
+        needShutdown ? FLUSHING_DECODER_SHUTDOWN : FLUSHING_DECODER;
+
+    mFlushComplete[audio][false /* isDecoder */] = (mRenderer == NULL);
+    mFlushComplete[audio][true /* isDecoder */] = false;
+    if (audio) {
+        ALOGE_IF(mFlushingAudio != NONE,
+                "audio flushDecoder() is called in state %d", mFlushingAudio);
+        mFlushingAudio = newStatus;
+    } else {
+        ALOGE_IF(mFlushingVideo != NONE,
+                "video flushDecoder() is called in state %d", mFlushingVideo);
+        mFlushingVideo = newStatus;
+    }
+}
+
+void NuPlayer2::queueDecoderShutdown(
+        bool audio, bool video, const sp<AMessage> &reply) {
+    ALOGI("queueDecoderShutdown audio=%d, video=%d", audio, video);
+
+    mDeferredActions.push_back(
+            new FlushDecoderAction(
+                audio ? FLUSH_CMD_SHUTDOWN : FLUSH_CMD_NONE,
+                video ? FLUSH_CMD_SHUTDOWN : FLUSH_CMD_NONE));
+
+    mDeferredActions.push_back(
+            new SimpleAction(&NuPlayer2::performScanSources));
+
+    mDeferredActions.push_back(new PostMessageAction(reply));
+
+    processDeferredActions();
+}
+
+status_t NuPlayer2::setVideoScalingMode(int32_t mode) {
+    mVideoScalingMode = mode;
+    if (mNativeWindow != NULL && mNativeWindow->getANativeWindow() != NULL) {
+        status_t ret = native_window_set_scaling_mode(
+                mNativeWindow->getANativeWindow(), mVideoScalingMode);
+        if (ret != OK) {
+            ALOGE("Failed to set scaling mode (%d): %s",
+                -ret, strerror(-ret));
+            return ret;
+        }
+    }
+    return OK;
+}
+
+status_t NuPlayer2::getTrackInfo(Parcel* reply) const {
+    sp<AMessage> msg = new AMessage(kWhatGetTrackInfo, this);
+    msg->setPointer("reply", reply);
+
+    sp<AMessage> response;
+    status_t err = msg->postAndAwaitResponse(&response);
+    return err;
+}
+
+status_t NuPlayer2::getSelectedTrack(int32_t type, Parcel* reply) const {
+    sp<AMessage> msg = new AMessage(kWhatGetSelectedTrack, this);
+    msg->setPointer("reply", reply);
+    msg->setInt32("type", type);
+
+    sp<AMessage> response;
+    status_t err = msg->postAndAwaitResponse(&response);
+    if (err == OK && response != NULL) {
+        CHECK(response->findInt32("err", &err));
+    }
+    return err;
+}
+
+status_t NuPlayer2::selectTrack(size_t trackIndex, bool select, int64_t timeUs) {
+    sp<AMessage> msg = new AMessage(kWhatSelectTrack, this);
+    msg->setSize("trackIndex", trackIndex);
+    msg->setInt32("select", select);
+    msg->setInt64("timeUs", timeUs);
+
+    sp<AMessage> response;
+    status_t err = msg->postAndAwaitResponse(&response);
+
+    if (err != OK) {
+        return err;
+    }
+
+    if (!response->findInt32("err", &err)) {
+        err = OK;
+    }
+
+    return err;
+}
+
+status_t NuPlayer2::getCurrentPosition(int64_t *mediaUs) {
+    sp<Renderer> renderer = mRenderer;
+    if (renderer == NULL) {
+        return NO_INIT;
+    }
+
+    return renderer->getCurrentPosition(mediaUs);
+}
+
+void NuPlayer2::getStats(Vector<sp<AMessage> > *mTrackStats) {
+    CHECK(mTrackStats != NULL);
+
+    mTrackStats->clear();
+    if (mVideoDecoder != NULL) {
+        mTrackStats->push_back(mVideoDecoder->getStats());
+    }
+    if (mAudioDecoder != NULL) {
+        mTrackStats->push_back(mAudioDecoder->getStats());
+    }
+}
+
+sp<MetaData> NuPlayer2::getFileMeta() {
+    return mSource->getFileFormatMeta();
+}
+
+float NuPlayer2::getFrameRate() {
+    sp<MetaData> meta = mSource->getFormatMeta(false /* audio */);
+    if (meta == NULL) {
+        return 0;
+    }
+    int32_t rate;
+    if (!meta->findInt32(kKeyFrameRate, &rate)) {
+        // fall back to try file meta
+        sp<MetaData> fileMeta = getFileMeta();
+        if (fileMeta == NULL) {
+            ALOGW("source has video meta but not file meta");
+            return -1;
+        }
+        int32_t fileMetaRate;
+        if (!fileMeta->findInt32(kKeyFrameRate, &fileMetaRate)) {
+            return -1;
+        }
+        return fileMetaRate;
+    }
+    return rate;
+}
+
+void NuPlayer2::schedulePollDuration() {
+    sp<AMessage> msg = new AMessage(kWhatPollDuration, this);
+    msg->setInt32("generation", mPollDurationGeneration);
+    msg->post();
+}
+
+void NuPlayer2::cancelPollDuration() {
+    ++mPollDurationGeneration;
+}
+
+void NuPlayer2::processDeferredActions() {
+    while (!mDeferredActions.empty()) {
+        // We won't execute any deferred actions until we're no longer in
+        // an intermediate state, i.e. one more more decoders are currently
+        // flushing or shutting down.
+
+        if (mFlushingAudio != NONE || mFlushingVideo != NONE) {
+            // We're currently flushing, postpone the reset until that's
+            // completed.
+
+            ALOGV("postponing action mFlushingAudio=%d, mFlushingVideo=%d",
+                  mFlushingAudio, mFlushingVideo);
+
+            break;
+        }
+
+        sp<Action> action = *mDeferredActions.begin();
+        mDeferredActions.erase(mDeferredActions.begin());
+
+        action->execute(this);
+    }
+}
+
+void NuPlayer2::performSeek(int64_t seekTimeUs, MediaPlayer2SeekMode mode) {
+    ALOGV("performSeek seekTimeUs=%lld us (%.2f secs), mode=%d",
+          (long long)seekTimeUs, seekTimeUs / 1E6, mode);
+
+    if (mSource == NULL) {
+        // This happens when reset occurs right before the loop mode
+        // asynchronously seeks to the start of the stream.
+        LOG_ALWAYS_FATAL_IF(mAudioDecoder != NULL || mVideoDecoder != NULL,
+                "mSource is NULL and decoders not NULL audio(%p) video(%p)",
+                mAudioDecoder.get(), mVideoDecoder.get());
+        return;
+    }
+    mPreviousSeekTimeUs = seekTimeUs;
+    mSource->seekTo(seekTimeUs, mode);
+    ++mTimedTextGeneration;
+
+    // everything's flushed, continue playback.
+}
+
+void NuPlayer2::performDecoderFlush(FlushCommand audio, FlushCommand video) {
+    ALOGV("performDecoderFlush audio=%d, video=%d", audio, video);
+
+    if ((audio == FLUSH_CMD_NONE || mAudioDecoder == NULL)
+            && (video == FLUSH_CMD_NONE || mVideoDecoder == NULL)) {
+        return;
+    }
+
+    if (audio != FLUSH_CMD_NONE && mAudioDecoder != NULL) {
+        flushDecoder(true /* audio */, (audio == FLUSH_CMD_SHUTDOWN));
+    }
+
+    if (video != FLUSH_CMD_NONE && mVideoDecoder != NULL) {
+        flushDecoder(false /* audio */, (video == FLUSH_CMD_SHUTDOWN));
+    }
+}
+
+void NuPlayer2::performReset() {
+    ALOGV("performReset");
+
+    CHECK(mAudioDecoder == NULL);
+    CHECK(mVideoDecoder == NULL);
+
+    stopPlaybackTimer("performReset");
+    stopRebufferingTimer(true);
+
+    cancelPollDuration();
+
+    ++mScanSourcesGeneration;
+    mScanSourcesPending = false;
+
+    if (mRendererLooper != NULL) {
+        if (mRenderer != NULL) {
+            mRendererLooper->unregisterHandler(mRenderer->id());
+        }
+        mRendererLooper->stop();
+        mRendererLooper.clear();
+    }
+    mRenderer.clear();
+    ++mRendererGeneration;
+
+    if (mSource != NULL) {
+        mSource->stop();
+
+        Mutex::Autolock autoLock(mSourceLock);
+        mSource.clear();
+    }
+
+    if (mDriver != NULL) {
+        sp<NuPlayer2Driver> driver = mDriver.promote();
+        if (driver != NULL) {
+            driver->notifyResetComplete();
+        }
+    }
+
+    mStarted = false;
+    mPrepared = false;
+    mResetting = false;
+    mSourceStarted = false;
+
+    // Modular DRM
+    if (mCrypto != NULL) {
+        // decoders will be flushed before this so their mCrypto would go away on their own
+        // TODO change to ALOGV
+        ALOGD("performReset mCrypto: %p", mCrypto.get());
+        mCrypto.clear();
+    }
+    mIsDrmProtected = false;
+}
+
+void NuPlayer2::performScanSources() {
+    ALOGV("performScanSources");
+
+    if (!mStarted) {
+        return;
+    }
+
+    if (mAudioDecoder == NULL || mVideoDecoder == NULL) {
+        postScanSources();
+    }
+}
+
+void NuPlayer2::performSetSurface(const sp<ANativeWindowWrapper> &nww) {
+    ALOGV("performSetSurface");
+
+    mNativeWindow = nww;
+
+    // XXX - ignore error from setVideoScalingMode for now
+    setVideoScalingMode(mVideoScalingMode);
+
+    if (mDriver != NULL) {
+        sp<NuPlayer2Driver> driver = mDriver.promote();
+        if (driver != NULL) {
+            driver->notifySetSurfaceComplete();
+        }
+    }
+}
+
+void NuPlayer2::performResumeDecoders(bool needNotify) {
+    if (needNotify) {
+        mResumePending = true;
+        if (mVideoDecoder == NULL) {
+            // if audio-only, we can notify seek complete now,
+            // as the resume operation will be relatively fast.
+            finishResume();
+        }
+    }
+
+    if (mVideoDecoder != NULL) {
+        // When there is continuous seek, MediaPlayer will cache the seek
+        // position, and send down new seek request when previous seek is
+        // complete. Let's wait for at least one video output frame before
+        // notifying seek complete, so that the video thumbnail gets updated
+        // when seekbar is dragged.
+        mVideoDecoder->signalResume(needNotify);
+    }
+
+    if (mAudioDecoder != NULL) {
+        mAudioDecoder->signalResume(false /* needNotify */);
+    }
+}
+
+void NuPlayer2::finishResume() {
+    if (mResumePending) {
+        mResumePending = false;
+        notifyDriverSeekComplete();
+    }
+}
+
+void NuPlayer2::notifyDriverSeekComplete() {
+    if (mDriver != NULL) {
+        sp<NuPlayer2Driver> driver = mDriver.promote();
+        if (driver != NULL) {
+            driver->notifySeekComplete();
+        }
+    }
+}
+
+void NuPlayer2::onSourceNotify(const sp<AMessage> &msg) {
+    int32_t what;
+    CHECK(msg->findInt32("what", &what));
+
+    switch (what) {
+        case Source::kWhatInstantiateSecureDecoders:
+        {
+            if (mSource == NULL) {
+                // This is a stale notification from a source that was
+                // asynchronously preparing when the client called reset().
+                // We handled the reset, the source is gone.
+                break;
+            }
+
+            sp<AMessage> reply;
+            CHECK(msg->findMessage("reply", &reply));
+            status_t err = onInstantiateSecureDecoders();
+            reply->setInt32("err", err);
+            reply->post();
+            break;
+        }
+
+        case Source::kWhatPrepared:
+        {
+            ALOGV("NuPlayer2::onSourceNotify Source::kWhatPrepared source: %p", mSource.get());
+            if (mSource == NULL) {
+                // This is a stale notification from a source that was
+                // asynchronously preparing when the client called reset().
+                // We handled the reset, the source is gone.
+                break;
+            }
+
+            int32_t err;
+            CHECK(msg->findInt32("err", &err));
+
+            if (err != OK) {
+                // shut down potential secure codecs in case client never calls reset
+                mDeferredActions.push_back(
+                        new FlushDecoderAction(FLUSH_CMD_SHUTDOWN /* audio */,
+                                               FLUSH_CMD_SHUTDOWN /* video */));
+                processDeferredActions();
+            } else {
+                mPrepared = true;
+            }
+
+            sp<NuPlayer2Driver> driver = mDriver.promote();
+            if (driver != NULL) {
+                // notify duration first, so that it's definitely set when
+                // the app received the "prepare complete" callback.
+                int64_t durationUs;
+                if (mSource->getDuration(&durationUs) == OK) {
+                    driver->notifyDuration(durationUs);
+                }
+                driver->notifyPrepareCompleted(err);
+            }
+
+            break;
+        }
+
+        // Modular DRM
+        case Source::kWhatDrmInfo:
+        {
+            Parcel parcel;
+            sp<ABuffer> drmInfo;
+            CHECK(msg->findBuffer("drmInfo", &drmInfo));
+            parcel.setData(drmInfo->data(), drmInfo->size());
+
+            ALOGV("onSourceNotify() kWhatDrmInfo MEDIA2_DRM_INFO drmInfo: %p  parcel size: %zu",
+                    drmInfo.get(), parcel.dataSize());
+
+            notifyListener(MEDIA2_DRM_INFO, 0 /* ext1 */, 0 /* ext2 */, &parcel);
+
+            break;
+        }
+
+        case Source::kWhatFlagsChanged:
+        {
+            uint32_t flags;
+            CHECK(msg->findInt32("flags", (int32_t *)&flags));
+
+            sp<NuPlayer2Driver> driver = mDriver.promote();
+            if (driver != NULL) {
+
+                ALOGV("onSourceNotify() kWhatFlagsChanged  FLAG_CAN_PAUSE: %d  "
+                        "FLAG_CAN_SEEK_BACKWARD: %d \n\t\t\t\t FLAG_CAN_SEEK_FORWARD: %d  "
+                        "FLAG_CAN_SEEK: %d  FLAG_DYNAMIC_DURATION: %d \n"
+                        "\t\t\t\t FLAG_SECURE: %d  FLAG_PROTECTED: %d",
+                        (flags & Source::FLAG_CAN_PAUSE) != 0,
+                        (flags & Source::FLAG_CAN_SEEK_BACKWARD) != 0,
+                        (flags & Source::FLAG_CAN_SEEK_FORWARD) != 0,
+                        (flags & Source::FLAG_CAN_SEEK) != 0,
+                        (flags & Source::FLAG_DYNAMIC_DURATION) != 0,
+                        (flags & Source::FLAG_SECURE) != 0,
+                        (flags & Source::FLAG_PROTECTED) != 0);
+
+                if ((flags & NuPlayer2::Source::FLAG_CAN_SEEK) == 0) {
+                    driver->notifyListener(
+                            MEDIA2_INFO, MEDIA2_INFO_NOT_SEEKABLE, 0);
+                }
+                driver->notifyFlagsChanged(flags);
+            }
+
+            if ((mSourceFlags & Source::FLAG_DYNAMIC_DURATION)
+                    && (!(flags & Source::FLAG_DYNAMIC_DURATION))) {
+                cancelPollDuration();
+            } else if (!(mSourceFlags & Source::FLAG_DYNAMIC_DURATION)
+                    && (flags & Source::FLAG_DYNAMIC_DURATION)
+                    && (mAudioDecoder != NULL || mVideoDecoder != NULL)) {
+                schedulePollDuration();
+            }
+
+            mSourceFlags = flags;
+            break;
+        }
+
+        case Source::kWhatVideoSizeChanged:
+        {
+            sp<AMessage> format;
+            CHECK(msg->findMessage("format", &format));
+
+            updateVideoSize(format);
+            break;
+        }
+
+        case Source::kWhatBufferingUpdate:
+        {
+            int32_t percentage;
+            CHECK(msg->findInt32("percentage", &percentage));
+
+            notifyListener(MEDIA2_BUFFERING_UPDATE, percentage, 0);
+            break;
+        }
+
+        case Source::kWhatPauseOnBufferingStart:
+        {
+            // ignore if not playing
+            if (mStarted) {
+                ALOGI("buffer low, pausing...");
+
+                startRebufferingTimer();
+                mPausedForBuffering = true;
+                onPause();
+            }
+            notifyListener(MEDIA2_INFO, MEDIA2_INFO_BUFFERING_START, 0);
+            break;
+        }
+
+        case Source::kWhatResumeOnBufferingEnd:
+        {
+            // ignore if not playing
+            if (mStarted) {
+                ALOGI("buffer ready, resuming...");
+
+                stopRebufferingTimer(false);
+                mPausedForBuffering = false;
+
+                // do not resume yet if client didn't unpause
+                if (!mPausedByClient) {
+                    onResume();
+                }
+            }
+            notifyListener(MEDIA2_INFO, MEDIA2_INFO_BUFFERING_END, 0);
+            break;
+        }
+
+        case Source::kWhatCacheStats:
+        {
+            int32_t kbps;
+            CHECK(msg->findInt32("bandwidth", &kbps));
+
+            notifyListener(MEDIA2_INFO, MEDIA2_INFO_NETWORK_BANDWIDTH, kbps);
+            break;
+        }
+
+        case Source::kWhatSubtitleData:
+        {
+            sp<ABuffer> buffer;
+            CHECK(msg->findBuffer("buffer", &buffer));
+
+            sendSubtitleData(buffer, 0 /* baseIndex */);
+            break;
+        }
+
+        case Source::kWhatTimedMetaData:
+        {
+            sp<ABuffer> buffer;
+            if (!msg->findBuffer("buffer", &buffer)) {
+                notifyListener(MEDIA2_INFO, MEDIA2_INFO_METADATA_UPDATE, 0);
+            } else {
+                sendTimedMetaData(buffer);
+            }
+            break;
+        }
+
+        case Source::kWhatTimedTextData:
+        {
+            int32_t generation;
+            if (msg->findInt32("generation", &generation)
+                    && generation != mTimedTextGeneration) {
+                break;
+            }
+
+            sp<ABuffer> buffer;
+            CHECK(msg->findBuffer("buffer", &buffer));
+
+            sp<NuPlayer2Driver> driver = mDriver.promote();
+            if (driver == NULL) {
+                break;
+            }
+
+            int posMs;
+            int64_t timeUs, posUs;
+            driver->getCurrentPosition(&posMs);
+            posUs = (int64_t) posMs * 1000ll;
+            CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+
+            if (posUs < timeUs) {
+                if (!msg->findInt32("generation", &generation)) {
+                    msg->setInt32("generation", mTimedTextGeneration);
+                }
+                msg->post(timeUs - posUs);
+            } else {
+                sendTimedTextData(buffer);
+            }
+            break;
+        }
+
+        case Source::kWhatQueueDecoderShutdown:
+        {
+            int32_t audio, video;
+            CHECK(msg->findInt32("audio", &audio));
+            CHECK(msg->findInt32("video", &video));
+
+            sp<AMessage> reply;
+            CHECK(msg->findMessage("reply", &reply));
+
+            queueDecoderShutdown(audio, video, reply);
+            break;
+        }
+
+        case Source::kWhatDrmNoLicense:
+        {
+            notifyListener(MEDIA2_ERROR, MEDIA2_ERROR_UNKNOWN, ERROR_DRM_NO_LICENSE);
+            break;
+        }
+
+        default:
+            TRESPASS();
+    }
+}
+
+void NuPlayer2::onClosedCaptionNotify(const sp<AMessage> &msg) {
+    int32_t what;
+    CHECK(msg->findInt32("what", &what));
+
+    switch (what) {
+        case NuPlayer2::CCDecoder::kWhatClosedCaptionData:
+        {
+            sp<ABuffer> buffer;
+            CHECK(msg->findBuffer("buffer", &buffer));
+
+            size_t inbandTracks = 0;
+            if (mSource != NULL) {
+                inbandTracks = mSource->getTrackCount();
+            }
+
+            sendSubtitleData(buffer, inbandTracks);
+            break;
+        }
+
+        case NuPlayer2::CCDecoder::kWhatTrackAdded:
+        {
+            notifyListener(MEDIA2_INFO, MEDIA2_INFO_METADATA_UPDATE, 0);
+
+            break;
+        }
+
+        default:
+            TRESPASS();
+    }
+
+
+}
+
+void NuPlayer2::sendSubtitleData(const sp<ABuffer> &buffer, int32_t baseIndex) {
+    int32_t trackIndex;
+    int64_t timeUs, durationUs;
+    CHECK(buffer->meta()->findInt32("trackIndex", &trackIndex));
+    CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+    CHECK(buffer->meta()->findInt64("durationUs", &durationUs));
+
+    Parcel in;
+    in.writeInt32(trackIndex + baseIndex);
+    in.writeInt64(timeUs);
+    in.writeInt64(durationUs);
+    in.writeInt32(buffer->size());
+    in.writeInt32(buffer->size());
+    in.write(buffer->data(), buffer->size());
+
+    notifyListener(MEDIA2_SUBTITLE_DATA, 0, 0, &in);
+}
+
+void NuPlayer2::sendTimedMetaData(const sp<ABuffer> &buffer) {
+    int64_t timeUs;
+    CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+
+    Parcel in;
+    in.writeInt64(timeUs);
+    in.writeInt32(buffer->size());
+    in.writeInt32(buffer->size());
+    in.write(buffer->data(), buffer->size());
+
+    notifyListener(MEDIA2_META_DATA, 0, 0, &in);
+}
+
+void NuPlayer2::sendTimedTextData(const sp<ABuffer> &buffer) {
+    const void *data;
+    size_t size = 0;
+    int64_t timeUs;
+    int32_t flag = TextDescriptions::IN_BAND_TEXT_3GPP;
+
+    AString mime;
+    CHECK(buffer->meta()->findString("mime", &mime));
+    CHECK(strcasecmp(mime.c_str(), MEDIA_MIMETYPE_TEXT_3GPP) == 0);
+
+    data = buffer->data();
+    size = buffer->size();
+
+    Parcel parcel;
+    if (size > 0) {
+        CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+        int32_t global = 0;
+        if (buffer->meta()->findInt32("global", &global) && global) {
+            flag |= TextDescriptions::GLOBAL_DESCRIPTIONS;
+        } else {
+            flag |= TextDescriptions::LOCAL_DESCRIPTIONS;
+        }
+        TextDescriptions::getParcelOfDescriptions(
+                (const uint8_t *)data, size, flag, timeUs / 1000, &parcel);
+    }
+
+    if ((parcel.dataSize() > 0)) {
+        notifyListener(MEDIA2_TIMED_TEXT, 0, 0, &parcel);
+    } else {  // send an empty timed text
+        notifyListener(MEDIA2_TIMED_TEXT, 0, 0);
+    }
+}
+
+const char *NuPlayer2::getDataSourceType() {
+    switch (mDataSourceType) {
+        case DATA_SOURCE_TYPE_HTTP_LIVE:
+            return "HTTPLive";
+
+        case DATA_SOURCE_TYPE_RTSP:
+            return "RTSP";
+
+        case DATA_SOURCE_TYPE_GENERIC_URL:
+            return "GenURL";
+
+        case DATA_SOURCE_TYPE_GENERIC_FD:
+            return "GenFD";
+
+        case DATA_SOURCE_TYPE_MEDIA:
+            return "Media";
+
+        case DATA_SOURCE_TYPE_STREAM:
+            return "Stream";
+
+        case DATA_SOURCE_TYPE_NONE:
+        default:
+            return "None";
+    }
+ }
+
+// Modular DRM begin
+status_t NuPlayer2::prepareDrm(const uint8_t uuid[16], const Vector<uint8_t> &drmSessionId)
+{
+    ALOGV("prepareDrm ");
+
+    // Passing to the looper anyway; called in a pre-config prepared state so no race on mCrypto
+    sp<AMessage> msg = new AMessage(kWhatPrepareDrm, this);
+    // synchronous call so just passing the address but with local copies of "const" args
+    uint8_t UUID[16];
+    memcpy(UUID, uuid, sizeof(UUID));
+    Vector<uint8_t> sessionId = drmSessionId;
+    msg->setPointer("uuid", (void*)UUID);
+    msg->setPointer("drmSessionId", (void*)&sessionId);
+
+    sp<AMessage> response;
+    status_t status = msg->postAndAwaitResponse(&response);
+
+    if (status == OK && response != NULL) {
+        CHECK(response->findInt32("status", &status));
+        ALOGV("prepareDrm ret: %d ", status);
+    } else {
+        ALOGE("prepareDrm err: %d", status);
+    }
+
+    return status;
+}
+
+status_t NuPlayer2::releaseDrm()
+{
+    ALOGV("releaseDrm ");
+
+    sp<AMessage> msg = new AMessage(kWhatReleaseDrm, this);
+
+    sp<AMessage> response;
+    status_t status = msg->postAndAwaitResponse(&response);
+
+    if (status == OK && response != NULL) {
+        CHECK(response->findInt32("status", &status));
+        ALOGV("releaseDrm ret: %d ", status);
+    } else {
+        ALOGE("releaseDrm err: %d", status);
+    }
+
+    return status;
+}
+
+status_t NuPlayer2::onPrepareDrm(const sp<AMessage> &msg)
+{
+    // TODO change to ALOGV
+    ALOGD("onPrepareDrm ");
+
+    status_t status = INVALID_OPERATION;
+    if (mSource == NULL) {
+        ALOGE("onPrepareDrm: No source. onPrepareDrm failed with %d.", status);
+        return status;
+    }
+
+    uint8_t *uuid;
+    Vector<uint8_t> *drmSessionId;
+    CHECK(msg->findPointer("uuid", (void**)&uuid));
+    CHECK(msg->findPointer("drmSessionId", (void**)&drmSessionId));
+
+    status = OK;
+    sp<AMediaCryptoWrapper> crypto = NULL;
+
+    status = mSource->prepareDrm(uuid, *drmSessionId, &crypto);
+    if (crypto == NULL) {
+        ALOGE("onPrepareDrm: mSource->prepareDrm failed. status: %d", status);
+        return status;
+    }
+    ALOGV("onPrepareDrm: mSource->prepareDrm succeeded");
+
+    if (mCrypto != NULL) {
+        ALOGE("onPrepareDrm: Unexpected. Already having mCrypto: %p", mCrypto.get());
+        mCrypto.clear();
+    }
+
+    mCrypto = crypto;
+    mIsDrmProtected = true;
+    // TODO change to ALOGV
+    ALOGD("onPrepareDrm: mCrypto: %p", mCrypto.get());
+
+    return status;
+}
+
+status_t NuPlayer2::onReleaseDrm()
+{
+    // TODO change to ALOGV
+    ALOGD("onReleaseDrm ");
+
+    if (!mIsDrmProtected) {
+        ALOGW("onReleaseDrm: Unexpected. mIsDrmProtected is already false.");
+    }
+
+    mIsDrmProtected = false;
+
+    status_t status;
+    if (mCrypto != NULL) {
+        // notifying the source first before removing crypto from codec
+        if (mSource != NULL) {
+            mSource->releaseDrm();
+        }
+
+        status=OK;
+        // first making sure the codecs have released their crypto reference
+        const sp<DecoderBase> &videoDecoder = getDecoder(false/*audio*/);
+        if (videoDecoder != NULL) {
+            status = videoDecoder->releaseCrypto();
+            ALOGV("onReleaseDrm: video decoder ret: %d", status);
+        }
+
+        const sp<DecoderBase> &audioDecoder = getDecoder(true/*audio*/);
+        if (audioDecoder != NULL) {
+            status_t status_audio = audioDecoder->releaseCrypto();
+            if (status == OK) {   // otherwise, returning the first error
+                status = status_audio;
+            }
+            ALOGV("onReleaseDrm: audio decoder ret: %d", status_audio);
+        }
+
+        // TODO change to ALOGV
+        ALOGD("onReleaseDrm: mCrypto: %p", mCrypto.get());
+        mCrypto.clear();
+    } else {   // mCrypto == NULL
+        ALOGE("onReleaseDrm: Unexpected. There is no crypto.");
+        status = INVALID_OPERATION;
+    }
+
+    return status;
+}
+// Modular DRM end
+////////////////////////////////////////////////////////////////////////////////
+
+sp<AMessage> NuPlayer2::Source::getFormat(bool audio) {
+    sp<MetaData> meta = getFormatMeta(audio);
+
+    if (meta == NULL) {
+        return NULL;
+    }
+
+    sp<AMessage> msg = new AMessage;
+
+    if(convertMetaDataToMessage(meta, &msg) == OK) {
+        return msg;
+    }
+    return NULL;
+}
+
+void NuPlayer2::Source::notifyFlagsChanged(uint32_t flags) {
+    sp<AMessage> notify = dupNotify();
+    notify->setInt32("what", kWhatFlagsChanged);
+    notify->setInt32("flags", flags);
+    notify->post();
+}
+
+void NuPlayer2::Source::notifyVideoSizeChanged(const sp<AMessage> &format) {
+    sp<AMessage> notify = dupNotify();
+    notify->setInt32("what", kWhatVideoSizeChanged);
+    notify->setMessage("format", format);
+    notify->post();
+}
+
+void NuPlayer2::Source::notifyPrepared(status_t err) {
+    ALOGV("Source::notifyPrepared %d", err);
+    sp<AMessage> notify = dupNotify();
+    notify->setInt32("what", kWhatPrepared);
+    notify->setInt32("err", err);
+    notify->post();
+}
+
+void NuPlayer2::Source::notifyDrmInfo(const sp<ABuffer> &drmInfoBuffer)
+{
+    ALOGV("Source::notifyDrmInfo");
+
+    sp<AMessage> notify = dupNotify();
+    notify->setInt32("what", kWhatDrmInfo);
+    notify->setBuffer("drmInfo", drmInfoBuffer);
+
+    notify->post();
+}
+
+void NuPlayer2::Source::notifyInstantiateSecureDecoders(const sp<AMessage> &reply) {
+    sp<AMessage> notify = dupNotify();
+    notify->setInt32("what", kWhatInstantiateSecureDecoders);
+    notify->setMessage("reply", reply);
+    notify->post();
+}
+
+void NuPlayer2::Source::onMessageReceived(const sp<AMessage> & /* msg */) {
+    TRESPASS();
+}
+
+}  // namespace android
diff --git a/media/libmedia/nuplayer2/NuPlayer2.h b/media/libmedia/nuplayer2/NuPlayer2.h
new file mode 100644
index 0000000..638b259
--- /dev/null
+++ b/media/libmedia/nuplayer2/NuPlayer2.h
@@ -0,0 +1,346 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef NU_PLAYER2_H_
+
+#define NU_PLAYER2_H_
+
+#include <media/AudioResamplerPublic.h>
+#include <media/MediaPlayer2Interface.h>
+#include <media/stagefright/foundation/AHandler.h>
+
+namespace android {
+
+struct ABuffer;
+struct AMediaCryptoWrapper;
+struct AMessage;
+struct ANativeWindowWrapper;
+struct AudioPlaybackRate;
+struct AVSyncSettings;
+class IDataSource;
+struct MediaClock;
+struct MediaHTTPService;
+class MetaData;
+struct NuPlayer2Driver;
+
+struct NuPlayer2 : public AHandler {
+    explicit NuPlayer2(pid_t pid, const sp<MediaClock> &mediaClock);
+
+    void setUID(uid_t uid);
+
+    void setDriver(const wp<NuPlayer2Driver> &driver);
+
+    void setDataSourceAsync(const sp<IStreamSource> &source);
+
+    void setDataSourceAsync(
+            const sp<MediaHTTPService> &httpService,
+            const char *url,
+            const KeyedVector<String8, String8> *headers);
+
+    void setDataSourceAsync(int fd, int64_t offset, int64_t length);
+
+    void setDataSourceAsync(const sp<DataSource> &source);
+
+    status_t getBufferingSettings(BufferingSettings* buffering /* nonnull */);
+    status_t setBufferingSettings(const BufferingSettings& buffering);
+
+    void prepareAsync();
+
+    void setVideoSurfaceTextureAsync(const sp<ANativeWindowWrapper> &nww);
+
+    void setAudioSink(const sp<MediaPlayer2Base::AudioSink> &sink);
+    status_t setPlaybackSettings(const AudioPlaybackRate &rate);
+    status_t getPlaybackSettings(AudioPlaybackRate *rate /* nonnull */);
+    status_t setSyncSettings(const AVSyncSettings &sync, float videoFpsHint);
+    status_t getSyncSettings(AVSyncSettings *sync /* nonnull */, float *videoFps /* nonnull */);
+
+    void start();
+
+    void pause();
+
+    // Will notify the driver through "notifyResetComplete" once finished.
+    void resetAsync();
+
+    // Request a notification when specified media time is reached.
+    status_t notifyAt(int64_t mediaTimeUs);
+
+    // Will notify the driver through "notifySeekComplete" once finished
+    // and needNotify is true.
+    void seekToAsync(
+            int64_t seekTimeUs,
+            MediaPlayer2SeekMode mode = MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC,
+            bool needNotify = false);
+
+    status_t setVideoScalingMode(int32_t mode);
+    status_t getTrackInfo(Parcel* reply) const;
+    status_t getSelectedTrack(int32_t type, Parcel* reply) const;
+    status_t selectTrack(size_t trackIndex, bool select, int64_t timeUs);
+    status_t getCurrentPosition(int64_t *mediaUs);
+    void getStats(Vector<sp<AMessage> > *mTrackStats);
+
+    sp<MetaData> getFileMeta();
+    float getFrameRate();
+
+    // Modular DRM
+    status_t prepareDrm(const uint8_t uuid[16], const Vector<uint8_t> &drmSessionId);
+    status_t releaseDrm();
+
+    const char *getDataSourceType();
+
+protected:
+    virtual ~NuPlayer2();
+
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+
+public:
+    struct StreamListener;
+    struct Source;
+
+private:
+    struct Decoder;
+    struct DecoderBase;
+    struct DecoderPassThrough;
+    struct CCDecoder;
+    struct GenericSource;
+    struct HTTPLiveSource;
+    struct Renderer;
+    struct RTSPSource;
+    struct StreamingSource;
+    struct Action;
+    struct SeekAction;
+    struct SetSurfaceAction;
+    struct ResumeDecoderAction;
+    struct FlushDecoderAction;
+    struct PostMessageAction;
+    struct SimpleAction;
+
+    enum {
+        kWhatSetDataSource              = '=DaS',
+        kWhatPrepare                    = 'prep',
+        kWhatSetVideoSurface            = '=VSu',
+        kWhatSetAudioSink               = '=AuS',
+        kWhatMoreDataQueued             = 'more',
+        kWhatConfigPlayback             = 'cfPB',
+        kWhatConfigSync                 = 'cfSy',
+        kWhatGetPlaybackSettings        = 'gPbS',
+        kWhatGetSyncSettings            = 'gSyS',
+        kWhatStart                      = 'strt',
+        kWhatScanSources                = 'scan',
+        kWhatVideoNotify                = 'vidN',
+        kWhatAudioNotify                = 'audN',
+        kWhatClosedCaptionNotify        = 'capN',
+        kWhatRendererNotify             = 'renN',
+        kWhatReset                      = 'rset',
+        kWhatNotifyTime                 = 'nfyT',
+        kWhatSeek                       = 'seek',
+        kWhatPause                      = 'paus',
+        kWhatResume                     = 'rsme',
+        kWhatPollDuration               = 'polD',
+        kWhatSourceNotify               = 'srcN',
+        kWhatGetTrackInfo               = 'gTrI',
+        kWhatGetSelectedTrack           = 'gSel',
+        kWhatSelectTrack                = 'selT',
+        kWhatGetBufferingSettings       = 'gBus',
+        kWhatSetBufferingSettings       = 'sBuS',
+        kWhatPrepareDrm                 = 'pDrm',
+        kWhatReleaseDrm                 = 'rDrm',
+    };
+
+    wp<NuPlayer2Driver> mDriver;
+    bool mUIDValid;
+    uid_t mUID;
+    pid_t mPID;
+    const sp<MediaClock> mMediaClock;
+    Mutex mSourceLock;  // guard |mSource|.
+    sp<Source> mSource;
+    uint32_t mSourceFlags;
+    sp<ANativeWindowWrapper> mNativeWindow;
+    sp<MediaPlayer2Base::AudioSink> mAudioSink;
+    sp<DecoderBase> mVideoDecoder;
+    bool mOffloadAudio;
+    sp<DecoderBase> mAudioDecoder;
+    sp<CCDecoder> mCCDecoder;
+    sp<Renderer> mRenderer;
+    sp<ALooper> mRendererLooper;
+    int32_t mAudioDecoderGeneration;
+    int32_t mVideoDecoderGeneration;
+    int32_t mRendererGeneration;
+
+    Mutex mPlayingTimeLock;
+    int64_t mLastStartedPlayingTimeNs;
+    void stopPlaybackTimer(const char *where);
+    void startPlaybackTimer(const char *where);
+
+    int64_t mLastStartedRebufferingTimeNs;
+    void startRebufferingTimer();
+    void stopRebufferingTimer(bool exitingPlayback);
+
+    int64_t mPreviousSeekTimeUs;
+
+    List<sp<Action> > mDeferredActions;
+
+    bool mAudioEOS;
+    bool mVideoEOS;
+
+    bool mScanSourcesPending;
+    int32_t mScanSourcesGeneration;
+
+    int32_t mPollDurationGeneration;
+    int32_t mTimedTextGeneration;
+
+    enum FlushStatus {
+        NONE,
+        FLUSHING_DECODER,
+        FLUSHING_DECODER_SHUTDOWN,
+        SHUTTING_DOWN_DECODER,
+        FLUSHED,
+        SHUT_DOWN,
+    };
+
+    enum FlushCommand {
+        FLUSH_CMD_NONE,
+        FLUSH_CMD_FLUSH,
+        FLUSH_CMD_SHUTDOWN,
+    };
+
+    // Status of flush responses from the decoder and renderer.
+    bool mFlushComplete[2][2];
+
+    FlushStatus mFlushingAudio;
+    FlushStatus mFlushingVideo;
+
+    // Status of flush responses from the decoder and renderer.
+    bool mResumePending;
+
+    int32_t mVideoScalingMode;
+
+    AudioPlaybackRate mPlaybackSettings;
+    AVSyncSettings mSyncSettings;
+    float mVideoFpsHint;
+    bool mStarted;
+    bool mPrepared;
+    bool mResetting;
+    bool mSourceStarted;
+    bool mAudioDecoderError;
+    bool mVideoDecoderError;
+
+    // Actual pause state, either as requested by client or due to buffering.
+    bool mPaused;
+
+    // Pause state as requested by client. Note that if mPausedByClient is
+    // true, mPaused is always true; if mPausedByClient is false, mPaused could
+    // still become true, when we pause internally due to buffering.
+    bool mPausedByClient;
+
+    // Pause state as requested by source (internally) due to buffering
+    bool mPausedForBuffering;
+
+    // Modular DRM
+    sp<AMediaCryptoWrapper> mCrypto;
+    bool mIsDrmProtected;
+
+    typedef enum {
+        DATA_SOURCE_TYPE_NONE,
+        DATA_SOURCE_TYPE_HTTP_LIVE,
+        DATA_SOURCE_TYPE_RTSP,
+        DATA_SOURCE_TYPE_GENERIC_URL,
+        DATA_SOURCE_TYPE_GENERIC_FD,
+        DATA_SOURCE_TYPE_MEDIA,
+        DATA_SOURCE_TYPE_STREAM,
+    } DATA_SOURCE_TYPE;
+
+    std::atomic<DATA_SOURCE_TYPE> mDataSourceType;
+
+    inline const sp<DecoderBase> &getDecoder(bool audio) {
+        return audio ? mAudioDecoder : mVideoDecoder;
+    }
+
+    inline void clearFlushComplete() {
+        mFlushComplete[0][0] = false;
+        mFlushComplete[0][1] = false;
+        mFlushComplete[1][0] = false;
+        mFlushComplete[1][1] = false;
+    }
+
+    void tryOpenAudioSinkForOffload(
+            const sp<AMessage> &format, const sp<MetaData> &audioMeta, bool hasVideo);
+    void closeAudioSink();
+    void restartAudio(
+            int64_t currentPositionUs, bool forceNonOffload, bool needsToCreateAudioDecoder);
+    void determineAudioModeChange(const sp<AMessage> &audioFormat);
+
+    status_t instantiateDecoder(
+            bool audio, sp<DecoderBase> *decoder, bool checkAudioModeChange = true);
+
+    status_t onInstantiateSecureDecoders();
+
+    void updateVideoSize(
+            const sp<AMessage> &inputFormat,
+            const sp<AMessage> &outputFormat = NULL);
+
+    void notifyListener(int msg, int ext1, int ext2, const Parcel *in = NULL);
+
+    void handleFlushComplete(bool audio, bool isDecoder);
+    void finishFlushIfPossible();
+
+    void onStart(
+            int64_t startPositionUs = -1,
+            MediaPlayer2SeekMode mode = MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC);
+    void onResume();
+    void onPause();
+
+    bool audioDecoderStillNeeded();
+
+    void flushDecoder(bool audio, bool needShutdown);
+
+    void finishResume();
+    void notifyDriverSeekComplete();
+
+    void postScanSources();
+
+    void schedulePollDuration();
+    void cancelPollDuration();
+
+    void processDeferredActions();
+
+    void performSeek(int64_t seekTimeUs, MediaPlayer2SeekMode mode);
+    void performDecoderFlush(FlushCommand audio, FlushCommand video);
+    void performReset();
+    void performScanSources();
+    void performSetSurface(const sp<ANativeWindowWrapper> &nw);
+    void performResumeDecoders(bool needNotify);
+
+    void onSourceNotify(const sp<AMessage> &msg);
+    void onClosedCaptionNotify(const sp<AMessage> &msg);
+
+    void queueDecoderShutdown(
+            bool audio, bool video, const sp<AMessage> &reply);
+
+    void sendSubtitleData(const sp<ABuffer> &buffer, int32_t baseIndex);
+    void sendTimedMetaData(const sp<ABuffer> &buffer);
+    void sendTimedTextData(const sp<ABuffer> &buffer);
+
+    void writeTrackInfo(Parcel* reply, const sp<AMessage>& format) const;
+
+    status_t onPrepareDrm(const sp<AMessage> &msg);
+    status_t onReleaseDrm();
+
+    DISALLOW_EVIL_CONSTRUCTORS(NuPlayer2);
+};
+
+}  // namespace android
+
+#endif  // NU_PLAYER2_H_
diff --git a/media/libmedia/nuplayer2/NuPlayer2CCDecoder.cpp b/media/libmedia/nuplayer2/NuPlayer2CCDecoder.cpp
new file mode 100644
index 0000000..e4afd5b
--- /dev/null
+++ b/media/libmedia/nuplayer2/NuPlayer2CCDecoder.cpp
@@ -0,0 +1,579 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NuPlayer2CCDecoder"
+#include <utils/Log.h>
+#include <inttypes.h>
+
+#include "NuPlayer2CCDecoder.h"
+
+#include <media/stagefright/foundation/ABitReader.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/avc_utils.h>
+#include <media/stagefright/MediaDefs.h>
+
+namespace android {
+
+// In CEA-708B, the maximum bandwidth of CC is set to 9600bps.
+static const size_t kMaxBandwithSizeBytes = 9600 / 8;
+
+struct CCData {
+    CCData(uint8_t type, uint8_t data1, uint8_t data2)
+        : mType(type), mData1(data1), mData2(data2) {
+    }
+    bool getChannel(size_t *channel) const {
+        if (mData1 >= 0x10 && mData1 <= 0x1f) {
+            *channel = (mData1 >= 0x18 ? 1 : 0) + (mType ? 2 : 0);
+            return true;
+        }
+        return false;
+    }
+
+    uint8_t mType;
+    uint8_t mData1;
+    uint8_t mData2;
+};
+
+static bool isNullPad(CCData *cc) {
+    return cc->mData1 < 0x10 && cc->mData2 < 0x10;
+}
+
+static void dumpBytePair(const sp<ABuffer> &ccBuf) __attribute__ ((unused));
+static void dumpBytePair(const sp<ABuffer> &ccBuf) {
+    size_t offset = 0;
+    AString out;
+
+    while (offset < ccBuf->size()) {
+        char tmp[128];
+
+        CCData *cc = (CCData *) (ccBuf->data() + offset);
+
+        if (isNullPad(cc)) {
+            // 1 null pad or XDS metadata, ignore
+            offset += sizeof(CCData);
+            continue;
+        }
+
+        if (cc->mData1 >= 0x20 && cc->mData1 <= 0x7f) {
+            // 2 basic chars
+            snprintf(tmp, sizeof(tmp), "[%d]Basic: %c %c", cc->mType, cc->mData1, cc->mData2);
+        } else if ((cc->mData1 == 0x11 || cc->mData1 == 0x19)
+                 && cc->mData2 >= 0x30 && cc->mData2 <= 0x3f) {
+            // 1 special char
+            snprintf(tmp, sizeof(tmp), "[%d]Special: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+        } else if ((cc->mData1 == 0x12 || cc->mData1 == 0x1A)
+                 && cc->mData2 >= 0x20 && cc->mData2 <= 0x3f){
+            // 1 Spanish/French char
+            snprintf(tmp, sizeof(tmp), "[%d]Spanish: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+        } else if ((cc->mData1 == 0x13 || cc->mData1 == 0x1B)
+                 && cc->mData2 >= 0x20 && cc->mData2 <= 0x3f){
+            // 1 Portuguese/German/Danish char
+            snprintf(tmp, sizeof(tmp), "[%d]German: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+        } else if ((cc->mData1 == 0x11 || cc->mData1 == 0x19)
+                 && cc->mData2 >= 0x20 && cc->mData2 <= 0x2f){
+            // Mid-Row Codes (Table 69)
+            snprintf(tmp, sizeof(tmp), "[%d]Mid-row: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+        } else if (((cc->mData1 == 0x14 || cc->mData1 == 0x1c)
+                  && cc->mData2 >= 0x20 && cc->mData2 <= 0x2f)
+                  ||
+                   ((cc->mData1 == 0x17 || cc->mData1 == 0x1f)
+                  && cc->mData2 >= 0x21 && cc->mData2 <= 0x23)){
+            // Misc Control Codes (Table 70)
+            snprintf(tmp, sizeof(tmp), "[%d]Ctrl: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+        } else if ((cc->mData1 & 0x70) == 0x10
+                && (cc->mData2 & 0x40) == 0x40
+                && ((cc->mData1 & 0x07) || !(cc->mData2 & 0x20)) ) {
+            // Preamble Address Codes (Table 71)
+            snprintf(tmp, sizeof(tmp), "[%d]PAC: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+        } else {
+            snprintf(tmp, sizeof(tmp), "[%d]Invalid: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+        }
+
+        if (out.size() > 0) {
+            out.append(", ");
+        }
+
+        out.append(tmp);
+
+        offset += sizeof(CCData);
+    }
+
+    ALOGI("%s", out.c_str());
+}
+
+NuPlayer2::CCDecoder::CCDecoder(const sp<AMessage> &notify)
+    : mNotify(notify),
+      mSelectedTrack(-1),
+      mDTVCCPacket(new ABuffer(kMaxBandwithSizeBytes)) {
+    mDTVCCPacket->setRange(0, 0);
+
+    // In CEA-608, streams from packets which have the value 0 of cc_type contain CC1 and CC2, and
+    // streams from packets which have the value 1 of cc_type contain CC3 and CC4.
+    // The following array indicates the current transmitting channels for each value of cc_type.
+    mLine21Channels[0] = 0; // CC1
+    mLine21Channels[1] = 2; // CC3
+}
+
+size_t NuPlayer2::CCDecoder::getTrackCount() const {
+    return mTracks.size();
+}
+
+sp<AMessage> NuPlayer2::CCDecoder::getTrackInfo(size_t index) const {
+    if (!isTrackValid(index)) {
+        return NULL;
+    }
+
+    sp<AMessage> format = new AMessage();
+
+    CCTrack track = mTracks[index];
+
+    format->setInt32("type", MEDIA_TRACK_TYPE_SUBTITLE);
+    format->setString("language", "und");
+
+    switch (track.mTrackType) {
+        case kTrackTypeCEA608:
+            format->setString("mime", MEDIA_MIMETYPE_TEXT_CEA_608);
+            break;
+        case kTrackTypeCEA708:
+            format->setString("mime", MEDIA_MIMETYPE_TEXT_CEA_708);
+            break;
+        default:
+            ALOGE("Unknown track type: %d", track.mTrackType);
+            return NULL;
+    }
+
+    // For CEA-608 CC1, field 0 channel 0
+    bool isDefaultAuto = track.mTrackType == kTrackTypeCEA608
+            && track.mTrackChannel == 0;
+    // For CEA-708, Primary Caption Service.
+    bool isDefaultOnly = track.mTrackType == kTrackTypeCEA708
+            && track.mTrackChannel == 1;
+    format->setInt32("auto", isDefaultAuto);
+    format->setInt32("default", isDefaultAuto || isDefaultOnly);
+    format->setInt32("forced", 0);
+
+    return format;
+}
+
+status_t NuPlayer2::CCDecoder::selectTrack(size_t index, bool select) {
+    if (!isTrackValid(index)) {
+        return BAD_VALUE;
+    }
+
+    if (select) {
+        if (mSelectedTrack == (ssize_t)index) {
+            ALOGE("track %zu already selected", index);
+            return BAD_VALUE;
+        }
+        ALOGV("selected track %zu", index);
+        mSelectedTrack = index;
+    } else {
+        if (mSelectedTrack != (ssize_t)index) {
+            ALOGE("track %zu is not selected", index);
+            return BAD_VALUE;
+        }
+        ALOGV("unselected track %zu", index);
+        mSelectedTrack = -1;
+    }
+
+    // Clear the previous track payloads
+    mCCMap.clear();
+
+    return OK;
+}
+
+bool NuPlayer2::CCDecoder::isSelected() const {
+    return mSelectedTrack >= 0 && mSelectedTrack < (int32_t)getTrackCount();
+}
+
+bool NuPlayer2::CCDecoder::isTrackValid(size_t index) const {
+    return index < getTrackCount();
+}
+
+// returns true if a new CC track is found
+bool NuPlayer2::CCDecoder::extractFromSEI(const sp<ABuffer> &accessUnit) {
+    sp<ABuffer> sei;
+    if (!accessUnit->meta()->findBuffer("sei", &sei) || sei == NULL) {
+        return false;
+    }
+
+    int64_t timeUs;
+    CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
+
+    bool trackAdded = false;
+
+    const NALPosition *nal = (NALPosition *)sei->data();
+
+    for (size_t i = 0; i < sei->size() / sizeof(NALPosition); ++i, ++nal) {
+        trackAdded |= parseSEINalUnit(
+                timeUs, accessUnit->data() + nal->nalOffset, nal->nalSize);
+    }
+
+    return trackAdded;
+}
+
+// returns true if a new CC track is found
+bool NuPlayer2::CCDecoder::parseSEINalUnit(int64_t timeUs, const uint8_t *data, size_t size) {
+    unsigned nalType = data[0] & 0x1f;
+
+    // the buffer should only have SEI in it
+    if (nalType != 6) {
+        return false;
+    }
+
+    bool trackAdded = false;
+    NALBitReader br(data + 1, size - 1);
+
+    // sei_message()
+    while (br.atLeastNumBitsLeft(16)) { // at least 16-bit for sei_message()
+        uint32_t payload_type = 0;
+        size_t payload_size = 0;
+        uint8_t last_byte;
+
+        do {
+            last_byte = br.getBits(8);
+            payload_type += last_byte;
+        } while (last_byte == 0xFF);
+
+        do {
+            last_byte = br.getBits(8);
+            payload_size += last_byte;
+        } while (last_byte == 0xFF);
+
+        if (payload_size > SIZE_MAX / 8
+                || !br.atLeastNumBitsLeft(payload_size * 8)) {
+            ALOGV("Malformed SEI payload");
+            break;
+        }
+
+        // sei_payload()
+        if (payload_type == 4) {
+            bool isCC = false;
+            if (payload_size > 1 + 2 + 4 + 1) {
+                // user_data_registered_itu_t_t35()
+
+                // ATSC A/72: 6.4.2
+                uint8_t itu_t_t35_country_code = br.getBits(8);
+                uint16_t itu_t_t35_provider_code = br.getBits(16);
+                uint32_t user_identifier = br.getBits(32);
+                uint8_t user_data_type_code = br.getBits(8);
+
+                payload_size -= 1 + 2 + 4 + 1;
+
+                isCC = itu_t_t35_country_code == 0xB5
+                        && itu_t_t35_provider_code == 0x0031
+                        && user_identifier == 'GA94'
+                        && user_data_type_code == 0x3;
+            }
+
+            if (isCC && payload_size > 2) {
+                trackAdded |= parseMPEGCCData(timeUs, br.data(), br.numBitsLeft() / 8);
+            } else {
+                ALOGV("Malformed SEI payload type 4");
+            }
+        } else {
+            ALOGV("Unsupported SEI payload type %d", payload_type);
+        }
+
+        // skipping remaining bits of this payload
+        br.skipBits(payload_size * 8);
+    }
+
+    return trackAdded;
+}
+
+// returns true if a new CC track is found
+bool NuPlayer2::CCDecoder::extractFromMPEGUserData(const sp<ABuffer> &accessUnit) {
+    sp<ABuffer> mpegUserData;
+    if (!accessUnit->meta()->findBuffer("mpegUserData", &mpegUserData)
+            || mpegUserData == NULL) {
+        return false;
+    }
+
+    int64_t timeUs;
+    CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
+
+    bool trackAdded = false;
+
+    const size_t *userData = (size_t *)mpegUserData->data();
+
+    for (size_t i = 0; i < mpegUserData->size() / sizeof(size_t); ++i) {
+        trackAdded |= parseMPEGUserDataUnit(
+                timeUs, accessUnit->data() + userData[i], accessUnit->size() - userData[i]);
+    }
+
+    return trackAdded;
+}
+
+// returns true if a new CC track is found
+bool NuPlayer2::CCDecoder::parseMPEGUserDataUnit(int64_t timeUs, const uint8_t *data, size_t size) {
+    ABitReader br(data + 4, 5);
+
+    uint32_t user_identifier = br.getBits(32);
+    uint8_t user_data_type = br.getBits(8);
+
+    if (user_identifier == 'GA94' && user_data_type == 0x3) {
+        return parseMPEGCCData(timeUs, data + 9, size - 9);
+    }
+
+    return false;
+}
+
+// returns true if a new CC track is found
+bool NuPlayer2::CCDecoder::parseMPEGCCData(int64_t timeUs, const uint8_t *data, size_t size) {
+    bool trackAdded = false;
+
+    // MPEG_cc_data()
+    // ATSC A/53 Part 4: 6.2.3.1
+    ABitReader br(data, size);
+
+    if (br.numBitsLeft() <= 16) {
+        return false;
+    }
+
+    br.skipBits(1);
+    bool process_cc_data_flag = br.getBits(1);
+    br.skipBits(1);
+    size_t cc_count = br.getBits(5);
+    br.skipBits(8);
+
+    if (!process_cc_data_flag || 3 * 8 * cc_count >= br.numBitsLeft()) {
+        return false;
+    }
+
+    sp<ABuffer> line21CCBuf = NULL;
+
+    for (size_t i = 0; i < cc_count; ++i) {
+        br.skipBits(5);
+        bool cc_valid = br.getBits(1);
+        uint8_t cc_type = br.getBits(2);
+
+        if (cc_valid) {
+            if (cc_type == 3) {
+                if (mDTVCCPacket->size() > 0) {
+                    trackAdded |= parseDTVCCPacket(
+                            timeUs, mDTVCCPacket->data(), mDTVCCPacket->size());
+                    mDTVCCPacket->setRange(0, 0);
+                }
+                memcpy(mDTVCCPacket->data() + mDTVCCPacket->size(), br.data(), 2);
+                mDTVCCPacket->setRange(0, mDTVCCPacket->size() + 2);
+                br.skipBits(16);
+            } else if (mDTVCCPacket->size() > 0 && cc_type == 2) {
+                memcpy(mDTVCCPacket->data() + mDTVCCPacket->size(), br.data(), 2);
+                mDTVCCPacket->setRange(0, mDTVCCPacket->size() + 2);
+                br.skipBits(16);
+            } else if (cc_type == 0 || cc_type == 1) {
+                uint8_t cc_data_1 = br.getBits(8) & 0x7f;
+                uint8_t cc_data_2 = br.getBits(8) & 0x7f;
+
+                CCData cc(cc_type, cc_data_1, cc_data_2);
+
+                if (isNullPad(&cc)) {
+                    continue;
+                }
+
+                size_t channel;
+                if (cc.getChannel(&channel)) {
+                    mLine21Channels[cc_type] = channel;
+
+                    // create a new track if it does not exist.
+                    getTrackIndex(kTrackTypeCEA608, channel, &trackAdded);
+                }
+
+                if (isSelected() && mTracks[mSelectedTrack].mTrackType == kTrackTypeCEA608
+                        && mTracks[mSelectedTrack].mTrackChannel == mLine21Channels[cc_type]) {
+                    if (line21CCBuf == NULL) {
+                        line21CCBuf = new ABuffer((cc_count - i) * sizeof(CCData));
+                        line21CCBuf->setRange(0, 0);
+                    }
+                    memcpy(line21CCBuf->data() + line21CCBuf->size(), &cc, sizeof(cc));
+                    line21CCBuf->setRange(0, line21CCBuf->size() + sizeof(CCData));
+                }
+            } else {
+                br.skipBits(16);
+            }
+        } else {
+            if ((cc_type == 3 || cc_type == 2) && mDTVCCPacket->size() > 0) {
+                trackAdded |= parseDTVCCPacket(timeUs, mDTVCCPacket->data(), mDTVCCPacket->size());
+                mDTVCCPacket->setRange(0, 0);
+            }
+            br.skipBits(16);
+        }
+    }
+
+    if (isSelected() && mTracks[mSelectedTrack].mTrackType == kTrackTypeCEA608
+            && line21CCBuf != NULL && line21CCBuf->size() > 0) {
+        mCCMap.add(timeUs, line21CCBuf);
+    }
+
+    return trackAdded;
+}
+
+// returns true if a new CC track is found
+bool NuPlayer2::CCDecoder::parseDTVCCPacket(int64_t timeUs, const uint8_t *data, size_t size) {
+    // CEA-708B 5 DTVCC Packet Layer.
+    ABitReader br(data, size);
+    br.skipBits(2);
+
+    size_t packet_size = br.getBits(6);
+    if (packet_size == 0) packet_size = 64;
+    packet_size *= 2;
+
+    if (size != packet_size) {
+        return false;
+    }
+
+    bool trackAdded = false;
+
+    while (br.numBitsLeft() >= 16) {
+        // CEA-708B Figure 5 and 6.
+        uint8_t service_number = br.getBits(3);
+        size_t block_size = br.getBits(5);
+
+        if (service_number == 64) {
+            br.skipBits(2);
+            service_number = br.getBits(6);
+
+            if (service_number < 64) {
+                return trackAdded;
+            }
+        }
+
+        if (br.numBitsLeft() < block_size * 8) {
+            return trackAdded;
+        }
+
+        if (block_size > 0) {
+            size_t trackIndex = getTrackIndex(kTrackTypeCEA708, service_number, &trackAdded);
+            if (mSelectedTrack == (ssize_t)trackIndex) {
+                sp<ABuffer> ccPacket = new ABuffer(block_size);
+                memcpy(ccPacket->data(), br.data(), block_size);
+                mCCMap.add(timeUs, ccPacket);
+            }
+        }
+        br.skipBits(block_size * 8);
+    }
+
+    return trackAdded;
+}
+
+// return the track index for a given type and channel.
+// if the track does not exist, creates a new one.
+size_t NuPlayer2::CCDecoder::getTrackIndex(
+        int32_t trackType, size_t channel, bool *trackAdded) {
+    CCTrack track(trackType, channel);
+    ssize_t index = mTrackIndices.indexOfKey(track);
+
+    if (index < 0) {
+        // A new track is added.
+        index = mTracks.size();
+        mTrackIndices.add(track, index);
+        mTracks.add(track);
+        *trackAdded = true;
+        return index;
+    }
+
+    return mTrackIndices.valueAt(index);
+}
+
+void NuPlayer2::CCDecoder::decode(const sp<ABuffer> &accessUnit) {
+    if (extractFromMPEGUserData(accessUnit) || extractFromSEI(accessUnit)) {
+        sp<AMessage> msg = mNotify->dup();
+        msg->setInt32("what", kWhatTrackAdded);
+        msg->post();
+    }
+    // TODO: extract CC from other sources
+}
+
+void NuPlayer2::CCDecoder::display(int64_t timeUs) {
+    if (!isSelected()) {
+        return;
+    }
+
+    ssize_t index = mCCMap.indexOfKey(timeUs);
+    if (index < 0) {
+        ALOGV("cc for timestamp %" PRId64 " not found", timeUs);
+        return;
+    }
+
+    sp<ABuffer> ccBuf;
+
+    if (index == 0) {
+        ccBuf = mCCMap.valueAt(index);
+    } else {
+        size_t size = 0;
+
+        for (ssize_t i = 0; i <= index; ++i) {
+            size += mCCMap.valueAt(i)->size();
+        }
+
+        ccBuf = new ABuffer(size);
+        ccBuf->setRange(0, 0);
+
+        for (ssize_t i = 0; i <= index; ++i) {
+            sp<ABuffer> buf = mCCMap.valueAt(i);
+            memcpy(ccBuf->data() + ccBuf->size(), buf->data(), buf->size());
+            ccBuf->setRange(0, ccBuf->size() + buf->size());
+        }
+    }
+
+    if (ccBuf->size() > 0) {
+#if 0
+        dumpBytePair(ccBuf);
+#endif
+
+        ccBuf->meta()->setInt32("trackIndex", mSelectedTrack);
+        ccBuf->meta()->setInt64("timeUs", timeUs);
+        ccBuf->meta()->setInt64("durationUs", 0ll);
+
+        sp<AMessage> msg = mNotify->dup();
+        msg->setInt32("what", kWhatClosedCaptionData);
+        msg->setBuffer("buffer", ccBuf);
+        msg->post();
+    }
+
+    // remove all entries before timeUs
+    mCCMap.removeItemsAt(0, index + 1);
+}
+
+void NuPlayer2::CCDecoder::flush() {
+    mCCMap.clear();
+    mDTVCCPacket->setRange(0, 0);
+}
+
+int32_t NuPlayer2::CCDecoder::CCTrack::compare(const NuPlayer2::CCDecoder::CCTrack& rhs) const {
+    int32_t cmp = mTrackType - rhs.mTrackType;
+    if (cmp != 0) return cmp;
+    return mTrackChannel - rhs.mTrackChannel;
+}
+
+bool NuPlayer2::CCDecoder::CCTrack::operator<(const NuPlayer2::CCDecoder::CCTrack& rhs) const {
+    return compare(rhs) < 0;
+}
+
+bool NuPlayer2::CCDecoder::CCTrack::operator==(const NuPlayer2::CCDecoder::CCTrack& rhs) const {
+    return compare(rhs) == 0;
+}
+
+bool NuPlayer2::CCDecoder::CCTrack::operator!=(const NuPlayer2::CCDecoder::CCTrack& rhs) const {
+    return compare(rhs) != 0;
+}
+
+}  // namespace android
+
diff --git a/media/libmedia/nuplayer2/NuPlayer2CCDecoder.h b/media/libmedia/nuplayer2/NuPlayer2CCDecoder.h
new file mode 100644
index 0000000..57d5ea2
--- /dev/null
+++ b/media/libmedia/nuplayer2/NuPlayer2CCDecoder.h
@@ -0,0 +1,97 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef NUPLAYER2_CCDECODER_H_
+
+#define NUPLAYER2_CCDECODER_H_
+
+#include "NuPlayer2.h"
+
+namespace android {
+
+struct NuPlayer2::CCDecoder : public RefBase {
+    enum {
+        kWhatClosedCaptionData,
+        kWhatTrackAdded,
+    };
+
+    enum {
+        kTrackTypeCEA608,
+        kTrackTypeCEA708,
+    };
+
+    explicit CCDecoder(const sp<AMessage> &notify);
+
+    size_t getTrackCount() const;
+    sp<AMessage> getTrackInfo(size_t index) const;
+    status_t selectTrack(size_t index, bool select);
+    bool isSelected() const;
+    void decode(const sp<ABuffer> &accessUnit);
+    void display(int64_t timeUs);
+    void flush();
+
+private:
+    // CC track identifier.
+    struct CCTrack {
+        CCTrack() : mTrackType(0), mTrackChannel(0) { }
+
+        CCTrack(const int32_t trackType, const size_t trackChannel)
+            : mTrackType(trackType), mTrackChannel(trackChannel) { }
+
+        int32_t mTrackType;
+        size_t mTrackChannel;
+
+        // The ordering of CCTracks is to build a map of track to index.
+        // It is necessary to find the index of the matched CCTrack when CC data comes.
+        int compare(const NuPlayer2::CCDecoder::CCTrack& rhs) const;
+        inline bool operator<(const NuPlayer2::CCDecoder::CCTrack& rhs) const;
+        inline bool operator==(const NuPlayer2::CCDecoder::CCTrack& rhs) const;
+        inline bool operator!=(const NuPlayer2::CCDecoder::CCTrack& rhs) const;
+    };
+
+    sp<AMessage> mNotify;
+    KeyedVector<int64_t, sp<ABuffer> > mCCMap;
+    ssize_t mSelectedTrack;
+    KeyedVector<CCTrack, size_t> mTrackIndices;
+    Vector<CCTrack> mTracks;
+
+    // CEA-608 closed caption
+    size_t mLine21Channels[2]; // The current channels of NTSC_CC_FIELD_{1, 2}
+
+    // CEA-708 closed caption
+    sp<ABuffer> mDTVCCPacket;
+
+    bool isTrackValid(size_t index) const;
+    size_t getTrackIndex(int32_t trackType, size_t channel, bool *trackAdded);
+
+    // Extract from H.264 SEIs
+    bool extractFromSEI(const sp<ABuffer> &accessUnit);
+    bool parseSEINalUnit(int64_t timeUs, const uint8_t *data, size_t size);
+
+    // Extract from MPEG user data
+    bool extractFromMPEGUserData(const sp<ABuffer> &accessUnit);
+    bool parseMPEGUserDataUnit(int64_t timeUs, const uint8_t *data, size_t size);
+
+    // Extract CC tracks from MPEG_cc_data
+    bool parseMPEGCCData(int64_t timeUs, const uint8_t *data, size_t size);
+    bool parseDTVCCPacket(int64_t timeUs, const uint8_t *data, size_t size);
+
+    DISALLOW_EVIL_CONSTRUCTORS(CCDecoder);
+};
+
+}  // namespace android
+
+#endif  // NUPLAYER2_CCDECODER_H_
diff --git a/media/libmedia/nuplayer2/NuPlayer2Decoder.cpp b/media/libmedia/nuplayer2/NuPlayer2Decoder.cpp
new file mode 100644
index 0000000..25d41f3
--- /dev/null
+++ b/media/libmedia/nuplayer2/NuPlayer2Decoder.cpp
@@ -0,0 +1,1291 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NuPlayer2Decoder"
+#include <utils/Log.h>
+#include <inttypes.h>
+
+#include <algorithm>
+
+#include "NuPlayer2CCDecoder.h"
+#include "NuPlayer2Decoder.h"
+#include "NuPlayer2Drm.h"
+#include "NuPlayer2Renderer.h"
+#include "NuPlayer2Source.h"
+
+#include <cutils/properties.h>
+#include <media/MediaCodecBuffer.h>
+#include <media/NdkMediaCodec.h>
+#include <media/NdkWrapper.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/avc_utils.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/SurfaceUtils.h>
+
+#include "ATSParser.h"
+
+namespace android {
+
+static float kDisplayRefreshingRate = 60.f; // TODO: get this from the display
+
+// The default total video frame rate of a stream when that info is not available from
+// the source.
+static float kDefaultVideoFrameRateTotal = 30.f;
+
+static inline bool getAudioDeepBufferSetting() {
+    return property_get_bool("media.stagefright.audio.deep", false /* default_value */);
+}
+
+NuPlayer2::Decoder::Decoder(
+        const sp<AMessage> &notify,
+        const sp<Source> &source,
+        pid_t pid,
+        uid_t uid,
+        const sp<Renderer> &renderer,
+        const sp<ANativeWindowWrapper> &nww,
+        const sp<CCDecoder> &ccDecoder)
+    : DecoderBase(notify),
+      mNativeWindow(nww),
+      mSource(source),
+      mRenderer(renderer),
+      mCCDecoder(ccDecoder),
+      mPid(pid),
+      mUid(uid),
+      mSkipRenderingUntilMediaTimeUs(-1ll),
+      mNumFramesTotal(0ll),
+      mNumInputFramesDropped(0ll),
+      mNumOutputFramesDropped(0ll),
+      mVideoWidth(0),
+      mVideoHeight(0),
+      mIsAudio(true),
+      mIsVideoAVC(false),
+      mIsSecure(false),
+      mIsEncrypted(false),
+      mIsEncryptedObservedEarlier(false),
+      mFormatChangePending(false),
+      mTimeChangePending(false),
+      mFrameRateTotal(kDefaultVideoFrameRateTotal),
+      mPlaybackSpeed(1.0f),
+      mNumVideoTemporalLayerTotal(1), // decode all layers
+      mNumVideoTemporalLayerAllowed(1),
+      mCurrentMaxVideoTemporalLayerId(0),
+      mResumePending(false),
+      mComponentName("decoder") {
+    mVideoTemporalLayerAggregateFps[0] = mFrameRateTotal;
+}
+
+NuPlayer2::Decoder::~Decoder() {
+    // Need to stop looper first since mCodec could be accessed on the mDecoderLooper.
+    stopLooper();
+    if (mCodec != NULL) {
+        mCodec->release();
+    }
+    releaseAndResetMediaBuffers();
+}
+
+sp<AMessage> NuPlayer2::Decoder::getStats() const {
+    mStats->setInt64("frames-total", mNumFramesTotal);
+    mStats->setInt64("frames-dropped-input", mNumInputFramesDropped);
+    mStats->setInt64("frames-dropped-output", mNumOutputFramesDropped);
+    return mStats;
+}
+
+status_t NuPlayer2::Decoder::setVideoSurface(const sp<ANativeWindowWrapper> &nww) {
+    if (nww == NULL || nww->getANativeWindow() == NULL
+        || ADebug::isExperimentEnabled("legacy-setsurface")) {
+        return BAD_VALUE;
+    }
+
+    sp<AMessage> msg = new AMessage(kWhatSetVideoSurface, this);
+
+    msg->setObject("surface", nww);
+    sp<AMessage> response;
+    status_t err = msg->postAndAwaitResponse(&response);
+    if (err == OK && response != NULL) {
+        CHECK(response->findInt32("err", &err));
+    }
+    return err;
+}
+
+void NuPlayer2::Decoder::onMessageReceived(const sp<AMessage> &msg) {
+    ALOGV("[%s] onMessage: %s", mComponentName.c_str(), msg->debugString().c_str());
+
+    switch (msg->what()) {
+        case kWhatCodecNotify:
+        {
+            int32_t cbID;
+            CHECK(msg->findInt32("callbackID", &cbID));
+
+            ALOGV("[%s] kWhatCodecNotify: cbID = %d, paused = %d",
+                    mIsAudio ? "audio" : "video", cbID, mPaused);
+
+            if (mPaused) {
+                break;
+            }
+
+            switch (cbID) {
+                case AMediaCodecWrapper::CB_INPUT_AVAILABLE:
+                {
+                    int32_t index;
+                    CHECK(msg->findInt32("index", &index));
+
+                    handleAnInputBuffer(index);
+                    break;
+                }
+
+                case AMediaCodecWrapper::CB_OUTPUT_AVAILABLE:
+                {
+                    int32_t index;
+                    size_t offset;
+                    size_t size;
+                    int64_t timeUs;
+                    int32_t flags;
+
+                    CHECK(msg->findInt32("index", &index));
+                    CHECK(msg->findSize("offset", &offset));
+                    CHECK(msg->findSize("size", &size));
+                    CHECK(msg->findInt64("timeUs", &timeUs));
+                    CHECK(msg->findInt32("flags", &flags));
+
+                    handleAnOutputBuffer(index, offset, size, timeUs, flags);
+                    break;
+                }
+
+                case AMediaCodecWrapper::CB_OUTPUT_FORMAT_CHANGED:
+                {
+                    sp<AMessage> format;
+                    CHECK(msg->findMessage("format", &format));
+
+                    handleOutputFormatChange(format);
+                    break;
+                }
+
+                case AMediaCodecWrapper::CB_ERROR:
+                {
+                    status_t err;
+                    CHECK(msg->findInt32("err", &err));
+                    ALOGE("Decoder (%s) reported error : 0x%x",
+                            mIsAudio ? "audio" : "video", err);
+
+                    handleError(err);
+                    break;
+                }
+
+                default:
+                {
+                    TRESPASS();
+                    break;
+                }
+            }
+
+            break;
+        }
+
+        case kWhatRenderBuffer:
+        {
+            if (!isStaleReply(msg)) {
+                onRenderBuffer(msg);
+            }
+            break;
+        }
+
+        case kWhatAudioOutputFormatChanged:
+        {
+            if (!isStaleReply(msg)) {
+                status_t err;
+                if (msg->findInt32("err", &err) && err != OK) {
+                    ALOGE("Renderer reported 0x%x when changing audio output format", err);
+                    handleError(err);
+                }
+            }
+            break;
+        }
+
+        case kWhatSetVideoSurface:
+        {
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
+            sp<RefBase> obj;
+            CHECK(msg->findObject("surface", &obj));
+            sp<ANativeWindowWrapper> nww =
+                static_cast<ANativeWindowWrapper *>(obj.get()); // non-null
+            if (nww == NULL || nww->getANativeWindow() == NULL) {
+                break;
+            }
+            int32_t err = INVALID_OPERATION;
+            // NOTE: in practice mNativeWindow is always non-null,
+            // but checking here for completeness
+            if (mCodec != NULL
+                && mNativeWindow != NULL && mNativeWindow->getANativeWindow() != NULL) {
+                // TODO: once AwesomePlayer is removed, remove this automatic connecting
+                // to the surface by MediaPlayerService.
+                //
+                // at this point MediaPlayer2Manager::client has already connected to the
+                // surface, which MediaCodec does not expect
+                err = nativeWindowDisconnect(nww->getANativeWindow(), "kWhatSetVideoSurface(nww)");
+                if (err == OK) {
+                    err = mCodec->setOutputSurface(nww);
+                    ALOGI_IF(err, "codec setOutputSurface returned: %d", err);
+                    if (err == OK) {
+                        // reconnect to the old surface as MPS::Client will expect to
+                        // be able to disconnect from it.
+                        (void)nativeWindowConnect(mNativeWindow->getANativeWindow(),
+                                                  "kWhatSetVideoSurface(mNativeWindow)");
+                        mNativeWindow = nww;
+                    }
+                }
+                if (err != OK) {
+                    // reconnect to the new surface on error as MPS::Client will expect to
+                    // be able to disconnect from it.
+                    (void)nativeWindowConnect(nww->getANativeWindow(), "kWhatSetVideoSurface(err)");
+                }
+            }
+
+            sp<AMessage> response = new AMessage;
+            response->setInt32("err", err);
+            response->postReply(replyID);
+            break;
+        }
+
+        case kWhatDrmReleaseCrypto:
+        {
+            ALOGV("kWhatDrmReleaseCrypto");
+            onReleaseCrypto(msg);
+            break;
+        }
+
+        default:
+            DecoderBase::onMessageReceived(msg);
+            break;
+    }
+}
+
+void NuPlayer2::Decoder::onConfigure(const sp<AMessage> &format) {
+    ALOGV("[%s] onConfigure (format=%s)", mComponentName.c_str(), format->debugString().c_str());
+    CHECK(mCodec == NULL);
+
+    mFormatChangePending = false;
+    mTimeChangePending = false;
+
+    ++mBufferGeneration;
+
+    AString mime;
+    CHECK(format->findString("mime", &mime));
+
+    mIsAudio = !strncasecmp("audio/", mime.c_str(), 6);
+    mIsVideoAVC = !strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime.c_str());
+
+    mComponentName = mime;
+    mComponentName.append(" decoder");
+    ALOGV("[%s] onConfigure (nww=%p)", mComponentName.c_str(),
+          (mNativeWindow == NULL ? NULL : mNativeWindow->getANativeWindow()));
+
+    mCodec = AMediaCodecWrapper::CreateDecoderByType(mime);
+    int32_t secure = 0;
+    if (format->findInt32("secure", &secure) && secure != 0) {
+        if (mCodec != NULL) {
+            if (mCodec->getName(&mComponentName) == OK) {
+                mComponentName.append(".secure");
+                mCodec->release();
+                ALOGI("[%s] creating", mComponentName.c_str());
+                mCodec = AMediaCodecWrapper::CreateCodecByName(mComponentName);
+            } else {
+                mCodec = NULL;
+            }
+        }
+    }
+    if (mCodec == NULL) {
+        ALOGE("Failed to create %s%s decoder",
+                (secure ? "secure " : ""), mime.c_str());
+        handleError(NO_INIT);
+        return;
+    }
+    mIsSecure = secure;
+
+    mCodec->getName(&mComponentName);
+
+    status_t err;
+    if (mNativeWindow != NULL && mNativeWindow->getANativeWindow() != NULL) {
+        // disconnect from surface as MediaCodec will reconnect
+        err = nativeWindowDisconnect(mNativeWindow->getANativeWindow(), "onConfigure");
+        // We treat this as a warning, as this is a preparatory step.
+        // Codec will try to connect to the surface, which is where
+        // any error signaling will occur.
+        ALOGW_IF(err != OK, "failed to disconnect from surface: %d", err);
+    }
+
+    // Modular DRM
+    sp<RefBase> objCrypto;
+    format->findObject("crypto", &objCrypto);
+    sp<AMediaCryptoWrapper> crypto = static_cast<AMediaCryptoWrapper *>(objCrypto.get());
+    // non-encrypted source won't have a crypto
+    mIsEncrypted = (crypto != NULL);
+    // configure is called once; still using OR in case the behavior changes.
+    mIsEncryptedObservedEarlier = mIsEncryptedObservedEarlier || mIsEncrypted;
+    ALOGV("onConfigure mCrypto: %p, mIsSecure: %d", crypto.get(), mIsSecure);
+
+    err = mCodec->configure(
+            AMediaFormatWrapper::Create(format),
+            mNativeWindow,
+            crypto,
+            0 /* flags */);
+
+    if (err != OK) {
+        ALOGE("Failed to configure [%s] decoder (err=%d)", mComponentName.c_str(), err);
+        mCodec->release();
+        mCodec.clear();
+        handleError(err);
+        return;
+    }
+    rememberCodecSpecificData(format);
+
+    // the following should work in configured state
+    sp<AMediaFormatWrapper> outputFormat = mCodec->getOutputFormat();
+    if (outputFormat == NULL) {
+        handleError(INVALID_OPERATION);
+        return;
+    }
+    mInputFormat = mCodec->getInputFormat();
+    if (mInputFormat == NULL) {
+        handleError(INVALID_OPERATION);
+        return;
+    }
+
+    mStats->setString("mime", mime.c_str());
+    mStats->setString("component-name", mComponentName.c_str());
+
+    if (!mIsAudio) {
+        int32_t width, height;
+        if (outputFormat->getInt32("width", &width)
+                && outputFormat->getInt32("height", &height)) {
+            mStats->setInt32("width", width);
+            mStats->setInt32("height", height);
+        }
+    }
+
+    sp<AMessage> reply = new AMessage(kWhatCodecNotify, this);
+    mCodec->setCallback(reply);
+
+    err = mCodec->start();
+    if (err != OK) {
+        ALOGE("Failed to start [%s] decoder (err=%d)", mComponentName.c_str(), err);
+        mCodec->release();
+        mCodec.clear();
+        handleError(err);
+        return;
+    }
+
+    releaseAndResetMediaBuffers();
+
+    mPaused = false;
+    mResumePending = false;
+}
+
+void NuPlayer2::Decoder::onSetParameters(const sp<AMessage> &params) {
+    bool needAdjustLayers = false;
+    float frameRateTotal;
+    if (params->findFloat("frame-rate-total", &frameRateTotal)
+            && mFrameRateTotal != frameRateTotal) {
+        needAdjustLayers = true;
+        mFrameRateTotal = frameRateTotal;
+    }
+
+    int32_t numVideoTemporalLayerTotal;
+    if (params->findInt32("temporal-layer-count", &numVideoTemporalLayerTotal)
+            && numVideoTemporalLayerTotal >= 0
+            && numVideoTemporalLayerTotal <= kMaxNumVideoTemporalLayers
+            && mNumVideoTemporalLayerTotal != numVideoTemporalLayerTotal) {
+        needAdjustLayers = true;
+        mNumVideoTemporalLayerTotal = std::max(numVideoTemporalLayerTotal, 1);
+    }
+
+    if (needAdjustLayers && mNumVideoTemporalLayerTotal > 1) {
+        // TODO: For now, layer fps is calculated for some specific architectures.
+        // But it really should be extracted from the stream.
+        mVideoTemporalLayerAggregateFps[0] =
+            mFrameRateTotal / (float)(1ll << (mNumVideoTemporalLayerTotal - 1));
+        for (int32_t i = 1; i < mNumVideoTemporalLayerTotal; ++i) {
+            mVideoTemporalLayerAggregateFps[i] =
+                mFrameRateTotal / (float)(1ll << (mNumVideoTemporalLayerTotal - i))
+                + mVideoTemporalLayerAggregateFps[i - 1];
+        }
+    }
+
+    float playbackSpeed;
+    if (params->findFloat("playback-speed", &playbackSpeed)
+            && mPlaybackSpeed != playbackSpeed) {
+        needAdjustLayers = true;
+        mPlaybackSpeed = playbackSpeed;
+    }
+
+    if (needAdjustLayers) {
+        float decodeFrameRate = mFrameRateTotal;
+        // enable temporal layering optimization only if we know the layering depth
+        if (mNumVideoTemporalLayerTotal > 1) {
+            int32_t layerId;
+            for (layerId = 0; layerId < mNumVideoTemporalLayerTotal - 1; ++layerId) {
+                if (mVideoTemporalLayerAggregateFps[layerId] * mPlaybackSpeed
+                        >= kDisplayRefreshingRate * 0.9) {
+                    break;
+                }
+            }
+            mNumVideoTemporalLayerAllowed = layerId + 1;
+            decodeFrameRate = mVideoTemporalLayerAggregateFps[layerId];
+        }
+        ALOGV("onSetParameters: allowed layers=%d, decodeFps=%g",
+                mNumVideoTemporalLayerAllowed, decodeFrameRate);
+
+        if (mCodec == NULL) {
+            ALOGW("onSetParameters called before codec is created.");
+            return;
+        }
+
+        sp<AMediaFormatWrapper> codecParams = new AMediaFormatWrapper();
+        codecParams->setFloat("operating-rate", decodeFrameRate * mPlaybackSpeed);
+        mCodec->setParameters(codecParams);
+    }
+}
+
+void NuPlayer2::Decoder::onSetRenderer(const sp<Renderer> &renderer) {
+    mRenderer = renderer;
+}
+
+void NuPlayer2::Decoder::onResume(bool notifyComplete) {
+    mPaused = false;
+
+    if (notifyComplete) {
+        mResumePending = true;
+    }
+
+    if (mCodec == NULL) {
+        ALOGE("[%s] onResume without a valid codec", mComponentName.c_str());
+        handleError(NO_INIT);
+        return;
+    }
+    mCodec->start();
+}
+
+void NuPlayer2::Decoder::doFlush(bool notifyComplete) {
+    if (mCCDecoder != NULL) {
+        mCCDecoder->flush();
+    }
+
+    if (mRenderer != NULL) {
+        mRenderer->flush(mIsAudio, notifyComplete);
+        mRenderer->signalTimeDiscontinuity();
+    }
+
+    status_t err = OK;
+    if (mCodec != NULL) {
+        err = mCodec->flush();
+        mCSDsToSubmit = mCSDsForCurrentFormat; // copy operator
+        ++mBufferGeneration;
+    }
+
+    if (err != OK) {
+        ALOGE("failed to flush [%s] (err=%d)", mComponentName.c_str(), err);
+        handleError(err);
+        // finish with posting kWhatFlushCompleted.
+        // we attempt to release the buffers even if flush fails.
+    }
+    releaseAndResetMediaBuffers();
+    mPaused = true;
+}
+
+
+void NuPlayer2::Decoder::onFlush() {
+    doFlush(true);
+
+    if (isDiscontinuityPending()) {
+        // This could happen if the client starts seeking/shutdown
+        // after we queued an EOS for discontinuities.
+        // We can consider discontinuity handled.
+        finishHandleDiscontinuity(false /* flushOnTimeChange */);
+    }
+
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("what", kWhatFlushCompleted);
+    notify->post();
+}
+
+void NuPlayer2::Decoder::onShutdown(bool notifyComplete) {
+    status_t err = OK;
+
+    // if there is a pending resume request, notify complete now
+    notifyResumeCompleteIfNecessary();
+
+    if (mCodec != NULL) {
+        err = mCodec->release();
+        mCodec = NULL;
+        ++mBufferGeneration;
+
+        if (mNativeWindow != NULL && mNativeWindow->getANativeWindow() != NULL) {
+            // reconnect to surface as MediaCodec disconnected from it
+            status_t error = nativeWindowConnect(mNativeWindow->getANativeWindow(), "onShutdown");
+            ALOGW_IF(error != NO_ERROR,
+                    "[%s] failed to connect to native window, error=%d",
+                    mComponentName.c_str(), error);
+        }
+        mComponentName = "decoder";
+    }
+
+    releaseAndResetMediaBuffers();
+
+    if (err != OK) {
+        ALOGE("failed to release [%s] (err=%d)", mComponentName.c_str(), err);
+        handleError(err);
+        // finish with posting kWhatShutdownCompleted.
+    }
+
+    if (notifyComplete) {
+        sp<AMessage> notify = mNotify->dup();
+        notify->setInt32("what", kWhatShutdownCompleted);
+        notify->post();
+        mPaused = true;
+    }
+}
+
+/*
+ * returns true if we should request more data
+ */
+bool NuPlayer2::Decoder::doRequestBuffers() {
+    if (isDiscontinuityPending()) {
+        return false;
+    }
+    status_t err = OK;
+    while (err == OK && !mDequeuedInputBuffers.empty()) {
+        size_t bufferIx = *mDequeuedInputBuffers.begin();
+        sp<AMessage> msg = new AMessage();
+        msg->setSize("buffer-ix", bufferIx);
+        err = fetchInputData(msg);
+        if (err != OK && err != ERROR_END_OF_STREAM) {
+            // if EOS, need to queue EOS buffer
+            break;
+        }
+        mDequeuedInputBuffers.erase(mDequeuedInputBuffers.begin());
+
+        if (!mPendingInputMessages.empty()
+                || !onInputBufferFetched(msg)) {
+            mPendingInputMessages.push_back(msg);
+        }
+    }
+
+    return err == -EWOULDBLOCK
+            && mSource->feedMoreTSData() == OK;
+}
+
+void NuPlayer2::Decoder::handleError(int32_t err)
+{
+    // We cannot immediately release the codec due to buffers still outstanding
+    // in the renderer.  We signal to the player the error so it can shutdown/release the
+    // decoder after flushing and increment the generation to discard unnecessary messages.
+
+    ++mBufferGeneration;
+
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("what", kWhatError);
+    notify->setInt32("err", err);
+    notify->post();
+}
+
+status_t NuPlayer2::Decoder::releaseCrypto()
+{
+    ALOGV("releaseCrypto");
+
+    sp<AMessage> msg = new AMessage(kWhatDrmReleaseCrypto, this);
+
+    sp<AMessage> response;
+    status_t status = msg->postAndAwaitResponse(&response);
+    if (status == OK && response != NULL) {
+        CHECK(response->findInt32("status", &status));
+        ALOGV("releaseCrypto ret: %d ", status);
+    } else {
+        ALOGE("releaseCrypto err: %d", status);
+    }
+
+    return status;
+}
+
+void NuPlayer2::Decoder::onReleaseCrypto(const sp<AMessage>& msg)
+{
+    status_t status = INVALID_OPERATION;
+    if (mCodec != NULL) {
+        status = mCodec->releaseCrypto();
+    } else {
+        // returning OK if the codec has been already released
+        status = OK;
+        ALOGE("onReleaseCrypto No mCodec. err: %d", status);
+    }
+
+    sp<AMessage> response = new AMessage;
+    response->setInt32("status", status);
+    // Clearing the state as it's tied to crypto. mIsEncryptedObservedEarlier is sticky though
+    // and lasts for the lifetime of this codec. See its use in fetchInputData.
+    mIsEncrypted = false;
+
+    sp<AReplyToken> replyID;
+    CHECK(msg->senderAwaitsResponse(&replyID));
+    response->postReply(replyID);
+}
+
+bool NuPlayer2::Decoder::handleAnInputBuffer(size_t index) {
+    if (isDiscontinuityPending()) {
+        return false;
+    }
+
+    if (mCodec == NULL) {
+        ALOGE("[%s] handleAnInputBuffer without a valid codec", mComponentName.c_str());
+        handleError(NO_INIT);
+        return false;
+    }
+
+    size_t bufferSize = 0;
+    uint8_t *bufferBase = mCodec->getInputBuffer(index, &bufferSize);
+
+    if (bufferBase == NULL) {
+        ALOGE("[%s] handleAnInputBuffer, failed to get input buffer", mComponentName.c_str());
+        handleError(UNKNOWN_ERROR);
+        return false;
+    }
+
+    sp<MediaCodecBuffer> buffer =
+        new MediaCodecBuffer(NULL /* format */, new ABuffer(bufferBase, bufferSize));
+
+    if (index >= mInputBuffers.size()) {
+        for (size_t i = mInputBuffers.size(); i <= index; ++i) {
+            mInputBuffers.add();
+            mMediaBuffers.add();
+            mInputBufferIsDequeued.add();
+            mMediaBuffers.editItemAt(i) = NULL;
+            mInputBufferIsDequeued.editItemAt(i) = false;
+        }
+    }
+    mInputBuffers.editItemAt(index) = buffer;
+
+    //CHECK_LT(bufferIx, mInputBuffers.size());
+
+    if (mMediaBuffers[index] != NULL) {
+        mMediaBuffers[index]->release();
+        mMediaBuffers.editItemAt(index) = NULL;
+    }
+    mInputBufferIsDequeued.editItemAt(index) = true;
+
+    if (!mCSDsToSubmit.isEmpty()) {
+        sp<AMessage> msg = new AMessage();
+        msg->setSize("buffer-ix", index);
+
+        sp<ABuffer> buffer = mCSDsToSubmit.itemAt(0);
+        ALOGI("[%s] resubmitting CSD", mComponentName.c_str());
+        msg->setBuffer("buffer", buffer);
+        mCSDsToSubmit.removeAt(0);
+        if (!onInputBufferFetched(msg)) {
+            handleError(UNKNOWN_ERROR);
+            return false;
+        }
+        return true;
+    }
+
+    while (!mPendingInputMessages.empty()) {
+        sp<AMessage> msg = *mPendingInputMessages.begin();
+        if (!onInputBufferFetched(msg)) {
+            break;
+        }
+        mPendingInputMessages.erase(mPendingInputMessages.begin());
+    }
+
+    if (!mInputBufferIsDequeued.editItemAt(index)) {
+        return true;
+    }
+
+    mDequeuedInputBuffers.push_back(index);
+
+    onRequestInputBuffers();
+    return true;
+}
+
+bool NuPlayer2::Decoder::handleAnOutputBuffer(
+        size_t index,
+        size_t offset,
+        size_t size,
+        int64_t timeUs,
+        int32_t flags) {
+    if (mCodec == NULL) {
+        ALOGE("[%s] handleAnOutputBuffer without a valid codec", mComponentName.c_str());
+        handleError(NO_INIT);
+        return false;
+    }
+
+//    CHECK_LT(bufferIx, mOutputBuffers.size());
+
+    size_t bufferSize = 0;
+    uint8_t *bufferBase = mCodec->getOutputBuffer(index, &bufferSize);
+
+    if (bufferBase == NULL) {
+        ALOGE("[%s] handleAnOutputBuffer, failed to get output buffer", mComponentName.c_str());
+        handleError(UNKNOWN_ERROR);
+        return false;
+    }
+
+    sp<MediaCodecBuffer> buffer =
+        new MediaCodecBuffer(NULL /* format */, new ABuffer(bufferBase, bufferSize));
+
+    if (index >= mOutputBuffers.size()) {
+        for (size_t i = mOutputBuffers.size(); i <= index; ++i) {
+            mOutputBuffers.add();
+        }
+    }
+
+    mOutputBuffers.editItemAt(index) = buffer;
+
+    buffer->setRange(offset, size);
+    buffer->meta()->clear();
+    buffer->meta()->setInt64("timeUs", timeUs);
+
+    bool eos = flags & AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM;
+    // we do not expect CODECCONFIG or SYNCFRAME for decoder
+
+    sp<AMessage> reply = new AMessage(kWhatRenderBuffer, this);
+    reply->setSize("buffer-ix", index);
+    reply->setInt32("generation", mBufferGeneration);
+
+    if (eos) {
+        ALOGI("[%s] saw output EOS", mIsAudio ? "audio" : "video");
+
+        buffer->meta()->setInt32("eos", true);
+        reply->setInt32("eos", true);
+    }
+
+    mNumFramesTotal += !mIsAudio;
+
+    if (mSkipRenderingUntilMediaTimeUs >= 0) {
+        if (timeUs < mSkipRenderingUntilMediaTimeUs) {
+            ALOGV("[%s] dropping buffer at time %lld as requested.",
+                     mComponentName.c_str(), (long long)timeUs);
+
+            reply->post();
+            if (eos) {
+                notifyResumeCompleteIfNecessary();
+                if (mRenderer != NULL && !isDiscontinuityPending()) {
+                    mRenderer->queueEOS(mIsAudio, ERROR_END_OF_STREAM);
+                }
+            }
+            return true;
+        }
+
+        mSkipRenderingUntilMediaTimeUs = -1;
+    }
+
+    // wait until 1st frame comes out to signal resume complete
+    notifyResumeCompleteIfNecessary();
+
+    if (mRenderer != NULL) {
+        // send the buffer to renderer.
+        mRenderer->queueBuffer(mIsAudio, buffer, reply);
+        if (eos && !isDiscontinuityPending()) {
+            mRenderer->queueEOS(mIsAudio, ERROR_END_OF_STREAM);
+        }
+    }
+
+    return true;
+}
+
+void NuPlayer2::Decoder::handleOutputFormatChange(const sp<AMessage> &format) {
+    if (!mIsAudio) {
+        int32_t width, height;
+        if (format->findInt32("width", &width)
+                && format->findInt32("height", &height)) {
+            mStats->setInt32("width", width);
+            mStats->setInt32("height", height);
+        }
+        sp<AMessage> notify = mNotify->dup();
+        notify->setInt32("what", kWhatVideoSizeChanged);
+        notify->setMessage("format", format);
+        notify->post();
+    } else if (mRenderer != NULL) {
+        uint32_t flags;
+        int64_t durationUs;
+        bool hasVideo = (mSource->getFormat(false /* audio */) != NULL);
+        if (getAudioDeepBufferSetting() // override regardless of source duration
+                || (mSource->getDuration(&durationUs) == OK
+                        && durationUs > AUDIO_SINK_MIN_DEEP_BUFFER_DURATION_US)) {
+            flags = AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
+        } else {
+            flags = AUDIO_OUTPUT_FLAG_NONE;
+        }
+
+        sp<AMessage> reply = new AMessage(kWhatAudioOutputFormatChanged, this);
+        reply->setInt32("generation", mBufferGeneration);
+        mRenderer->changeAudioFormat(
+                format, false /* offloadOnly */, hasVideo,
+                flags, mSource->isStreaming(), reply);
+    }
+}
+
+void NuPlayer2::Decoder::releaseAndResetMediaBuffers() {
+    for (size_t i = 0; i < mMediaBuffers.size(); i++) {
+        if (mMediaBuffers[i] != NULL) {
+            mMediaBuffers[i]->release();
+            mMediaBuffers.editItemAt(i) = NULL;
+        }
+    }
+    mMediaBuffers.resize(mInputBuffers.size());
+    for (size_t i = 0; i < mMediaBuffers.size(); i++) {
+        mMediaBuffers.editItemAt(i) = NULL;
+    }
+    mInputBufferIsDequeued.clear();
+    mInputBufferIsDequeued.resize(mInputBuffers.size());
+    for (size_t i = 0; i < mInputBufferIsDequeued.size(); i++) {
+        mInputBufferIsDequeued.editItemAt(i) = false;
+    }
+
+    mPendingInputMessages.clear();
+    mDequeuedInputBuffers.clear();
+    mSkipRenderingUntilMediaTimeUs = -1;
+}
+
+bool NuPlayer2::Decoder::isStaleReply(const sp<AMessage> &msg) {
+    int32_t generation;
+    CHECK(msg->findInt32("generation", &generation));
+    return generation != mBufferGeneration;
+}
+
+status_t NuPlayer2::Decoder::fetchInputData(sp<AMessage> &reply) {
+    sp<ABuffer> accessUnit;
+    bool dropAccessUnit = true;
+    do {
+        status_t err = mSource->dequeueAccessUnit(mIsAudio, &accessUnit);
+
+        if (err == -EWOULDBLOCK) {
+            return err;
+        } else if (err != OK) {
+            if (err == INFO_DISCONTINUITY) {
+                int32_t type;
+                CHECK(accessUnit->meta()->findInt32("discontinuity", &type));
+
+                bool formatChange =
+                    (mIsAudio &&
+                     (type & ATSParser::DISCONTINUITY_AUDIO_FORMAT))
+                    || (!mIsAudio &&
+                            (type & ATSParser::DISCONTINUITY_VIDEO_FORMAT));
+
+                bool timeChange = (type & ATSParser::DISCONTINUITY_TIME) != 0;
+
+                ALOGI("%s discontinuity (format=%d, time=%d)",
+                        mIsAudio ? "audio" : "video", formatChange, timeChange);
+
+                bool seamlessFormatChange = false;
+                sp<AMessage> newFormat = mSource->getFormat(mIsAudio);
+                if (formatChange) {
+                    seamlessFormatChange =
+                        supportsSeamlessFormatChange(newFormat);
+                    // treat seamless format change separately
+                    formatChange = !seamlessFormatChange;
+                }
+
+                // For format or time change, return EOS to queue EOS input,
+                // then wait for EOS on output.
+                if (formatChange /* not seamless */) {
+                    mFormatChangePending = true;
+                    err = ERROR_END_OF_STREAM;
+                } else if (timeChange) {
+                    rememberCodecSpecificData(newFormat);
+                    mTimeChangePending = true;
+                    err = ERROR_END_OF_STREAM;
+                } else if (seamlessFormatChange) {
+                    // reuse existing decoder and don't flush
+                    rememberCodecSpecificData(newFormat);
+                    continue;
+                } else {
+                    // This stream is unaffected by the discontinuity
+                    return -EWOULDBLOCK;
+                }
+            }
+
+            // reply should only be returned without a buffer set
+            // when there is an error (including EOS)
+            CHECK(err != OK);
+
+            reply->setInt32("err", err);
+            return ERROR_END_OF_STREAM;
+        }
+
+        dropAccessUnit = false;
+        if (!mIsAudio && !mIsEncrypted) {
+            // Extra safeguard if higher-level behavior changes. Otherwise, not required now.
+            // Preventing the buffer from being processed (and sent to codec) if this is a later
+            // round of playback but this time without prepareDrm. Or if there is a race between
+            // stop (which is not blocking) and releaseDrm allowing buffers being processed after
+            // Crypto has been released (GenericSource currently prevents this race though).
+            // Particularly doing this check before IsAVCReferenceFrame call to prevent parsing
+            // of encrypted data.
+            if (mIsEncryptedObservedEarlier) {
+                ALOGE("fetchInputData: mismatched mIsEncrypted/mIsEncryptedObservedEarlier (0/1)");
+
+                return INVALID_OPERATION;
+            }
+
+            int32_t layerId = 0;
+            bool haveLayerId = accessUnit->meta()->findInt32("temporal-layer-id", &layerId);
+            if (mRenderer->getVideoLateByUs() > 100000ll
+                    && mIsVideoAVC
+                    && !IsAVCReferenceFrame(accessUnit)) {
+                dropAccessUnit = true;
+            } else if (haveLayerId && mNumVideoTemporalLayerTotal > 1) {
+                // Add only one layer each time.
+                if (layerId > mCurrentMaxVideoTemporalLayerId + 1
+                        || layerId >= mNumVideoTemporalLayerAllowed) {
+                    dropAccessUnit = true;
+                    ALOGV("dropping layer(%d), speed=%g, allowed layer count=%d, max layerId=%d",
+                            layerId, mPlaybackSpeed, mNumVideoTemporalLayerAllowed,
+                            mCurrentMaxVideoTemporalLayerId);
+                } else if (layerId > mCurrentMaxVideoTemporalLayerId) {
+                    mCurrentMaxVideoTemporalLayerId = layerId;
+                } else if (layerId == 0 && mNumVideoTemporalLayerTotal > 1
+                        && IsIDR(accessUnit->data(), accessUnit->size())) {
+                    mCurrentMaxVideoTemporalLayerId = mNumVideoTemporalLayerTotal - 1;
+                }
+            }
+            if (dropAccessUnit) {
+                if (layerId <= mCurrentMaxVideoTemporalLayerId && layerId > 0) {
+                    mCurrentMaxVideoTemporalLayerId = layerId - 1;
+                }
+                ++mNumInputFramesDropped;
+            }
+        }
+    } while (dropAccessUnit);
+
+    // ALOGV("returned a valid buffer of %s data", mIsAudio ? "mIsAudio" : "video");
+#if 0
+    int64_t mediaTimeUs;
+    CHECK(accessUnit->meta()->findInt64("timeUs", &mediaTimeUs));
+    ALOGV("[%s] feeding input buffer at media time %.3f",
+         mIsAudio ? "audio" : "video",
+         mediaTimeUs / 1E6);
+#endif
+
+    if (mCCDecoder != NULL) {
+        mCCDecoder->decode(accessUnit);
+    }
+
+    reply->setBuffer("buffer", accessUnit);
+
+    return OK;
+}
+
+bool NuPlayer2::Decoder::onInputBufferFetched(const sp<AMessage> &msg) {
+    if (mCodec == NULL) {
+        ALOGE("[%s] onInputBufferFetched without a valid codec", mComponentName.c_str());
+        handleError(NO_INIT);
+        return false;
+    }
+
+    size_t bufferIx;
+    CHECK(msg->findSize("buffer-ix", &bufferIx));
+    CHECK_LT(bufferIx, mInputBuffers.size());
+    sp<MediaCodecBuffer> codecBuffer = mInputBuffers[bufferIx];
+
+    sp<ABuffer> buffer;
+    bool hasBuffer = msg->findBuffer("buffer", &buffer);
+    bool needsCopy = true;
+
+    if (buffer == NULL /* includes !hasBuffer */) {
+        int32_t streamErr = ERROR_END_OF_STREAM;
+        CHECK(msg->findInt32("err", &streamErr) || !hasBuffer);
+
+        CHECK(streamErr != OK);
+
+        // attempt to queue EOS
+        status_t err = mCodec->queueInputBuffer(
+                bufferIx,
+                0,
+                0,
+                0,
+                AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM);
+        if (err == OK) {
+            mInputBufferIsDequeued.editItemAt(bufferIx) = false;
+        } else if (streamErr == ERROR_END_OF_STREAM) {
+            streamErr = err;
+            // err will not be ERROR_END_OF_STREAM
+        }
+
+        if (streamErr != ERROR_END_OF_STREAM) {
+            ALOGE("Stream error for [%s] (err=%d), EOS %s queued",
+                    mComponentName.c_str(),
+                    streamErr,
+                    err == OK ? "successfully" : "unsuccessfully");
+            handleError(streamErr);
+        }
+    } else {
+        sp<AMessage> extra;
+        if (buffer->meta()->findMessage("extra", &extra) && extra != NULL) {
+            int64_t resumeAtMediaTimeUs;
+            if (extra->findInt64(
+                        "resume-at-mediaTimeUs", &resumeAtMediaTimeUs)) {
+                ALOGI("[%s] suppressing rendering until %lld us",
+                        mComponentName.c_str(), (long long)resumeAtMediaTimeUs);
+                mSkipRenderingUntilMediaTimeUs = resumeAtMediaTimeUs;
+            }
+        }
+
+        int64_t timeUs = 0;
+        uint32_t flags = 0;
+        CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+
+        int32_t eos, csd;
+        // we do not expect SYNCFRAME for decoder
+        if (buffer->meta()->findInt32("eos", &eos) && eos) {
+            flags |= AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM;
+        } else if (buffer->meta()->findInt32("csd", &csd) && csd) {
+            flags |= AMEDIACODEC_BUFFER_FLAG_CODEC_CONFIG;
+        }
+
+        // Modular DRM
+        MediaBuffer *mediaBuf = NULL;
+        sp<AMediaCodecCryptoInfoWrapper> cryptInfo;
+
+        // copy into codec buffer
+        if (needsCopy) {
+            if (buffer->size() > codecBuffer->capacity()) {
+                handleError(ERROR_BUFFER_TOO_SMALL);
+                mDequeuedInputBuffers.push_back(bufferIx);
+                return false;
+            }
+
+            if (buffer->data() != NULL) {
+                codecBuffer->setRange(0, buffer->size());
+                memcpy(codecBuffer->data(), buffer->data(), buffer->size());
+            } else { // No buffer->data()
+                //Modular DRM
+                mediaBuf = (MediaBuffer*)buffer->getMediaBufferBase();
+                if (mediaBuf != NULL) {
+                    codecBuffer->setRange(0, mediaBuf->size());
+                    memcpy(codecBuffer->data(), mediaBuf->data(), mediaBuf->size());
+
+                    sp<MetaData> meta_data = mediaBuf->meta_data();
+                    cryptInfo = AMediaCodecCryptoInfoWrapper::Create(meta_data);
+
+                    // since getMediaBuffer() has incremented the refCount
+                    mediaBuf->release();
+                } else { // No mediaBuf
+                    ALOGE("onInputBufferFetched: buffer->data()/mediaBuf are NULL for %p",
+                            buffer.get());
+                    handleError(UNKNOWN_ERROR);
+                    return false;
+                }
+            } // buffer->data()
+        } // needsCopy
+
+        status_t err;
+        if (cryptInfo != NULL) {
+            err = mCodec->queueSecureInputBuffer(
+                    bufferIx,
+                    codecBuffer->offset(),
+                    cryptInfo,
+                    timeUs,
+                    flags);
+            // synchronous call so done with cryptInfo here
+        } else {
+            err = mCodec->queueInputBuffer(
+                    bufferIx,
+                    codecBuffer->offset(),
+                    codecBuffer->size(),
+                    timeUs,
+                    flags);
+        } // no cryptInfo
+
+        if (err != OK) {
+            ALOGE("onInputBufferFetched: queue%sInputBuffer failed for [%s] (err=%d)",
+                    (cryptInfo != NULL ? "Secure" : ""),
+                    mComponentName.c_str(), err);
+            handleError(err);
+        } else {
+            mInputBufferIsDequeued.editItemAt(bufferIx) = false;
+        }
+
+    }   // buffer != NULL
+    return true;
+}
+
+void NuPlayer2::Decoder::onRenderBuffer(const sp<AMessage> &msg) {
+    status_t err;
+    int32_t render;
+    size_t bufferIx;
+    int32_t eos;
+    CHECK(msg->findSize("buffer-ix", &bufferIx));
+
+    if (!mIsAudio) {
+        int64_t timeUs;
+        sp<MediaCodecBuffer> buffer = mOutputBuffers[bufferIx];
+        buffer->meta()->findInt64("timeUs", &timeUs);
+
+        if (mCCDecoder != NULL && mCCDecoder->isSelected()) {
+            mCCDecoder->display(timeUs);
+        }
+    }
+
+    if (mCodec == NULL) {
+        err = NO_INIT;
+    } else if (msg->findInt32("render", &render) && render) {
+        int64_t timestampNs;
+        CHECK(msg->findInt64("timestampNs", &timestampNs));
+        err = mCodec->releaseOutputBufferAtTime(bufferIx, timestampNs);
+    } else {
+        mNumOutputFramesDropped += !mIsAudio;
+        err = mCodec->releaseOutputBuffer(bufferIx, false /* render */);
+    }
+    if (err != OK) {
+        ALOGE("failed to release output buffer for [%s] (err=%d)",
+                mComponentName.c_str(), err);
+        handleError(err);
+    }
+    if (msg->findInt32("eos", &eos) && eos
+            && isDiscontinuityPending()) {
+        finishHandleDiscontinuity(true /* flushOnTimeChange */);
+    }
+}
+
+bool NuPlayer2::Decoder::isDiscontinuityPending() const {
+    return mFormatChangePending || mTimeChangePending;
+}
+
+void NuPlayer2::Decoder::finishHandleDiscontinuity(bool flushOnTimeChange) {
+    ALOGV("finishHandleDiscontinuity: format %d, time %d, flush %d",
+            mFormatChangePending, mTimeChangePending, flushOnTimeChange);
+
+    // If we have format change, pause and wait to be killed;
+    // If we have time change only, flush and restart fetching.
+
+    if (mFormatChangePending) {
+        mPaused = true;
+    } else if (mTimeChangePending) {
+        if (flushOnTimeChange) {
+            doFlush(false /* notifyComplete */);
+            signalResume(false /* notifyComplete */);
+        }
+    }
+
+    // Notify NuPlayer2 to either shutdown decoder, or rescan sources
+    sp<AMessage> msg = mNotify->dup();
+    msg->setInt32("what", kWhatInputDiscontinuity);
+    msg->setInt32("formatChange", mFormatChangePending);
+    msg->post();
+
+    mFormatChangePending = false;
+    mTimeChangePending = false;
+}
+
+bool NuPlayer2::Decoder::supportsSeamlessAudioFormatChange(
+        const sp<AMessage> &targetFormat) const {
+    if (targetFormat == NULL) {
+        return true;
+    }
+
+    AString mime;
+    if (!targetFormat->findString("mime", &mime)) {
+        return false;
+    }
+
+    if (!strcasecmp(mime.c_str(), MEDIA_MIMETYPE_AUDIO_AAC)) {
+        // field-by-field comparison
+        const char * keys[] = { "channel-count", "sample-rate", "is-adts" };
+        for (unsigned int i = 0; i < sizeof(keys) / sizeof(keys[0]); i++) {
+            int32_t oldVal, newVal;
+            if (!mInputFormat->getInt32(keys[i], &oldVal) ||
+                    !targetFormat->findInt32(keys[i], &newVal) ||
+                    oldVal != newVal) {
+                return false;
+            }
+        }
+
+        sp<ABuffer> newBuf;
+        uint8_t *oldBufData = NULL;
+        size_t oldBufSize = 0;
+        if (mInputFormat->getBuffer("csd-0", (void**)&oldBufData, &oldBufSize) &&
+                targetFormat->findBuffer("csd-0", &newBuf)) {
+            if (oldBufSize != newBuf->size()) {
+                return false;
+            }
+            return !memcmp(oldBufData, newBuf->data(), oldBufSize);
+        }
+    }
+    return false;
+}
+
+bool NuPlayer2::Decoder::supportsSeamlessFormatChange(const sp<AMessage> &targetFormat) const {
+    if (mInputFormat == NULL) {
+        return false;
+    }
+
+    if (targetFormat == NULL) {
+        return true;
+    }
+
+    AString oldMime, newMime;
+    if (!mInputFormat->getString("mime", &oldMime)
+            || !targetFormat->findString("mime", &newMime)
+            || !(oldMime == newMime)) {
+        return false;
+    }
+
+    bool audio = !strncasecmp(oldMime.c_str(), "audio/", strlen("audio/"));
+    bool seamless;
+    if (audio) {
+        seamless = supportsSeamlessAudioFormatChange(targetFormat);
+    } else {
+        int32_t isAdaptive;
+        seamless = (mCodec != NULL &&
+                mInputFormat->getInt32("adaptive-playback", &isAdaptive) &&
+                isAdaptive);
+    }
+
+    ALOGV("%s seamless support for %s", seamless ? "yes" : "no", oldMime.c_str());
+    return seamless;
+}
+
+void NuPlayer2::Decoder::rememberCodecSpecificData(const sp<AMessage> &format) {
+    if (format == NULL) {
+        return;
+    }
+    mCSDsForCurrentFormat.clear();
+    for (int32_t i = 0; ; ++i) {
+        AString tag = "csd-";
+        tag.append(i);
+        sp<ABuffer> buffer;
+        if (!format->findBuffer(tag.c_str(), &buffer)) {
+            break;
+        }
+        mCSDsForCurrentFormat.push(buffer);
+    }
+}
+
+void NuPlayer2::Decoder::notifyResumeCompleteIfNecessary() {
+    if (mResumePending) {
+        mResumePending = false;
+
+        sp<AMessage> notify = mNotify->dup();
+        notify->setInt32("what", kWhatResumeCompleted);
+        notify->post();
+    }
+}
+
+}  // namespace android
+
diff --git a/media/libmedia/nuplayer2/NuPlayer2Decoder.h b/media/libmedia/nuplayer2/NuPlayer2Decoder.h
new file mode 100644
index 0000000..fdfb10e
--- /dev/null
+++ b/media/libmedia/nuplayer2/NuPlayer2Decoder.h
@@ -0,0 +1,150 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef NUPLAYER2_DECODER_H_
+#define NUPLAYER2_DECODER_H_
+
+#include "NuPlayer2.h"
+
+#include "NuPlayer2DecoderBase.h"
+
+namespace android {
+
+class MediaCodecBuffer;
+
+struct AMediaCodecWrapper;
+struct AMediaFormatWrapper;
+
+struct NuPlayer2::Decoder : public DecoderBase {
+    Decoder(const sp<AMessage> &notify,
+            const sp<Source> &source,
+            pid_t pid,
+            uid_t uid,
+            const sp<Renderer> &renderer = NULL,
+            const sp<ANativeWindowWrapper> &nww = NULL,
+            const sp<CCDecoder> &ccDecoder = NULL);
+
+    virtual sp<AMessage> getStats() const;
+
+    // sets the output surface of video decoders.
+    virtual status_t setVideoSurface(const sp<ANativeWindowWrapper> &nww);
+
+    virtual status_t releaseCrypto();
+
+protected:
+    virtual ~Decoder();
+
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+
+    virtual void onConfigure(const sp<AMessage> &format);
+    virtual void onSetParameters(const sp<AMessage> &params);
+    virtual void onSetRenderer(const sp<Renderer> &renderer);
+    virtual void onResume(bool notifyComplete);
+    virtual void onFlush();
+    virtual void onShutdown(bool notifyComplete);
+    virtual bool doRequestBuffers();
+
+private:
+    enum {
+        kWhatCodecNotify         = 'cdcN',
+        kWhatRenderBuffer        = 'rndr',
+        kWhatSetVideoSurface     = 'sSur',
+        kWhatAudioOutputFormatChanged = 'aofc',
+        kWhatDrmReleaseCrypto    = 'rDrm',
+    };
+
+    enum {
+        kMaxNumVideoTemporalLayers = 32,
+    };
+
+    sp<ANativeWindowWrapper> mNativeWindow;
+
+    sp<Source> mSource;
+    sp<Renderer> mRenderer;
+    sp<CCDecoder> mCCDecoder;
+
+    sp<AMediaFormatWrapper> mInputFormat;
+    sp<AMediaCodecWrapper> mCodec;
+
+    List<sp<AMessage> > mPendingInputMessages;
+
+    Vector<sp<MediaCodecBuffer> > mInputBuffers;
+    Vector<sp<MediaCodecBuffer> > mOutputBuffers;
+    Vector<sp<ABuffer> > mCSDsForCurrentFormat;
+    Vector<sp<ABuffer> > mCSDsToSubmit;
+    Vector<bool> mInputBufferIsDequeued;
+    Vector<MediaBuffer *> mMediaBuffers;
+    Vector<size_t> mDequeuedInputBuffers;
+
+    const pid_t mPid;
+    const uid_t mUid;
+    int64_t mSkipRenderingUntilMediaTimeUs;
+    int64_t mNumFramesTotal;
+    int64_t mNumInputFramesDropped;
+    int64_t mNumOutputFramesDropped;
+    int32_t mVideoWidth;
+    int32_t mVideoHeight;
+    bool mIsAudio;
+    bool mIsVideoAVC;
+    bool mIsSecure;
+    bool mIsEncrypted;
+    bool mIsEncryptedObservedEarlier;
+    bool mFormatChangePending;
+    bool mTimeChangePending;
+    float mFrameRateTotal;
+    float mPlaybackSpeed;
+    int32_t mNumVideoTemporalLayerTotal;
+    int32_t mNumVideoTemporalLayerAllowed;
+    int32_t mCurrentMaxVideoTemporalLayerId;
+    float mVideoTemporalLayerAggregateFps[kMaxNumVideoTemporalLayers];
+
+    bool mResumePending;
+    AString mComponentName;
+
+    void handleError(int32_t err);
+    bool handleAnInputBuffer(size_t index);
+    bool handleAnOutputBuffer(
+            size_t index,
+            size_t offset,
+            size_t size,
+            int64_t timeUs,
+            int32_t flags);
+    void handleOutputFormatChange(const sp<AMessage> &format);
+
+    void releaseAndResetMediaBuffers();
+    bool isStaleReply(const sp<AMessage> &msg);
+
+    void doFlush(bool notifyComplete);
+    status_t fetchInputData(sp<AMessage> &reply);
+    bool onInputBufferFetched(const sp<AMessage> &msg);
+    void onRenderBuffer(const sp<AMessage> &msg);
+
+    bool supportsSeamlessFormatChange(const sp<AMessage> &to) const;
+    bool supportsSeamlessAudioFormatChange(const sp<AMessage> &targetFormat) const;
+    void rememberCodecSpecificData(const sp<AMessage> &format);
+    bool isDiscontinuityPending() const;
+    void finishHandleDiscontinuity(bool flushOnTimeChange);
+
+    void notifyResumeCompleteIfNecessary();
+
+    void onReleaseCrypto(const sp<AMessage>& msg);
+
+    DISALLOW_EVIL_CONSTRUCTORS(Decoder);
+};
+
+}  // namespace android
+
+#endif  // NUPLAYER2_DECODER_H_
diff --git a/media/libmedia/nuplayer2/NuPlayer2DecoderBase.cpp b/media/libmedia/nuplayer2/NuPlayer2DecoderBase.cpp
new file mode 100644
index 0000000..4d80912
--- /dev/null
+++ b/media/libmedia/nuplayer2/NuPlayer2DecoderBase.cpp
@@ -0,0 +1,214 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NuPlayer2DecoderBase"
+#include <utils/Log.h>
+#include <inttypes.h>
+
+#include "NuPlayer2DecoderBase.h"
+
+#include "NuPlayer2Renderer.h"
+
+#include <media/MediaCodecBuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+namespace android {
+
+NuPlayer2::DecoderBase::DecoderBase(const sp<AMessage> &notify)
+    :  mNotify(notify),
+       mBufferGeneration(0),
+       mPaused(false),
+       mStats(new AMessage),
+       mRequestInputBuffersPending(false) {
+    // Every decoder has its own looper because MediaCodec operations
+    // are blocking, but NuPlayer2 needs asynchronous operations.
+    mDecoderLooper = new ALooper;
+    mDecoderLooper->setName("NPDecoder");
+    mDecoderLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
+}
+
+NuPlayer2::DecoderBase::~DecoderBase() {
+    stopLooper();
+}
+
+static
+status_t PostAndAwaitResponse(
+        const sp<AMessage> &msg, sp<AMessage> *response) {
+    status_t err = msg->postAndAwaitResponse(response);
+
+    if (err != OK) {
+        return err;
+    }
+
+    if (!(*response)->findInt32("err", &err)) {
+        err = OK;
+    }
+
+    return err;
+}
+
+void NuPlayer2::DecoderBase::configure(const sp<AMessage> &format) {
+    sp<AMessage> msg = new AMessage(kWhatConfigure, this);
+    msg->setMessage("format", format);
+    msg->post();
+}
+
+void NuPlayer2::DecoderBase::init() {
+    mDecoderLooper->registerHandler(this);
+}
+
+void NuPlayer2::DecoderBase::stopLooper() {
+    mDecoderLooper->unregisterHandler(id());
+    mDecoderLooper->stop();
+}
+
+void NuPlayer2::DecoderBase::setParameters(const sp<AMessage> &params) {
+    sp<AMessage> msg = new AMessage(kWhatSetParameters, this);
+    msg->setMessage("params", params);
+    msg->post();
+}
+
+void NuPlayer2::DecoderBase::setRenderer(const sp<Renderer> &renderer) {
+    sp<AMessage> msg = new AMessage(kWhatSetRenderer, this);
+    msg->setObject("renderer", renderer);
+    msg->post();
+}
+
+void NuPlayer2::DecoderBase::pause() {
+    sp<AMessage> msg = new AMessage(kWhatPause, this);
+
+    sp<AMessage> response;
+    PostAndAwaitResponse(msg, &response);
+}
+
+void NuPlayer2::DecoderBase::signalFlush() {
+    (new AMessage(kWhatFlush, this))->post();
+}
+
+void NuPlayer2::DecoderBase::signalResume(bool notifyComplete) {
+    sp<AMessage> msg = new AMessage(kWhatResume, this);
+    msg->setInt32("notifyComplete", notifyComplete);
+    msg->post();
+}
+
+void NuPlayer2::DecoderBase::initiateShutdown() {
+    (new AMessage(kWhatShutdown, this))->post();
+}
+
+void NuPlayer2::DecoderBase::onRequestInputBuffers() {
+    if (mRequestInputBuffersPending) {
+        return;
+    }
+
+    // doRequestBuffers() return true if we should request more data
+    if (doRequestBuffers()) {
+        mRequestInputBuffersPending = true;
+
+        sp<AMessage> msg = new AMessage(kWhatRequestInputBuffers, this);
+        msg->post(10 * 1000ll);
+    }
+}
+
+void NuPlayer2::DecoderBase::onMessageReceived(const sp<AMessage> &msg) {
+
+    switch (msg->what()) {
+        case kWhatConfigure:
+        {
+            sp<AMessage> format;
+            CHECK(msg->findMessage("format", &format));
+            onConfigure(format);
+            break;
+        }
+
+        case kWhatSetParameters:
+        {
+            sp<AMessage> params;
+            CHECK(msg->findMessage("params", &params));
+            onSetParameters(params);
+            break;
+        }
+
+        case kWhatSetRenderer:
+        {
+            sp<RefBase> obj;
+            CHECK(msg->findObject("renderer", &obj));
+            onSetRenderer(static_cast<Renderer *>(obj.get()));
+            break;
+        }
+
+        case kWhatPause:
+        {
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
+            mPaused = true;
+
+            (new AMessage)->postReply(replyID);
+            break;
+        }
+
+        case kWhatRequestInputBuffers:
+        {
+            mRequestInputBuffersPending = false;
+            onRequestInputBuffers();
+            break;
+        }
+
+        case kWhatFlush:
+        {
+            onFlush();
+            break;
+        }
+
+        case kWhatResume:
+        {
+            int32_t notifyComplete;
+            CHECK(msg->findInt32("notifyComplete", &notifyComplete));
+
+            onResume(notifyComplete);
+            break;
+        }
+
+        case kWhatShutdown:
+        {
+            onShutdown(true);
+            break;
+        }
+
+        default:
+            TRESPASS();
+            break;
+    }
+}
+
+void NuPlayer2::DecoderBase::handleError(int32_t err)
+{
+    // We cannot immediately release the codec due to buffers still outstanding
+    // in the renderer.  We signal to the player the error so it can shutdown/release the
+    // decoder after flushing and increment the generation to discard unnecessary messages.
+
+    ++mBufferGeneration;
+
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("what", kWhatError);
+    notify->setInt32("err", err);
+    notify->post();
+}
+
+}  // namespace android
+
diff --git a/media/libmedia/nuplayer2/NuPlayer2DecoderBase.h b/media/libmedia/nuplayer2/NuPlayer2DecoderBase.h
new file mode 100644
index 0000000..1e57f0d
--- /dev/null
+++ b/media/libmedia/nuplayer2/NuPlayer2DecoderBase.h
@@ -0,0 +1,111 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef NUPLAYER2_DECODER_BASE_H_
+
+#define NUPLAYER2_DECODER_BASE_H_
+
+#include "NuPlayer2.h"
+
+#include <media/stagefright/foundation/AHandler.h>
+
+namespace android {
+
+struct ABuffer;
+struct ANativeWindowWrapper;
+struct MediaCodec;
+class MediaBuffer;
+class MediaCodecBuffer;
+
+struct NuPlayer2::DecoderBase : public AHandler {
+    explicit DecoderBase(const sp<AMessage> &notify);
+
+    void configure(const sp<AMessage> &format);
+    void init();
+    void setParameters(const sp<AMessage> &params);
+
+    // Synchronous call to ensure decoder will not request or send out data.
+    void pause();
+
+    void setRenderer(const sp<Renderer> &renderer);
+    virtual status_t setVideoSurface(const sp<ANativeWindowWrapper> &) { return INVALID_OPERATION; }
+
+    void signalFlush();
+    void signalResume(bool notifyComplete);
+    void initiateShutdown();
+
+    virtual sp<AMessage> getStats() const {
+        return mStats;
+    }
+
+    virtual status_t releaseCrypto() {
+        return INVALID_OPERATION;
+    }
+
+    enum {
+        kWhatInputDiscontinuity  = 'inDi',
+        kWhatVideoSizeChanged    = 'viSC',
+        kWhatFlushCompleted      = 'flsC',
+        kWhatShutdownCompleted   = 'shDC',
+        kWhatResumeCompleted     = 'resC',
+        kWhatEOS                 = 'eos ',
+        kWhatError               = 'err ',
+    };
+
+protected:
+
+    virtual ~DecoderBase();
+
+    void stopLooper();
+
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+
+    virtual void onConfigure(const sp<AMessage> &format) = 0;
+    virtual void onSetParameters(const sp<AMessage> &params) = 0;
+    virtual void onSetRenderer(const sp<Renderer> &renderer) = 0;
+    virtual void onResume(bool notifyComplete) = 0;
+    virtual void onFlush() = 0;
+    virtual void onShutdown(bool notifyComplete) = 0;
+
+    void onRequestInputBuffers();
+    virtual bool doRequestBuffers() = 0;
+    virtual void handleError(int32_t err);
+
+    sp<AMessage> mNotify;
+    int32_t mBufferGeneration;
+    bool mPaused;
+    sp<AMessage> mStats;
+
+private:
+    enum {
+        kWhatConfigure           = 'conf',
+        kWhatSetParameters       = 'setP',
+        kWhatSetRenderer         = 'setR',
+        kWhatPause               = 'paus',
+        kWhatRequestInputBuffers = 'reqB',
+        kWhatFlush               = 'flus',
+        kWhatShutdown            = 'shuD',
+    };
+
+    sp<ALooper> mDecoderLooper;
+    bool mRequestInputBuffersPending;
+
+    DISALLOW_EVIL_CONSTRUCTORS(DecoderBase);
+};
+
+}  // namespace android
+
+#endif  // NUPLAYER2_DECODER_BASE_H_
diff --git a/media/libmedia/nuplayer2/NuPlayer2DecoderPassThrough.cpp b/media/libmedia/nuplayer2/NuPlayer2DecoderPassThrough.cpp
new file mode 100644
index 0000000..0e0c1d8
--- /dev/null
+++ b/media/libmedia/nuplayer2/NuPlayer2DecoderPassThrough.cpp
@@ -0,0 +1,434 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NuPlayer2DecoderPassThrough"
+#include <utils/Log.h>
+#include <inttypes.h>
+
+#include "NuPlayer2DecoderPassThrough.h"
+
+#include "NuPlayer2Renderer.h"
+#include "NuPlayer2Source.h"
+
+#include <media/MediaCodecBuffer.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaErrors.h>
+
+#include "ATSParser.h"
+
+namespace android {
+
+// TODO optimize buffer size for power consumption
+// The offload read buffer size is 32 KB but 24 KB uses less power.
+static const size_t kAggregateBufferSizeBytes = 24 * 1024;
+static const size_t kMaxCachedBytes = 200000;
+
+NuPlayer2::DecoderPassThrough::DecoderPassThrough(
+        const sp<AMessage> &notify,
+        const sp<Source> &source,
+        const sp<Renderer> &renderer)
+    : DecoderBase(notify),
+      mSource(source),
+      mRenderer(renderer),
+      mSkipRenderingUntilMediaTimeUs(-1ll),
+      mReachedEOS(true),
+      mPendingAudioErr(OK),
+      mPendingBuffersToDrain(0),
+      mCachedBytes(0),
+      mComponentName("pass through decoder") {
+    ALOGW_IF(renderer == NULL, "expect a non-NULL renderer");
+}
+
+NuPlayer2::DecoderPassThrough::~DecoderPassThrough() {
+}
+
+void NuPlayer2::DecoderPassThrough::onConfigure(const sp<AMessage> &format) {
+    ALOGV("[%s] onConfigure", mComponentName.c_str());
+    mCachedBytes = 0;
+    mPendingBuffersToDrain = 0;
+    mReachedEOS = false;
+    ++mBufferGeneration;
+
+    onRequestInputBuffers();
+
+    int32_t hasVideo = 0;
+    format->findInt32("has-video", &hasVideo);
+
+    // The audio sink is already opened before the PassThrough decoder is created.
+    // Opening again might be relevant if decoder is instantiated after shutdown and
+    // format is different.
+    status_t err = mRenderer->openAudioSink(
+            format, true /* offloadOnly */, hasVideo,
+            AUDIO_OUTPUT_FLAG_NONE /* flags */, NULL /* isOffloaded */, mSource->isStreaming());
+    if (err != OK) {
+        handleError(err);
+    }
+}
+
+void NuPlayer2::DecoderPassThrough::onSetParameters(const sp<AMessage> &/*params*/) {
+    ALOGW("onSetParameters() called unexpectedly");
+}
+
+void NuPlayer2::DecoderPassThrough::onSetRenderer(
+        const sp<Renderer> &renderer) {
+    // renderer can't be changed during offloading
+    ALOGW_IF(renderer != mRenderer,
+            "ignoring request to change renderer");
+}
+
+bool NuPlayer2::DecoderPassThrough::isStaleReply(const sp<AMessage> &msg) {
+    int32_t generation;
+    CHECK(msg->findInt32("generation", &generation));
+    return generation != mBufferGeneration;
+}
+
+bool NuPlayer2::DecoderPassThrough::isDoneFetching() const {
+    ALOGV("[%s] mCachedBytes = %zu, mReachedEOS = %d mPaused = %d",
+            mComponentName.c_str(), mCachedBytes, mReachedEOS, mPaused);
+
+    return mCachedBytes >= kMaxCachedBytes || mReachedEOS || mPaused;
+}
+
+/*
+ * returns true if we should request more data
+ */
+bool NuPlayer2::DecoderPassThrough::doRequestBuffers() {
+    status_t err = OK;
+    while (!isDoneFetching()) {
+        sp<AMessage> msg = new AMessage();
+
+        err = fetchInputData(msg);
+        if (err != OK) {
+            break;
+        }
+
+        onInputBufferFetched(msg);
+    }
+
+    return err == -EWOULDBLOCK
+            && mSource->feedMoreTSData() == OK;
+}
+
+status_t NuPlayer2::DecoderPassThrough::dequeueAccessUnit(sp<ABuffer> *accessUnit) {
+    status_t err;
+
+    // Did we save an accessUnit earlier because of a discontinuity?
+    if (mPendingAudioAccessUnit != NULL) {
+        *accessUnit = mPendingAudioAccessUnit;
+        mPendingAudioAccessUnit.clear();
+        err = mPendingAudioErr;
+        ALOGV("feedDecoderInputData() use mPendingAudioAccessUnit");
+    } else {
+        err = mSource->dequeueAccessUnit(true /* audio */, accessUnit);
+    }
+
+    if (err == INFO_DISCONTINUITY || err == ERROR_END_OF_STREAM) {
+        if (mAggregateBuffer != NULL) {
+            // We already have some data so save this for later.
+            mPendingAudioErr = err;
+            mPendingAudioAccessUnit = *accessUnit;
+            (*accessUnit).clear();
+            ALOGD("return aggregated buffer and save err(=%d) for later", err);
+            err = OK;
+        }
+    }
+
+    return err;
+}
+
+sp<ABuffer> NuPlayer2::DecoderPassThrough::aggregateBuffer(
+        const sp<ABuffer> &accessUnit) {
+    sp<ABuffer> aggregate;
+
+    if (accessUnit == NULL) {
+        // accessUnit is saved to mPendingAudioAccessUnit
+        // return current mAggregateBuffer
+        aggregate = mAggregateBuffer;
+        mAggregateBuffer.clear();
+        return aggregate;
+    }
+
+    size_t smallSize = accessUnit->size();
+    if ((mAggregateBuffer == NULL)
+            // Don't bother if only room for a few small buffers.
+            && (smallSize < (kAggregateBufferSizeBytes / 3))) {
+        // Create a larger buffer for combining smaller buffers from the extractor.
+        mAggregateBuffer = new ABuffer(kAggregateBufferSizeBytes);
+        mAggregateBuffer->setRange(0, 0); // start empty
+    }
+
+    if (mAggregateBuffer != NULL) {
+        int64_t timeUs;
+        int64_t dummy;
+        bool smallTimestampValid = accessUnit->meta()->findInt64("timeUs", &timeUs);
+        bool bigTimestampValid = mAggregateBuffer->meta()->findInt64("timeUs", &dummy);
+        // Will the smaller buffer fit?
+        size_t bigSize = mAggregateBuffer->size();
+        size_t roomLeft = mAggregateBuffer->capacity() - bigSize;
+        // Should we save this small buffer for the next big buffer?
+        // If the first small buffer did not have a timestamp then save
+        // any buffer that does have a timestamp until the next big buffer.
+        if ((smallSize > roomLeft)
+            || (!bigTimestampValid && (bigSize > 0) && smallTimestampValid)) {
+            mPendingAudioErr = OK;
+            mPendingAudioAccessUnit = accessUnit;
+            aggregate = mAggregateBuffer;
+            mAggregateBuffer.clear();
+        } else {
+            // Grab time from first small buffer if available.
+            if ((bigSize == 0) && smallTimestampValid) {
+                mAggregateBuffer->meta()->setInt64("timeUs", timeUs);
+            }
+            // Append small buffer to the bigger buffer.
+            memcpy(mAggregateBuffer->base() + bigSize, accessUnit->data(), smallSize);
+            bigSize += smallSize;
+            mAggregateBuffer->setRange(0, bigSize);
+
+            ALOGV("feedDecoderInputData() smallSize = %zu, bigSize = %zu, capacity = %zu",
+                    smallSize, bigSize, mAggregateBuffer->capacity());
+        }
+    } else {
+        // decided not to aggregate
+        aggregate = accessUnit;
+    }
+
+    return aggregate;
+}
+
+status_t NuPlayer2::DecoderPassThrough::fetchInputData(sp<AMessage> &reply) {
+    sp<ABuffer> accessUnit;
+
+    do {
+        status_t err = dequeueAccessUnit(&accessUnit);
+
+        if (err == -EWOULDBLOCK) {
+            // Flush out the aggregate buffer to try to avoid underrun.
+            accessUnit = aggregateBuffer(NULL /* accessUnit */);
+            if (accessUnit != NULL) {
+                break;
+            }
+            return err;
+        } else if (err != OK) {
+            if (err == INFO_DISCONTINUITY) {
+                int32_t type;
+                CHECK(accessUnit->meta()->findInt32("discontinuity", &type));
+
+                bool formatChange =
+                        (type & ATSParser::DISCONTINUITY_AUDIO_FORMAT) != 0;
+
+                bool timeChange =
+                        (type & ATSParser::DISCONTINUITY_TIME) != 0;
+
+                ALOGI("audio discontinuity (formatChange=%d, time=%d)",
+                        formatChange, timeChange);
+
+                if (formatChange || timeChange) {
+                    sp<AMessage> msg = mNotify->dup();
+                    msg->setInt32("what", kWhatInputDiscontinuity);
+                    // will perform seamless format change,
+                    // only notify NuPlayer2 to scan sources
+                    msg->setInt32("formatChange", false);
+                    msg->post();
+                }
+
+                if (timeChange) {
+                    doFlush(false /* notifyComplete */);
+                    err = OK;
+                } else if (formatChange) {
+                    // do seamless format change
+                    err = OK;
+                } else {
+                    // This stream is unaffected by the discontinuity
+                    return -EWOULDBLOCK;
+                }
+            }
+
+            reply->setInt32("err", err);
+            return OK;
+        }
+
+        accessUnit = aggregateBuffer(accessUnit);
+    } while (accessUnit == NULL);
+
+#if 0
+    int64_t mediaTimeUs;
+    CHECK(accessUnit->meta()->findInt64("timeUs", &mediaTimeUs));
+    ALOGV("feeding audio input buffer at media time %.2f secs",
+         mediaTimeUs / 1E6);
+#endif
+
+    reply->setBuffer("buffer", accessUnit);
+
+    return OK;
+}
+
+void NuPlayer2::DecoderPassThrough::onInputBufferFetched(
+        const sp<AMessage> &msg) {
+    if (mReachedEOS) {
+        return;
+    }
+
+    sp<ABuffer> buffer;
+    bool hasBuffer = msg->findBuffer("buffer", &buffer);
+    if (buffer == NULL) {
+        int32_t streamErr = ERROR_END_OF_STREAM;
+        CHECK(msg->findInt32("err", &streamErr) || !hasBuffer);
+        if (streamErr == OK) {
+            return;
+        }
+
+        if (streamErr != ERROR_END_OF_STREAM) {
+            handleError(streamErr);
+        }
+        mReachedEOS = true;
+        if (mRenderer != NULL) {
+            mRenderer->queueEOS(true /* audio */, ERROR_END_OF_STREAM);
+        }
+        return;
+    }
+
+    sp<AMessage> extra;
+    if (buffer->meta()->findMessage("extra", &extra) && extra != NULL) {
+        int64_t resumeAtMediaTimeUs;
+        if (extra->findInt64(
+                    "resume-at-mediatimeUs", &resumeAtMediaTimeUs)) {
+            ALOGI("[%s] suppressing rendering until %lld us",
+                    mComponentName.c_str(), (long long)resumeAtMediaTimeUs);
+            mSkipRenderingUntilMediaTimeUs = resumeAtMediaTimeUs;
+        }
+    }
+
+    int32_t bufferSize = buffer->size();
+    mCachedBytes += bufferSize;
+
+    int64_t timeUs = 0;
+    CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+    if (mSkipRenderingUntilMediaTimeUs >= 0) {
+        if (timeUs < mSkipRenderingUntilMediaTimeUs) {
+            ALOGV("[%s] dropping buffer at time %lld as requested.",
+                     mComponentName.c_str(), (long long)timeUs);
+
+            onBufferConsumed(bufferSize);
+            return;
+        }
+
+        mSkipRenderingUntilMediaTimeUs = -1;
+    }
+
+    if (mRenderer == NULL) {
+        onBufferConsumed(bufferSize);
+        return;
+    }
+
+    sp<AMessage> reply = new AMessage(kWhatBufferConsumed, this);
+    reply->setInt32("generation", mBufferGeneration);
+    reply->setInt32("size", bufferSize);
+
+    sp<MediaCodecBuffer> mcBuffer = new MediaCodecBuffer(nullptr, buffer);
+    mcBuffer->meta()->setInt64("timeUs", timeUs);
+
+    mRenderer->queueBuffer(true /* audio */, mcBuffer, reply);
+
+    ++mPendingBuffersToDrain;
+    ALOGV("onInputBufferFilled: #ToDrain = %zu, cachedBytes = %zu",
+            mPendingBuffersToDrain, mCachedBytes);
+}
+
+void NuPlayer2::DecoderPassThrough::onBufferConsumed(int32_t size) {
+    --mPendingBuffersToDrain;
+    mCachedBytes -= size;
+    ALOGV("onBufferConsumed: #ToDrain = %zu, cachedBytes = %zu",
+            mPendingBuffersToDrain, mCachedBytes);
+    onRequestInputBuffers();
+}
+
+void NuPlayer2::DecoderPassThrough::onResume(bool notifyComplete) {
+    mPaused = false;
+
+    onRequestInputBuffers();
+
+    if (notifyComplete) {
+        sp<AMessage> notify = mNotify->dup();
+        notify->setInt32("what", kWhatResumeCompleted);
+        notify->post();
+    }
+}
+
+void NuPlayer2::DecoderPassThrough::doFlush(bool notifyComplete) {
+    ++mBufferGeneration;
+    mSkipRenderingUntilMediaTimeUs = -1;
+    mPendingAudioAccessUnit.clear();
+    mPendingAudioErr = OK;
+    mAggregateBuffer.clear();
+
+    if (mRenderer != NULL) {
+        mRenderer->flush(true /* audio */, notifyComplete);
+        mRenderer->signalTimeDiscontinuity();
+    }
+
+    mPendingBuffersToDrain = 0;
+    mCachedBytes = 0;
+    mReachedEOS = false;
+}
+
+void NuPlayer2::DecoderPassThrough::onFlush() {
+    doFlush(true /* notifyComplete */);
+
+    mPaused = true;
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("what", kWhatFlushCompleted);
+    notify->post();
+
+}
+
+void NuPlayer2::DecoderPassThrough::onShutdown(bool notifyComplete) {
+    ++mBufferGeneration;
+    mSkipRenderingUntilMediaTimeUs = -1;
+
+    if (notifyComplete) {
+        sp<AMessage> notify = mNotify->dup();
+        notify->setInt32("what", kWhatShutdownCompleted);
+        notify->post();
+    }
+
+    mReachedEOS = true;
+}
+
+void NuPlayer2::DecoderPassThrough::onMessageReceived(const sp<AMessage> &msg) {
+    ALOGV("[%s] onMessage: %s", mComponentName.c_str(),
+            msg->debugString().c_str());
+
+    switch (msg->what()) {
+        case kWhatBufferConsumed:
+        {
+            if (!isStaleReply(msg)) {
+                int32_t size;
+                CHECK(msg->findInt32("size", &size));
+                onBufferConsumed(size);
+            }
+            break;
+        }
+
+        default:
+            DecoderBase::onMessageReceived(msg);
+            break;
+    }
+}
+
+}  // namespace android
diff --git a/media/libmedia/nuplayer2/NuPlayer2DecoderPassThrough.h b/media/libmedia/nuplayer2/NuPlayer2DecoderPassThrough.h
new file mode 100644
index 0000000..838c60a
--- /dev/null
+++ b/media/libmedia/nuplayer2/NuPlayer2DecoderPassThrough.h
@@ -0,0 +1,85 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef NUPLAYER2_DECODER_PASS_THROUGH_H_
+
+#define NUPLAYER2_DECODER_PASS_THROUGH_H_
+
+#include "NuPlayer2.h"
+
+#include "NuPlayer2DecoderBase.h"
+
+namespace android {
+
+struct NuPlayer2::DecoderPassThrough : public DecoderBase {
+    DecoderPassThrough(const sp<AMessage> &notify,
+                       const sp<Source> &source,
+                       const sp<Renderer> &renderer);
+
+protected:
+
+    virtual ~DecoderPassThrough();
+
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+
+    virtual void onConfigure(const sp<AMessage> &format);
+    virtual void onSetParameters(const sp<AMessage> &params);
+    virtual void onSetRenderer(const sp<Renderer> &renderer);
+    virtual void onResume(bool notifyComplete);
+    virtual void onFlush();
+    virtual void onShutdown(bool notifyComplete);
+    virtual bool doRequestBuffers();
+
+private:
+    enum {
+        kWhatBufferConsumed     = 'bufC',
+    };
+
+    sp<Source> mSource;
+    sp<Renderer> mRenderer;
+    int64_t mSkipRenderingUntilMediaTimeUs;
+
+    bool    mReachedEOS;
+
+    // Used by feedDecoderInputData to aggregate small buffers into
+    // one large buffer.
+    sp<ABuffer> mPendingAudioAccessUnit;
+    status_t    mPendingAudioErr;
+    sp<ABuffer> mAggregateBuffer;
+
+    // mPendingBuffersToDrain are only for debugging. It can be removed
+    // when the power investigation is done.
+    size_t  mPendingBuffersToDrain;
+    size_t  mCachedBytes;
+    AString mComponentName;
+
+    bool isStaleReply(const sp<AMessage> &msg);
+    bool isDoneFetching() const;
+
+    status_t dequeueAccessUnit(sp<ABuffer> *accessUnit);
+    sp<ABuffer> aggregateBuffer(const sp<ABuffer> &accessUnit);
+    status_t fetchInputData(sp<AMessage> &reply);
+    void doFlush(bool notifyComplete);
+
+    void onInputBufferFetched(const sp<AMessage> &msg);
+    void onBufferConsumed(int32_t size);
+
+    DISALLOW_EVIL_CONSTRUCTORS(DecoderPassThrough);
+};
+
+}  // namespace android
+
+#endif  // NUPLAYER2_DECODER_PASS_THROUGH_H_
diff --git a/media/libmedia/nuplayer2/NuPlayer2Driver.cpp b/media/libmedia/nuplayer2/NuPlayer2Driver.cpp
new file mode 100644
index 0000000..629a7eb
--- /dev/null
+++ b/media/libmedia/nuplayer2/NuPlayer2Driver.cpp
@@ -0,0 +1,1189 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NuPlayer2Driver"
+#include <inttypes.h>
+#include <utils/Log.h>
+#include <cutils/properties.h>
+
+#include "NuPlayer2Driver.h"
+
+#include "NuPlayer2.h"
+#include "NuPlayer2Source.h"
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/foundation/ByteUtils.h>
+#include <media/stagefright/MediaClock.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/Utils.h>
+
+#include <media/IMediaAnalyticsService.h>
+
+static const int kDumpLockRetries = 50;
+static const int kDumpLockSleepUs = 20000;
+
+namespace android {
+
+struct ParcelWrapper : public RefBase {
+    static sp<ParcelWrapper> Create(const Parcel *p) {
+        if (p != NULL) {
+            sp<ParcelWrapper> pw = new ParcelWrapper();
+            if (pw->appendFrom(p) == OK) {
+                return pw;
+            }
+        }
+        return NULL;
+    }
+
+    const Parcel *getParcel() {
+        return mParcel;
+    }
+
+protected:
+    virtual ~ParcelWrapper() {
+        if (mParcel != NULL) {
+            delete mParcel;
+        }
+    }
+
+private:
+    ParcelWrapper()
+        : mParcel(NULL) { }
+
+    status_t appendFrom(const Parcel *p) {
+        if (mParcel == NULL) {
+            mParcel = new Parcel;
+        }
+        return mParcel->appendFrom(p, 0 /* start */, p->dataSize());
+    }
+
+    Parcel *mParcel;
+};
+
+// key for media statistics
+static const char *kKeyPlayer = "nuplayer2";
+// attrs for media statistics
+static const char *kPlayerVMime = "android.media.mediaplayer.video.mime";
+static const char *kPlayerVCodec = "android.media.mediaplayer.video.codec";
+static const char *kPlayerWidth = "android.media.mediaplayer.width";
+static const char *kPlayerHeight = "android.media.mediaplayer.height";
+static const char *kPlayerFrames = "android.media.mediaplayer.frames";
+static const char *kPlayerFramesDropped = "android.media.mediaplayer.dropped";
+static const char *kPlayerAMime = "android.media.mediaplayer.audio.mime";
+static const char *kPlayerACodec = "android.media.mediaplayer.audio.codec";
+static const char *kPlayerDuration = "android.media.mediaplayer.durationMs";
+static const char *kPlayerPlaying = "android.media.mediaplayer.playingMs";
+static const char *kPlayerError = "android.media.mediaplayer.err";
+static const char *kPlayerErrorCode = "android.media.mediaplayer.errcode";
+static const char *kPlayerErrorState = "android.media.mediaplayer.errstate";
+static const char *kPlayerDataSourceType = "android.media.mediaplayer.dataSource";
+//
+static const char *kPlayerRebuffering = "android.media.mediaplayer.rebufferingMs";
+static const char *kPlayerRebufferingCount = "android.media.mediaplayer.rebuffers";
+static const char *kPlayerRebufferingAtExit = "android.media.mediaplayer.rebufferExit";
+
+
+NuPlayer2Driver::NuPlayer2Driver(pid_t pid)
+    : mState(STATE_IDLE),
+      mIsAsyncPrepare(false),
+      mAsyncResult(UNKNOWN_ERROR),
+      mSetSurfaceInProgress(false),
+      mDurationUs(-1),
+      mPositionUs(-1),
+      mSeekInProgress(false),
+      mPlayingTimeUs(0),
+      mRebufferingTimeUs(0),
+      mRebufferingEvents(0),
+      mRebufferingAtExit(false),
+      mLooper(new ALooper),
+      mNuPlayer2Looper(new ALooper),
+      mMediaClock(new MediaClock),
+      mPlayer(new NuPlayer2(pid, mMediaClock)),
+      mPlayerFlags(0),
+      mAnalyticsItem(NULL),
+      mClientUid(-1),
+      mAtEOS(false),
+      mLooping(false),
+      mAutoLoop(false) {
+    ALOGD("NuPlayer2Driver(%p) created, clientPid(%d)", this, pid);
+    mLooper->setName("NuPlayer2Driver Looper");
+    mNuPlayer2Looper->setName("NuPlayer2 Looper");
+
+    mMediaClock->init();
+
+    // set up an analytics record
+    mAnalyticsItem = new MediaAnalyticsItem(kKeyPlayer);
+    mAnalyticsItem->generateSessionID();
+
+    mNuPlayer2Looper->start(
+            false, /* runOnCallingThread */
+            true,  /* canCallJava */
+            PRIORITY_AUDIO);
+
+    mNuPlayer2Looper->registerHandler(mPlayer);
+
+    mPlayer->setDriver(this);
+}
+
+NuPlayer2Driver::~NuPlayer2Driver() {
+    ALOGV("~NuPlayer2Driver(%p)", this);
+    mNuPlayer2Looper->stop();
+    mLooper->stop();
+
+    // finalize any pending metrics, usually a no-op.
+    updateMetrics("destructor");
+    logMetrics("destructor");
+
+    if (mAnalyticsItem != NULL) {
+        delete mAnalyticsItem;
+        mAnalyticsItem = NULL;
+    }
+}
+
+status_t NuPlayer2Driver::initCheck() {
+    mLooper->start(
+            false, /* runOnCallingThread */
+            true,  /* canCallJava */
+            PRIORITY_AUDIO);
+
+    mLooper->registerHandler(this);
+    return OK;
+}
+
+status_t NuPlayer2Driver::setUID(uid_t uid) {
+    mPlayer->setUID(uid);
+    mClientUid = uid;
+    if (mAnalyticsItem) {
+        mAnalyticsItem->setUid(mClientUid);
+    }
+
+    return OK;
+}
+
+status_t NuPlayer2Driver::setDataSource(
+        const sp<MediaHTTPService> &httpService,
+        const char *url,
+        const KeyedVector<String8, String8> *headers) {
+    ALOGV("setDataSource(%p) url(%s)", this, uriDebugString(url, false).c_str());
+    Mutex::Autolock autoLock(mLock);
+
+    if (mState != STATE_IDLE) {
+        return INVALID_OPERATION;
+    }
+
+    mState = STATE_SET_DATASOURCE_PENDING;
+
+    mPlayer->setDataSourceAsync(httpService, url, headers);
+
+    while (mState == STATE_SET_DATASOURCE_PENDING) {
+        mCondition.wait(mLock);
+    }
+
+    return mAsyncResult;
+}
+
+status_t NuPlayer2Driver::setDataSource(int fd, int64_t offset, int64_t length) {
+    ALOGV("setDataSource(%p) file(%d)", this, fd);
+    Mutex::Autolock autoLock(mLock);
+
+    if (mState != STATE_IDLE) {
+        return INVALID_OPERATION;
+    }
+
+    mState = STATE_SET_DATASOURCE_PENDING;
+
+    mPlayer->setDataSourceAsync(fd, offset, length);
+
+    while (mState == STATE_SET_DATASOURCE_PENDING) {
+        mCondition.wait(mLock);
+    }
+
+    return mAsyncResult;
+}
+
+status_t NuPlayer2Driver::setDataSource(const sp<IStreamSource> &source) {
+    ALOGV("setDataSource(%p) stream source", this);
+    Mutex::Autolock autoLock(mLock);
+
+    if (mState != STATE_IDLE) {
+        return INVALID_OPERATION;
+    }
+
+    mState = STATE_SET_DATASOURCE_PENDING;
+
+    mPlayer->setDataSourceAsync(source);
+
+    while (mState == STATE_SET_DATASOURCE_PENDING) {
+        mCondition.wait(mLock);
+    }
+
+    return mAsyncResult;
+}
+
+status_t NuPlayer2Driver::setDataSource(const sp<DataSource> &source) {
+    ALOGV("setDataSource(%p) callback source", this);
+    Mutex::Autolock autoLock(mLock);
+
+    if (mState != STATE_IDLE) {
+        return INVALID_OPERATION;
+    }
+
+    mState = STATE_SET_DATASOURCE_PENDING;
+
+    mPlayer->setDataSourceAsync(source);
+
+    while (mState == STATE_SET_DATASOURCE_PENDING) {
+        mCondition.wait(mLock);
+    }
+
+    return mAsyncResult;
+}
+
+status_t NuPlayer2Driver::setVideoSurfaceTexture(const sp<ANativeWindowWrapper> &nww) {
+    ALOGV("setVideoSurfaceTexture(%p)", this);
+    Mutex::Autolock autoLock(mLock);
+
+    if (mSetSurfaceInProgress) {
+        return INVALID_OPERATION;
+    }
+
+    switch (mState) {
+        case STATE_SET_DATASOURCE_PENDING:
+        case STATE_RESET_IN_PROGRESS:
+            return INVALID_OPERATION;
+
+        default:
+            break;
+    }
+
+    mSetSurfaceInProgress = true;
+
+    mPlayer->setVideoSurfaceTextureAsync(nww);
+
+    while (mSetSurfaceInProgress) {
+        mCondition.wait(mLock);
+    }
+
+    return OK;
+}
+
+status_t NuPlayer2Driver::getBufferingSettings(BufferingSettings* buffering) {
+    ALOGV("getBufferingSettings(%p)", this);
+    {
+        Mutex::Autolock autoLock(mLock);
+        if (mState == STATE_IDLE) {
+            return INVALID_OPERATION;
+        }
+    }
+
+    return mPlayer->getBufferingSettings(buffering);
+}
+
+status_t NuPlayer2Driver::setBufferingSettings(const BufferingSettings& buffering) {
+    ALOGV("setBufferingSettings(%p)", this);
+    {
+        Mutex::Autolock autoLock(mLock);
+        if (mState == STATE_IDLE) {
+            return INVALID_OPERATION;
+        }
+    }
+
+    return mPlayer->setBufferingSettings(buffering);
+}
+
+status_t NuPlayer2Driver::prepare() {
+    ALOGV("prepare(%p)", this);
+    Mutex::Autolock autoLock(mLock);
+    return prepare_l();
+}
+
+status_t NuPlayer2Driver::prepare_l() {
+    switch (mState) {
+        case STATE_UNPREPARED:
+            mState = STATE_PREPARING;
+
+            // Make sure we're not posting any notifications, success or
+            // failure information is only communicated through our result
+            // code.
+            mIsAsyncPrepare = false;
+            mPlayer->prepareAsync();
+            while (mState == STATE_PREPARING) {
+                mCondition.wait(mLock);
+            }
+            return (mState == STATE_PREPARED) ? OK : UNKNOWN_ERROR;
+        case STATE_STOPPED:
+            // this is really just paused. handle as seek to start
+            mAtEOS = false;
+            mState = STATE_STOPPED_AND_PREPARING;
+            mIsAsyncPrepare = false;
+            mPlayer->seekToAsync(0, MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC /* mode */,
+                    true /* needNotify */);
+            while (mState == STATE_STOPPED_AND_PREPARING) {
+                mCondition.wait(mLock);
+            }
+            return (mState == STATE_STOPPED_AND_PREPARED) ? OK : UNKNOWN_ERROR;
+        default:
+            return INVALID_OPERATION;
+    };
+}
+
+status_t NuPlayer2Driver::prepareAsync() {
+    ALOGV("prepareAsync(%p)", this);
+    Mutex::Autolock autoLock(mLock);
+
+    switch (mState) {
+        case STATE_UNPREPARED:
+            mState = STATE_PREPARING;
+            mIsAsyncPrepare = true;
+            mPlayer->prepareAsync();
+            return OK;
+        case STATE_STOPPED:
+            // this is really just paused. handle as seek to start
+            mAtEOS = false;
+            mState = STATE_STOPPED_AND_PREPARING;
+            mIsAsyncPrepare = true;
+            mPlayer->seekToAsync(0, MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC /* mode */,
+                    true /* needNotify */);
+            return OK;
+        default:
+            return INVALID_OPERATION;
+    };
+}
+
+status_t NuPlayer2Driver::start() {
+    ALOGD("start(%p), state is %d, eos is %d", this, mState, mAtEOS);
+    Mutex::Autolock autoLock(mLock);
+    return start_l();
+}
+
+status_t NuPlayer2Driver::start_l() {
+    switch (mState) {
+        case STATE_UNPREPARED:
+        {
+            status_t err = prepare_l();
+
+            if (err != OK) {
+                return err;
+            }
+
+            CHECK_EQ(mState, STATE_PREPARED);
+
+            // fall through
+        }
+
+        case STATE_PAUSED:
+        case STATE_STOPPED_AND_PREPARED:
+        case STATE_PREPARED:
+        {
+            mPlayer->start();
+
+            // fall through
+        }
+
+        case STATE_RUNNING:
+        {
+            if (mAtEOS) {
+                mPlayer->seekToAsync(0);
+                mAtEOS = false;
+                mPositionUs = -1;
+            }
+            break;
+        }
+
+        default:
+            return INVALID_OPERATION;
+    }
+
+    mState = STATE_RUNNING;
+
+    return OK;
+}
+
+status_t NuPlayer2Driver::stop() {
+    ALOGD("stop(%p)", this);
+    Mutex::Autolock autoLock(mLock);
+
+    switch (mState) {
+        case STATE_RUNNING:
+            mPlayer->pause();
+            // fall through
+
+        case STATE_PAUSED:
+            mState = STATE_STOPPED;
+            notifyListener_l(MEDIA2_STOPPED);
+            break;
+
+        case STATE_PREPARED:
+        case STATE_STOPPED:
+        case STATE_STOPPED_AND_PREPARING:
+        case STATE_STOPPED_AND_PREPARED:
+            mState = STATE_STOPPED;
+            break;
+
+        default:
+            return INVALID_OPERATION;
+    }
+
+    return OK;
+}
+
+status_t NuPlayer2Driver::pause() {
+    ALOGD("pause(%p)", this);
+    // The NuPlayerRenderer may get flushed if pause for long enough, e.g. the pause timeout tear
+    // down for audio offload mode. If that happens, the NuPlayerRenderer will no longer know the
+    // current position. So similar to seekTo, update |mPositionUs| to the pause position by calling
+    // getCurrentPosition here.
+    int unused;
+    getCurrentPosition(&unused);
+
+    Mutex::Autolock autoLock(mLock);
+
+    switch (mState) {
+        case STATE_PAUSED:
+        case STATE_PREPARED:
+            return OK;
+
+        case STATE_RUNNING:
+            mState = STATE_PAUSED;
+            notifyListener_l(MEDIA2_PAUSED);
+            mPlayer->pause();
+            break;
+
+        default:
+            return INVALID_OPERATION;
+    }
+
+    return OK;
+}
+
+bool NuPlayer2Driver::isPlaying() {
+    return mState == STATE_RUNNING && !mAtEOS;
+}
+
+status_t NuPlayer2Driver::setPlaybackSettings(const AudioPlaybackRate &rate) {
+    status_t err = mPlayer->setPlaybackSettings(rate);
+    if (err == OK) {
+        // try to update position
+        int unused;
+        getCurrentPosition(&unused);
+        Mutex::Autolock autoLock(mLock);
+        if (rate.mSpeed == 0.f && mState == STATE_RUNNING) {
+            mState = STATE_PAUSED;
+            notifyListener_l(MEDIA2_PAUSED);
+        } else if (rate.mSpeed != 0.f
+                && (mState == STATE_PAUSED
+                    || mState == STATE_STOPPED_AND_PREPARED
+                    || mState == STATE_PREPARED)) {
+            err = start_l();
+        }
+    }
+    return err;
+}
+
+status_t NuPlayer2Driver::getPlaybackSettings(AudioPlaybackRate *rate) {
+    return mPlayer->getPlaybackSettings(rate);
+}
+
+status_t NuPlayer2Driver::setSyncSettings(const AVSyncSettings &sync, float videoFpsHint) {
+    return mPlayer->setSyncSettings(sync, videoFpsHint);
+}
+
+status_t NuPlayer2Driver::getSyncSettings(AVSyncSettings *sync, float *videoFps) {
+    return mPlayer->getSyncSettings(sync, videoFps);
+}
+
+status_t NuPlayer2Driver::seekTo(int msec, MediaPlayer2SeekMode mode) {
+    ALOGD("seekTo(%p) (%d ms, %d) at state %d", this, msec, mode, mState);
+    Mutex::Autolock autoLock(mLock);
+
+    int64_t seekTimeUs = msec * 1000ll;
+
+    switch (mState) {
+        case STATE_PREPARED:
+        case STATE_STOPPED_AND_PREPARED:
+        case STATE_PAUSED:
+        case STATE_RUNNING:
+        {
+            mAtEOS = false;
+            mSeekInProgress = true;
+            // seeks can take a while, so we essentially paused
+            notifyListener_l(MEDIA2_PAUSED);
+            mPlayer->seekToAsync(seekTimeUs, mode, true /* needNotify */);
+            break;
+        }
+
+        default:
+            return INVALID_OPERATION;
+    }
+
+    mPositionUs = seekTimeUs;
+    return OK;
+}
+
+status_t NuPlayer2Driver::getCurrentPosition(int *msec) {
+    int64_t tempUs = 0;
+    {
+        Mutex::Autolock autoLock(mLock);
+        if (mSeekInProgress || (mState == STATE_PAUSED && !mAtEOS)) {
+            tempUs = (mPositionUs <= 0) ? 0 : mPositionUs;
+            *msec = (int)divRound(tempUs, (int64_t)(1000));
+            return OK;
+        }
+    }
+
+    status_t ret = mPlayer->getCurrentPosition(&tempUs);
+
+    Mutex::Autolock autoLock(mLock);
+    // We need to check mSeekInProgress here because mPlayer->seekToAsync is an async call, which
+    // means getCurrentPosition can be called before seek is completed. Iow, renderer may return a
+    // position value that's different the seek to position.
+    if (ret != OK) {
+        tempUs = (mPositionUs <= 0) ? 0 : mPositionUs;
+    } else {
+        mPositionUs = tempUs;
+    }
+    *msec = (int)divRound(tempUs, (int64_t)(1000));
+    return OK;
+}
+
+status_t NuPlayer2Driver::getDuration(int *msec) {
+    Mutex::Autolock autoLock(mLock);
+
+    if (mDurationUs < 0) {
+        return UNKNOWN_ERROR;
+    }
+
+    *msec = (mDurationUs + 500ll) / 1000;
+
+    return OK;
+}
+
+void NuPlayer2Driver::updateMetrics(const char *where) {
+    if (where == NULL) {
+        where = "unknown";
+    }
+    ALOGV("updateMetrics(%p) from %s at state %d", this, where, mState);
+
+    // gather the final stats for this record
+    Vector<sp<AMessage>> trackStats;
+    mPlayer->getStats(&trackStats);
+
+    if (trackStats.size() > 0) {
+        for (size_t i = 0; i < trackStats.size(); ++i) {
+            const sp<AMessage> &stats = trackStats.itemAt(i);
+
+            AString mime;
+            stats->findString("mime", &mime);
+
+            AString name;
+            stats->findString("component-name", &name);
+
+            if (mime.startsWith("video/")) {
+                int32_t width, height;
+                mAnalyticsItem->setCString(kPlayerVMime, mime.c_str());
+                if (!name.empty()) {
+                    mAnalyticsItem->setCString(kPlayerVCodec, name.c_str());
+                }
+
+                if (stats->findInt32("width", &width)
+                        && stats->findInt32("height", &height)) {
+                    mAnalyticsItem->setInt32(kPlayerWidth, width);
+                    mAnalyticsItem->setInt32(kPlayerHeight, height);
+                }
+
+                int64_t numFramesTotal = 0;
+                int64_t numFramesDropped = 0;
+                stats->findInt64("frames-total", &numFramesTotal);
+                stats->findInt64("frames-dropped-output", &numFramesDropped);
+
+                mAnalyticsItem->setInt64(kPlayerFrames, numFramesTotal);
+                mAnalyticsItem->setInt64(kPlayerFramesDropped, numFramesDropped);
+
+
+            } else if (mime.startsWith("audio/")) {
+                mAnalyticsItem->setCString(kPlayerAMime, mime.c_str());
+                if (!name.empty()) {
+                    mAnalyticsItem->setCString(kPlayerACodec, name.c_str());
+                }
+            }
+        }
+    }
+
+    // always provide duration and playing time, even if they have 0/unknown values.
+
+    // getDuration() uses mLock for mutex -- careful where we use it.
+    int duration_ms = -1;
+    getDuration(&duration_ms);
+    mAnalyticsItem->setInt64(kPlayerDuration, duration_ms);
+
+    mAnalyticsItem->setInt64(kPlayerPlaying, (mPlayingTimeUs+500)/1000 );
+
+    if (mRebufferingEvents != 0) {
+        mAnalyticsItem->setInt64(kPlayerRebuffering, (mRebufferingTimeUs+500)/1000 );
+        mAnalyticsItem->setInt32(kPlayerRebufferingCount, mRebufferingEvents);
+        mAnalyticsItem->setInt32(kPlayerRebufferingAtExit, mRebufferingAtExit);
+    }
+
+    mAnalyticsItem->setCString(kPlayerDataSourceType, mPlayer->getDataSourceType());
+}
+
+
+void NuPlayer2Driver::logMetrics(const char *where) {
+    if (where == NULL) {
+        where = "unknown";
+    }
+    ALOGV("logMetrics(%p) from %s at state %d", this, where, mState);
+
+    if (mAnalyticsItem == NULL || mAnalyticsItem->isEnabled() == false) {
+        return;
+    }
+
+    // log only non-empty records
+    // we always updateMetrics() before we get here
+    // and that always injects 3 fields (duration, playing time, and
+    // datasource) into the record.
+    // So the canonical "empty" record has 3 elements in it.
+    if (mAnalyticsItem->count() > 3) {
+
+        mAnalyticsItem->setFinalized(true);
+        mAnalyticsItem->selfrecord();
+
+        // re-init in case we prepare() and start() again.
+        delete mAnalyticsItem ;
+        mAnalyticsItem = new MediaAnalyticsItem("nuplayer");
+        if (mAnalyticsItem) {
+            mAnalyticsItem->generateSessionID();
+            mAnalyticsItem->setUid(mClientUid);
+        }
+    } else {
+        ALOGV("did not have anything to record");
+    }
+}
+
+status_t NuPlayer2Driver::reset() {
+    ALOGD("reset(%p) at state %d", this, mState);
+
+    updateMetrics("reset");
+    logMetrics("reset");
+
+    Mutex::Autolock autoLock(mLock);
+
+    switch (mState) {
+        case STATE_IDLE:
+            return OK;
+
+        case STATE_SET_DATASOURCE_PENDING:
+        case STATE_RESET_IN_PROGRESS:
+            return INVALID_OPERATION;
+
+        case STATE_PREPARING:
+        {
+            CHECK(mIsAsyncPrepare);
+
+            notifyListener_l(MEDIA2_PREPARED);
+            break;
+        }
+
+        default:
+            break;
+    }
+
+    if (mState != STATE_STOPPED) {
+        notifyListener_l(MEDIA2_STOPPED);
+    }
+
+    mState = STATE_RESET_IN_PROGRESS;
+    mPlayer->resetAsync();
+
+    while (mState == STATE_RESET_IN_PROGRESS) {
+        mCondition.wait(mLock);
+    }
+
+    mDurationUs = -1;
+    mPositionUs = -1;
+    mLooping = false;
+    mPlayingTimeUs = 0;
+    mRebufferingTimeUs = 0;
+    mRebufferingEvents = 0;
+    mRebufferingAtExit = false;
+
+    return OK;
+}
+
+status_t NuPlayer2Driver::notifyAt(int64_t mediaTimeUs) {
+    ALOGV("notifyAt(%p), time:%lld", this, (long long)mediaTimeUs);
+    return mPlayer->notifyAt(mediaTimeUs);
+}
+
+status_t NuPlayer2Driver::setLooping(int loop) {
+    mLooping = loop != 0;
+    return OK;
+}
+
+player2_type NuPlayer2Driver::playerType() {
+    return PLAYER2_NU_PLAYER2;
+}
+
+status_t NuPlayer2Driver::invoke(const Parcel &request, Parcel *reply) {
+    if (reply == NULL) {
+        ALOGE("reply is a NULL pointer");
+        return BAD_VALUE;
+    }
+
+    int32_t methodId;
+    status_t ret = request.readInt32(&methodId);
+    if (ret != OK) {
+        ALOGE("Failed to retrieve the requested method to invoke, err(%d)", ret);
+        return ret;
+    }
+
+    switch (methodId) {
+        case MEDIA_PLAYER2_INVOKE_ID_SET_VIDEO_SCALING_MODE:
+        {
+            int mode = request.readInt32();
+            return mPlayer->setVideoScalingMode(mode);
+        }
+
+        case MEDIA_PLAYER2_INVOKE_ID_GET_TRACK_INFO:
+        {
+            return mPlayer->getTrackInfo(reply);
+        }
+
+        case MEDIA_PLAYER2_INVOKE_ID_SELECT_TRACK:
+        {
+            int trackIndex = request.readInt32();
+            int msec = 0;
+            // getCurrentPosition should always return OK
+            getCurrentPosition(&msec);
+            return mPlayer->selectTrack(trackIndex, true /* select */, msec * 1000ll);
+        }
+
+        case MEDIA_PLAYER2_INVOKE_ID_UNSELECT_TRACK:
+        {
+            int trackIndex = request.readInt32();
+            return mPlayer->selectTrack(trackIndex, false /* select */, 0xdeadbeef /* not used */);
+        }
+
+        case MEDIA_PLAYER2_INVOKE_ID_GET_SELECTED_TRACK:
+        {
+            int32_t type = request.readInt32();
+            return mPlayer->getSelectedTrack(type, reply);
+        }
+
+        default:
+        {
+            return INVALID_OPERATION;
+        }
+    }
+}
+
+void NuPlayer2Driver::setAudioSink(const sp<AudioSink> &audioSink) {
+    mPlayer->setAudioSink(audioSink);
+    mAudioSink = audioSink;
+}
+
+status_t NuPlayer2Driver::setParameter(
+        int /* key */, const Parcel & /* request */) {
+    return INVALID_OPERATION;
+}
+
+status_t NuPlayer2Driver::getParameter(int key, Parcel *reply) {
+
+    if (key == FOURCC('m','t','r','X')) {
+        // mtrX -- a play on 'metrics' (not matrix)
+        // gather current info all together, parcel it, and send it back
+        updateMetrics("api");
+        mAnalyticsItem->writeToParcel(reply);
+        return OK;
+    }
+
+    return INVALID_OPERATION;
+}
+
+status_t NuPlayer2Driver::getMetadata(
+        const media::Metadata::Filter& /* ids */, Parcel *records) {
+    Mutex::Autolock autoLock(mLock);
+
+    using media::Metadata;
+
+    Metadata meta(records);
+
+    meta.appendBool(
+            Metadata::kPauseAvailable,
+            mPlayerFlags & NuPlayer2::Source::FLAG_CAN_PAUSE);
+
+    meta.appendBool(
+            Metadata::kSeekBackwardAvailable,
+            mPlayerFlags & NuPlayer2::Source::FLAG_CAN_SEEK_BACKWARD);
+
+    meta.appendBool(
+            Metadata::kSeekForwardAvailable,
+            mPlayerFlags & NuPlayer2::Source::FLAG_CAN_SEEK_FORWARD);
+
+    meta.appendBool(
+            Metadata::kSeekAvailable,
+            mPlayerFlags & NuPlayer2::Source::FLAG_CAN_SEEK);
+
+    return OK;
+}
+
+void NuPlayer2Driver::notifyResetComplete() {
+    ALOGD("notifyResetComplete(%p)", this);
+    Mutex::Autolock autoLock(mLock);
+
+    CHECK_EQ(mState, STATE_RESET_IN_PROGRESS);
+    mState = STATE_IDLE;
+    mCondition.broadcast();
+}
+
+void NuPlayer2Driver::notifySetSurfaceComplete() {
+    ALOGV("notifySetSurfaceComplete(%p)", this);
+    Mutex::Autolock autoLock(mLock);
+
+    CHECK(mSetSurfaceInProgress);
+    mSetSurfaceInProgress = false;
+
+    mCondition.broadcast();
+}
+
+void NuPlayer2Driver::notifyDuration(int64_t durationUs) {
+    Mutex::Autolock autoLock(mLock);
+    mDurationUs = durationUs;
+}
+
+void NuPlayer2Driver::notifyMorePlayingTimeUs(int64_t playingUs) {
+    Mutex::Autolock autoLock(mLock);
+    mPlayingTimeUs += playingUs;
+}
+
+void NuPlayer2Driver::notifyMoreRebufferingTimeUs(int64_t rebufferingUs) {
+    Mutex::Autolock autoLock(mLock);
+    mRebufferingTimeUs += rebufferingUs;
+    mRebufferingEvents++;
+}
+
+void NuPlayer2Driver::notifyRebufferingWhenExit(bool status) {
+    Mutex::Autolock autoLock(mLock);
+    mRebufferingAtExit = status;
+}
+
+void NuPlayer2Driver::notifySeekComplete() {
+    ALOGV("notifySeekComplete(%p)", this);
+    Mutex::Autolock autoLock(mLock);
+    mSeekInProgress = false;
+    notifySeekComplete_l();
+}
+
+void NuPlayer2Driver::notifySeekComplete_l() {
+    bool wasSeeking = true;
+    if (mState == STATE_STOPPED_AND_PREPARING) {
+        wasSeeking = false;
+        mState = STATE_STOPPED_AND_PREPARED;
+        mCondition.broadcast();
+        if (!mIsAsyncPrepare) {
+            // if we are preparing synchronously, no need to notify listener
+            return;
+        }
+    } else if (mState == STATE_STOPPED) {
+        // no need to notify listener
+        return;
+    }
+    notifyListener_l(wasSeeking ? MEDIA2_SEEK_COMPLETE : MEDIA2_PREPARED);
+}
+
+status_t NuPlayer2Driver::dump(
+        int fd, const Vector<String16> & /* args */) const {
+
+    Vector<sp<AMessage> > trackStats;
+    mPlayer->getStats(&trackStats);
+
+    AString logString(" NuPlayer2\n");
+    char buf[256] = {0};
+
+    bool locked = false;
+    for (int i = 0; i < kDumpLockRetries; ++i) {
+        if (mLock.tryLock() == NO_ERROR) {
+            locked = true;
+            break;
+        }
+        usleep(kDumpLockSleepUs);
+    }
+
+    if (locked) {
+        snprintf(buf, sizeof(buf), "  state(%d), atEOS(%d), looping(%d), autoLoop(%d)\n",
+                mState, mAtEOS, mLooping, mAutoLoop);
+        mLock.unlock();
+    } else {
+        snprintf(buf, sizeof(buf), "  NPD(%p) lock is taken\n", this);
+    }
+    logString.append(buf);
+
+    for (size_t i = 0; i < trackStats.size(); ++i) {
+        const sp<AMessage> &stats = trackStats.itemAt(i);
+
+        AString mime;
+        if (stats->findString("mime", &mime)) {
+            snprintf(buf, sizeof(buf), "  mime(%s)\n", mime.c_str());
+            logString.append(buf);
+        }
+
+        AString name;
+        if (stats->findString("component-name", &name)) {
+            snprintf(buf, sizeof(buf), "    decoder(%s)\n", name.c_str());
+            logString.append(buf);
+        }
+
+        if (mime.startsWith("video/")) {
+            int32_t width, height;
+            if (stats->findInt32("width", &width)
+                    && stats->findInt32("height", &height)) {
+                snprintf(buf, sizeof(buf), "    resolution(%d x %d)\n", width, height);
+                logString.append(buf);
+            }
+
+            int64_t numFramesTotal = 0;
+            int64_t numFramesDropped = 0;
+
+            stats->findInt64("frames-total", &numFramesTotal);
+            stats->findInt64("frames-dropped-output", &numFramesDropped);
+            snprintf(buf, sizeof(buf), "    numFramesTotal(%lld), numFramesDropped(%lld), "
+                     "percentageDropped(%.2f%%)\n",
+                     (long long)numFramesTotal,
+                     (long long)numFramesDropped,
+                     numFramesTotal == 0
+                            ? 0.0 : (double)(numFramesDropped * 100) / numFramesTotal);
+            logString.append(buf);
+        }
+    }
+
+    ALOGI("%s", logString.c_str());
+
+    if (fd >= 0) {
+        FILE *out = fdopen(dup(fd), "w");
+        fprintf(out, "%s", logString.c_str());
+        fclose(out);
+        out = NULL;
+    }
+
+    return OK;
+}
+
+void NuPlayer2Driver::onMessageReceived(const sp<AMessage> &msg) {
+    switch (msg->what()) {
+        case kWhatNotifyListener: {
+            int32_t msgId;
+            int32_t ext1 = 0;
+            int32_t ext2 = 0;
+            CHECK(msg->findInt32("messageId", &msgId));
+            msg->findInt32("ext1", &ext1);
+            msg->findInt32("ext2", &ext2);
+            sp<ParcelWrapper> in;
+            sp<RefBase> obj;
+            if (msg->findObject("parcel", &obj) && obj != NULL) {
+                in = static_cast<ParcelWrapper *>(obj.get());
+            }
+            sendEvent(msgId, ext1, ext2, (in == NULL ? NULL : in->getParcel()));
+            break;
+        }
+        default:
+            break;
+    }
+}
+
+void NuPlayer2Driver::notifyListener(
+        int msg, int ext1, int ext2, const Parcel *in) {
+    Mutex::Autolock autoLock(mLock);
+    notifyListener_l(msg, ext1, ext2, in);
+}
+
+void NuPlayer2Driver::notifyListener_l(
+        int msg, int ext1, int ext2, const Parcel *in) {
+    ALOGD("notifyListener_l(%p), (%d, %d, %d, %d), loop setting(%d, %d)",
+            this, msg, ext1, ext2, (in == NULL ? -1 : (int)in->dataSize()), mAutoLoop, mLooping);
+    switch (msg) {
+        case MEDIA2_PLAYBACK_COMPLETE:
+        {
+            if (mState != STATE_RESET_IN_PROGRESS) {
+                if (mAutoLoop) {
+                    audio_stream_type_t streamType = AUDIO_STREAM_MUSIC;
+                    if (mAudioSink != NULL) {
+                        streamType = mAudioSink->getAudioStreamType();
+                    }
+                    if (streamType == AUDIO_STREAM_NOTIFICATION) {
+                        ALOGW("disabling auto-loop for notification");
+                        mAutoLoop = false;
+                    }
+                }
+                if (mLooping || mAutoLoop) {
+                    mPlayer->seekToAsync(0);
+                    if (mAudioSink != NULL) {
+                        // The renderer has stopped the sink at the end in order to play out
+                        // the last little bit of audio. If we're looping, we need to restart it.
+                        mAudioSink->start();
+                    }
+                    // don't send completion event when looping
+                    return;
+                }
+                if (property_get_bool("persist.debug.sf.stats", false)) {
+                    Vector<String16> args;
+                    dump(-1, args);
+                }
+                mPlayer->pause();
+                mState = STATE_PAUSED;
+            }
+            // fall through
+        }
+
+        case MEDIA2_ERROR:
+        {
+            // when we have an error, add it to the analytics for this playback.
+            // ext1 is our primary 'error type' value. Only add ext2 when non-zero.
+            // [test against msg is due to fall through from previous switch value]
+            if (msg == MEDIA2_ERROR) {
+                mAnalyticsItem->setInt32(kPlayerError, ext1);
+                if (ext2 != 0) {
+                    mAnalyticsItem->setInt32(kPlayerErrorCode, ext2);
+                }
+                mAnalyticsItem->setCString(kPlayerErrorState, stateString(mState).c_str());
+            }
+            mAtEOS = true;
+            break;
+        }
+
+        default:
+            break;
+    }
+
+    sp<AMessage> notify = new AMessage(kWhatNotifyListener, this);
+    notify->setInt32("messageId", msg);
+    notify->setInt32("ext1", ext1);
+    notify->setInt32("ext2", ext2);
+    notify->setObject("parcel", ParcelWrapper::Create(in));
+    notify->post();
+}
+
+void NuPlayer2Driver::notifySetDataSourceCompleted(status_t err) {
+    Mutex::Autolock autoLock(mLock);
+
+    CHECK_EQ(mState, STATE_SET_DATASOURCE_PENDING);
+
+    mAsyncResult = err;
+    mState = (err == OK) ? STATE_UNPREPARED : STATE_IDLE;
+    mCondition.broadcast();
+}
+
+void NuPlayer2Driver::notifyPrepareCompleted(status_t err) {
+    ALOGV("notifyPrepareCompleted %d", err);
+
+    Mutex::Autolock autoLock(mLock);
+
+    if (mState != STATE_PREPARING) {
+        // We were preparing asynchronously when the client called
+        // reset(), we sent a premature "prepared" notification and
+        // then initiated the reset. This notification is stale.
+        CHECK(mState == STATE_RESET_IN_PROGRESS || mState == STATE_IDLE);
+        return;
+    }
+
+    CHECK_EQ(mState, STATE_PREPARING);
+
+    mAsyncResult = err;
+
+    if (err == OK) {
+        // update state before notifying client, so that if client calls back into NuPlayer2Driver
+        // in response, NuPlayer2Driver has the right state
+        mState = STATE_PREPARED;
+        if (mIsAsyncPrepare) {
+            notifyListener_l(MEDIA2_PREPARED);
+        }
+    } else {
+        mState = STATE_UNPREPARED;
+        if (mIsAsyncPrepare) {
+            notifyListener_l(MEDIA2_ERROR, MEDIA2_ERROR_UNKNOWN, err);
+        }
+    }
+
+    sp<MetaData> meta = mPlayer->getFileMeta();
+    int32_t loop;
+    if (meta != NULL
+            && meta->findInt32(kKeyAutoLoop, &loop) && loop != 0) {
+        mAutoLoop = true;
+    }
+
+    mCondition.broadcast();
+}
+
+void NuPlayer2Driver::notifyFlagsChanged(uint32_t flags) {
+    Mutex::Autolock autoLock(mLock);
+
+    mPlayerFlags = flags;
+}
+
+// Modular DRM
+status_t NuPlayer2Driver::prepareDrm(const uint8_t uuid[16], const Vector<uint8_t> &drmSessionId)
+{
+    ALOGV("prepareDrm(%p) state: %d", this, mState);
+
+    // leaving the state verification for mediaplayer.cpp
+    status_t ret = mPlayer->prepareDrm(uuid, drmSessionId);
+
+    ALOGV("prepareDrm ret: %d", ret);
+
+    return ret;
+}
+
+status_t NuPlayer2Driver::releaseDrm()
+{
+    ALOGV("releaseDrm(%p) state: %d", this, mState);
+
+    // leaving the state verification for mediaplayer.cpp
+    status_t ret = mPlayer->releaseDrm();
+
+    ALOGV("releaseDrm ret: %d", ret);
+
+    return ret;
+}
+
+std::string NuPlayer2Driver::stateString(State state) {
+    const char *rval = NULL;
+    char rawbuffer[16];  // allows "%d"
+
+    switch (state) {
+        case STATE_IDLE: rval = "IDLE"; break;
+        case STATE_SET_DATASOURCE_PENDING: rval = "SET_DATASOURCE_PENDING"; break;
+        case STATE_UNPREPARED: rval = "UNPREPARED"; break;
+        case STATE_PREPARING: rval = "PREPARING"; break;
+        case STATE_PREPARED: rval = "PREPARED"; break;
+        case STATE_RUNNING: rval = "RUNNING"; break;
+        case STATE_PAUSED: rval = "PAUSED"; break;
+        case STATE_RESET_IN_PROGRESS: rval = "RESET_IN_PROGRESS"; break;
+        case STATE_STOPPED: rval = "STOPPED"; break;
+        case STATE_STOPPED_AND_PREPARING: rval = "STOPPED_AND_PREPARING"; break;
+        case STATE_STOPPED_AND_PREPARED: rval = "STOPPED_AND_PREPARED"; break;
+        default:
+            // yes, this buffer is shared and vulnerable to races
+            snprintf(rawbuffer, sizeof(rawbuffer), "%d", state);
+            rval = rawbuffer;
+            break;
+    }
+
+    return rval;
+}
+
+}  // namespace android
diff --git a/media/libmedia/nuplayer2/NuPlayer2Driver.h b/media/libmedia/nuplayer2/NuPlayer2Driver.h
new file mode 100644
index 0000000..d393f9d
--- /dev/null
+++ b/media/libmedia/nuplayer2/NuPlayer2Driver.h
@@ -0,0 +1,169 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/MediaPlayer2Interface.h>
+
+#include <media/MediaAnalyticsItem.h>
+#include <media/stagefright/foundation/ABase.h>
+
+namespace android {
+
+struct ALooper;
+struct MediaClock;
+struct NuPlayer2;
+
+struct NuPlayer2Driver : public MediaPlayer2Interface {
+    explicit NuPlayer2Driver(pid_t pid);
+
+    virtual status_t initCheck();
+
+    virtual status_t setUID(uid_t uid);
+
+    virtual status_t setDataSource(
+            const sp<MediaHTTPService> &httpService,
+            const char *url,
+            const KeyedVector<String8, String8> *headers);
+
+    virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
+
+    virtual status_t setDataSource(const sp<IStreamSource> &source);
+
+    virtual status_t setDataSource(const sp<DataSource>& dataSource);
+
+    virtual status_t setVideoSurfaceTexture(const sp<ANativeWindowWrapper> &nww);
+
+    virtual status_t getBufferingSettings(
+            BufferingSettings* buffering /* nonnull */) override;
+    virtual status_t setBufferingSettings(const BufferingSettings& buffering) override;
+
+    virtual status_t prepare();
+    virtual status_t prepareAsync();
+    virtual status_t start();
+    virtual status_t stop();
+    virtual status_t pause();
+    virtual bool isPlaying();
+    virtual status_t setPlaybackSettings(const AudioPlaybackRate &rate);
+    virtual status_t getPlaybackSettings(AudioPlaybackRate *rate);
+    virtual status_t setSyncSettings(const AVSyncSettings &sync, float videoFpsHint);
+    virtual status_t getSyncSettings(AVSyncSettings *sync, float *videoFps);
+    virtual status_t seekTo(
+            int msec, MediaPlayer2SeekMode mode = MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC);
+    virtual status_t getCurrentPosition(int *msec);
+    virtual status_t getDuration(int *msec);
+    virtual status_t reset();
+    virtual status_t notifyAt(int64_t mediaTimeUs) override;
+    virtual status_t setLooping(int loop);
+    virtual player2_type playerType();
+    virtual status_t invoke(const Parcel &request, Parcel *reply);
+    virtual void setAudioSink(const sp<AudioSink> &audioSink);
+    virtual status_t setParameter(int key, const Parcel &request);
+    virtual status_t getParameter(int key, Parcel *reply);
+
+    virtual status_t getMetadata(
+            const media::Metadata::Filter& ids, Parcel *records);
+
+    virtual status_t dump(int fd, const Vector<String16> &args) const;
+
+    virtual void onMessageReceived(const sp<AMessage> &msg) override;
+
+    void notifySetDataSourceCompleted(status_t err);
+    void notifyPrepareCompleted(status_t err);
+    void notifyResetComplete();
+    void notifySetSurfaceComplete();
+    void notifyDuration(int64_t durationUs);
+    void notifyMorePlayingTimeUs(int64_t timeUs);
+    void notifyMoreRebufferingTimeUs(int64_t timeUs);
+    void notifyRebufferingWhenExit(bool status);
+    void notifySeekComplete();
+    void notifySeekComplete_l();
+    void notifyListener(int msg, int ext1 = 0, int ext2 = 0, const Parcel *in = NULL);
+    void notifyFlagsChanged(uint32_t flags);
+
+    // Modular DRM
+    virtual status_t prepareDrm(const uint8_t uuid[16], const Vector<uint8_t> &drmSessionId);
+    virtual status_t releaseDrm();
+
+protected:
+    virtual ~NuPlayer2Driver();
+
+private:
+    enum State {
+        STATE_IDLE,
+        STATE_SET_DATASOURCE_PENDING,
+        STATE_UNPREPARED,
+        STATE_PREPARING,
+        STATE_PREPARED,
+        STATE_RUNNING,
+        STATE_PAUSED,
+        STATE_RESET_IN_PROGRESS,
+        STATE_STOPPED,                  // equivalent to PAUSED
+        STATE_STOPPED_AND_PREPARING,    // equivalent to PAUSED, but seeking
+        STATE_STOPPED_AND_PREPARED,     // equivalent to PAUSED, but seek complete
+    };
+
+    std::string stateString(State state);
+
+    enum {
+        kWhatNotifyListener,
+    };
+
+    mutable Mutex mLock;
+    Condition mCondition;
+
+    State mState;
+
+    bool mIsAsyncPrepare;
+    status_t mAsyncResult;
+
+    // The following are protected through "mLock"
+    // >>>
+    bool mSetSurfaceInProgress;
+    int64_t mDurationUs;
+    int64_t mPositionUs;
+    bool mSeekInProgress;
+    int64_t mPlayingTimeUs;
+    int64_t mRebufferingTimeUs;
+    int32_t mRebufferingEvents;
+    bool mRebufferingAtExit;
+    // <<<
+
+    sp<ALooper> mLooper;
+    sp<ALooper> mNuPlayer2Looper;
+    const sp<MediaClock> mMediaClock;
+    const sp<NuPlayer2> mPlayer;
+    sp<AudioSink> mAudioSink;
+    uint32_t mPlayerFlags;
+
+    MediaAnalyticsItem *mAnalyticsItem;
+    uid_t mClientUid;
+
+    bool mAtEOS;
+    bool mLooping;
+    bool mAutoLoop;
+
+    void updateMetrics(const char *where);
+    void logMetrics(const char *where);
+
+    status_t prepare_l();
+    status_t start_l();
+    void notifyListener_l(int msg, int ext1 = 0, int ext2 = 0, const Parcel *in = NULL);
+
+    DISALLOW_EVIL_CONSTRUCTORS(NuPlayer2Driver);
+};
+
+}  // namespace android
+
+
diff --git a/media/libmedia/nuplayer2/NuPlayer2Drm.cpp b/media/libmedia/nuplayer2/NuPlayer2Drm.cpp
new file mode 100644
index 0000000..4751849
--- /dev/null
+++ b/media/libmedia/nuplayer2/NuPlayer2Drm.cpp
@@ -0,0 +1,130 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NuPlayer2Drm"
+
+#include "NuPlayer2Drm.h"
+
+#include <media/NdkWrapper.h>
+#include <utils/Log.h>
+
+
+namespace android {
+
+Vector<DrmUUID> NuPlayer2Drm::parsePSSH(const void *pssh, size_t psshsize)
+{
+    Vector<DrmUUID> drmSchemes, empty;
+    const int DATALEN_SIZE = 4;
+
+    // the format of the buffer is 1 or more of:
+    //    {
+    //        16 byte uuid
+    //        4 byte data length N
+    //        N bytes of data
+    //    }
+    // Determine the number of entries in the source data.
+    // Since we got the data from stagefright, we trust it is valid and properly formatted.
+
+    const uint8_t *data = (const uint8_t*)pssh;
+    size_t len = psshsize;
+    size_t numentries = 0;
+    while (len > 0) {
+        if (len < DrmUUID::UUID_SIZE) {
+            ALOGE("ParsePSSH: invalid PSSH data");
+            return empty;
+        }
+
+        const uint8_t *uuidPtr = data;
+
+        // skip uuid
+        data += DrmUUID::UUID_SIZE;
+        len -= DrmUUID::UUID_SIZE;
+
+        // get data length
+        if (len < DATALEN_SIZE) {
+            ALOGE("ParsePSSH: invalid PSSH data");
+            return empty;
+        }
+
+        uint32_t datalen = *((uint32_t*)data);
+        data += DATALEN_SIZE;
+        len -= DATALEN_SIZE;
+
+        if (len < datalen) {
+            ALOGE("ParsePSSH: invalid PSSH data");
+            return empty;
+        }
+
+        // skip the data
+        data += datalen;
+        len -= datalen;
+
+        DrmUUID _uuid(uuidPtr);
+        drmSchemes.add(_uuid);
+
+        ALOGV("ParsePSSH[%zu]: %s: %s", numentries,
+                _uuid.toHexString().string(),
+                DrmUUID::arrayToHex(data, datalen).string()
+             );
+
+        numentries++;
+    }
+
+    return drmSchemes;
+}
+
+Vector<DrmUUID> NuPlayer2Drm::getSupportedDrmSchemes(const void *pssh, size_t psshsize)
+{
+    Vector<DrmUUID> psshDRMs = parsePSSH(pssh, psshsize);
+
+    Vector<DrmUUID> supportedDRMs;
+    for (size_t i = 0; i < psshDRMs.size(); i++) {
+        DrmUUID uuid = psshDRMs[i];
+        if (AMediaDrmWrapper::isCryptoSchemeSupported(uuid.ptr(), NULL)) {
+            supportedDRMs.add(uuid);
+        }
+    }
+
+    ALOGV("getSupportedDrmSchemes: psshDRMs: %zu supportedDRMs: %zu",
+            psshDRMs.size(), supportedDRMs.size());
+
+    return supportedDRMs;
+}
+
+// Parcel has only private copy constructor so passing it in rather than returning
+void NuPlayer2Drm::retrieveDrmInfo(const void *pssh, size_t psshsize, Parcel *parcel)
+{
+    // 1) PSSH bytes
+    parcel->writeUint32(psshsize);
+    parcel->writeByteArray(psshsize, (const uint8_t*)pssh);
+
+    ALOGV("retrieveDrmInfo: MEDIA2_DRM_INFO  PSSH: size: %zu %s", psshsize,
+            DrmUUID::arrayToHex((uint8_t*)pssh, psshsize).string());
+
+    // 2) supportedDRMs
+    Vector<DrmUUID> supportedDRMs = getSupportedDrmSchemes(pssh, psshsize);
+    parcel->writeUint32(supportedDRMs.size());
+    for (size_t i = 0; i < supportedDRMs.size(); i++) {
+        DrmUUID uuid = supportedDRMs[i];
+        parcel->writeByteArray(DrmUUID::UUID_SIZE, uuid.ptr());
+
+        ALOGV("retrieveDrmInfo: MEDIA2_DRM_INFO  supportedScheme[%zu] %s", i,
+                uuid.toHexString().string());
+    }
+}
+
+}   // namespace android
diff --git a/media/libmedia/nuplayer2/NuPlayer2Drm.h b/media/libmedia/nuplayer2/NuPlayer2Drm.h
new file mode 100644
index 0000000..e762ccc
--- /dev/null
+++ b/media/libmedia/nuplayer2/NuPlayer2Drm.h
@@ -0,0 +1,87 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef NUPLAYER2_DRM_H_
+#define NUPLAYER2_DRM_H_
+
+#include <binder/Parcel.h>
+#include <media/stagefright/MetaData.h> // for CryptInfo
+
+
+namespace android {
+
+    struct DrmUUID {
+        static const int UUID_SIZE = 16;
+
+        DrmUUID() {
+            memset(this->uuid, 0, sizeof(uuid));
+        }
+
+        // to allow defining Vector/KeyedVector of UUID type
+        DrmUUID(const DrmUUID &a) {
+            memcpy(this->uuid, a.uuid, sizeof(uuid));
+        }
+
+        // to allow defining Vector/KeyedVector of UUID type
+        DrmUUID(const uint8_t uuid_in[UUID_SIZE]) {
+            memcpy(this->uuid, uuid_in, sizeof(uuid));
+        }
+
+        const uint8_t *ptr() const {
+            return uuid;
+        }
+
+        String8 toHexString() const {
+            return arrayToHex(uuid, UUID_SIZE);
+        }
+
+        static String8 toHexString(const uint8_t uuid_in[UUID_SIZE]) {
+            return arrayToHex(uuid_in, UUID_SIZE);
+        }
+
+        static String8 arrayToHex(const uint8_t *array, int bytes) {
+            String8 result;
+            for (int i = 0; i < bytes; i++) {
+                result.appendFormat("%02x", array[i]);
+            }
+
+            return result;
+        }
+
+    protected:
+        uint8_t uuid[UUID_SIZE];
+    };
+
+
+    struct NuPlayer2Drm {
+
+        // static helpers - internal
+
+    protected:
+        static Vector<DrmUUID> parsePSSH(const void *pssh, size_t psshsize);
+        static Vector<DrmUUID> getSupportedDrmSchemes(const void *pssh, size_t psshsize);
+
+        // static helpers - public
+
+    public:
+        // Parcel has only private copy constructor so passing it in rather than returning
+        static void retrieveDrmInfo(const void *pssh, size_t psshsize, Parcel *parcel);
+
+    };  // NuPlayer2Drm
+
+}   // android
+
+#endif     //NUPLAYER2_DRM_H_
diff --git a/media/libmedia/nuplayer2/NuPlayer2Renderer.cpp b/media/libmedia/nuplayer2/NuPlayer2Renderer.cpp
new file mode 100644
index 0000000..71f5dce
--- /dev/null
+++ b/media/libmedia/nuplayer2/NuPlayer2Renderer.cpp
@@ -0,0 +1,2075 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NuPlayer2Renderer"
+#include <utils/Log.h>
+
+#include "JWakeLock.h"
+#include "NuPlayer2Renderer.h"
+#include <algorithm>
+#include <cutils/properties.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/MediaClock.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/Utils.h>
+#include <media/stagefright/VideoFrameScheduler.h>
+#include <media/MediaCodecBuffer.h>
+
+#include <inttypes.h>
+
+namespace android {
+
+/*
+ * Example of common configuration settings in shell script form
+
+   #Turn offload audio off (use PCM for Play Music) -- AudioPolicyManager
+   adb shell setprop audio.offload.disable 1
+
+   #Allow offload audio with video (requires offloading to be enabled) -- AudioPolicyManager
+   adb shell setprop audio.offload.video 1
+
+   #Use audio callbacks for PCM data
+   adb shell setprop media.stagefright.audio.cbk 1
+
+   #Use deep buffer for PCM data with video (it is generally enabled for audio-only)
+   adb shell setprop media.stagefright.audio.deep 1
+
+   #Set size of buffers for pcm audio sink in msec (example: 1000 msec)
+   adb shell setprop media.stagefright.audio.sink 1000
+
+ * These configurations take effect for the next track played (not the current track).
+ */
+
+static inline bool getUseAudioCallbackSetting() {
+    return property_get_bool("media.stagefright.audio.cbk", false /* default_value */);
+}
+
+static inline int32_t getAudioSinkPcmMsSetting() {
+    return property_get_int32(
+            "media.stagefright.audio.sink", 500 /* default_value */);
+}
+
+// Maximum time in paused state when offloading audio decompression. When elapsed, the AudioSink
+// is closed to allow the audio DSP to power down.
+static const int64_t kOffloadPauseMaxUs = 10000000ll;
+
+// Maximum allowed delay from AudioSink, 1.5 seconds.
+static const int64_t kMaxAllowedAudioSinkDelayUs = 1500000ll;
+
+static const int64_t kMinimumAudioClockUpdatePeriodUs = 20 /* msec */ * 1000;
+
+// static
+const NuPlayer2::Renderer::PcmInfo NuPlayer2::Renderer::AUDIO_PCMINFO_INITIALIZER = {
+        AUDIO_CHANNEL_NONE,
+        AUDIO_OUTPUT_FLAG_NONE,
+        AUDIO_FORMAT_INVALID,
+        0, // mNumChannels
+        0 // mSampleRate
+};
+
+// static
+const int64_t NuPlayer2::Renderer::kMinPositionUpdateDelayUs = 100000ll;
+
+NuPlayer2::Renderer::Renderer(
+        const sp<MediaPlayer2Base::AudioSink> &sink,
+        const sp<MediaClock> &mediaClock,
+        const sp<AMessage> &notify,
+        uint32_t flags)
+    : mAudioSink(sink),
+      mUseVirtualAudioSink(false),
+      mNotify(notify),
+      mFlags(flags),
+      mNumFramesWritten(0),
+      mDrainAudioQueuePending(false),
+      mDrainVideoQueuePending(false),
+      mAudioQueueGeneration(0),
+      mVideoQueueGeneration(0),
+      mAudioDrainGeneration(0),
+      mVideoDrainGeneration(0),
+      mAudioEOSGeneration(0),
+      mMediaClock(mediaClock),
+      mPlaybackSettings(AUDIO_PLAYBACK_RATE_DEFAULT),
+      mAudioFirstAnchorTimeMediaUs(-1),
+      mAnchorTimeMediaUs(-1),
+      mAnchorNumFramesWritten(-1),
+      mVideoLateByUs(0ll),
+      mNextVideoTimeMediaUs(-1),
+      mHasAudio(false),
+      mHasVideo(false),
+      mNotifyCompleteAudio(false),
+      mNotifyCompleteVideo(false),
+      mSyncQueues(false),
+      mPaused(false),
+      mPauseDrainAudioAllowedUs(0),
+      mVideoSampleReceived(false),
+      mVideoRenderingStarted(false),
+      mVideoRenderingStartGeneration(0),
+      mAudioRenderingStartGeneration(0),
+      mRenderingDataDelivered(false),
+      mNextAudioClockUpdateTimeUs(-1),
+      mLastAudioMediaTimeUs(-1),
+      mAudioOffloadPauseTimeoutGeneration(0),
+      mAudioTornDown(false),
+      mCurrentOffloadInfo(AUDIO_INFO_INITIALIZER),
+      mCurrentPcmInfo(AUDIO_PCMINFO_INITIALIZER),
+      mTotalBuffersQueued(0),
+      mLastAudioBufferDrained(0),
+      mUseAudioCallback(false),
+      mWakeLock(new JWakeLock()) {
+    CHECK(mediaClock != NULL);
+    mPlaybackRate = mPlaybackSettings.mSpeed;
+    mMediaClock->setPlaybackRate(mPlaybackRate);
+}
+
+NuPlayer2::Renderer::~Renderer() {
+    if (offloadingAudio()) {
+        mAudioSink->stop();
+        mAudioSink->flush();
+        mAudioSink->close();
+    }
+
+    // Try to avoid racing condition in case callback is still on.
+    Mutex::Autolock autoLock(mLock);
+    if (mUseAudioCallback) {
+        flushQueue(&mAudioQueue);
+        flushQueue(&mVideoQueue);
+    }
+    mWakeLock.clear();
+    mVideoScheduler.clear();
+    mNotify.clear();
+    mAudioSink.clear();
+}
+
+void NuPlayer2::Renderer::queueBuffer(
+        bool audio,
+        const sp<MediaCodecBuffer> &buffer,
+        const sp<AMessage> &notifyConsumed) {
+    sp<AMessage> msg = new AMessage(kWhatQueueBuffer, this);
+    msg->setInt32("queueGeneration", getQueueGeneration(audio));
+    msg->setInt32("audio", static_cast<int32_t>(audio));
+    msg->setObject("buffer", buffer);
+    msg->setMessage("notifyConsumed", notifyConsumed);
+    msg->post();
+}
+
+void NuPlayer2::Renderer::queueEOS(bool audio, status_t finalResult) {
+    CHECK_NE(finalResult, (status_t)OK);
+
+    sp<AMessage> msg = new AMessage(kWhatQueueEOS, this);
+    msg->setInt32("queueGeneration", getQueueGeneration(audio));
+    msg->setInt32("audio", static_cast<int32_t>(audio));
+    msg->setInt32("finalResult", finalResult);
+    msg->post();
+}
+
+status_t NuPlayer2::Renderer::setPlaybackSettings(const AudioPlaybackRate &rate) {
+    sp<AMessage> msg = new AMessage(kWhatConfigPlayback, this);
+    writeToAMessage(msg, rate);
+    sp<AMessage> response;
+    status_t err = msg->postAndAwaitResponse(&response);
+    if (err == OK && response != NULL) {
+        CHECK(response->findInt32("err", &err));
+    }
+    return err;
+}
+
+status_t NuPlayer2::Renderer::onConfigPlayback(const AudioPlaybackRate &rate /* sanitized */) {
+    if (rate.mSpeed == 0.f) {
+        onPause();
+        // don't call audiosink's setPlaybackRate if pausing, as pitch does not
+        // have to correspond to the any non-0 speed (e.g old speed). Keep
+        // settings nonetheless, using the old speed, in case audiosink changes.
+        AudioPlaybackRate newRate = rate;
+        newRate.mSpeed = mPlaybackSettings.mSpeed;
+        mPlaybackSettings = newRate;
+        return OK;
+    }
+
+    if (mAudioSink != NULL && mAudioSink->ready()) {
+        status_t err = mAudioSink->setPlaybackRate(rate);
+        if (err != OK) {
+            return err;
+        }
+    }
+    mPlaybackSettings = rate;
+    mPlaybackRate = rate.mSpeed;
+    mMediaClock->setPlaybackRate(mPlaybackRate);
+    return OK;
+}
+
+status_t NuPlayer2::Renderer::getPlaybackSettings(AudioPlaybackRate *rate /* nonnull */) {
+    sp<AMessage> msg = new AMessage(kWhatGetPlaybackSettings, this);
+    sp<AMessage> response;
+    status_t err = msg->postAndAwaitResponse(&response);
+    if (err == OK && response != NULL) {
+        CHECK(response->findInt32("err", &err));
+        if (err == OK) {
+            readFromAMessage(response, rate);
+        }
+    }
+    return err;
+}
+
+status_t NuPlayer2::Renderer::onGetPlaybackSettings(AudioPlaybackRate *rate /* nonnull */) {
+    if (mAudioSink != NULL && mAudioSink->ready()) {
+        status_t err = mAudioSink->getPlaybackRate(rate);
+        if (err == OK) {
+            if (!isAudioPlaybackRateEqual(*rate, mPlaybackSettings)) {
+                ALOGW("correcting mismatch in internal/external playback rate");
+            }
+            // get playback settings used by audiosink, as it may be
+            // slightly off due to audiosink not taking small changes.
+            mPlaybackSettings = *rate;
+            if (mPaused) {
+                rate->mSpeed = 0.f;
+            }
+        }
+        return err;
+    }
+    *rate = mPlaybackSettings;
+    return OK;
+}
+
+status_t NuPlayer2::Renderer::setSyncSettings(const AVSyncSettings &sync, float videoFpsHint) {
+    sp<AMessage> msg = new AMessage(kWhatConfigSync, this);
+    writeToAMessage(msg, sync, videoFpsHint);
+    sp<AMessage> response;
+    status_t err = msg->postAndAwaitResponse(&response);
+    if (err == OK && response != NULL) {
+        CHECK(response->findInt32("err", &err));
+    }
+    return err;
+}
+
+status_t NuPlayer2::Renderer::onConfigSync(const AVSyncSettings &sync, float videoFpsHint __unused) {
+    if (sync.mSource != AVSYNC_SOURCE_DEFAULT) {
+        return BAD_VALUE;
+    }
+    // TODO: support sync sources
+    return INVALID_OPERATION;
+}
+
+status_t NuPlayer2::Renderer::getSyncSettings(AVSyncSettings *sync, float *videoFps) {
+    sp<AMessage> msg = new AMessage(kWhatGetSyncSettings, this);
+    sp<AMessage> response;
+    status_t err = msg->postAndAwaitResponse(&response);
+    if (err == OK && response != NULL) {
+        CHECK(response->findInt32("err", &err));
+        if (err == OK) {
+            readFromAMessage(response, sync, videoFps);
+        }
+    }
+    return err;
+}
+
+status_t NuPlayer2::Renderer::onGetSyncSettings(
+        AVSyncSettings *sync /* nonnull */, float *videoFps /* nonnull */) {
+    *sync = mSyncSettings;
+    *videoFps = -1.f;
+    return OK;
+}
+
+void NuPlayer2::Renderer::flush(bool audio, bool notifyComplete) {
+    {
+        Mutex::Autolock autoLock(mLock);
+        if (audio) {
+            mNotifyCompleteAudio |= notifyComplete;
+            clearAudioFirstAnchorTime_l();
+            ++mAudioQueueGeneration;
+            ++mAudioDrainGeneration;
+        } else {
+            mNotifyCompleteVideo |= notifyComplete;
+            ++mVideoQueueGeneration;
+            ++mVideoDrainGeneration;
+        }
+
+        mMediaClock->clearAnchor();
+        mVideoLateByUs = 0;
+        mNextVideoTimeMediaUs = -1;
+        mSyncQueues = false;
+    }
+
+    sp<AMessage> msg = new AMessage(kWhatFlush, this);
+    msg->setInt32("audio", static_cast<int32_t>(audio));
+    msg->post();
+}
+
+void NuPlayer2::Renderer::signalTimeDiscontinuity() {
+}
+
+void NuPlayer2::Renderer::signalDisableOffloadAudio() {
+    (new AMessage(kWhatDisableOffloadAudio, this))->post();
+}
+
+void NuPlayer2::Renderer::signalEnableOffloadAudio() {
+    (new AMessage(kWhatEnableOffloadAudio, this))->post();
+}
+
+void NuPlayer2::Renderer::pause() {
+    (new AMessage(kWhatPause, this))->post();
+}
+
+void NuPlayer2::Renderer::resume() {
+    (new AMessage(kWhatResume, this))->post();
+}
+
+void NuPlayer2::Renderer::setVideoFrameRate(float fps) {
+    sp<AMessage> msg = new AMessage(kWhatSetVideoFrameRate, this);
+    msg->setFloat("frame-rate", fps);
+    msg->post();
+}
+
+// Called on any threads without mLock acquired.
+status_t NuPlayer2::Renderer::getCurrentPosition(int64_t *mediaUs) {
+    status_t result = mMediaClock->getMediaTime(ALooper::GetNowUs(), mediaUs);
+    if (result == OK) {
+        return result;
+    }
+
+    // MediaClock has not started yet. Try to start it if possible.
+    {
+        Mutex::Autolock autoLock(mLock);
+        if (mAudioFirstAnchorTimeMediaUs == -1) {
+            return result;
+        }
+
+        AudioTimestamp ts;
+        status_t res = mAudioSink->getTimestamp(ts);
+        if (res != OK) {
+            return result;
+        }
+
+        // AudioSink has rendered some frames.
+        int64_t nowUs = ALooper::GetNowUs();
+        int64_t nowMediaUs = mAudioSink->getPlayedOutDurationUs(nowUs)
+                + mAudioFirstAnchorTimeMediaUs;
+        mMediaClock->updateAnchor(nowMediaUs, nowUs, -1);
+    }
+
+    return mMediaClock->getMediaTime(ALooper::GetNowUs(), mediaUs);
+}
+
+void NuPlayer2::Renderer::clearAudioFirstAnchorTime_l() {
+    mAudioFirstAnchorTimeMediaUs = -1;
+    mMediaClock->setStartingTimeMedia(-1);
+}
+
+void NuPlayer2::Renderer::setAudioFirstAnchorTimeIfNeeded_l(int64_t mediaUs) {
+    if (mAudioFirstAnchorTimeMediaUs == -1) {
+        mAudioFirstAnchorTimeMediaUs = mediaUs;
+        mMediaClock->setStartingTimeMedia(mediaUs);
+    }
+}
+
+// Called on renderer looper.
+void NuPlayer2::Renderer::clearAnchorTime() {
+    mMediaClock->clearAnchor();
+    mAnchorTimeMediaUs = -1;
+    mAnchorNumFramesWritten = -1;
+}
+
+void NuPlayer2::Renderer::setVideoLateByUs(int64_t lateUs) {
+    Mutex::Autolock autoLock(mLock);
+    mVideoLateByUs = lateUs;
+}
+
+int64_t NuPlayer2::Renderer::getVideoLateByUs() {
+    Mutex::Autolock autoLock(mLock);
+    return mVideoLateByUs;
+}
+
+status_t NuPlayer2::Renderer::openAudioSink(
+        const sp<AMessage> &format,
+        bool offloadOnly,
+        bool hasVideo,
+        uint32_t flags,
+        bool *isOffloaded,
+        bool isStreaming) {
+    sp<AMessage> msg = new AMessage(kWhatOpenAudioSink, this);
+    msg->setMessage("format", format);
+    msg->setInt32("offload-only", offloadOnly);
+    msg->setInt32("has-video", hasVideo);
+    msg->setInt32("flags", flags);
+    msg->setInt32("isStreaming", isStreaming);
+
+    sp<AMessage> response;
+    status_t postStatus = msg->postAndAwaitResponse(&response);
+
+    int32_t err;
+    if (postStatus != OK || response.get() == nullptr || !response->findInt32("err", &err)) {
+        err = INVALID_OPERATION;
+    } else if (err == OK && isOffloaded != NULL) {
+        int32_t offload;
+        CHECK(response->findInt32("offload", &offload));
+        *isOffloaded = (offload != 0);
+    }
+    return err;
+}
+
+void NuPlayer2::Renderer::closeAudioSink() {
+    sp<AMessage> msg = new AMessage(kWhatCloseAudioSink, this);
+
+    sp<AMessage> response;
+    msg->postAndAwaitResponse(&response);
+}
+
+void NuPlayer2::Renderer::changeAudioFormat(
+        const sp<AMessage> &format,
+        bool offloadOnly,
+        bool hasVideo,
+        uint32_t flags,
+        bool isStreaming,
+        const sp<AMessage> &notify) {
+    sp<AMessage> meta = new AMessage;
+    meta->setMessage("format", format);
+    meta->setInt32("offload-only", offloadOnly);
+    meta->setInt32("has-video", hasVideo);
+    meta->setInt32("flags", flags);
+    meta->setInt32("isStreaming", isStreaming);
+
+    sp<AMessage> msg = new AMessage(kWhatChangeAudioFormat, this);
+    msg->setInt32("queueGeneration", getQueueGeneration(true /* audio */));
+    msg->setMessage("notify", notify);
+    msg->setMessage("meta", meta);
+    msg->post();
+}
+
+void NuPlayer2::Renderer::onMessageReceived(const sp<AMessage> &msg) {
+    switch (msg->what()) {
+        case kWhatOpenAudioSink:
+        {
+            sp<AMessage> format;
+            CHECK(msg->findMessage("format", &format));
+
+            int32_t offloadOnly;
+            CHECK(msg->findInt32("offload-only", &offloadOnly));
+
+            int32_t hasVideo;
+            CHECK(msg->findInt32("has-video", &hasVideo));
+
+            uint32_t flags;
+            CHECK(msg->findInt32("flags", (int32_t *)&flags));
+
+            uint32_t isStreaming;
+            CHECK(msg->findInt32("isStreaming", (int32_t *)&isStreaming));
+
+            status_t err = onOpenAudioSink(format, offloadOnly, hasVideo, flags, isStreaming);
+
+            sp<AMessage> response = new AMessage;
+            response->setInt32("err", err);
+            response->setInt32("offload", offloadingAudio());
+
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+            response->postReply(replyID);
+
+            break;
+        }
+
+        case kWhatCloseAudioSink:
+        {
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
+            onCloseAudioSink();
+
+            sp<AMessage> response = new AMessage;
+            response->postReply(replyID);
+            break;
+        }
+
+        case kWhatStopAudioSink:
+        {
+            mAudioSink->stop();
+            break;
+        }
+
+        case kWhatChangeAudioFormat:
+        {
+            int32_t queueGeneration;
+            CHECK(msg->findInt32("queueGeneration", &queueGeneration));
+
+            sp<AMessage> notify;
+            CHECK(msg->findMessage("notify", &notify));
+
+            if (offloadingAudio()) {
+                ALOGW("changeAudioFormat should NOT be called in offload mode");
+                notify->setInt32("err", INVALID_OPERATION);
+                notify->post();
+                break;
+            }
+
+            sp<AMessage> meta;
+            CHECK(msg->findMessage("meta", &meta));
+
+            if (queueGeneration != getQueueGeneration(true /* audio */)
+                    || mAudioQueue.empty()) {
+                onChangeAudioFormat(meta, notify);
+                break;
+            }
+
+            QueueEntry entry;
+            entry.mNotifyConsumed = notify;
+            entry.mMeta = meta;
+
+            Mutex::Autolock autoLock(mLock);
+            mAudioQueue.push_back(entry);
+            postDrainAudioQueue_l();
+
+            break;
+        }
+
+        case kWhatDrainAudioQueue:
+        {
+            mDrainAudioQueuePending = false;
+
+            int32_t generation;
+            CHECK(msg->findInt32("drainGeneration", &generation));
+            if (generation != getDrainGeneration(true /* audio */)) {
+                break;
+            }
+
+            if (onDrainAudioQueue()) {
+                uint32_t numFramesPlayed;
+                CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed),
+                         (status_t)OK);
+
+                // Handle AudioTrack race when start is immediately called after flush.
+                uint32_t numFramesPendingPlayout =
+                    (mNumFramesWritten > numFramesPlayed ?
+                        mNumFramesWritten - numFramesPlayed : 0);
+
+                // This is how long the audio sink will have data to
+                // play back.
+                int64_t delayUs =
+                    mAudioSink->msecsPerFrame()
+                        * numFramesPendingPlayout * 1000ll;
+                if (mPlaybackRate > 1.0f) {
+                    delayUs /= mPlaybackRate;
+                }
+
+                // Let's give it more data after about half that time
+                // has elapsed.
+                delayUs /= 2;
+                // check the buffer size to estimate maximum delay permitted.
+                const int64_t maxDrainDelayUs = std::max(
+                        mAudioSink->getBufferDurationInUs(), (int64_t)500000 /* half second */);
+                ALOGD_IF(delayUs > maxDrainDelayUs, "postDrainAudioQueue long delay: %lld > %lld",
+                        (long long)delayUs, (long long)maxDrainDelayUs);
+                Mutex::Autolock autoLock(mLock);
+                postDrainAudioQueue_l(delayUs);
+            }
+            break;
+        }
+
+        case kWhatDrainVideoQueue:
+        {
+            int32_t generation;
+            CHECK(msg->findInt32("drainGeneration", &generation));
+            if (generation != getDrainGeneration(false /* audio */)) {
+                break;
+            }
+
+            mDrainVideoQueuePending = false;
+
+            onDrainVideoQueue();
+
+            postDrainVideoQueue();
+            break;
+        }
+
+        case kWhatPostDrainVideoQueue:
+        {
+            int32_t generation;
+            CHECK(msg->findInt32("drainGeneration", &generation));
+            if (generation != getDrainGeneration(false /* audio */)) {
+                break;
+            }
+
+            mDrainVideoQueuePending = false;
+            postDrainVideoQueue();
+            break;
+        }
+
+        case kWhatQueueBuffer:
+        {
+            onQueueBuffer(msg);
+            break;
+        }
+
+        case kWhatQueueEOS:
+        {
+            onQueueEOS(msg);
+            break;
+        }
+
+        case kWhatEOS:
+        {
+            int32_t generation;
+            CHECK(msg->findInt32("audioEOSGeneration", &generation));
+            if (generation != mAudioEOSGeneration) {
+                break;
+            }
+            status_t finalResult;
+            CHECK(msg->findInt32("finalResult", &finalResult));
+            notifyEOS(true /* audio */, finalResult);
+            break;
+        }
+
+        case kWhatConfigPlayback:
+        {
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+            AudioPlaybackRate rate;
+            readFromAMessage(msg, &rate);
+            status_t err = onConfigPlayback(rate);
+            sp<AMessage> response = new AMessage;
+            response->setInt32("err", err);
+            response->postReply(replyID);
+            break;
+        }
+
+        case kWhatGetPlaybackSettings:
+        {
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+            AudioPlaybackRate rate = AUDIO_PLAYBACK_RATE_DEFAULT;
+            status_t err = onGetPlaybackSettings(&rate);
+            sp<AMessage> response = new AMessage;
+            if (err == OK) {
+                writeToAMessage(response, rate);
+            }
+            response->setInt32("err", err);
+            response->postReply(replyID);
+            break;
+        }
+
+        case kWhatConfigSync:
+        {
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+            AVSyncSettings sync;
+            float videoFpsHint;
+            readFromAMessage(msg, &sync, &videoFpsHint);
+            status_t err = onConfigSync(sync, videoFpsHint);
+            sp<AMessage> response = new AMessage;
+            response->setInt32("err", err);
+            response->postReply(replyID);
+            break;
+        }
+
+        case kWhatGetSyncSettings:
+        {
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
+            ALOGV("kWhatGetSyncSettings");
+            AVSyncSettings sync;
+            float videoFps = -1.f;
+            status_t err = onGetSyncSettings(&sync, &videoFps);
+            sp<AMessage> response = new AMessage;
+            if (err == OK) {
+                writeToAMessage(response, sync, videoFps);
+            }
+            response->setInt32("err", err);
+            response->postReply(replyID);
+            break;
+        }
+
+        case kWhatFlush:
+        {
+            onFlush(msg);
+            break;
+        }
+
+        case kWhatDisableOffloadAudio:
+        {
+            onDisableOffloadAudio();
+            break;
+        }
+
+        case kWhatEnableOffloadAudio:
+        {
+            onEnableOffloadAudio();
+            break;
+        }
+
+        case kWhatPause:
+        {
+            onPause();
+            break;
+        }
+
+        case kWhatResume:
+        {
+            onResume();
+            break;
+        }
+
+        case kWhatSetVideoFrameRate:
+        {
+            float fps;
+            CHECK(msg->findFloat("frame-rate", &fps));
+            onSetVideoFrameRate(fps);
+            break;
+        }
+
+        case kWhatAudioTearDown:
+        {
+            int32_t reason;
+            CHECK(msg->findInt32("reason", &reason));
+
+            onAudioTearDown((AudioTearDownReason)reason);
+            break;
+        }
+
+        case kWhatAudioOffloadPauseTimeout:
+        {
+            int32_t generation;
+            CHECK(msg->findInt32("drainGeneration", &generation));
+            if (generation != mAudioOffloadPauseTimeoutGeneration) {
+                break;
+            }
+            ALOGV("Audio Offload tear down due to pause timeout.");
+            onAudioTearDown(kDueToTimeout);
+            mWakeLock->release();
+            break;
+        }
+
+        default:
+            TRESPASS();
+            break;
+    }
+}
+
+void NuPlayer2::Renderer::postDrainAudioQueue_l(int64_t delayUs) {
+    if (mDrainAudioQueuePending || mSyncQueues || mUseAudioCallback) {
+        return;
+    }
+
+    if (mAudioQueue.empty()) {
+        return;
+    }
+
+    // FIXME: if paused, wait until AudioTrack stop() is complete before delivering data.
+    if (mPaused) {
+        const int64_t diffUs = mPauseDrainAudioAllowedUs - ALooper::GetNowUs();
+        if (diffUs > delayUs) {
+            delayUs = diffUs;
+        }
+    }
+
+    mDrainAudioQueuePending = true;
+    sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, this);
+    msg->setInt32("drainGeneration", mAudioDrainGeneration);
+    msg->post(delayUs);
+}
+
+void NuPlayer2::Renderer::prepareForMediaRenderingStart_l() {
+    mAudioRenderingStartGeneration = mAudioDrainGeneration;
+    mVideoRenderingStartGeneration = mVideoDrainGeneration;
+    mRenderingDataDelivered = false;
+}
+
+void NuPlayer2::Renderer::notifyIfMediaRenderingStarted_l() {
+    if (mVideoRenderingStartGeneration == mVideoDrainGeneration &&
+        mAudioRenderingStartGeneration == mAudioDrainGeneration) {
+        mRenderingDataDelivered = true;
+        if (mPaused) {
+            return;
+        }
+        mVideoRenderingStartGeneration = -1;
+        mAudioRenderingStartGeneration = -1;
+
+        sp<AMessage> notify = mNotify->dup();
+        notify->setInt32("what", kWhatMediaRenderingStart);
+        notify->post();
+    }
+}
+
+// static
+size_t NuPlayer2::Renderer::AudioSinkCallback(
+        MediaPlayer2Base::AudioSink * /* audioSink */,
+        void *buffer,
+        size_t size,
+        void *cookie,
+        MediaPlayer2Base::AudioSink::cb_event_t event) {
+    NuPlayer2::Renderer *me = (NuPlayer2::Renderer *)cookie;
+
+    switch (event) {
+        case MediaPlayer2Base::AudioSink::CB_EVENT_FILL_BUFFER:
+        {
+            return me->fillAudioBuffer(buffer, size);
+            break;
+        }
+
+        case MediaPlayer2Base::AudioSink::CB_EVENT_STREAM_END:
+        {
+            ALOGV("AudioSink::CB_EVENT_STREAM_END");
+            me->notifyEOSCallback();
+            break;
+        }
+
+        case MediaPlayer2Base::AudioSink::CB_EVENT_TEAR_DOWN:
+        {
+            ALOGV("AudioSink::CB_EVENT_TEAR_DOWN");
+            me->notifyAudioTearDown(kDueToError);
+            break;
+        }
+    }
+
+    return 0;
+}
+
+void NuPlayer2::Renderer::notifyEOSCallback() {
+    Mutex::Autolock autoLock(mLock);
+
+    if (!mUseAudioCallback) {
+        return;
+    }
+
+    notifyEOS_l(true /* audio */, ERROR_END_OF_STREAM);
+}
+
+size_t NuPlayer2::Renderer::fillAudioBuffer(void *buffer, size_t size) {
+    Mutex::Autolock autoLock(mLock);
+
+    if (!mUseAudioCallback) {
+        return 0;
+    }
+
+    bool hasEOS = false;
+
+    size_t sizeCopied = 0;
+    bool firstEntry = true;
+    QueueEntry *entry;  // will be valid after while loop if hasEOS is set.
+    while (sizeCopied < size && !mAudioQueue.empty()) {
+        entry = &*mAudioQueue.begin();
+
+        if (entry->mBuffer == NULL) { // EOS
+            hasEOS = true;
+            mAudioQueue.erase(mAudioQueue.begin());
+            break;
+        }
+
+        if (firstEntry && entry->mOffset == 0) {
+            firstEntry = false;
+            int64_t mediaTimeUs;
+            CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
+            ALOGV("fillAudioBuffer: rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
+            setAudioFirstAnchorTimeIfNeeded_l(mediaTimeUs);
+        }
+
+        size_t copy = entry->mBuffer->size() - entry->mOffset;
+        size_t sizeRemaining = size - sizeCopied;
+        if (copy > sizeRemaining) {
+            copy = sizeRemaining;
+        }
+
+        memcpy((char *)buffer + sizeCopied,
+               entry->mBuffer->data() + entry->mOffset,
+               copy);
+
+        entry->mOffset += copy;
+        if (entry->mOffset == entry->mBuffer->size()) {
+            entry->mNotifyConsumed->post();
+            mAudioQueue.erase(mAudioQueue.begin());
+            entry = NULL;
+        }
+        sizeCopied += copy;
+
+        notifyIfMediaRenderingStarted_l();
+    }
+
+    if (mAudioFirstAnchorTimeMediaUs >= 0) {
+        int64_t nowUs = ALooper::GetNowUs();
+        int64_t nowMediaUs =
+            mAudioFirstAnchorTimeMediaUs + mAudioSink->getPlayedOutDurationUs(nowUs);
+        // we don't know how much data we are queueing for offloaded tracks.
+        mMediaClock->updateAnchor(nowMediaUs, nowUs, INT64_MAX);
+    }
+
+    // for non-offloaded audio, we need to compute the frames written because
+    // there is no EVENT_STREAM_END notification. The frames written gives
+    // an estimate on the pending played out duration.
+    if (!offloadingAudio()) {
+        mNumFramesWritten += sizeCopied / mAudioSink->frameSize();
+    }
+
+    if (hasEOS) {
+        (new AMessage(kWhatStopAudioSink, this))->post();
+        // As there is currently no EVENT_STREAM_END callback notification for
+        // non-offloaded audio tracks, we need to post the EOS ourselves.
+        if (!offloadingAudio()) {
+            int64_t postEOSDelayUs = 0;
+            if (mAudioSink->needsTrailingPadding()) {
+                postEOSDelayUs = getPendingAudioPlayoutDurationUs(ALooper::GetNowUs());
+            }
+            ALOGV("fillAudioBuffer: notifyEOS_l "
+                    "mNumFramesWritten:%u  finalResult:%d  postEOSDelay:%lld",
+                    mNumFramesWritten, entry->mFinalResult, (long long)postEOSDelayUs);
+            notifyEOS_l(true /* audio */, entry->mFinalResult, postEOSDelayUs);
+        }
+    }
+    return sizeCopied;
+}
+
+void NuPlayer2::Renderer::drainAudioQueueUntilLastEOS() {
+    List<QueueEntry>::iterator it = mAudioQueue.begin(), itEOS = it;
+    bool foundEOS = false;
+    while (it != mAudioQueue.end()) {
+        int32_t eos;
+        QueueEntry *entry = &*it++;
+        if ((entry->mBuffer == nullptr && entry->mNotifyConsumed == nullptr)
+                || (entry->mNotifyConsumed->findInt32("eos", &eos) && eos != 0)) {
+            itEOS = it;
+            foundEOS = true;
+        }
+    }
+
+    if (foundEOS) {
+        // post all replies before EOS and drop the samples
+        for (it = mAudioQueue.begin(); it != itEOS; it++) {
+            if (it->mBuffer == nullptr) {
+                if (it->mNotifyConsumed == nullptr) {
+                    // delay doesn't matter as we don't even have an AudioTrack
+                    notifyEOS(true /* audio */, it->mFinalResult);
+                } else {
+                    // TAG for re-opening audio sink.
+                    onChangeAudioFormat(it->mMeta, it->mNotifyConsumed);
+                }
+            } else {
+                it->mNotifyConsumed->post();
+            }
+        }
+        mAudioQueue.erase(mAudioQueue.begin(), itEOS);
+    }
+}
+
+bool NuPlayer2::Renderer::onDrainAudioQueue() {
+    // do not drain audio during teardown as queued buffers may be invalid.
+    if (mAudioTornDown) {
+        return false;
+    }
+    // TODO: This call to getPosition checks if AudioTrack has been created
+    // in AudioSink before draining audio. If AudioTrack doesn't exist, then
+    // CHECKs on getPosition will fail.
+    // We still need to figure out why AudioTrack is not created when
+    // this function is called. One possible reason could be leftover
+    // audio. Another possible place is to check whether decoder
+    // has received INFO_FORMAT_CHANGED as the first buffer since
+    // AudioSink is opened there, and possible interactions with flush
+    // immediately after start. Investigate error message
+    // "vorbis_dsp_synthesis returned -135", along with RTSP.
+    uint32_t numFramesPlayed;
+    if (mAudioSink->getPosition(&numFramesPlayed) != OK) {
+        // When getPosition fails, renderer will not reschedule the draining
+        // unless new samples are queued.
+        // If we have pending EOS (or "eos" marker for discontinuities), we need
+        // to post these now as NuPlayer2Decoder might be waiting for it.
+        drainAudioQueueUntilLastEOS();
+
+        ALOGW("onDrainAudioQueue(): audio sink is not ready");
+        return false;
+    }
+
+#if 0
+    ssize_t numFramesAvailableToWrite =
+        mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed);
+
+    if (numFramesAvailableToWrite == mAudioSink->frameCount()) {
+        ALOGI("audio sink underrun");
+    } else {
+        ALOGV("audio queue has %d frames left to play",
+             mAudioSink->frameCount() - numFramesAvailableToWrite);
+    }
+#endif
+
+    uint32_t prevFramesWritten = mNumFramesWritten;
+    while (!mAudioQueue.empty()) {
+        QueueEntry *entry = &*mAudioQueue.begin();
+
+        if (entry->mBuffer == NULL) {
+            if (entry->mNotifyConsumed != nullptr) {
+                // TAG for re-open audio sink.
+                onChangeAudioFormat(entry->mMeta, entry->mNotifyConsumed);
+                mAudioQueue.erase(mAudioQueue.begin());
+                continue;
+            }
+
+            // EOS
+            if (mPaused) {
+                // Do not notify EOS when paused.
+                // This is needed to avoid switch to next clip while in pause.
+                ALOGV("onDrainAudioQueue(): Do not notify EOS when paused");
+                return false;
+            }
+
+            int64_t postEOSDelayUs = 0;
+            if (mAudioSink->needsTrailingPadding()) {
+                postEOSDelayUs = getPendingAudioPlayoutDurationUs(ALooper::GetNowUs());
+            }
+            notifyEOS(true /* audio */, entry->mFinalResult, postEOSDelayUs);
+            mLastAudioMediaTimeUs = getDurationUsIfPlayedAtSampleRate(mNumFramesWritten);
+
+            mAudioQueue.erase(mAudioQueue.begin());
+            entry = NULL;
+            if (mAudioSink->needsTrailingPadding()) {
+                // If we're not in gapless playback (i.e. through setNextPlayer), we
+                // need to stop the track here, because that will play out the last
+                // little bit at the end of the file. Otherwise short files won't play.
+                mAudioSink->stop();
+                mNumFramesWritten = 0;
+            }
+            return false;
+        }
+
+        mLastAudioBufferDrained = entry->mBufferOrdinal;
+
+        // ignore 0-sized buffer which could be EOS marker with no data
+        if (entry->mOffset == 0 && entry->mBuffer->size() > 0) {
+            int64_t mediaTimeUs;
+            CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
+            ALOGV("onDrainAudioQueue: rendering audio at media time %.2f secs",
+                    mediaTimeUs / 1E6);
+            onNewAudioMediaTime(mediaTimeUs);
+        }
+
+        size_t copy = entry->mBuffer->size() - entry->mOffset;
+
+        ssize_t written = mAudioSink->write(entry->mBuffer->data() + entry->mOffset,
+                                            copy, false /* blocking */);
+        if (written < 0) {
+            // An error in AudioSink write. Perhaps the AudioSink was not properly opened.
+            if (written == WOULD_BLOCK) {
+                ALOGV("AudioSink write would block when writing %zu bytes", copy);
+            } else {
+                ALOGE("AudioSink write error(%zd) when writing %zu bytes", written, copy);
+                // This can only happen when AudioSink was opened with doNotReconnect flag set to
+                // true, in which case the NuPlayer2 will handle the reconnect.
+                notifyAudioTearDown(kDueToError);
+            }
+            break;
+        }
+
+        entry->mOffset += written;
+        size_t remainder = entry->mBuffer->size() - entry->mOffset;
+        if ((ssize_t)remainder < mAudioSink->frameSize()) {
+            if (remainder > 0) {
+                ALOGW("Corrupted audio buffer has fractional frames, discarding %zu bytes.",
+                        remainder);
+                entry->mOffset += remainder;
+                copy -= remainder;
+            }
+
+            entry->mNotifyConsumed->post();
+            mAudioQueue.erase(mAudioQueue.begin());
+
+            entry = NULL;
+        }
+
+        size_t copiedFrames = written / mAudioSink->frameSize();
+        mNumFramesWritten += copiedFrames;
+
+        {
+            Mutex::Autolock autoLock(mLock);
+            int64_t maxTimeMedia;
+            maxTimeMedia =
+                mAnchorTimeMediaUs +
+                        (int64_t)(max((long long)mNumFramesWritten - mAnchorNumFramesWritten, 0LL)
+                                * 1000LL * mAudioSink->msecsPerFrame());
+            mMediaClock->updateMaxTimeMedia(maxTimeMedia);
+
+            notifyIfMediaRenderingStarted_l();
+        }
+
+        if (written != (ssize_t)copy) {
+            // A short count was received from AudioSink::write()
+            //
+            // AudioSink write is called in non-blocking mode.
+            // It may return with a short count when:
+            //
+            // 1) Size to be copied is not a multiple of the frame size. Fractional frames are
+            //    discarded.
+            // 2) The data to be copied exceeds the available buffer in AudioSink.
+            // 3) An error occurs and data has been partially copied to the buffer in AudioSink.
+            // 4) AudioSink is an AudioCache for data retrieval, and the AudioCache is exceeded.
+
+            // (Case 1)
+            // Must be a multiple of the frame size.  If it is not a multiple of a frame size, it
+            // needs to fail, as we should not carry over fractional frames between calls.
+            CHECK_EQ(copy % mAudioSink->frameSize(), 0u);
+
+            // (Case 2, 3, 4)
+            // Return early to the caller.
+            // Beware of calling immediately again as this may busy-loop if you are not careful.
+            ALOGV("AudioSink write short frame count %zd < %zu", written, copy);
+            break;
+        }
+    }
+
+    // calculate whether we need to reschedule another write.
+    bool reschedule = !mAudioQueue.empty()
+            && (!mPaused
+                || prevFramesWritten != mNumFramesWritten); // permit pause to fill buffers
+    //ALOGD("reschedule:%d  empty:%d  mPaused:%d  prevFramesWritten:%u  mNumFramesWritten:%u",
+    //        reschedule, mAudioQueue.empty(), mPaused, prevFramesWritten, mNumFramesWritten);
+    return reschedule;
+}
+
+int64_t NuPlayer2::Renderer::getDurationUsIfPlayedAtSampleRate(uint32_t numFrames) {
+    int32_t sampleRate = offloadingAudio() ?
+            mCurrentOffloadInfo.sample_rate : mCurrentPcmInfo.mSampleRate;
+    if (sampleRate == 0) {
+        ALOGE("sampleRate is 0 in %s mode", offloadingAudio() ? "offload" : "non-offload");
+        return 0;
+    }
+    // TODO: remove the (int32_t) casting below as it may overflow at 12.4 hours.
+    return (int64_t)((int32_t)numFrames * 1000000LL / sampleRate);
+}
+
+// Calculate duration of pending samples if played at normal rate (i.e., 1.0).
+int64_t NuPlayer2::Renderer::getPendingAudioPlayoutDurationUs(int64_t nowUs) {
+    int64_t writtenAudioDurationUs = getDurationUsIfPlayedAtSampleRate(mNumFramesWritten);
+    if (mUseVirtualAudioSink) {
+        int64_t nowUs = ALooper::GetNowUs();
+        int64_t mediaUs;
+        if (mMediaClock->getMediaTime(nowUs, &mediaUs) != OK) {
+            return 0ll;
+        } else {
+            return writtenAudioDurationUs - (mediaUs - mAudioFirstAnchorTimeMediaUs);
+        }
+    }
+
+    const int64_t audioSinkPlayedUs = mAudioSink->getPlayedOutDurationUs(nowUs);
+    int64_t pendingUs = writtenAudioDurationUs - audioSinkPlayedUs;
+    if (pendingUs < 0) {
+        // This shouldn't happen unless the timestamp is stale.
+        ALOGW("%s: pendingUs %lld < 0, clamping to zero, potential resume after pause "
+                "writtenAudioDurationUs: %lld, audioSinkPlayedUs: %lld",
+                __func__, (long long)pendingUs,
+                (long long)writtenAudioDurationUs, (long long)audioSinkPlayedUs);
+        pendingUs = 0;
+    }
+    return pendingUs;
+}
+
+int64_t NuPlayer2::Renderer::getRealTimeUs(int64_t mediaTimeUs, int64_t nowUs) {
+    int64_t realUs;
+    if (mMediaClock->getRealTimeFor(mediaTimeUs, &realUs) != OK) {
+        // If failed to get current position, e.g. due to audio clock is
+        // not ready, then just play out video immediately without delay.
+        return nowUs;
+    }
+    return realUs;
+}
+
+void NuPlayer2::Renderer::onNewAudioMediaTime(int64_t mediaTimeUs) {
+    Mutex::Autolock autoLock(mLock);
+    // TRICKY: vorbis decoder generates multiple frames with the same
+    // timestamp, so only update on the first frame with a given timestamp
+    if (mediaTimeUs == mAnchorTimeMediaUs) {
+        return;
+    }
+    setAudioFirstAnchorTimeIfNeeded_l(mediaTimeUs);
+
+    // mNextAudioClockUpdateTimeUs is -1 if we're waiting for audio sink to start
+    if (mNextAudioClockUpdateTimeUs == -1) {
+        AudioTimestamp ts;
+        if (mAudioSink->getTimestamp(ts) == OK && ts.mPosition > 0) {
+            mNextAudioClockUpdateTimeUs = 0; // start our clock updates
+        }
+    }
+    int64_t nowUs = ALooper::GetNowUs();
+    if (mNextAudioClockUpdateTimeUs >= 0) {
+        if (nowUs >= mNextAudioClockUpdateTimeUs) {
+            int64_t nowMediaUs = mediaTimeUs - getPendingAudioPlayoutDurationUs(nowUs);
+            mMediaClock->updateAnchor(nowMediaUs, nowUs, mediaTimeUs);
+            mUseVirtualAudioSink = false;
+            mNextAudioClockUpdateTimeUs = nowUs + kMinimumAudioClockUpdatePeriodUs;
+        }
+    } else {
+        int64_t unused;
+        if ((mMediaClock->getMediaTime(nowUs, &unused) != OK)
+                && (getDurationUsIfPlayedAtSampleRate(mNumFramesWritten)
+                        > kMaxAllowedAudioSinkDelayUs)) {
+            // Enough data has been sent to AudioSink, but AudioSink has not rendered
+            // any data yet. Something is wrong with AudioSink, e.g., the device is not
+            // connected to audio out.
+            // Switch to system clock. This essentially creates a virtual AudioSink with
+            // initial latenty of getDurationUsIfPlayedAtSampleRate(mNumFramesWritten).
+            // This virtual AudioSink renders audio data starting from the very first sample
+            // and it's paced by system clock.
+            ALOGW("AudioSink stuck. ARE YOU CONNECTED TO AUDIO OUT? Switching to system clock.");
+            mMediaClock->updateAnchor(mAudioFirstAnchorTimeMediaUs, nowUs, mediaTimeUs);
+            mUseVirtualAudioSink = true;
+        }
+    }
+    mAnchorNumFramesWritten = mNumFramesWritten;
+    mAnchorTimeMediaUs = mediaTimeUs;
+}
+
+// Called without mLock acquired.
+void NuPlayer2::Renderer::postDrainVideoQueue() {
+    if (mDrainVideoQueuePending
+            || getSyncQueues()
+            || (mPaused && mVideoSampleReceived)) {
+        return;
+    }
+
+    if (mVideoQueue.empty()) {
+        return;
+    }
+
+    QueueEntry &entry = *mVideoQueue.begin();
+
+    sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, this);
+    msg->setInt32("drainGeneration", getDrainGeneration(false /* audio */));
+
+    if (entry.mBuffer == NULL) {
+        // EOS doesn't carry a timestamp.
+        msg->post();
+        mDrainVideoQueuePending = true;
+        return;
+    }
+
+    int64_t nowUs = ALooper::GetNowUs();
+    if (mFlags & FLAG_REAL_TIME) {
+        int64_t realTimeUs;
+        CHECK(entry.mBuffer->meta()->findInt64("timeUs", &realTimeUs));
+
+        realTimeUs = mVideoScheduler->schedule(realTimeUs * 1000) / 1000;
+
+        int64_t twoVsyncsUs = 2 * (mVideoScheduler->getVsyncPeriod() / 1000);
+
+        int64_t delayUs = realTimeUs - nowUs;
+
+        ALOGW_IF(delayUs > 500000, "unusually high delayUs: %lld", (long long)delayUs);
+        // post 2 display refreshes before rendering is due
+        msg->post(delayUs > twoVsyncsUs ? delayUs - twoVsyncsUs : 0);
+
+        mDrainVideoQueuePending = true;
+        return;
+    }
+
+    int64_t mediaTimeUs;
+    CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
+
+    {
+        Mutex::Autolock autoLock(mLock);
+        if (mAnchorTimeMediaUs < 0) {
+            mMediaClock->updateAnchor(mediaTimeUs, nowUs, mediaTimeUs);
+            mAnchorTimeMediaUs = mediaTimeUs;
+        }
+    }
+    mNextVideoTimeMediaUs = mediaTimeUs + 100000;
+    if (!mHasAudio) {
+        // smooth out videos >= 10fps
+        mMediaClock->updateMaxTimeMedia(mNextVideoTimeMediaUs);
+    }
+
+    if (!mVideoSampleReceived || mediaTimeUs < mAudioFirstAnchorTimeMediaUs) {
+        msg->post();
+    } else {
+        int64_t twoVsyncsUs = 2 * (mVideoScheduler->getVsyncPeriod() / 1000);
+
+        // post 2 display refreshes before rendering is due
+        mMediaClock->addTimer(msg, mediaTimeUs, -twoVsyncsUs);
+    }
+
+    mDrainVideoQueuePending = true;
+}
+
+void NuPlayer2::Renderer::onDrainVideoQueue() {
+    if (mVideoQueue.empty()) {
+        return;
+    }
+
+    QueueEntry *entry = &*mVideoQueue.begin();
+
+    if (entry->mBuffer == NULL) {
+        // EOS
+
+        notifyEOS(false /* audio */, entry->mFinalResult);
+
+        mVideoQueue.erase(mVideoQueue.begin());
+        entry = NULL;
+
+        setVideoLateByUs(0);
+        return;
+    }
+
+    int64_t nowUs = ALooper::GetNowUs();
+    int64_t realTimeUs;
+    int64_t mediaTimeUs = -1;
+    if (mFlags & FLAG_REAL_TIME) {
+        CHECK(entry->mBuffer->meta()->findInt64("timeUs", &realTimeUs));
+    } else {
+        CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
+
+        realTimeUs = getRealTimeUs(mediaTimeUs, nowUs);
+    }
+    realTimeUs = mVideoScheduler->schedule(realTimeUs * 1000) / 1000;
+
+    bool tooLate = false;
+
+    if (!mPaused) {
+        setVideoLateByUs(nowUs - realTimeUs);
+        tooLate = (mVideoLateByUs > 40000);
+
+        if (tooLate) {
+            ALOGV("video late by %lld us (%.2f secs)",
+                 (long long)mVideoLateByUs, mVideoLateByUs / 1E6);
+        } else {
+            int64_t mediaUs = 0;
+            mMediaClock->getMediaTime(realTimeUs, &mediaUs);
+            ALOGV("rendering video at media time %.2f secs",
+                    (mFlags & FLAG_REAL_TIME ? realTimeUs :
+                    mediaUs) / 1E6);
+
+            if (!(mFlags & FLAG_REAL_TIME)
+                    && mLastAudioMediaTimeUs != -1
+                    && mediaTimeUs > mLastAudioMediaTimeUs) {
+                // If audio ends before video, video continues to drive media clock.
+                // Also smooth out videos >= 10fps.
+                mMediaClock->updateMaxTimeMedia(mediaTimeUs + 100000);
+            }
+        }
+    } else {
+        setVideoLateByUs(0);
+        if (!mVideoSampleReceived && !mHasAudio) {
+            // This will ensure that the first frame after a flush won't be used as anchor
+            // when renderer is in paused state, because resume can happen any time after seek.
+            clearAnchorTime();
+        }
+    }
+
+    // Always render the first video frame while keeping stats on A/V sync.
+    if (!mVideoSampleReceived) {
+        realTimeUs = nowUs;
+        tooLate = false;
+    }
+
+    entry->mNotifyConsumed->setInt64("timestampNs", realTimeUs * 1000ll);
+    entry->mNotifyConsumed->setInt32("render", !tooLate);
+    entry->mNotifyConsumed->post();
+    mVideoQueue.erase(mVideoQueue.begin());
+    entry = NULL;
+
+    mVideoSampleReceived = true;
+
+    if (!mPaused) {
+        if (!mVideoRenderingStarted) {
+            mVideoRenderingStarted = true;
+            notifyVideoRenderingStart();
+        }
+        Mutex::Autolock autoLock(mLock);
+        notifyIfMediaRenderingStarted_l();
+    }
+}
+
+void NuPlayer2::Renderer::notifyVideoRenderingStart() {
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("what", kWhatVideoRenderingStart);
+    notify->post();
+}
+
+void NuPlayer2::Renderer::notifyEOS(bool audio, status_t finalResult, int64_t delayUs) {
+    Mutex::Autolock autoLock(mLock);
+    notifyEOS_l(audio, finalResult, delayUs);
+}
+
+void NuPlayer2::Renderer::notifyEOS_l(bool audio, status_t finalResult, int64_t delayUs) {
+    if (audio && delayUs > 0) {
+        sp<AMessage> msg = new AMessage(kWhatEOS, this);
+        msg->setInt32("audioEOSGeneration", mAudioEOSGeneration);
+        msg->setInt32("finalResult", finalResult);
+        msg->post(delayUs);
+        return;
+    }
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("what", kWhatEOS);
+    notify->setInt32("audio", static_cast<int32_t>(audio));
+    notify->setInt32("finalResult", finalResult);
+    notify->post(delayUs);
+
+    if (audio) {
+        // Video might outlive audio. Clear anchor to enable video only case.
+        mAnchorTimeMediaUs = -1;
+        mHasAudio = false;
+        if (mNextVideoTimeMediaUs >= 0) {
+            int64_t mediaUs = 0;
+            mMediaClock->getMediaTime(ALooper::GetNowUs(), &mediaUs);
+            if (mNextVideoTimeMediaUs > mediaUs) {
+                mMediaClock->updateMaxTimeMedia(mNextVideoTimeMediaUs);
+            }
+        }
+    }
+}
+
+void NuPlayer2::Renderer::notifyAudioTearDown(AudioTearDownReason reason) {
+    sp<AMessage> msg = new AMessage(kWhatAudioTearDown, this);
+    msg->setInt32("reason", reason);
+    msg->post();
+}
+
+void NuPlayer2::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
+    int32_t audio;
+    CHECK(msg->findInt32("audio", &audio));
+
+    if (dropBufferIfStale(audio, msg)) {
+        return;
+    }
+
+    if (audio) {
+        mHasAudio = true;
+    } else {
+        mHasVideo = true;
+    }
+
+    if (mHasVideo) {
+        if (mVideoScheduler == NULL) {
+            mVideoScheduler = new VideoFrameScheduler();
+            mVideoScheduler->init();
+        }
+    }
+
+    sp<RefBase> obj;
+    CHECK(msg->findObject("buffer", &obj));
+    sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
+
+    sp<AMessage> notifyConsumed;
+    CHECK(msg->findMessage("notifyConsumed", &notifyConsumed));
+
+    QueueEntry entry;
+    entry.mBuffer = buffer;
+    entry.mNotifyConsumed = notifyConsumed;
+    entry.mOffset = 0;
+    entry.mFinalResult = OK;
+    entry.mBufferOrdinal = ++mTotalBuffersQueued;
+
+    if (audio) {
+        Mutex::Autolock autoLock(mLock);
+        mAudioQueue.push_back(entry);
+        postDrainAudioQueue_l();
+    } else {
+        mVideoQueue.push_back(entry);
+        postDrainVideoQueue();
+    }
+
+    Mutex::Autolock autoLock(mLock);
+    if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) {
+        return;
+    }
+
+    sp<MediaCodecBuffer> firstAudioBuffer = (*mAudioQueue.begin()).mBuffer;
+    sp<MediaCodecBuffer> firstVideoBuffer = (*mVideoQueue.begin()).mBuffer;
+
+    if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) {
+        // EOS signalled on either queue.
+        syncQueuesDone_l();
+        return;
+    }
+
+    int64_t firstAudioTimeUs;
+    int64_t firstVideoTimeUs;
+    CHECK(firstAudioBuffer->meta()
+            ->findInt64("timeUs", &firstAudioTimeUs));
+    CHECK(firstVideoBuffer->meta()
+            ->findInt64("timeUs", &firstVideoTimeUs));
+
+    int64_t diff = firstVideoTimeUs - firstAudioTimeUs;
+
+    ALOGV("queueDiff = %.2f secs", diff / 1E6);
+
+    if (diff > 100000ll) {
+        // Audio data starts More than 0.1 secs before video.
+        // Drop some audio.
+
+        (*mAudioQueue.begin()).mNotifyConsumed->post();
+        mAudioQueue.erase(mAudioQueue.begin());
+        return;
+    }
+
+    syncQueuesDone_l();
+}
+
+void NuPlayer2::Renderer::syncQueuesDone_l() {
+    if (!mSyncQueues) {
+        return;
+    }
+
+    mSyncQueues = false;
+
+    if (!mAudioQueue.empty()) {
+        postDrainAudioQueue_l();
+    }
+
+    if (!mVideoQueue.empty()) {
+        mLock.unlock();
+        postDrainVideoQueue();
+        mLock.lock();
+    }
+}
+
+void NuPlayer2::Renderer::onQueueEOS(const sp<AMessage> &msg) {
+    int32_t audio;
+    CHECK(msg->findInt32("audio", &audio));
+
+    if (dropBufferIfStale(audio, msg)) {
+        return;
+    }
+
+    int32_t finalResult;
+    CHECK(msg->findInt32("finalResult", &finalResult));
+
+    QueueEntry entry;
+    entry.mOffset = 0;
+    entry.mFinalResult = finalResult;
+
+    if (audio) {
+        Mutex::Autolock autoLock(mLock);
+        if (mAudioQueue.empty() && mSyncQueues) {
+            syncQueuesDone_l();
+        }
+        mAudioQueue.push_back(entry);
+        postDrainAudioQueue_l();
+    } else {
+        if (mVideoQueue.empty() && getSyncQueues()) {
+            Mutex::Autolock autoLock(mLock);
+            syncQueuesDone_l();
+        }
+        mVideoQueue.push_back(entry);
+        postDrainVideoQueue();
+    }
+}
+
+void NuPlayer2::Renderer::onFlush(const sp<AMessage> &msg) {
+    int32_t audio, notifyComplete;
+    CHECK(msg->findInt32("audio", &audio));
+
+    {
+        Mutex::Autolock autoLock(mLock);
+        if (audio) {
+            notifyComplete = mNotifyCompleteAudio;
+            mNotifyCompleteAudio = false;
+            mLastAudioMediaTimeUs = -1;
+        } else {
+            notifyComplete = mNotifyCompleteVideo;
+            mNotifyCompleteVideo = false;
+        }
+
+        // If we're currently syncing the queues, i.e. dropping audio while
+        // aligning the first audio/video buffer times and only one of the
+        // two queues has data, we may starve that queue by not requesting
+        // more buffers from the decoder. If the other source then encounters
+        // a discontinuity that leads to flushing, we'll never find the
+        // corresponding discontinuity on the other queue.
+        // Therefore we'll stop syncing the queues if at least one of them
+        // is flushed.
+        syncQueuesDone_l();
+    }
+    clearAnchorTime();
+
+    ALOGV("flushing %s", audio ? "audio" : "video");
+    if (audio) {
+        {
+            Mutex::Autolock autoLock(mLock);
+            flushQueue(&mAudioQueue);
+
+            ++mAudioDrainGeneration;
+            ++mAudioEOSGeneration;
+            prepareForMediaRenderingStart_l();
+
+            // the frame count will be reset after flush.
+            clearAudioFirstAnchorTime_l();
+        }
+
+        mDrainAudioQueuePending = false;
+
+        if (offloadingAudio()) {
+            mAudioSink->pause();
+            mAudioSink->flush();
+            if (!mPaused) {
+                mAudioSink->start();
+            }
+        } else {
+            mAudioSink->pause();
+            mAudioSink->flush();
+            // Call stop() to signal to the AudioSink to completely fill the
+            // internal buffer before resuming playback.
+            // FIXME: this is ignored after flush().
+            mAudioSink->stop();
+            if (mPaused) {
+                // Race condition: if renderer is paused and audio sink is stopped,
+                // we need to make sure that the audio track buffer fully drains
+                // before delivering data.
+                // FIXME: remove this if we can detect if stop() is complete.
+                const int delayUs = 2 * 50 * 1000; // (2 full mixer thread cycles at 50ms)
+                mPauseDrainAudioAllowedUs = ALooper::GetNowUs() + delayUs;
+            } else {
+                mAudioSink->start();
+            }
+            mNumFramesWritten = 0;
+        }
+        mNextAudioClockUpdateTimeUs = -1;
+    } else {
+        flushQueue(&mVideoQueue);
+
+        mDrainVideoQueuePending = false;
+
+        if (mVideoScheduler != NULL) {
+            mVideoScheduler->restart();
+        }
+
+        Mutex::Autolock autoLock(mLock);
+        ++mVideoDrainGeneration;
+        prepareForMediaRenderingStart_l();
+    }
+
+    mVideoSampleReceived = false;
+
+    if (notifyComplete) {
+        notifyFlushComplete(audio);
+    }
+}
+
+void NuPlayer2::Renderer::flushQueue(List<QueueEntry> *queue) {
+    while (!queue->empty()) {
+        QueueEntry *entry = &*queue->begin();
+
+        if (entry->mBuffer != NULL) {
+            entry->mNotifyConsumed->post();
+        } else if (entry->mNotifyConsumed != nullptr) {
+            // Is it needed to open audio sink now?
+            onChangeAudioFormat(entry->mMeta, entry->mNotifyConsumed);
+        }
+
+        queue->erase(queue->begin());
+        entry = NULL;
+    }
+}
+
+void NuPlayer2::Renderer::notifyFlushComplete(bool audio) {
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("what", kWhatFlushComplete);
+    notify->setInt32("audio", static_cast<int32_t>(audio));
+    notify->post();
+}
+
+bool NuPlayer2::Renderer::dropBufferIfStale(
+        bool audio, const sp<AMessage> &msg) {
+    int32_t queueGeneration;
+    CHECK(msg->findInt32("queueGeneration", &queueGeneration));
+
+    if (queueGeneration == getQueueGeneration(audio)) {
+        return false;
+    }
+
+    sp<AMessage> notifyConsumed;
+    if (msg->findMessage("notifyConsumed", &notifyConsumed)) {
+        notifyConsumed->post();
+    }
+
+    return true;
+}
+
+void NuPlayer2::Renderer::onAudioSinkChanged() {
+    if (offloadingAudio()) {
+        return;
+    }
+    CHECK(!mDrainAudioQueuePending);
+    mNumFramesWritten = 0;
+    mAnchorNumFramesWritten = -1;
+    uint32_t written;
+    if (mAudioSink->getFramesWritten(&written) == OK) {
+        mNumFramesWritten = written;
+    }
+}
+
+void NuPlayer2::Renderer::onDisableOffloadAudio() {
+    Mutex::Autolock autoLock(mLock);
+    mFlags &= ~FLAG_OFFLOAD_AUDIO;
+    ++mAudioDrainGeneration;
+    if (mAudioRenderingStartGeneration != -1) {
+        prepareForMediaRenderingStart_l();
+    }
+}
+
+void NuPlayer2::Renderer::onEnableOffloadAudio() {
+    Mutex::Autolock autoLock(mLock);
+    mFlags |= FLAG_OFFLOAD_AUDIO;
+    ++mAudioDrainGeneration;
+    if (mAudioRenderingStartGeneration != -1) {
+        prepareForMediaRenderingStart_l();
+    }
+}
+
+void NuPlayer2::Renderer::onPause() {
+    if (mPaused) {
+        return;
+    }
+
+    {
+        Mutex::Autolock autoLock(mLock);
+        // we do not increment audio drain generation so that we fill audio buffer during pause.
+        ++mVideoDrainGeneration;
+        prepareForMediaRenderingStart_l();
+        mPaused = true;
+        mMediaClock->setPlaybackRate(0.0);
+    }
+
+    mDrainAudioQueuePending = false;
+    mDrainVideoQueuePending = false;
+
+    // Note: audio data may not have been decoded, and the AudioSink may not be opened.
+    mAudioSink->pause();
+    startAudioOffloadPauseTimeout();
+
+    ALOGV("now paused audio queue has %zu entries, video has %zu entries",
+          mAudioQueue.size(), mVideoQueue.size());
+}
+
+void NuPlayer2::Renderer::onResume() {
+    if (!mPaused) {
+        return;
+    }
+
+    // Note: audio data may not have been decoded, and the AudioSink may not be opened.
+    cancelAudioOffloadPauseTimeout();
+    if (mAudioSink->ready()) {
+        status_t err = mAudioSink->start();
+        if (err != OK) {
+            ALOGE("cannot start AudioSink err %d", err);
+            notifyAudioTearDown(kDueToError);
+        }
+    }
+
+    {
+        Mutex::Autolock autoLock(mLock);
+        mPaused = false;
+        // rendering started message may have been delayed if we were paused.
+        if (mRenderingDataDelivered) {
+            notifyIfMediaRenderingStarted_l();
+        }
+        // configure audiosink as we did not do it when pausing
+        if (mAudioSink != NULL && mAudioSink->ready()) {
+            mAudioSink->setPlaybackRate(mPlaybackSettings);
+        }
+
+        mMediaClock->setPlaybackRate(mPlaybackRate);
+
+        if (!mAudioQueue.empty()) {
+            postDrainAudioQueue_l();
+        }
+    }
+
+    if (!mVideoQueue.empty()) {
+        postDrainVideoQueue();
+    }
+}
+
+void NuPlayer2::Renderer::onSetVideoFrameRate(float fps) {
+    if (mVideoScheduler == NULL) {
+        mVideoScheduler = new VideoFrameScheduler();
+    }
+    mVideoScheduler->init(fps);
+}
+
+int32_t NuPlayer2::Renderer::getQueueGeneration(bool audio) {
+    Mutex::Autolock autoLock(mLock);
+    return (audio ? mAudioQueueGeneration : mVideoQueueGeneration);
+}
+
+int32_t NuPlayer2::Renderer::getDrainGeneration(bool audio) {
+    Mutex::Autolock autoLock(mLock);
+    return (audio ? mAudioDrainGeneration : mVideoDrainGeneration);
+}
+
+bool NuPlayer2::Renderer::getSyncQueues() {
+    Mutex::Autolock autoLock(mLock);
+    return mSyncQueues;
+}
+
+void NuPlayer2::Renderer::onAudioTearDown(AudioTearDownReason reason) {
+    if (mAudioTornDown) {
+        return;
+    }
+    mAudioTornDown = true;
+
+    int64_t currentPositionUs;
+    sp<AMessage> notify = mNotify->dup();
+    if (getCurrentPosition(&currentPositionUs) == OK) {
+        notify->setInt64("positionUs", currentPositionUs);
+    }
+
+    mAudioSink->stop();
+    mAudioSink->flush();
+
+    notify->setInt32("what", kWhatAudioTearDown);
+    notify->setInt32("reason", reason);
+    notify->post();
+}
+
+void NuPlayer2::Renderer::startAudioOffloadPauseTimeout() {
+    if (offloadingAudio()) {
+        mWakeLock->acquire();
+        sp<AMessage> msg = new AMessage(kWhatAudioOffloadPauseTimeout, this);
+        msg->setInt32("drainGeneration", mAudioOffloadPauseTimeoutGeneration);
+        msg->post(kOffloadPauseMaxUs);
+    }
+}
+
+void NuPlayer2::Renderer::cancelAudioOffloadPauseTimeout() {
+    // We may have called startAudioOffloadPauseTimeout() without
+    // the AudioSink open and with offloadingAudio enabled.
+    //
+    // When we cancel, it may be that offloadingAudio is subsequently disabled, so regardless
+    // we always release the wakelock and increment the pause timeout generation.
+    //
+    // Note: The acquired wakelock prevents the device from suspending
+    // immediately after offload pause (in case a resume happens shortly thereafter).
+    mWakeLock->release(true);
+    ++mAudioOffloadPauseTimeoutGeneration;
+}
+
+status_t NuPlayer2::Renderer::onOpenAudioSink(
+        const sp<AMessage> &format,
+        bool offloadOnly,
+        bool hasVideo,
+        uint32_t flags,
+        bool isStreaming) {
+    ALOGV("openAudioSink: offloadOnly(%d) offloadingAudio(%d)",
+            offloadOnly, offloadingAudio());
+    bool audioSinkChanged = false;
+
+    int32_t numChannels;
+    CHECK(format->findInt32("channel-count", &numChannels));
+
+    int32_t channelMask;
+    if (!format->findInt32("channel-mask", &channelMask)) {
+        // signal to the AudioSink to derive the mask from count.
+        channelMask = CHANNEL_MASK_USE_CHANNEL_ORDER;
+    }
+
+    int32_t sampleRate;
+    CHECK(format->findInt32("sample-rate", &sampleRate));
+
+    if (offloadingAudio()) {
+        audio_format_t audioFormat = AUDIO_FORMAT_PCM_16_BIT;
+        AString mime;
+        CHECK(format->findString("mime", &mime));
+        status_t err = mapMimeToAudioFormat(audioFormat, mime.c_str());
+
+        if (err != OK) {
+            ALOGE("Couldn't map mime \"%s\" to a valid "
+                    "audio_format", mime.c_str());
+            onDisableOffloadAudio();
+        } else {
+            ALOGV("Mime \"%s\" mapped to audio_format 0x%x",
+                    mime.c_str(), audioFormat);
+
+            int avgBitRate = -1;
+            format->findInt32("bitrate", &avgBitRate);
+
+            int32_t aacProfile = -1;
+            if (audioFormat == AUDIO_FORMAT_AAC
+                    && format->findInt32("aac-profile", &aacProfile)) {
+                // Redefine AAC format as per aac profile
+                mapAACProfileToAudioFormat(
+                        audioFormat,
+                        aacProfile);
+            }
+
+            audio_offload_info_t offloadInfo = AUDIO_INFO_INITIALIZER;
+            offloadInfo.duration_us = -1;
+            format->findInt64(
+                    "durationUs", &offloadInfo.duration_us);
+            offloadInfo.sample_rate = sampleRate;
+            offloadInfo.channel_mask = channelMask;
+            offloadInfo.format = audioFormat;
+            offloadInfo.stream_type = AUDIO_STREAM_MUSIC;
+            offloadInfo.bit_rate = avgBitRate;
+            offloadInfo.has_video = hasVideo;
+            offloadInfo.is_streaming = isStreaming;
+
+            if (memcmp(&mCurrentOffloadInfo, &offloadInfo, sizeof(offloadInfo)) == 0) {
+                ALOGV("openAudioSink: no change in offload mode");
+                // no change from previous configuration, everything ok.
+                return OK;
+            }
+            mCurrentPcmInfo = AUDIO_PCMINFO_INITIALIZER;
+
+            ALOGV("openAudioSink: try to open AudioSink in offload mode");
+            uint32_t offloadFlags = flags;
+            offloadFlags |= AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
+            offloadFlags &= ~AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
+            audioSinkChanged = true;
+            mAudioSink->close();
+
+            err = mAudioSink->open(
+                    sampleRate,
+                    numChannels,
+                    (audio_channel_mask_t)channelMask,
+                    audioFormat,
+                    0 /* bufferCount - unused */,
+                    &NuPlayer2::Renderer::AudioSinkCallback,
+                    this,
+                    (audio_output_flags_t)offloadFlags,
+                    &offloadInfo);
+
+            if (err == OK) {
+                err = mAudioSink->setPlaybackRate(mPlaybackSettings);
+            }
+
+            if (err == OK) {
+                // If the playback is offloaded to h/w, we pass
+                // the HAL some metadata information.
+                // We don't want to do this for PCM because it
+                // will be going through the AudioFlinger mixer
+                // before reaching the hardware.
+                // TODO
+                mCurrentOffloadInfo = offloadInfo;
+                if (!mPaused) { // for preview mode, don't start if paused
+                    err = mAudioSink->start();
+                }
+                ALOGV_IF(err == OK, "openAudioSink: offload succeeded");
+            }
+            if (err != OK) {
+                // Clean up, fall back to non offload mode.
+                mAudioSink->close();
+                onDisableOffloadAudio();
+                mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
+                ALOGV("openAudioSink: offload failed");
+                if (offloadOnly) {
+                    notifyAudioTearDown(kForceNonOffload);
+                }
+            } else {
+                mUseAudioCallback = true;  // offload mode transfers data through callback
+                ++mAudioDrainGeneration;  // discard pending kWhatDrainAudioQueue message.
+            }
+        }
+    }
+    if (!offloadOnly && !offloadingAudio()) {
+        ALOGV("openAudioSink: open AudioSink in NON-offload mode");
+        uint32_t pcmFlags = flags;
+        pcmFlags &= ~AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
+
+        const PcmInfo info = {
+                (audio_channel_mask_t)channelMask,
+                (audio_output_flags_t)pcmFlags,
+                AUDIO_FORMAT_PCM_16_BIT, // TODO: change to audioFormat
+                numChannels,
+                sampleRate
+        };
+        if (memcmp(&mCurrentPcmInfo, &info, sizeof(info)) == 0) {
+            ALOGV("openAudioSink: no change in pcm mode");
+            // no change from previous configuration, everything ok.
+            return OK;
+        }
+
+        audioSinkChanged = true;
+        mAudioSink->close();
+        mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
+        // Note: It is possible to set up the callback, but not use it to send audio data.
+        // This requires a fix in AudioSink to explicitly specify the transfer mode.
+        mUseAudioCallback = getUseAudioCallbackSetting();
+        if (mUseAudioCallback) {
+            ++mAudioDrainGeneration;  // discard pending kWhatDrainAudioQueue message.
+        }
+
+        // Compute the desired buffer size.
+        // For callback mode, the amount of time before wakeup is about half the buffer size.
+        const uint32_t frameCount =
+                (unsigned long long)sampleRate * getAudioSinkPcmMsSetting() / 1000;
+
+        // The doNotReconnect means AudioSink will signal back and let NuPlayer2 to re-construct
+        // AudioSink. We don't want this when there's video because it will cause a video seek to
+        // the previous I frame. But we do want this when there's only audio because it will give
+        // NuPlayer2 a chance to switch from non-offload mode to offload mode.
+        // So we only set doNotReconnect when there's no video.
+        const bool doNotReconnect = !hasVideo;
+
+        // We should always be able to set our playback settings if the sink is closed.
+        LOG_ALWAYS_FATAL_IF(mAudioSink->setPlaybackRate(mPlaybackSettings) != OK,
+                "onOpenAudioSink: can't set playback rate on closed sink");
+        status_t err = mAudioSink->open(
+                    sampleRate,
+                    numChannels,
+                    (audio_channel_mask_t)channelMask,
+                    AUDIO_FORMAT_PCM_16_BIT,
+                    0 /* bufferCount - unused */,
+                    mUseAudioCallback ? &NuPlayer2::Renderer::AudioSinkCallback : NULL,
+                    mUseAudioCallback ? this : NULL,
+                    (audio_output_flags_t)pcmFlags,
+                    NULL,
+                    doNotReconnect,
+                    frameCount);
+        if (err != OK) {
+            ALOGW("openAudioSink: non offloaded open failed status: %d", err);
+            mAudioSink->close();
+            mCurrentPcmInfo = AUDIO_PCMINFO_INITIALIZER;
+            return err;
+        }
+        mCurrentPcmInfo = info;
+        if (!mPaused) { // for preview mode, don't start if paused
+            mAudioSink->start();
+        }
+    }
+    if (audioSinkChanged) {
+        onAudioSinkChanged();
+    }
+    mAudioTornDown = false;
+    return OK;
+}
+
+void NuPlayer2::Renderer::onCloseAudioSink() {
+    mAudioSink->close();
+    mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
+    mCurrentPcmInfo = AUDIO_PCMINFO_INITIALIZER;
+}
+
+void NuPlayer2::Renderer::onChangeAudioFormat(
+        const sp<AMessage> &meta, const sp<AMessage> &notify) {
+    sp<AMessage> format;
+    CHECK(meta->findMessage("format", &format));
+
+    int32_t offloadOnly;
+    CHECK(meta->findInt32("offload-only", &offloadOnly));
+
+    int32_t hasVideo;
+    CHECK(meta->findInt32("has-video", &hasVideo));
+
+    uint32_t flags;
+    CHECK(meta->findInt32("flags", (int32_t *)&flags));
+
+    uint32_t isStreaming;
+    CHECK(meta->findInt32("isStreaming", (int32_t *)&isStreaming));
+
+    status_t err = onOpenAudioSink(format, offloadOnly, hasVideo, flags, isStreaming);
+
+    if (err != OK) {
+        notify->setInt32("err", err);
+    }
+    notify->post();
+}
+
+}  // namespace android
+
diff --git a/media/libmedia/nuplayer2/NuPlayer2Renderer.h b/media/libmedia/nuplayer2/NuPlayer2Renderer.h
new file mode 100644
index 0000000..3007654
--- /dev/null
+++ b/media/libmedia/nuplayer2/NuPlayer2Renderer.h
@@ -0,0 +1,303 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef NUPLAYER2_RENDERER_H_
+
+#define NUPLAYER2_RENDERER_H_
+
+#include <media/AudioResamplerPublic.h>
+#include <media/AVSyncSettings.h>
+
+#include "NuPlayer2.h"
+
+namespace android {
+
+class  JWakeLock;
+struct MediaClock;
+class MediaCodecBuffer;
+struct VideoFrameScheduler;
+
+struct NuPlayer2::Renderer : public AHandler {
+    enum Flags {
+        FLAG_REAL_TIME = 1,
+        FLAG_OFFLOAD_AUDIO = 2,
+    };
+    Renderer(const sp<MediaPlayer2Base::AudioSink> &sink,
+             const sp<MediaClock> &mediaClock,
+             const sp<AMessage> &notify,
+             uint32_t flags = 0);
+
+    static size_t AudioSinkCallback(
+            MediaPlayer2Base::AudioSink *audioSink,
+            void *data, size_t size, void *me,
+            MediaPlayer2Base::AudioSink::cb_event_t event);
+
+    void queueBuffer(
+            bool audio,
+            const sp<MediaCodecBuffer> &buffer,
+            const sp<AMessage> &notifyConsumed);
+
+    void queueEOS(bool audio, status_t finalResult);
+
+    status_t setPlaybackSettings(const AudioPlaybackRate &rate /* sanitized */);
+    status_t getPlaybackSettings(AudioPlaybackRate *rate /* nonnull */);
+    status_t setSyncSettings(const AVSyncSettings &sync, float videoFpsHint);
+    status_t getSyncSettings(AVSyncSettings *sync /* nonnull */, float *videoFps /* nonnull */);
+
+    void flush(bool audio, bool notifyComplete);
+
+    void signalTimeDiscontinuity();
+
+    void signalDisableOffloadAudio();
+    void signalEnableOffloadAudio();
+
+    void pause();
+    void resume();
+
+    void setVideoFrameRate(float fps);
+
+    status_t getCurrentPosition(int64_t *mediaUs);
+    int64_t getVideoLateByUs();
+
+    status_t openAudioSink(
+            const sp<AMessage> &format,
+            bool offloadOnly,
+            bool hasVideo,
+            uint32_t flags,
+            bool *isOffloaded,
+            bool isStreaming);
+    void closeAudioSink();
+
+    // re-open audio sink after all pending audio buffers played.
+    void changeAudioFormat(
+            const sp<AMessage> &format,
+            bool offloadOnly,
+            bool hasVideo,
+            uint32_t flags,
+            bool isStreaming,
+            const sp<AMessage> &notify);
+
+    enum {
+        kWhatEOS                      = 'eos ',
+        kWhatFlushComplete            = 'fluC',
+        kWhatPosition                 = 'posi',
+        kWhatVideoRenderingStart      = 'vdrd',
+        kWhatMediaRenderingStart      = 'mdrd',
+        kWhatAudioTearDown            = 'adTD',
+        kWhatAudioOffloadPauseTimeout = 'aOPT',
+    };
+
+    enum AudioTearDownReason {
+        kDueToError = 0,   // Could restart with either offload or non-offload.
+        kDueToTimeout,
+        kForceNonOffload,  // Restart only with non-offload.
+    };
+
+protected:
+    virtual ~Renderer();
+
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+
+private:
+    enum {
+        kWhatDrainAudioQueue     = 'draA',
+        kWhatDrainVideoQueue     = 'draV',
+        kWhatPostDrainVideoQueue = 'pDVQ',
+        kWhatQueueBuffer         = 'queB',
+        kWhatQueueEOS            = 'qEOS',
+        kWhatConfigPlayback      = 'cfPB',
+        kWhatConfigSync          = 'cfSy',
+        kWhatGetPlaybackSettings = 'gPbS',
+        kWhatGetSyncSettings     = 'gSyS',
+        kWhatFlush               = 'flus',
+        kWhatPause               = 'paus',
+        kWhatResume              = 'resm',
+        kWhatOpenAudioSink       = 'opnA',
+        kWhatCloseAudioSink      = 'clsA',
+        kWhatChangeAudioFormat   = 'chgA',
+        kWhatStopAudioSink       = 'stpA',
+        kWhatDisableOffloadAudio = 'noOA',
+        kWhatEnableOffloadAudio  = 'enOA',
+        kWhatSetVideoFrameRate   = 'sVFR',
+    };
+
+    // if mBuffer != nullptr, it's a buffer containing real data.
+    // else if mNotifyConsumed == nullptr, it's EOS.
+    // else it's a tag for re-opening audio sink in different format.
+    struct QueueEntry {
+        sp<MediaCodecBuffer> mBuffer;
+        sp<AMessage> mMeta;
+        sp<AMessage> mNotifyConsumed;
+        size_t mOffset;
+        status_t mFinalResult;
+        int32_t mBufferOrdinal;
+    };
+
+    static const int64_t kMinPositionUpdateDelayUs;
+
+    sp<MediaPlayer2Base::AudioSink> mAudioSink;
+    bool mUseVirtualAudioSink;
+    sp<AMessage> mNotify;
+    Mutex mLock;
+    uint32_t mFlags;
+    List<QueueEntry> mAudioQueue;
+    List<QueueEntry> mVideoQueue;
+    uint32_t mNumFramesWritten;
+    sp<VideoFrameScheduler> mVideoScheduler;
+
+    bool mDrainAudioQueuePending;
+    bool mDrainVideoQueuePending;
+    int32_t mAudioQueueGeneration;
+    int32_t mVideoQueueGeneration;
+    int32_t mAudioDrainGeneration;
+    int32_t mVideoDrainGeneration;
+    int32_t mAudioEOSGeneration;
+
+    const sp<MediaClock> mMediaClock;
+    float mPlaybackRate; // audio track rate
+
+    AudioPlaybackRate mPlaybackSettings;
+    AVSyncSettings mSyncSettings;
+    float mVideoFpsHint;
+
+    int64_t mAudioFirstAnchorTimeMediaUs;
+    int64_t mAnchorTimeMediaUs;
+    int64_t mAnchorNumFramesWritten;
+    int64_t mVideoLateByUs;
+    int64_t mNextVideoTimeMediaUs;
+    bool mHasAudio;
+    bool mHasVideo;
+
+    bool mNotifyCompleteAudio;
+    bool mNotifyCompleteVideo;
+
+    bool mSyncQueues;
+
+    // modified on only renderer's thread.
+    bool mPaused;
+    int64_t mPauseDrainAudioAllowedUs; // time when we can drain/deliver audio in pause mode.
+
+    bool mVideoSampleReceived;
+    bool mVideoRenderingStarted;
+    int32_t mVideoRenderingStartGeneration;
+    int32_t mAudioRenderingStartGeneration;
+    bool mRenderingDataDelivered;
+
+    int64_t mNextAudioClockUpdateTimeUs;
+    // the media timestamp of last audio sample right before EOS.
+    int64_t mLastAudioMediaTimeUs;
+
+    int32_t mAudioOffloadPauseTimeoutGeneration;
+    bool mAudioTornDown;
+    audio_offload_info_t mCurrentOffloadInfo;
+
+    struct PcmInfo {
+        audio_channel_mask_t mChannelMask;
+        audio_output_flags_t mFlags;
+        audio_format_t mFormat;
+        int32_t mNumChannels;
+        int32_t mSampleRate;
+    };
+    PcmInfo mCurrentPcmInfo;
+    static const PcmInfo AUDIO_PCMINFO_INITIALIZER;
+
+    int32_t mTotalBuffersQueued;
+    int32_t mLastAudioBufferDrained;
+    bool mUseAudioCallback;
+
+    sp<JWakeLock> mWakeLock;
+
+    status_t getCurrentPositionOnLooper(int64_t *mediaUs);
+    status_t getCurrentPositionOnLooper(
+            int64_t *mediaUs, int64_t nowUs, bool allowPastQueuedVideo = false);
+    bool getCurrentPositionIfPaused_l(int64_t *mediaUs);
+    status_t getCurrentPositionFromAnchor(
+            int64_t *mediaUs, int64_t nowUs, bool allowPastQueuedVideo = false);
+
+    void notifyEOSCallback();
+    size_t fillAudioBuffer(void *buffer, size_t size);
+
+    bool onDrainAudioQueue();
+    void drainAudioQueueUntilLastEOS();
+    int64_t getPendingAudioPlayoutDurationUs(int64_t nowUs);
+    void postDrainAudioQueue_l(int64_t delayUs = 0);
+
+    void clearAnchorTime();
+    void clearAudioFirstAnchorTime_l();
+    void setAudioFirstAnchorTimeIfNeeded_l(int64_t mediaUs);
+    void setVideoLateByUs(int64_t lateUs);
+
+    void onNewAudioMediaTime(int64_t mediaTimeUs);
+    int64_t getRealTimeUs(int64_t mediaTimeUs, int64_t nowUs);
+
+    void onDrainVideoQueue();
+    void postDrainVideoQueue();
+
+    void prepareForMediaRenderingStart_l();
+    void notifyIfMediaRenderingStarted_l();
+
+    void onQueueBuffer(const sp<AMessage> &msg);
+    void onQueueEOS(const sp<AMessage> &msg);
+    void onFlush(const sp<AMessage> &msg);
+    void onAudioSinkChanged();
+    void onDisableOffloadAudio();
+    void onEnableOffloadAudio();
+    status_t onConfigPlayback(const AudioPlaybackRate &rate /* sanitized */);
+    status_t onGetPlaybackSettings(AudioPlaybackRate *rate /* nonnull */);
+    status_t onConfigSync(const AVSyncSettings &sync, float videoFpsHint);
+    status_t onGetSyncSettings(AVSyncSettings *sync /* nonnull */, float *videoFps /* nonnull */);
+
+    void onPause();
+    void onResume();
+    void onSetVideoFrameRate(float fps);
+    int32_t getQueueGeneration(bool audio);
+    int32_t getDrainGeneration(bool audio);
+    bool getSyncQueues();
+    void onAudioTearDown(AudioTearDownReason reason);
+    status_t onOpenAudioSink(
+            const sp<AMessage> &format,
+            bool offloadOnly,
+            bool hasVideo,
+            uint32_t flags,
+            bool isStreaming);
+    void onCloseAudioSink();
+    void onChangeAudioFormat(const sp<AMessage> &meta, const sp<AMessage> &notify);
+
+    void notifyEOS(bool audio, status_t finalResult, int64_t delayUs = 0);
+    void notifyEOS_l(bool audio, status_t finalResult, int64_t delayUs = 0);
+    void notifyFlushComplete(bool audio);
+    void notifyPosition();
+    void notifyVideoLateBy(int64_t lateByUs);
+    void notifyVideoRenderingStart();
+    void notifyAudioTearDown(AudioTearDownReason reason);
+
+    void flushQueue(List<QueueEntry> *queue);
+    bool dropBufferIfStale(bool audio, const sp<AMessage> &msg);
+    void syncQueuesDone_l();
+
+    bool offloadingAudio() const { return (mFlags & FLAG_OFFLOAD_AUDIO) != 0; }
+
+    void startAudioOffloadPauseTimeout();
+    void cancelAudioOffloadPauseTimeout();
+
+    int64_t getDurationUsIfPlayedAtSampleRate(uint32_t numFrames);
+
+    DISALLOW_EVIL_CONSTRUCTORS(Renderer);
+};
+
+} // namespace android
+
+#endif  // NUPLAYER2_RENDERER_H_
diff --git a/media/libmedia/nuplayer2/NuPlayer2Source.h b/media/libmedia/nuplayer2/NuPlayer2Source.h
new file mode 100644
index 0000000..41ded9d
--- /dev/null
+++ b/media/libmedia/nuplayer2/NuPlayer2Source.h
@@ -0,0 +1,168 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef NUPLAYER2_SOURCE_H_
+
+#define NUPLAYER2_SOURCE_H_
+
+#include "NuPlayer2.h"
+
+#include <media/mediaplayer2.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MetaData.h>
+#include <utils/Vector.h>
+
+namespace android {
+
+struct ABuffer;
+struct AMediaCryptoWrapper;
+class MediaBuffer;
+
+struct NuPlayer2::Source : public AHandler {
+    enum Flags {
+        FLAG_CAN_PAUSE          = 1,
+        FLAG_CAN_SEEK_BACKWARD  = 2,  // the "10 sec back button"
+        FLAG_CAN_SEEK_FORWARD   = 4,  // the "10 sec forward button"
+        FLAG_CAN_SEEK           = 8,  // the "seek bar"
+        FLAG_DYNAMIC_DURATION   = 16,
+        FLAG_SECURE             = 32, // Secure codec is required.
+        FLAG_PROTECTED          = 64, // The screen needs to be protected (screenshot is disabled).
+    };
+
+    enum {
+        kWhatPrepared,
+        kWhatFlagsChanged,
+        kWhatVideoSizeChanged,
+        kWhatBufferingUpdate,
+        kWhatPauseOnBufferingStart,
+        kWhatResumeOnBufferingEnd,
+        kWhatCacheStats,
+        kWhatSubtitleData,
+        kWhatTimedTextData,
+        kWhatTimedMetaData,
+        kWhatQueueDecoderShutdown,
+        kWhatDrmNoLicense,
+        kWhatInstantiateSecureDecoders,
+        // Modular DRM
+        kWhatDrmInfo,
+    };
+
+    // The provides message is used to notify the player about various
+    // events.
+    explicit Source(const sp<AMessage> &notify)
+        : mNotify(notify) {
+    }
+
+    virtual status_t getBufferingSettings(
+            BufferingSettings* buffering /* nonnull */) = 0;
+    virtual status_t setBufferingSettings(const BufferingSettings& buffering) = 0;
+
+    virtual void prepareAsync() = 0;
+
+    virtual void start() = 0;
+    virtual void stop() {}
+    virtual void pause() {}
+    virtual void resume() {}
+
+    // Explicitly disconnect the underling data source
+    virtual void disconnect() {}
+
+    // Returns OK iff more data was available,
+    // an error or ERROR_END_OF_STREAM if not.
+    virtual status_t feedMoreTSData() = 0;
+
+    // Returns non-NULL format when the specified track exists.
+    // When the format has "err" set to -EWOULDBLOCK, source needs more time to get valid meta data.
+    // Returns NULL if the specified track doesn't exist or is invalid;
+    virtual sp<AMessage> getFormat(bool audio);
+
+    virtual sp<MetaData> getFormatMeta(bool /* audio */) { return NULL; }
+    virtual sp<MetaData> getFileFormatMeta() const { return NULL; }
+
+    virtual status_t dequeueAccessUnit(
+            bool audio, sp<ABuffer> *accessUnit) = 0;
+
+    virtual status_t getDuration(int64_t * /* durationUs */) {
+        return INVALID_OPERATION;
+    }
+
+    virtual size_t getTrackCount() const {
+        return 0;
+    }
+
+    virtual sp<AMessage> getTrackInfo(size_t /* trackIndex */) const {
+        return NULL;
+    }
+
+    virtual ssize_t getSelectedTrack(media_track_type /* type */) const {
+        return INVALID_OPERATION;
+    }
+
+    virtual status_t selectTrack(size_t /* trackIndex */, bool /* select */, int64_t /* timeUs*/) {
+        return INVALID_OPERATION;
+    }
+
+    virtual status_t seekTo(
+            int64_t /* seekTimeUs */,
+            MediaPlayer2SeekMode /* mode */ = MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC) {
+        return INVALID_OPERATION;
+    }
+
+    virtual bool isRealTime() const {
+        return false;
+    }
+
+    virtual bool isStreaming() const {
+        return true;
+    }
+
+    virtual void setOffloadAudio(bool /* offload */) {}
+
+    // Modular DRM
+    virtual status_t prepareDrm(
+            const uint8_t /* uuid */[16], const Vector<uint8_t> & /* drmSessionId */,
+            sp<AMediaCryptoWrapper> * /* crypto */) {
+        return INVALID_OPERATION;
+    }
+
+    virtual status_t releaseDrm() {
+        return INVALID_OPERATION;
+    }
+
+protected:
+    virtual ~Source() {}
+
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+
+    sp<AMessage> dupNotify() const { return mNotify->dup(); }
+
+    void notifyFlagsChanged(uint32_t flags);
+    void notifyVideoSizeChanged(const sp<AMessage> &format = NULL);
+    void notifyInstantiateSecureDecoders(const sp<AMessage> &reply);
+    void notifyPrepared(status_t err = OK);
+    // Modular DRM
+    void notifyDrmInfo(const sp<ABuffer> &buffer);
+
+private:
+    sp<AMessage> mNotify;
+
+    DISALLOW_EVIL_CONSTRUCTORS(Source);
+};
+
+}  // namespace android
+
+#endif  // NUPLAYER2_SOURCE_H_
+
diff --git a/media/libmedia/nuplayer2/NuPlayer2StreamListener.cpp b/media/libmedia/nuplayer2/NuPlayer2StreamListener.cpp
new file mode 100644
index 0000000..0769711
--- /dev/null
+++ b/media/libmedia/nuplayer2/NuPlayer2StreamListener.cpp
@@ -0,0 +1,173 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NuPlayer2StreamListener"
+#include <utils/Log.h>
+
+#include "NuPlayer2StreamListener.h"
+
+#include <binder/MemoryDealer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaErrors.h>
+
+namespace android {
+
+NuPlayer2::StreamListener::StreamListener(
+        const sp<IStreamSource> &source,
+        const sp<AHandler> &targetHandler)
+    : mSource(source),
+      mTargetHandler(targetHandler),
+      mEOS(false),
+      mSendDataNotification(true) {
+    mSource->setListener(this);
+
+    mMemoryDealer = new MemoryDealer(kNumBuffers * kBufferSize);
+    for (size_t i = 0; i < kNumBuffers; ++i) {
+        sp<IMemory> mem = mMemoryDealer->allocate(kBufferSize);
+        CHECK(mem != NULL);
+
+        mBuffers.push(mem);
+    }
+    mSource->setBuffers(mBuffers);
+}
+
+void NuPlayer2::StreamListener::start() {
+    for (size_t i = 0; i < kNumBuffers; ++i) {
+        mSource->onBufferAvailable(i);
+    }
+}
+
+void NuPlayer2::StreamListener::queueBuffer(size_t index, size_t size) {
+    QueueEntry entry;
+    entry.mIsCommand = false;
+    entry.mIndex = index;
+    entry.mSize = size;
+    entry.mOffset = 0;
+
+    Mutex::Autolock autoLock(mLock);
+    mQueue.push_back(entry);
+
+    if (mSendDataNotification) {
+        mSendDataNotification = false;
+
+        if (mTargetHandler != NULL) {
+            (new AMessage(kWhatMoreDataQueued, mTargetHandler))->post();
+        }
+    }
+}
+
+void NuPlayer2::StreamListener::issueCommand(
+        Command cmd, bool synchronous, const sp<AMessage> &extra) {
+    CHECK(!synchronous);
+
+    QueueEntry entry;
+    entry.mIsCommand = true;
+    entry.mCommand = cmd;
+    entry.mExtra = extra;
+
+    Mutex::Autolock autoLock(mLock);
+    mQueue.push_back(entry);
+
+    if (mSendDataNotification) {
+        mSendDataNotification = false;
+
+        if (mTargetHandler != NULL) {
+            (new AMessage(kWhatMoreDataQueued, mTargetHandler))->post();
+        }
+    }
+}
+
+ssize_t NuPlayer2::StreamListener::read(
+        void *data, size_t size, sp<AMessage> *extra) {
+    CHECK_GT(size, 0u);
+
+    extra->clear();
+
+    Mutex::Autolock autoLock(mLock);
+
+    if (mEOS) {
+        return 0;
+    }
+
+    if (mQueue.empty()) {
+        mSendDataNotification = true;
+
+        return -EWOULDBLOCK;
+    }
+
+    QueueEntry *entry = &*mQueue.begin();
+
+    if (entry->mIsCommand) {
+        switch (entry->mCommand) {
+            case EOS:
+            {
+                mQueue.erase(mQueue.begin());
+                entry = NULL;
+
+                mEOS = true;
+                return 0;
+            }
+
+            case DISCONTINUITY:
+            {
+                *extra = entry->mExtra;
+
+                mQueue.erase(mQueue.begin());
+                entry = NULL;
+
+                return INFO_DISCONTINUITY;
+            }
+
+            default:
+                TRESPASS();
+                break;
+        }
+    }
+
+    size_t copy = entry->mSize;
+    if (copy > size) {
+        copy = size;
+    }
+
+    if (entry->mIndex >= mBuffers.size()) {
+        return ERROR_MALFORMED;
+    }
+
+    sp<IMemory> mem = mBuffers.editItemAt(entry->mIndex);
+    if (mem == NULL || mem->size() < copy || mem->size() - copy < entry->mOffset) {
+        return ERROR_MALFORMED;
+    }
+
+    memcpy(data,
+           (const uint8_t *)mem->pointer()
+            + entry->mOffset,
+           copy);
+
+    entry->mOffset += copy;
+    entry->mSize -= copy;
+
+    if (entry->mSize == 0) {
+        mSource->onBufferAvailable(entry->mIndex);
+        mQueue.erase(mQueue.begin());
+        entry = NULL;
+    }
+
+    return copy;
+}
+
+}  // namespace android
diff --git a/media/libmedia/nuplayer2/NuPlayer2StreamListener.h b/media/libmedia/nuplayer2/NuPlayer2StreamListener.h
new file mode 100644
index 0000000..4327b24
--- /dev/null
+++ b/media/libmedia/nuplayer2/NuPlayer2StreamListener.h
@@ -0,0 +1,74 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef NUPLAYER2_STREAM_LISTENER_H_
+
+#define NUPLAYER2_STREAM_LISTENER_H_
+
+#include "NuPlayer2.h"
+
+#include <media/IStreamSource.h>
+
+namespace android {
+
+class MemoryDealer;
+
+struct NuPlayer2::StreamListener : public BnStreamListener {
+    StreamListener(
+            const sp<IStreamSource> &source,
+            const sp<AHandler> &targetHandler);
+
+    virtual void queueBuffer(size_t index, size_t size);
+
+    virtual void issueCommand(
+            Command cmd, bool synchronous, const sp<AMessage> &extra);
+
+    void start();
+    ssize_t read(void *data, size_t size, sp<AMessage> *extra);
+
+private:
+    enum {
+        kNumBuffers = 8,
+        kBufferSize = 188 * 10
+    };
+
+    struct QueueEntry {
+        bool mIsCommand;
+
+        size_t mIndex;
+        size_t mSize;
+        size_t mOffset;
+
+        Command mCommand;
+        sp<AMessage> mExtra;
+    };
+
+    Mutex mLock;
+
+    sp<IStreamSource> mSource;
+    sp<AHandler> mTargetHandler;
+    sp<MemoryDealer> mMemoryDealer;
+    Vector<sp<IMemory> > mBuffers;
+    List<QueueEntry> mQueue;
+    bool mEOS;
+    bool mSendDataNotification;
+
+    DISALLOW_EVIL_CONSTRUCTORS(StreamListener);
+};
+
+}  // namespace android
+
+#endif // NUPLAYER2_STREAM_LISTENER_H_
diff --git a/media/libmedia/nuplayer2/RTSPSource.cpp b/media/libmedia/nuplayer2/RTSPSource.cpp
new file mode 100644
index 0000000..3f6966d
--- /dev/null
+++ b/media/libmedia/nuplayer2/RTSPSource.cpp
@@ -0,0 +1,904 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "RTSPSource"
+#include <utils/Log.h>
+
+#include "RTSPSource.h"
+
+#include "AnotherPacketSource.h"
+#include "MyHandler.h"
+#include "SDPLoader.h"
+
+#include <media/MediaHTTPService.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MetaData.h>
+
+namespace android {
+
+const int64_t kNearEOSTimeoutUs = 2000000ll; // 2 secs
+
+// Default Buffer Underflow/Prepare/StartServer/Overflow Marks
+static const int kUnderflowMarkMs   =  1000;  // 1 second
+static const int kPrepareMarkMs     =  3000;  // 3 seconds
+//static const int kStartServerMarkMs =  5000;
+static const int kOverflowMarkMs    = 10000;  // 10 seconds
+
+NuPlayer2::RTSPSource::RTSPSource(
+        const sp<AMessage> &notify,
+        const sp<MediaHTTPService> &httpService,
+        const char *url,
+        const KeyedVector<String8, String8> *headers,
+        bool uidValid,
+        uid_t uid,
+        bool isSDP)
+    : Source(notify),
+      mHTTPService(httpService),
+      mURL(url),
+      mUIDValid(uidValid),
+      mUID(uid),
+      mFlags(0),
+      mIsSDP(isSDP),
+      mState(DISCONNECTED),
+      mFinalResult(OK),
+      mDisconnectReplyID(0),
+      mBuffering(false),
+      mInPreparationPhase(true),
+      mEOSPending(false),
+      mSeekGeneration(0),
+      mEOSTimeoutAudio(0),
+      mEOSTimeoutVideo(0) {
+    mBufferingSettings.mInitialMarkMs = kPrepareMarkMs;
+    mBufferingSettings.mResumePlaybackMarkMs = kOverflowMarkMs;
+    if (headers) {
+        mExtraHeaders = *headers;
+
+        ssize_t index =
+            mExtraHeaders.indexOfKey(String8("x-hide-urls-from-log"));
+
+        if (index >= 0) {
+            mFlags |= kFlagIncognito;
+
+            mExtraHeaders.removeItemsAt(index);
+        }
+    }
+}
+
+NuPlayer2::RTSPSource::~RTSPSource() {
+    if (mLooper != NULL) {
+        mLooper->unregisterHandler(id());
+        mLooper->stop();
+    }
+}
+
+status_t NuPlayer2::RTSPSource::getBufferingSettings(
+            BufferingSettings* buffering /* nonnull */) {
+    Mutex::Autolock _l(mBufferingSettingsLock);
+    *buffering = mBufferingSettings;
+    return OK;
+}
+
+status_t NuPlayer2::RTSPSource::setBufferingSettings(const BufferingSettings& buffering) {
+    Mutex::Autolock _l(mBufferingSettingsLock);
+    mBufferingSettings = buffering;
+    return OK;
+}
+
+void NuPlayer2::RTSPSource::prepareAsync() {
+    if (mIsSDP && mHTTPService == NULL) {
+        notifyPrepared(BAD_VALUE);
+        return;
+    }
+
+    if (mLooper == NULL) {
+        mLooper = new ALooper;
+        mLooper->setName("rtsp");
+        mLooper->start();
+
+        mLooper->registerHandler(this);
+    }
+
+    CHECK(mHandler == NULL);
+    CHECK(mSDPLoader == NULL);
+
+    sp<AMessage> notify = new AMessage(kWhatNotify, this);
+
+    CHECK_EQ(mState, (int)DISCONNECTED);
+    mState = CONNECTING;
+
+    if (mIsSDP) {
+        mSDPLoader = new SDPLoader(notify,
+                (mFlags & kFlagIncognito) ? SDPLoader::kFlagIncognito : 0,
+                mHTTPService);
+
+        mSDPLoader->load(
+                mURL.c_str(), mExtraHeaders.isEmpty() ? NULL : &mExtraHeaders);
+    } else {
+        mHandler = new MyHandler(mURL.c_str(), notify, mUIDValid, mUID);
+        mLooper->registerHandler(mHandler);
+
+        mHandler->connect();
+    }
+
+    startBufferingIfNecessary();
+}
+
+void NuPlayer2::RTSPSource::start() {
+}
+
+void NuPlayer2::RTSPSource::stop() {
+    if (mLooper == NULL) {
+        return;
+    }
+    sp<AMessage> msg = new AMessage(kWhatDisconnect, this);
+
+    sp<AMessage> dummy;
+    msg->postAndAwaitResponse(&dummy);
+}
+
+status_t NuPlayer2::RTSPSource::feedMoreTSData() {
+    Mutex::Autolock _l(mBufferingLock);
+    return mFinalResult;
+}
+
+sp<MetaData> NuPlayer2::RTSPSource::getFormatMeta(bool audio) {
+    sp<AnotherPacketSource> source = getSource(audio);
+
+    if (source == NULL) {
+        return NULL;
+    }
+
+    return source->getFormat();
+}
+
+bool NuPlayer2::RTSPSource::haveSufficientDataOnAllTracks() {
+    // We're going to buffer at least 2 secs worth data on all tracks before
+    // starting playback (both at startup and after a seek).
+
+    static const int64_t kMinDurationUs = 2000000ll;
+
+    int64_t mediaDurationUs = 0;
+    getDuration(&mediaDurationUs);
+    if ((mAudioTrack != NULL && mAudioTrack->isFinished(mediaDurationUs))
+            || (mVideoTrack != NULL && mVideoTrack->isFinished(mediaDurationUs))) {
+        return true;
+    }
+
+    status_t err;
+    int64_t durationUs;
+    if (mAudioTrack != NULL
+            && (durationUs = mAudioTrack->getBufferedDurationUs(&err))
+                    < kMinDurationUs
+            && err == OK) {
+        ALOGV("audio track doesn't have enough data yet. (%.2f secs buffered)",
+              durationUs / 1E6);
+        return false;
+    }
+
+    if (mVideoTrack != NULL
+            && (durationUs = mVideoTrack->getBufferedDurationUs(&err))
+                    < kMinDurationUs
+            && err == OK) {
+        ALOGV("video track doesn't have enough data yet. (%.2f secs buffered)",
+              durationUs / 1E6);
+        return false;
+    }
+
+    return true;
+}
+
+status_t NuPlayer2::RTSPSource::dequeueAccessUnit(
+        bool audio, sp<ABuffer> *accessUnit) {
+    if (!stopBufferingIfNecessary()) {
+        return -EWOULDBLOCK;
+    }
+
+    sp<AnotherPacketSource> source = getSource(audio);
+
+    if (source == NULL) {
+        return -EWOULDBLOCK;
+    }
+
+    status_t finalResult;
+    if (!source->hasBufferAvailable(&finalResult)) {
+        if (finalResult == OK) {
+
+            // If other source already signaled EOS, this source should also return EOS
+            if (sourceReachedEOS(!audio)) {
+                return ERROR_END_OF_STREAM;
+            }
+
+            // If this source has detected near end, give it some time to retrieve more
+            // data before returning EOS
+            int64_t mediaDurationUs = 0;
+            getDuration(&mediaDurationUs);
+            if (source->isFinished(mediaDurationUs)) {
+                int64_t eosTimeout = audio ? mEOSTimeoutAudio : mEOSTimeoutVideo;
+                if (eosTimeout == 0) {
+                    setEOSTimeout(audio, ALooper::GetNowUs());
+                } else if ((ALooper::GetNowUs() - eosTimeout) > kNearEOSTimeoutUs) {
+                    setEOSTimeout(audio, 0);
+                    return ERROR_END_OF_STREAM;
+                }
+                return -EWOULDBLOCK;
+            }
+
+            if (!sourceNearEOS(!audio)) {
+                // We should not enter buffering mode
+                // if any of the sources already have detected EOS.
+                startBufferingIfNecessary();
+            }
+
+            return -EWOULDBLOCK;
+        }
+        return finalResult;
+    }
+
+    setEOSTimeout(audio, 0);
+
+    return source->dequeueAccessUnit(accessUnit);
+}
+
+sp<AnotherPacketSource> NuPlayer2::RTSPSource::getSource(bool audio) {
+    if (mTSParser != NULL) {
+        sp<MediaSource> source = mTSParser->getSource(
+                audio ? ATSParser::AUDIO : ATSParser::VIDEO);
+
+        return static_cast<AnotherPacketSource *>(source.get());
+    }
+
+    return audio ? mAudioTrack : mVideoTrack;
+}
+
+void NuPlayer2::RTSPSource::setEOSTimeout(bool audio, int64_t timeout) {
+    if (audio) {
+        mEOSTimeoutAudio = timeout;
+    } else {
+        mEOSTimeoutVideo = timeout;
+    }
+}
+
+status_t NuPlayer2::RTSPSource::getDuration(int64_t *durationUs) {
+    *durationUs = -1ll;
+
+    int64_t audioDurationUs;
+    if (mAudioTrack != NULL
+            && mAudioTrack->getFormat()->findInt64(
+                kKeyDuration, &audioDurationUs)
+            && audioDurationUs > *durationUs) {
+        *durationUs = audioDurationUs;
+    }
+
+    int64_t videoDurationUs;
+    if (mVideoTrack != NULL
+            && mVideoTrack->getFormat()->findInt64(
+                kKeyDuration, &videoDurationUs)
+            && videoDurationUs > *durationUs) {
+        *durationUs = videoDurationUs;
+    }
+
+    return OK;
+}
+
+status_t NuPlayer2::RTSPSource::seekTo(int64_t seekTimeUs, MediaPlayer2SeekMode mode) {
+    sp<AMessage> msg = new AMessage(kWhatPerformSeek, this);
+    msg->setInt32("generation", ++mSeekGeneration);
+    msg->setInt64("timeUs", seekTimeUs);
+    msg->setInt32("mode", mode);
+
+    sp<AMessage> response;
+    status_t err = msg->postAndAwaitResponse(&response);
+    if (err == OK && response != NULL) {
+        CHECK(response->findInt32("err", &err));
+    }
+
+    return err;
+}
+
+void NuPlayer2::RTSPSource::performSeek(int64_t seekTimeUs) {
+    if (mState != CONNECTED) {
+        finishSeek(INVALID_OPERATION);
+        return;
+    }
+
+    mState = SEEKING;
+    mHandler->seek(seekTimeUs);
+    mEOSPending = false;
+}
+
+void NuPlayer2::RTSPSource::schedulePollBuffering() {
+    sp<AMessage> msg = new AMessage(kWhatPollBuffering, this);
+    msg->post(1000000ll); // 1 second intervals
+}
+
+void NuPlayer2::RTSPSource::checkBuffering(
+        bool *prepared, bool *underflow, bool *overflow, bool *startServer, bool *finished) {
+    size_t numTracks = mTracks.size();
+    size_t preparedCount, underflowCount, overflowCount, startCount, finishedCount;
+    preparedCount = underflowCount = overflowCount = startCount = finishedCount = 0;
+
+    size_t count = numTracks;
+    for (size_t i = 0; i < count; ++i) {
+        status_t finalResult;
+        TrackInfo *info = &mTracks.editItemAt(i);
+        sp<AnotherPacketSource> src = info->mSource;
+        if (src == NULL) {
+            --numTracks;
+            continue;
+        }
+        int64_t bufferedDurationUs = src->getBufferedDurationUs(&finalResult);
+
+        int64_t initialMarkUs;
+        int64_t maxRebufferingMarkUs;
+        {
+            Mutex::Autolock _l(mBufferingSettingsLock);
+            initialMarkUs = mBufferingSettings.mInitialMarkMs * 1000ll;
+            // TODO: maxRebufferingMarkUs could be larger than
+            // mBufferingSettings.mResumePlaybackMarkMs * 1000ll.
+            maxRebufferingMarkUs = mBufferingSettings.mResumePlaybackMarkMs * 1000ll;
+        }
+        // isFinished when duration is 0 checks for EOS result only
+        if (bufferedDurationUs > initialMarkUs
+                || src->isFinished(/* duration */ 0)) {
+            ++preparedCount;
+        }
+
+        if (src->isFinished(/* duration */ 0)) {
+            ++overflowCount;
+            ++finishedCount;
+        } else {
+            // TODO: redefine kUnderflowMarkMs to a fair value,
+            if (bufferedDurationUs < kUnderflowMarkMs * 1000) {
+                ++underflowCount;
+            }
+            if (bufferedDurationUs > maxRebufferingMarkUs) {
+                ++overflowCount;
+            }
+            int64_t startServerMarkUs =
+                    (kUnderflowMarkMs * 1000ll + maxRebufferingMarkUs) / 2;
+            if (bufferedDurationUs < startServerMarkUs) {
+                ++startCount;
+            }
+        }
+    }
+
+    *prepared    = (preparedCount == numTracks);
+    *underflow   = (underflowCount > 0);
+    *overflow    = (overflowCount == numTracks);
+    *startServer = (startCount > 0);
+    *finished    = (finishedCount > 0);
+}
+
+void NuPlayer2::RTSPSource::onPollBuffering() {
+    bool prepared, underflow, overflow, startServer, finished;
+    checkBuffering(&prepared, &underflow, &overflow, &startServer, &finished);
+
+    if (prepared && mInPreparationPhase) {
+        mInPreparationPhase = false;
+        notifyPrepared();
+    }
+
+    if (!mInPreparationPhase && underflow) {
+        startBufferingIfNecessary();
+    }
+
+    if (haveSufficientDataOnAllTracks()) {
+        stopBufferingIfNecessary();
+    }
+
+    if (overflow && mHandler != NULL) {
+        mHandler->pause();
+    }
+
+    if (startServer && mHandler != NULL) {
+        mHandler->resume();
+    }
+
+    if (finished && mHandler != NULL) {
+        mHandler->cancelAccessUnitTimeoutCheck();
+    }
+
+    schedulePollBuffering();
+}
+
+void NuPlayer2::RTSPSource::signalSourceEOS(status_t result) {
+    const bool audio = true;
+    const bool video = false;
+
+    sp<AnotherPacketSource> source = getSource(audio);
+    if (source != NULL) {
+        source->signalEOS(result);
+    }
+
+    source = getSource(video);
+    if (source != NULL) {
+        source->signalEOS(result);
+    }
+}
+
+bool NuPlayer2::RTSPSource::sourceReachedEOS(bool audio) {
+    sp<AnotherPacketSource> source = getSource(audio);
+    status_t finalResult;
+    return (source != NULL &&
+            !source->hasBufferAvailable(&finalResult) &&
+            finalResult == ERROR_END_OF_STREAM);
+}
+
+bool NuPlayer2::RTSPSource::sourceNearEOS(bool audio) {
+    sp<AnotherPacketSource> source = getSource(audio);
+    int64_t mediaDurationUs = 0;
+    getDuration(&mediaDurationUs);
+    return (source != NULL && source->isFinished(mediaDurationUs));
+}
+
+void NuPlayer2::RTSPSource::onSignalEOS(const sp<AMessage> &msg) {
+    int32_t generation;
+    CHECK(msg->findInt32("generation", &generation));
+
+    if (generation != mSeekGeneration) {
+        return;
+    }
+
+    if (mEOSPending) {
+        signalSourceEOS(ERROR_END_OF_STREAM);
+        mEOSPending = false;
+    }
+}
+
+void NuPlayer2::RTSPSource::postSourceEOSIfNecessary() {
+    const bool audio = true;
+    const bool video = false;
+    // If a source has detected near end, give it some time to retrieve more
+    // data before signaling EOS
+    if (sourceNearEOS(audio) || sourceNearEOS(video)) {
+        if (!mEOSPending) {
+            sp<AMessage> msg = new AMessage(kWhatSignalEOS, this);
+            msg->setInt32("generation", mSeekGeneration);
+            msg->post(kNearEOSTimeoutUs);
+            mEOSPending = true;
+        }
+    }
+}
+
+void NuPlayer2::RTSPSource::onMessageReceived(const sp<AMessage> &msg) {
+    if (msg->what() == kWhatDisconnect) {
+        sp<AReplyToken> replyID;
+        CHECK(msg->senderAwaitsResponse(&replyID));
+
+        mDisconnectReplyID = replyID;
+        finishDisconnectIfPossible();
+        return;
+    } else if (msg->what() == kWhatPerformSeek) {
+        int32_t generation;
+        CHECK(msg->findInt32("generation", &generation));
+        CHECK(msg->senderAwaitsResponse(&mSeekReplyID));
+
+        if (generation != mSeekGeneration) {
+            // obsolete.
+            finishSeek(OK);
+            return;
+        }
+
+        int64_t seekTimeUs;
+        int32_t mode;
+        CHECK(msg->findInt64("timeUs", &seekTimeUs));
+        CHECK(msg->findInt32("mode", &mode));
+
+        // TODO: add "mode" to performSeek.
+        performSeek(seekTimeUs/*, (MediaPlayer2SeekMode)mode */);
+        return;
+    } else if (msg->what() == kWhatPollBuffering) {
+        onPollBuffering();
+        return;
+    } else if (msg->what() == kWhatSignalEOS) {
+        onSignalEOS(msg);
+        return;
+    }
+
+    CHECK_EQ(msg->what(), kWhatNotify);
+
+    int32_t what;
+    CHECK(msg->findInt32("what", &what));
+
+    switch (what) {
+        case MyHandler::kWhatConnected:
+        {
+            onConnected();
+
+            notifyVideoSizeChanged();
+
+            uint32_t flags = 0;
+
+            if (mHandler->isSeekable()) {
+                flags = FLAG_CAN_PAUSE
+                        | FLAG_CAN_SEEK
+                        | FLAG_CAN_SEEK_BACKWARD
+                        | FLAG_CAN_SEEK_FORWARD;
+            }
+
+            notifyFlagsChanged(flags);
+            schedulePollBuffering();
+            break;
+        }
+
+        case MyHandler::kWhatDisconnected:
+        {
+            onDisconnected(msg);
+            break;
+        }
+
+        case MyHandler::kWhatSeekDone:
+        {
+            mState = CONNECTED;
+            // Unblock seekTo here in case we attempted to seek in a live stream
+            finishSeek(OK);
+            break;
+        }
+
+        case MyHandler::kWhatSeekPaused:
+        {
+            sp<AnotherPacketSource> source = getSource(true /* audio */);
+            if (source != NULL) {
+                source->queueDiscontinuity(ATSParser::DISCONTINUITY_NONE,
+                        /* extra */ NULL,
+                        /* discard */ true);
+            }
+            source = getSource(false /* video */);
+            if (source != NULL) {
+                source->queueDiscontinuity(ATSParser::DISCONTINUITY_NONE,
+                        /* extra */ NULL,
+                        /* discard */ true);
+            };
+
+            status_t err = OK;
+            msg->findInt32("err", &err);
+
+            if (err == OK) {
+                int64_t timeUs;
+                CHECK(msg->findInt64("time", &timeUs));
+                mHandler->continueSeekAfterPause(timeUs);
+            } else {
+                finishSeek(err);
+            }
+            break;
+        }
+
+        case MyHandler::kWhatAccessUnit:
+        {
+            size_t trackIndex;
+            CHECK(msg->findSize("trackIndex", &trackIndex));
+
+            if (mTSParser == NULL) {
+                CHECK_LT(trackIndex, mTracks.size());
+            } else {
+                CHECK_EQ(trackIndex, 0u);
+            }
+
+            sp<ABuffer> accessUnit;
+            CHECK(msg->findBuffer("accessUnit", &accessUnit));
+
+            int32_t damaged;
+            if (accessUnit->meta()->findInt32("damaged", &damaged)
+                    && damaged) {
+                ALOGI("dropping damaged access unit.");
+                break;
+            }
+
+            if (mTSParser != NULL) {
+                size_t offset = 0;
+                status_t err = OK;
+                while (offset + 188 <= accessUnit->size()) {
+                    err = mTSParser->feedTSPacket(
+                            accessUnit->data() + offset, 188);
+                    if (err != OK) {
+                        break;
+                    }
+
+                    offset += 188;
+                }
+
+                if (offset < accessUnit->size()) {
+                    err = ERROR_MALFORMED;
+                }
+
+                if (err != OK) {
+                    signalSourceEOS(err);
+                }
+
+                postSourceEOSIfNecessary();
+                break;
+            }
+
+            TrackInfo *info = &mTracks.editItemAt(trackIndex);
+
+            sp<AnotherPacketSource> source = info->mSource;
+            if (source != NULL) {
+                uint32_t rtpTime;
+                CHECK(accessUnit->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+
+                if (!info->mNPTMappingValid) {
+                    // This is a live stream, we didn't receive any normal
+                    // playtime mapping. We won't map to npt time.
+                    source->queueAccessUnit(accessUnit);
+                    break;
+                }
+
+                int64_t nptUs =
+                    ((double)rtpTime - (double)info->mRTPTime)
+                        / info->mTimeScale
+                        * 1000000ll
+                        + info->mNormalPlaytimeUs;
+
+                accessUnit->meta()->setInt64("timeUs", nptUs);
+
+                source->queueAccessUnit(accessUnit);
+            }
+            postSourceEOSIfNecessary();
+            break;
+        }
+
+        case MyHandler::kWhatEOS:
+        {
+            int32_t finalResult;
+            CHECK(msg->findInt32("finalResult", &finalResult));
+            CHECK_NE(finalResult, (status_t)OK);
+
+            if (mTSParser != NULL) {
+                signalSourceEOS(finalResult);
+            }
+
+            size_t trackIndex;
+            CHECK(msg->findSize("trackIndex", &trackIndex));
+            CHECK_LT(trackIndex, mTracks.size());
+
+            TrackInfo *info = &mTracks.editItemAt(trackIndex);
+            sp<AnotherPacketSource> source = info->mSource;
+            if (source != NULL) {
+                source->signalEOS(finalResult);
+            }
+
+            break;
+        }
+
+        case MyHandler::kWhatSeekDiscontinuity:
+        {
+            size_t trackIndex;
+            CHECK(msg->findSize("trackIndex", &trackIndex));
+            CHECK_LT(trackIndex, mTracks.size());
+
+            TrackInfo *info = &mTracks.editItemAt(trackIndex);
+            sp<AnotherPacketSource> source = info->mSource;
+            if (source != NULL) {
+                source->queueDiscontinuity(
+                        ATSParser::DISCONTINUITY_TIME,
+                        NULL,
+                        true /* discard */);
+            }
+
+            break;
+        }
+
+        case MyHandler::kWhatNormalPlayTimeMapping:
+        {
+            size_t trackIndex;
+            CHECK(msg->findSize("trackIndex", &trackIndex));
+            CHECK_LT(trackIndex, mTracks.size());
+
+            uint32_t rtpTime;
+            CHECK(msg->findInt32("rtpTime", (int32_t *)&rtpTime));
+
+            int64_t nptUs;
+            CHECK(msg->findInt64("nptUs", &nptUs));
+
+            TrackInfo *info = &mTracks.editItemAt(trackIndex);
+            info->mRTPTime = rtpTime;
+            info->mNormalPlaytimeUs = nptUs;
+            info->mNPTMappingValid = true;
+            break;
+        }
+
+        case SDPLoader::kWhatSDPLoaded:
+        {
+            onSDPLoaded(msg);
+            break;
+        }
+
+        default:
+            TRESPASS();
+    }
+}
+
+void NuPlayer2::RTSPSource::onConnected() {
+    CHECK(mAudioTrack == NULL);
+    CHECK(mVideoTrack == NULL);
+
+    size_t numTracks = mHandler->countTracks();
+    for (size_t i = 0; i < numTracks; ++i) {
+        int32_t timeScale;
+        sp<MetaData> format = mHandler->getTrackFormat(i, &timeScale);
+
+        const char *mime;
+        CHECK(format->findCString(kKeyMIMEType, &mime));
+
+        if (!strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_MPEG2TS)) {
+            // Very special case for MPEG2 Transport Streams.
+            CHECK_EQ(numTracks, 1u);
+
+            mTSParser = new ATSParser;
+            return;
+        }
+
+        bool isAudio = !strncasecmp(mime, "audio/", 6);
+        bool isVideo = !strncasecmp(mime, "video/", 6);
+
+        TrackInfo info;
+        info.mTimeScale = timeScale;
+        info.mRTPTime = 0;
+        info.mNormalPlaytimeUs = 0ll;
+        info.mNPTMappingValid = false;
+
+        if ((isAudio && mAudioTrack == NULL)
+                || (isVideo && mVideoTrack == NULL)) {
+            sp<AnotherPacketSource> source = new AnotherPacketSource(format);
+
+            if (isAudio) {
+                mAudioTrack = source;
+            } else {
+                mVideoTrack = source;
+            }
+
+            info.mSource = source;
+        }
+
+        mTracks.push(info);
+    }
+
+    mState = CONNECTED;
+}
+
+void NuPlayer2::RTSPSource::onSDPLoaded(const sp<AMessage> &msg) {
+    status_t err;
+    CHECK(msg->findInt32("result", &err));
+
+    mSDPLoader.clear();
+
+    if (mDisconnectReplyID != 0) {
+        err = UNKNOWN_ERROR;
+    }
+
+    if (err == OK) {
+        sp<ASessionDescription> desc;
+        sp<RefBase> obj;
+        CHECK(msg->findObject("description", &obj));
+        desc = static_cast<ASessionDescription *>(obj.get());
+
+        AString rtspUri;
+        if (!desc->findAttribute(0, "a=control", &rtspUri)) {
+            ALOGE("Unable to find url in SDP");
+            err = UNKNOWN_ERROR;
+        } else {
+            sp<AMessage> notify = new AMessage(kWhatNotify, this);
+
+            mHandler = new MyHandler(rtspUri.c_str(), notify, mUIDValid, mUID);
+            mLooper->registerHandler(mHandler);
+
+            mHandler->loadSDP(desc);
+        }
+    }
+
+    if (err != OK) {
+        if (mState == CONNECTING) {
+            // We're still in the preparation phase, signal that it
+            // failed.
+            notifyPrepared(err);
+        }
+
+        mState = DISCONNECTED;
+        setError(err);
+
+        if (mDisconnectReplyID != 0) {
+            finishDisconnectIfPossible();
+        }
+    }
+}
+
+void NuPlayer2::RTSPSource::onDisconnected(const sp<AMessage> &msg) {
+    if (mState == DISCONNECTED) {
+        return;
+    }
+
+    status_t err;
+    CHECK(msg->findInt32("result", &err));
+    CHECK_NE(err, (status_t)OK);
+
+    mLooper->unregisterHandler(mHandler->id());
+    mHandler.clear();
+
+    if (mState == CONNECTING) {
+        // We're still in the preparation phase, signal that it
+        // failed.
+        notifyPrepared(err);
+    }
+
+    mState = DISCONNECTED;
+    setError(err);
+
+    if (mDisconnectReplyID != 0) {
+        finishDisconnectIfPossible();
+    }
+}
+
+void NuPlayer2::RTSPSource::finishDisconnectIfPossible() {
+    if (mState != DISCONNECTED) {
+        if (mHandler != NULL) {
+            mHandler->disconnect();
+        } else if (mSDPLoader != NULL) {
+            mSDPLoader->cancel();
+        }
+        return;
+    }
+
+    (new AMessage)->postReply(mDisconnectReplyID);
+    mDisconnectReplyID = 0;
+}
+
+void NuPlayer2::RTSPSource::setError(status_t err) {
+    Mutex::Autolock _l(mBufferingLock);
+    mFinalResult = err;
+}
+
+void NuPlayer2::RTSPSource::startBufferingIfNecessary() {
+    Mutex::Autolock _l(mBufferingLock);
+
+    if (!mBuffering) {
+        mBuffering = true;
+
+        sp<AMessage> notify = dupNotify();
+        notify->setInt32("what", kWhatPauseOnBufferingStart);
+        notify->post();
+    }
+}
+
+bool NuPlayer2::RTSPSource::stopBufferingIfNecessary() {
+    Mutex::Autolock _l(mBufferingLock);
+
+    if (mBuffering) {
+        if (!haveSufficientDataOnAllTracks()) {
+            return false;
+        }
+
+        mBuffering = false;
+
+        sp<AMessage> notify = dupNotify();
+        notify->setInt32("what", kWhatResumeOnBufferingEnd);
+        notify->post();
+    }
+
+    return true;
+}
+
+void NuPlayer2::RTSPSource::finishSeek(status_t err) {
+    if (mSeekReplyID == NULL) {
+        return;
+    }
+    sp<AMessage> seekReply = new AMessage;
+    seekReply->setInt32("err", err);
+    seekReply->postReply(mSeekReplyID);
+    mSeekReplyID = NULL;
+}
+
+}  // namespace android
diff --git a/media/libmedia/nuplayer2/RTSPSource.h b/media/libmedia/nuplayer2/RTSPSource.h
new file mode 100644
index 0000000..9bce473
--- /dev/null
+++ b/media/libmedia/nuplayer2/RTSPSource.h
@@ -0,0 +1,169 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef RTSP_SOURCE_H_
+
+#define RTSP_SOURCE_H_
+
+#include "NuPlayer2Source.h"
+
+#include "ATSParser.h"
+
+namespace android {
+
+struct ALooper;
+struct AReplyToken;
+struct AnotherPacketSource;
+struct MyHandler;
+struct SDPLoader;
+
+struct NuPlayer2::RTSPSource : public NuPlayer2::Source {
+    RTSPSource(
+            const sp<AMessage> &notify,
+            const sp<MediaHTTPService> &httpService,
+            const char *url,
+            const KeyedVector<String8, String8> *headers,
+            bool uidValid = false,
+            uid_t uid = 0,
+            bool isSDP = false);
+
+    virtual status_t getBufferingSettings(
+            BufferingSettings* buffering /* nonnull */) override;
+    virtual status_t setBufferingSettings(const BufferingSettings& buffering) override;
+
+    virtual void prepareAsync();
+    virtual void start();
+    virtual void stop();
+
+    virtual status_t feedMoreTSData();
+
+    virtual status_t dequeueAccessUnit(bool audio, sp<ABuffer> *accessUnit);
+
+    virtual status_t getDuration(int64_t *durationUs);
+    virtual status_t seekTo(
+            int64_t seekTimeUs,
+            MediaPlayer2SeekMode mode = MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC) override;
+
+    void onMessageReceived(const sp<AMessage> &msg);
+
+protected:
+    virtual ~RTSPSource();
+
+    virtual sp<MetaData> getFormatMeta(bool audio);
+
+private:
+    enum {
+        kWhatNotify          = 'noti',
+        kWhatDisconnect      = 'disc',
+        kWhatPerformSeek     = 'seek',
+        kWhatPollBuffering   = 'poll',
+        kWhatSignalEOS       = 'eos ',
+    };
+
+    enum State {
+        DISCONNECTED,
+        CONNECTING,
+        CONNECTED,
+        SEEKING,
+    };
+
+    enum Flags {
+        // Don't log any URLs.
+        kFlagIncognito = 1,
+    };
+
+    struct TrackInfo {
+        sp<AnotherPacketSource> mSource;
+
+        int32_t mTimeScale;
+        uint32_t mRTPTime;
+        int64_t mNormalPlaytimeUs;
+        bool mNPTMappingValid;
+    };
+
+    sp<MediaHTTPService> mHTTPService;
+    AString mURL;
+    KeyedVector<String8, String8> mExtraHeaders;
+    bool mUIDValid;
+    uid_t mUID;
+    uint32_t mFlags;
+    bool mIsSDP;
+    State mState;
+    status_t mFinalResult;
+    sp<AReplyToken> mDisconnectReplyID;
+    Mutex mBufferingLock;
+    bool mBuffering;
+    bool mInPreparationPhase;
+    bool mEOSPending;
+
+    Mutex mBufferingSettingsLock;
+    BufferingSettings mBufferingSettings;
+
+    sp<ALooper> mLooper;
+    sp<MyHandler> mHandler;
+    sp<SDPLoader> mSDPLoader;
+
+    Vector<TrackInfo> mTracks;
+    sp<AnotherPacketSource> mAudioTrack;
+    sp<AnotherPacketSource> mVideoTrack;
+
+    sp<ATSParser> mTSParser;
+
+    int32_t mSeekGeneration;
+
+    int64_t mEOSTimeoutAudio;
+    int64_t mEOSTimeoutVideo;
+
+    sp<AReplyToken> mSeekReplyID;
+
+    sp<AnotherPacketSource> getSource(bool audio);
+
+    void onConnected();
+    void onSDPLoaded(const sp<AMessage> &msg);
+    void onDisconnected(const sp<AMessage> &msg);
+    void finishDisconnectIfPossible();
+
+    void performSeek(int64_t seekTimeUs);
+    void schedulePollBuffering();
+    void checkBuffering(
+            bool *prepared,
+            bool *underflow,
+            bool *overflow,
+            bool *startServer,
+            bool *finished);
+    void onPollBuffering();
+
+    bool haveSufficientDataOnAllTracks();
+
+    void setEOSTimeout(bool audio, int64_t timeout);
+    void setError(status_t err);
+    void startBufferingIfNecessary();
+    bool stopBufferingIfNecessary();
+    void finishSeek(status_t err);
+
+    void postSourceEOSIfNecessary();
+    void signalSourceEOS(status_t result);
+    void onSignalEOS(const sp<AMessage> &msg);
+
+    bool sourceNearEOS(bool audio);
+    bool sourceReachedEOS(bool audio);
+
+    DISALLOW_EVIL_CONSTRUCTORS(RTSPSource);
+};
+
+}  // namespace android
+
+#endif  // RTSP_SOURCE_H_
diff --git a/media/libmedia/nuplayer2/StreamingSource.cpp b/media/libmedia/nuplayer2/StreamingSource.cpp
new file mode 100644
index 0000000..1c370b6
--- /dev/null
+++ b/media/libmedia/nuplayer2/StreamingSource.cpp
@@ -0,0 +1,310 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "StreamingSource"
+#include <utils/Log.h>
+
+#include "StreamingSource.h"
+
+#include "ATSParser.h"
+#include "AnotherPacketSource.h"
+#include "NuPlayer2StreamListener.h"
+
+#include <media/MediaSource.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/MediaKeys.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/Utils.h>
+
+namespace android {
+
+const int32_t kNumListenerQueuePackets = 80;
+
+NuPlayer2::StreamingSource::StreamingSource(
+        const sp<AMessage> &notify,
+        const sp<IStreamSource> &source)
+    : Source(notify),
+      mSource(source),
+      mFinalResult(OK),
+      mBuffering(false) {
+}
+
+NuPlayer2::StreamingSource::~StreamingSource() {
+    if (mLooper != NULL) {
+        mLooper->unregisterHandler(id());
+        mLooper->stop();
+    }
+}
+
+status_t NuPlayer2::StreamingSource::getBufferingSettings(
+        BufferingSettings *buffering /* nonnull */) {
+    *buffering = BufferingSettings();
+    return OK;
+}
+
+status_t NuPlayer2::StreamingSource::setBufferingSettings(
+        const BufferingSettings & /* buffering */) {
+    return OK;
+}
+
+void NuPlayer2::StreamingSource::prepareAsync() {
+    if (mLooper == NULL) {
+        mLooper = new ALooper;
+        mLooper->setName("streaming");
+        mLooper->start();
+
+        mLooper->registerHandler(this);
+    }
+
+    notifyVideoSizeChanged();
+    notifyFlagsChanged(0);
+    notifyPrepared();
+}
+
+void NuPlayer2::StreamingSource::start() {
+    mStreamListener = new StreamListener(mSource, NULL);
+
+    uint32_t sourceFlags = mSource->flags();
+
+    uint32_t parserFlags = ATSParser::TS_TIMESTAMPS_ARE_ABSOLUTE;
+    if (sourceFlags & IStreamSource::kFlagAlignedVideoData) {
+        parserFlags |= ATSParser::ALIGNED_VIDEO_DATA;
+    }
+
+    mTSParser = new ATSParser(parserFlags);
+
+    mStreamListener->start();
+
+    postReadBuffer();
+}
+
+status_t NuPlayer2::StreamingSource::feedMoreTSData() {
+    return postReadBuffer();
+}
+
+void NuPlayer2::StreamingSource::onReadBuffer() {
+    for (int32_t i = 0; i < kNumListenerQueuePackets; ++i) {
+        char buffer[188];
+        sp<AMessage> extra;
+        ssize_t n = mStreamListener->read(buffer, sizeof(buffer), &extra);
+
+        if (n == 0) {
+            ALOGI("input data EOS reached.");
+            mTSParser->signalEOS(ERROR_END_OF_STREAM);
+            setError(ERROR_END_OF_STREAM);
+            break;
+        } else if (n == INFO_DISCONTINUITY) {
+            int32_t type = ATSParser::DISCONTINUITY_TIME;
+
+            int32_t mask;
+            if (extra != NULL
+                    && extra->findInt32(
+                        kIStreamListenerKeyDiscontinuityMask, &mask)) {
+                if (mask == 0) {
+                    ALOGE("Client specified an illegal discontinuity type.");
+                    setError(ERROR_UNSUPPORTED);
+                    break;
+                }
+
+                type = mask;
+            }
+
+            mTSParser->signalDiscontinuity(
+                    (ATSParser::DiscontinuityType)type, extra);
+        } else if (n < 0) {
+            break;
+        } else {
+            if (buffer[0] == 0x00) {
+                // XXX legacy
+
+                if (extra == NULL) {
+                    extra = new AMessage;
+                }
+
+                uint8_t type = buffer[1];
+
+                if (type & 2) {
+                    int64_t mediaTimeUs;
+                    memcpy(&mediaTimeUs, &buffer[2], sizeof(mediaTimeUs));
+
+                    extra->setInt64(kATSParserKeyMediaTimeUs, mediaTimeUs);
+                }
+
+                mTSParser->signalDiscontinuity(
+                        ((type & 1) == 0)
+                            ? ATSParser::DISCONTINUITY_TIME
+                            : ATSParser::DISCONTINUITY_FORMATCHANGE,
+                        extra);
+            } else {
+                status_t err = mTSParser->feedTSPacket(buffer, sizeof(buffer));
+
+                if (err != OK) {
+                    ALOGE("TS Parser returned error %d", err);
+
+                    mTSParser->signalEOS(err);
+                    setError(err);
+                    break;
+                }
+            }
+        }
+    }
+}
+
+status_t NuPlayer2::StreamingSource::postReadBuffer() {
+    {
+        Mutex::Autolock _l(mBufferingLock);
+        if (mFinalResult != OK) {
+            return mFinalResult;
+        }
+        if (mBuffering) {
+            return OK;
+        }
+        mBuffering = true;
+    }
+
+    (new AMessage(kWhatReadBuffer, this))->post();
+    return OK;
+}
+
+bool NuPlayer2::StreamingSource::haveSufficientDataOnAllTracks() {
+    // We're going to buffer at least 2 secs worth data on all tracks before
+    // starting playback (both at startup and after a seek).
+
+    static const int64_t kMinDurationUs = 2000000ll;
+
+    sp<AnotherPacketSource> audioTrack = getSource(true /*audio*/);
+    sp<AnotherPacketSource> videoTrack = getSource(false /*audio*/);
+
+    status_t err;
+    int64_t durationUs;
+    if (audioTrack != NULL
+            && (durationUs = audioTrack->getBufferedDurationUs(&err))
+                    < kMinDurationUs
+            && err == OK) {
+        ALOGV("audio track doesn't have enough data yet. (%.2f secs buffered)",
+              durationUs / 1E6);
+        return false;
+    }
+
+    if (videoTrack != NULL
+            && (durationUs = videoTrack->getBufferedDurationUs(&err))
+                    < kMinDurationUs
+            && err == OK) {
+        ALOGV("video track doesn't have enough data yet. (%.2f secs buffered)",
+              durationUs / 1E6);
+        return false;
+    }
+
+    return true;
+}
+
+void NuPlayer2::StreamingSource::setError(status_t err) {
+    Mutex::Autolock _l(mBufferingLock);
+    mFinalResult = err;
+}
+
+sp<AnotherPacketSource> NuPlayer2::StreamingSource::getSource(bool audio) {
+    if (mTSParser == NULL) {
+        return NULL;
+    }
+
+    sp<MediaSource> source = mTSParser->getSource(
+            audio ? ATSParser::AUDIO : ATSParser::VIDEO);
+
+    return static_cast<AnotherPacketSource *>(source.get());
+}
+
+sp<AMessage> NuPlayer2::StreamingSource::getFormat(bool audio) {
+    sp<AnotherPacketSource> source = getSource(audio);
+
+    sp<AMessage> format = new AMessage;
+    if (source == NULL) {
+        format->setInt32("err", -EWOULDBLOCK);
+        return format;
+    }
+
+    sp<MetaData> meta = source->getFormat();
+    if (meta == NULL) {
+        format->setInt32("err", -EWOULDBLOCK);
+        return format;
+    }
+    status_t err = convertMetaDataToMessage(meta, &format);
+    if (err != OK) { // format may have been cleared on error
+        return NULL;
+    }
+    return format;
+}
+
+status_t NuPlayer2::StreamingSource::dequeueAccessUnit(
+        bool audio, sp<ABuffer> *accessUnit) {
+    sp<AnotherPacketSource> source = getSource(audio);
+
+    if (source == NULL) {
+        return -EWOULDBLOCK;
+    }
+
+    if (!haveSufficientDataOnAllTracks()) {
+        postReadBuffer();
+    }
+
+    status_t finalResult;
+    if (!source->hasBufferAvailable(&finalResult)) {
+        return finalResult == OK ? -EWOULDBLOCK : finalResult;
+    }
+
+    status_t err = source->dequeueAccessUnit(accessUnit);
+
+#if !defined(LOG_NDEBUG) || LOG_NDEBUG == 0
+    if (err == OK) {
+        int64_t timeUs;
+        CHECK((*accessUnit)->meta()->findInt64("timeUs", &timeUs));
+        ALOGV("dequeueAccessUnit timeUs=%lld us", timeUs);
+    }
+#endif
+
+    return err;
+}
+
+bool NuPlayer2::StreamingSource::isRealTime() const {
+    return mSource->flags() & IStreamSource::kFlagIsRealTimeData;
+}
+
+void NuPlayer2::StreamingSource::onMessageReceived(
+        const sp<AMessage> &msg) {
+    switch (msg->what()) {
+        case kWhatReadBuffer:
+        {
+            onReadBuffer();
+
+            {
+                Mutex::Autolock _l(mBufferingLock);
+                mBuffering = false;
+            }
+            break;
+        }
+        default:
+        {
+            TRESPASS();
+        }
+    }
+}
+
+
+}  // namespace android
+
diff --git a/media/libmedia/nuplayer2/StreamingSource.h b/media/libmedia/nuplayer2/StreamingSource.h
new file mode 100644
index 0000000..4b89c38
--- /dev/null
+++ b/media/libmedia/nuplayer2/StreamingSource.h
@@ -0,0 +1,79 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef STREAMING_SOURCE_H_
+
+#define STREAMING_SOURCE_H_
+
+#include "NuPlayer2.h"
+#include "NuPlayer2Source.h"
+
+namespace android {
+
+struct ABuffer;
+struct ATSParser;
+struct AnotherPacketSource;
+
+struct NuPlayer2::StreamingSource : public NuPlayer2::Source {
+    StreamingSource(
+            const sp<AMessage> &notify,
+            const sp<IStreamSource> &source);
+
+    virtual status_t getBufferingSettings(
+            BufferingSettings* buffering /* nonnull */) override;
+    virtual status_t setBufferingSettings(const BufferingSettings& buffering) override;
+
+    virtual void prepareAsync();
+    virtual void start();
+
+    virtual status_t feedMoreTSData();
+
+    virtual status_t dequeueAccessUnit(bool audio, sp<ABuffer> *accessUnit);
+
+    virtual bool isRealTime() const;
+
+protected:
+    virtual ~StreamingSource();
+
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+
+    virtual sp<AMessage> getFormat(bool audio);
+
+private:
+    enum {
+        kWhatReadBuffer,
+    };
+    sp<IStreamSource> mSource;
+    status_t mFinalResult;
+    sp<StreamListener> mStreamListener;
+    sp<ATSParser> mTSParser;
+
+    bool mBuffering;
+    Mutex mBufferingLock;
+    sp<ALooper> mLooper;
+
+    void setError(status_t err);
+    sp<AnotherPacketSource> getSource(bool audio);
+    bool haveSufficientDataOnAllTracks();
+    status_t postReadBuffer();
+    void onReadBuffer();
+
+    DISALLOW_EVIL_CONSTRUCTORS(StreamingSource);
+};
+
+}  // namespace android
+
+#endif  // STREAMING_SOURCE_H_
diff --git a/media/libmedia/omx/1.0/WOmxNode.cpp b/media/libmedia/omx/1.0/WOmxNode.cpp
index 0b40e8d..2cd8b76 100644
--- a/media/libmedia/omx/1.0/WOmxNode.cpp
+++ b/media/libmedia/omx/1.0/WOmxNode.cpp
@@ -151,7 +151,8 @@
                     hidl_handle const& outNativeHandle) {
                 fnStatus = toStatusT(status);
                 *buffer = outBuffer;
-                *native_handle = NativeHandle::create(
+                *native_handle = outNativeHandle.getNativeHandle() == nullptr ?
+                        nullptr : NativeHandle::create(
                         native_handle_clone(outNativeHandle), true);
             }));
     return transStatus == NO_ERROR ? fnStatus : transStatus;
diff --git a/media/libmediaextractor/Android.bp b/media/libmediaextractor/Android.bp
new file mode 100644
index 0000000..dcdb320
--- /dev/null
+++ b/media/libmediaextractor/Android.bp
@@ -0,0 +1,42 @@
+cc_library_shared {
+    name: "libmediaextractor",
+
+    include_dirs: [
+        "frameworks/av/include",
+        "frameworks/av/media/libmediaextractor/include",
+    ],
+
+    export_include_dirs: ["include"],
+
+    cflags: [
+        "-Wno-multichar",
+        "-Werror",
+        "-Wall",
+    ],
+
+    shared_libs: [
+        "libstagefright_foundation",
+        "libutils",
+        "libcutils",
+        "liblog",
+    ],
+
+    srcs: [
+        "DataSource.cpp",
+        "MediaSource.cpp",
+        "MediaExtractor.cpp",
+    ],
+
+    clang: true,
+
+    sanitize: {
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+        cfi: true,
+        diag: {
+            cfi: true,
+        },
+    },
+}
diff --git a/media/libmediaextractor/DataSource.cpp b/media/libmediaextractor/DataSource.cpp
new file mode 100644
index 0000000..72959c6
--- /dev/null
+++ b/media/libmediaextractor/DataSource.cpp
@@ -0,0 +1,130 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+//#define LOG_NDEBUG 0
+#define LOG_TAG "DataSource"
+
+#include <media/DataSource.h>
+#include <media/stagefright/foundation/ByteUtils.h>
+#include <media/stagefright/MediaErrors.h>
+#include <utils/String8.h>
+
+namespace android {
+
+bool DataSource::getUInt16(off64_t offset, uint16_t *x) {
+    *x = 0;
+
+    uint8_t byte[2];
+    if (readAt(offset, byte, 2) != 2) {
+        return false;
+    }
+
+    *x = (byte[0] << 8) | byte[1];
+
+    return true;
+}
+
+bool DataSource::getUInt24(off64_t offset, uint32_t *x) {
+    *x = 0;
+
+    uint8_t byte[3];
+    if (readAt(offset, byte, 3) != 3) {
+        return false;
+    }
+
+    *x = (byte[0] << 16) | (byte[1] << 8) | byte[2];
+
+    return true;
+}
+
+bool DataSource::getUInt32(off64_t offset, uint32_t *x) {
+    *x = 0;
+
+    uint32_t tmp;
+    if (readAt(offset, &tmp, 4) != 4) {
+        return false;
+    }
+
+    *x = ntohl(tmp);
+
+    return true;
+}
+
+bool DataSource::getUInt64(off64_t offset, uint64_t *x) {
+    *x = 0;
+
+    uint64_t tmp;
+    if (readAt(offset, &tmp, 8) != 8) {
+        return false;
+    }
+
+    *x = ntoh64(tmp);
+
+    return true;
+}
+
+bool DataSource::getUInt16Var(off64_t offset, uint16_t *x, size_t size) {
+    if (size == 2) {
+        return getUInt16(offset, x);
+    }
+    if (size == 1) {
+        uint8_t tmp;
+        if (readAt(offset, &tmp, 1) == 1) {
+            *x = tmp;
+            return true;
+        }
+    }
+    return false;
+}
+
+bool DataSource::getUInt32Var(off64_t offset, uint32_t *x, size_t size) {
+    if (size == 4) {
+        return getUInt32(offset, x);
+    }
+    if (size == 2) {
+        uint16_t tmp;
+        if (getUInt16(offset, &tmp)) {
+            *x = tmp;
+            return true;
+        }
+    }
+    return false;
+}
+
+bool DataSource::getUInt64Var(off64_t offset, uint64_t *x, size_t size) {
+    if (size == 8) {
+        return getUInt64(offset, x);
+    }
+    if (size == 4) {
+        uint32_t tmp;
+        if (getUInt32(offset, &tmp)) {
+            *x = tmp;
+            return true;
+        }
+    }
+    return false;
+}
+
+status_t DataSource::getSize(off64_t *size) {
+    *size = 0;
+
+    return ERROR_UNSUPPORTED;
+}
+
+String8 DataSource::getMIMEType() const {
+    return String8("application/octet-stream");
+}
+
+}  // namespace android
diff --git a/media/libmediaextractor/MediaExtractor.cpp b/media/libmediaextractor/MediaExtractor.cpp
new file mode 100644
index 0000000..2241567
--- /dev/null
+++ b/media/libmediaextractor/MediaExtractor.cpp
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaExtractor"
+#include <utils/Log.h>
+#include <pwd.h>
+
+#include <media/MediaExtractor.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/MetaData.h>
+
+namespace android {
+
+MediaExtractor::MediaExtractor() {
+    if (!LOG_NDEBUG) {
+        uid_t uid = getuid();
+        struct passwd *pw = getpwuid(uid);
+        ALOGV("extractor created in uid: %d (%s)", getuid(), pw->pw_name);
+    }
+}
+
+MediaExtractor::~MediaExtractor() {}
+
+sp<MetaData> MediaExtractor::getMetaData() {
+    return new MetaData;
+}
+
+uint32_t MediaExtractor::flags() const {
+    return CAN_SEEK_BACKWARD | CAN_SEEK_FORWARD | CAN_PAUSE | CAN_SEEK;
+}
+
+}  // namespace android
diff --git a/media/libmediaextractor/MediaSource.cpp b/media/libmediaextractor/MediaSource.cpp
new file mode 100644
index 0000000..a5d41f7
--- /dev/null
+++ b/media/libmediaextractor/MediaSource.cpp
@@ -0,0 +1,69 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/MediaSource.h>
+#include <media/IMediaSource.h>
+
+namespace android {
+
+MediaSource::MediaSource() {}
+
+MediaSource::~MediaSource() {}
+
+////////////////////////////////////////////////////////////////////////////////
+
+MediaSource::ReadOptions::ReadOptions() {
+    reset();
+}
+
+void MediaSource::ReadOptions::reset() {
+    mOptions = 0;
+    mSeekTimeUs = 0;
+    mNonBlocking = false;
+}
+
+void MediaSource::ReadOptions::setNonBlocking() {
+    mNonBlocking = true;
+}
+
+void MediaSource::ReadOptions::clearNonBlocking() {
+    mNonBlocking = false;
+}
+
+bool MediaSource::ReadOptions::getNonBlocking() const {
+    return mNonBlocking;
+}
+
+void MediaSource::ReadOptions::setSeekTo(int64_t time_us, SeekMode mode) {
+    mOptions |= kSeekTo_Option;
+    mSeekTimeUs = time_us;
+    mSeekMode = mode;
+}
+
+void MediaSource::ReadOptions::clearSeekTo() {
+    mOptions &= ~kSeekTo_Option;
+    mSeekTimeUs = 0;
+    mSeekMode = SEEK_CLOSEST_SYNC;
+}
+
+bool MediaSource::ReadOptions::getSeekTo(
+        int64_t *time_us, SeekMode *mode) const {
+    *time_us = mSeekTimeUs;
+    *mode = mSeekMode;
+    return (mOptions & kSeekTo_Option) != 0;
+}
+
+}  // namespace android
diff --git a/media/libstagefright/include/media/stagefright/DataSource.h b/media/libmediaextractor/include/media/DataSource.h
similarity index 86%
rename from media/libstagefright/include/media/stagefright/DataSource.h
rename to media/libmediaextractor/include/media/DataSource.h
index bd863ba..44f94a0 100644
--- a/media/libstagefright/include/media/stagefright/DataSource.h
+++ b/media/libmediaextractor/include/media/DataSource.h
@@ -47,17 +47,6 @@
         kIsLocalFileSource     = 16,
     };
 
-    static sp<DataSource> CreateFromURI(
-            const sp<IMediaHTTPService> &httpService,
-            const char *uri,
-            const KeyedVector<String8, String8> *headers = NULL,
-            String8 *contentType = NULL,
-            HTTPBase *httpSource = NULL);
-
-    static sp<DataSource> CreateMediaHTTP(const sp<IMediaHTTPService> &httpService);
-    static sp<DataSource> CreateFromIDataSource(const sp<IDataSource> &source);
-    static sp<DataSource> CreateFromFd(int fd, int64_t offset, int64_t length);
-
     DataSource() {}
 
     virtual status_t initCheck() const = 0;
@@ -113,7 +102,6 @@
     virtual sp<DecryptHandle> DrmInitialization(const char * /*mime*/ = NULL) {
         return NULL;
     }
-    virtual void getDrmInfo(sp<DecryptHandle> &/*handle*/, DrmManagerClient ** /*client*/) {};
 
     virtual String8 getUri() {
         return String8();
@@ -123,12 +111,6 @@
 
     virtual void close() {};
 
-    // creates an IDataSource wrapper to the DataSource.
-    virtual sp<IDataSource> asIDataSource();
-
-    // returns a pointer to IDataSource if it is wrapped.
-    virtual sp<IDataSource> getIDataSource() const;
-
 protected:
     virtual ~DataSource() {}
 
diff --git a/media/libmediaextractor/include/media/MediaExtractor.h b/media/libmediaextractor/include/media/MediaExtractor.h
new file mode 100644
index 0000000..f197b5e
--- /dev/null
+++ b/media/libmediaextractor/include/media/MediaExtractor.h
@@ -0,0 +1,189 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MEDIA_EXTRACTOR_H_
+
+#define MEDIA_EXTRACTOR_H_
+
+#include <stdio.h>
+#include <vector>
+
+#include <utils/Errors.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+class DataSource;
+class IMediaSource;
+class MediaExtractorFactory;
+class MetaData;
+class Parcel;
+class String8;
+struct AMessage;
+struct MediaSource;
+typedef std::vector<uint8_t> HInterfaceToken;
+
+class MediaExtractor : public RefBase {
+public:
+    virtual size_t countTracks() = 0;
+    virtual sp<MediaSource> getTrack(size_t index) = 0;
+
+    enum GetTrackMetaDataFlags {
+        kIncludeExtensiveMetaData = 1
+    };
+    virtual sp<MetaData> getTrackMetaData(
+            size_t index, uint32_t flags = 0) = 0;
+
+    // Return container specific meta-data. The default implementation
+    // returns an empty metadata object.
+    virtual sp<MetaData> getMetaData();
+
+    enum Flags {
+        CAN_SEEK_BACKWARD  = 1,  // the "seek 10secs back button"
+        CAN_SEEK_FORWARD   = 2,  // the "seek 10secs forward button"
+        CAN_PAUSE          = 4,
+        CAN_SEEK           = 8,  // the "seek bar"
+    };
+
+    // If subclasses do _not_ override this, the default is
+    // CAN_SEEK_BACKWARD | CAN_SEEK_FORWARD | CAN_SEEK | CAN_PAUSE
+    virtual uint32_t flags() const;
+
+    // for DRM
+    virtual char* getDrmTrackInfo(size_t /*trackID*/, int * /*len*/) {
+        return NULL;
+    }
+    virtual void setUID(uid_t /*uid*/) {
+    }
+    virtual status_t setMediaCas(const HInterfaceToken &/*casToken*/) {
+        return INVALID_OPERATION;
+    }
+
+    virtual const char * name() { return "<unspecified>"; }
+
+    virtual void release() {}
+    typedef MediaExtractor* (*CreatorFunc)(
+            const sp<DataSource> &source, const sp<AMessage> &meta);
+
+    // The sniffer can optionally fill in "meta" with an AMessage containing
+    // a dictionary of values that helps the corresponding extractor initialize
+    // its state without duplicating effort already exerted by the sniffer.
+    typedef CreatorFunc (*SnifferFunc)(
+            const sp<DataSource> &source, String8 *mimeType,
+            float *confidence, sp<AMessage> *meta);
+
+    typedef struct {
+        const uint8_t b[16];
+    } uuid_t;
+
+    typedef struct {
+        // version number of this structure
+        const uint32_t def_version;
+
+        // A unique identifier for this extractor.
+        // See below for a convenience macro to create this from a string.
+        uuid_t extractor_uuid;
+
+        // Version number of this extractor. When two extractors with the same
+        // uuid are encountered, the one with the largest version number will
+        // be used.
+        const uint32_t extractor_version;
+
+        // a human readable name
+        const char *extractor_name;
+
+        // the sniffer function
+        const SnifferFunc sniff;
+    } ExtractorDef;
+
+    static const uint32_t EXTRACTORDEF_VERSION = 1;
+
+    typedef ExtractorDef (*GetExtractorDef)();
+
+protected:
+    MediaExtractor();
+    virtual ~MediaExtractor();
+
+private:
+    MediaExtractor(const MediaExtractor &);
+    MediaExtractor &operator=(const MediaExtractor &);
+};
+
+// purposely not defined anywhere so that this will fail to link if
+// expressions below are not evaluated at compile time
+int invalid_uuid_string(const char *);
+
+template <typename T, size_t N>
+constexpr uint8_t _digitAt_(const T (&s)[N], const size_t n) {
+    return s[n] >= '0' && s[n] <= '9' ? s[n] - '0'
+            : s[n] >= 'a' && s[n] <= 'f' ? s[n] - 'a' + 10
+                    : s[n] >= 'A' && s[n] <= 'F' ? s[n] - 'A' + 10
+                            : invalid_uuid_string("uuid: bad digits");
+}
+
+template <typename T, size_t N>
+constexpr uint8_t _hexByteAt_(const T (&s)[N], size_t n) {
+    return (_digitAt_(s, n) << 4) + _digitAt_(s, n + 1);
+}
+
+constexpr bool _assertIsDash_(char c) {
+    return c == '-' ? true : invalid_uuid_string("Wrong format");
+}
+
+template <size_t N>
+constexpr MediaExtractor::uuid_t constUUID(const char (&s) [N]) {
+    static_assert(N == 37, "uuid: wrong length");
+    return
+            _assertIsDash_(s[8]),
+            _assertIsDash_(s[13]),
+            _assertIsDash_(s[18]),
+            _assertIsDash_(s[23]),
+            MediaExtractor::uuid_t {{
+                _hexByteAt_(s, 0),
+                _hexByteAt_(s, 2),
+                _hexByteAt_(s, 4),
+                _hexByteAt_(s, 6),
+                _hexByteAt_(s, 9),
+                _hexByteAt_(s, 11),
+                _hexByteAt_(s, 14),
+                _hexByteAt_(s, 16),
+                _hexByteAt_(s, 19),
+                _hexByteAt_(s, 21),
+                _hexByteAt_(s, 24),
+                _hexByteAt_(s, 26),
+                _hexByteAt_(s, 28),
+                _hexByteAt_(s, 30),
+                _hexByteAt_(s, 32),
+                _hexByteAt_(s, 34),
+            }};
+}
+// Convenience macro to create a uuid_t from a string literal, which should
+// be formatted as "12345678-1234-1234-1234-123456789abc", as generated by
+// e.g. https://www.uuidgenerator.net/ or the 'uuidgen' linux command.
+// Hex digits may be upper or lower case.
+//
+// The macro call is otherwise equivalent to specifying the structure directly
+// (e.g. UUID("7d613858-5837-4a38-84c5-332d1cddee27") is the same as
+//       {{0x7d, 0x61, 0x38, 0x58, 0x58, 0x37, 0x4a, 0x38,
+//         0x84, 0xc5, 0x33, 0x2d, 0x1c, 0xdd, 0xee, 0x27}})
+
+#define UUID(str) []{ constexpr MediaExtractor::uuid_t uuid = constUUID(str); return uuid; }()
+
+
+
+}  // namespace android
+
+#endif  // MEDIA_EXTRACTOR_H_
diff --git a/media/libstagefright/include/media/stagefright/MediaSource.h b/media/libmediaextractor/include/media/MediaSource.h
similarity index 74%
rename from media/libstagefright/include/media/stagefright/MediaSource.h
rename to media/libmediaextractor/include/media/MediaSource.h
index 14adb05..98b136b 100644
--- a/media/libstagefright/include/media/stagefright/MediaSource.h
+++ b/media/libmediaextractor/include/media/MediaSource.h
@@ -20,8 +20,10 @@
 
 #include <sys/types.h>
 
-#include <media/IMediaSource.h>
+#include <binder/IMemory.h>
+#include <binder/MemoryDealer.h>
 #include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MetaData.h>
 #include <utils/RefBase.h>
 #include <utils/Vector.h>
 
@@ -29,8 +31,9 @@
 
 class MediaBuffer;
 class MetaData;
+class IMediaSource;
 
-struct MediaSource : public BnMediaSource {
+struct MediaSource : public virtual RefBase {
     MediaSource();
 
     // To be called before any other methods on this object, except
@@ -48,6 +51,47 @@
     // Returns the format of the data output by this media source.
     virtual sp<MetaData> getFormat() = 0;
 
+    // Options that modify read() behaviour. The default is to
+    // a) not request a seek
+    // b) not be late, i.e. lateness_us = 0
+    struct ReadOptions {
+        enum SeekMode : int32_t {
+            SEEK_PREVIOUS_SYNC,
+            SEEK_NEXT_SYNC,
+            SEEK_CLOSEST_SYNC,
+            SEEK_CLOSEST,
+            SEEK_FRAME_INDEX,
+        };
+
+        ReadOptions();
+
+        // Reset everything back to defaults.
+        void reset();
+
+        void setSeekTo(int64_t time_us, SeekMode mode = SEEK_CLOSEST_SYNC);
+        void clearSeekTo();
+        bool getSeekTo(int64_t *time_us, SeekMode *mode) const;
+
+        void setNonBlocking();
+        void clearNonBlocking();
+        bool getNonBlocking() const;
+
+        // Used to clear all non-persistent options for multiple buffer reads.
+        void clearNonPersistent() {
+            clearSeekTo();
+        }
+
+    private:
+        enum Options {
+            kSeekTo_Option      = 1,
+        };
+
+        uint32_t mOptions;
+        int64_t mSeekTimeUs;
+        SeekMode mSeekMode;
+        bool mNonBlocking;
+    } __attribute__((packed)); // sent through Binder
+
     // Returns a new buffer of data. Call blocks until a
     // buffer is available, an error is encountered of the end of the stream
     // is reached.
@@ -66,15 +110,6 @@
         return ERROR_UNSUPPORTED;
     }
 
-    // The consumer of this media source requests that the given buffers
-    // are to be returned exclusively in response to read calls.
-    // This will be called after a successful start() and before the
-    // first read() call.
-    // Callee assumes ownership of the buffers if no error is returned.
-    virtual status_t setBuffers(const Vector<MediaBuffer *> & /* buffers */) {
-        return ERROR_UNSUPPORTED;
-    }
-
     // The consumer of this media source requests the source stops sending
     // buffers with timestamp larger than or equal to stopTimeUs. stopTimeUs
     // must be in the same time base as the startTime passed in start(). If
diff --git a/media/libmediametrics/MediaAnalyticsItem.cpp b/media/libmediametrics/MediaAnalyticsItem.cpp
index f968c09..6b063e8 100644
--- a/media/libmediametrics/MediaAnalyticsItem.cpp
+++ b/media/libmediametrics/MediaAnalyticsItem.cpp
@@ -214,12 +214,12 @@
     return mPkgName;
 }
 
-MediaAnalyticsItem &MediaAnalyticsItem::setPkgVersionCode(int32_t pkgVersionCode) {
+MediaAnalyticsItem &MediaAnalyticsItem::setPkgVersionCode(int64_t pkgVersionCode) {
     mPkgVersionCode = pkgVersionCode;
     return *this;
 }
 
-int32_t MediaAnalyticsItem::getPkgVersionCode() const {
+int64_t MediaAnalyticsItem::getPkgVersionCode() const {
     return mPkgVersionCode;
 }
 
@@ -279,8 +279,10 @@
         prop = &mProps[i];
     } else {
         if (i == mPropSize) {
-            growProps();
-            // XXX: verify success
+            if (growProps() == false) {
+                ALOGE("failed allocation for new props");
+                return NULL;
+            }
         }
         i = mPropCount++;
         prop = &mProps[i];
@@ -312,41 +314,54 @@
 // set the values
 void MediaAnalyticsItem::setInt32(MediaAnalyticsItem::Attr name, int32_t value) {
     Prop *prop = allocateProp(name);
-    prop->mType = kTypeInt32;
-    prop->u.int32Value = value;
+    if (prop != NULL) {
+        prop->mType = kTypeInt32;
+        prop->u.int32Value = value;
+    }
 }
 
 void MediaAnalyticsItem::setInt64(MediaAnalyticsItem::Attr name, int64_t value) {
     Prop *prop = allocateProp(name);
-    prop->mType = kTypeInt64;
-    prop->u.int64Value = value;
+    if (prop != NULL) {
+        prop->mType = kTypeInt64;
+        prop->u.int64Value = value;
+    }
 }
 
 void MediaAnalyticsItem::setDouble(MediaAnalyticsItem::Attr name, double value) {
     Prop *prop = allocateProp(name);
-    prop->mType = kTypeDouble;
-    prop->u.doubleValue = value;
+    if (prop != NULL) {
+        prop->mType = kTypeDouble;
+        prop->u.doubleValue = value;
+    }
 }
 
 void MediaAnalyticsItem::setCString(MediaAnalyticsItem::Attr name, const char *value) {
 
     Prop *prop = allocateProp(name);
     // any old value will be gone
-    prop->mType = kTypeCString;
-    prop->u.CStringValue = strdup(value);
+    if (prop != NULL) {
+        prop->mType = kTypeCString;
+        prop->u.CStringValue = strdup(value);
+    }
 }
 
 void MediaAnalyticsItem::setRate(MediaAnalyticsItem::Attr name, int64_t count, int64_t duration) {
     Prop *prop = allocateProp(name);
-    prop->mType = kTypeRate;
-    prop->u.rate.count = count;
-    prop->u.rate.duration = duration;
+    if (prop != NULL) {
+        prop->mType = kTypeRate;
+        prop->u.rate.count = count;
+        prop->u.rate.duration = duration;
+    }
 }
 
 
 // find/add/set fused into a single operation
 void MediaAnalyticsItem::addInt32(MediaAnalyticsItem::Attr name, int32_t value) {
     Prop *prop = allocateProp(name);
+    if (prop == NULL) {
+        return;
+    }
     switch (prop->mType) {
         case kTypeInt32:
             prop->u.int32Value += value;
@@ -361,6 +376,9 @@
 
 void MediaAnalyticsItem::addInt64(MediaAnalyticsItem::Attr name, int64_t value) {
     Prop *prop = allocateProp(name);
+    if (prop == NULL) {
+        return;
+    }
     switch (prop->mType) {
         case kTypeInt64:
             prop->u.int64Value += value;
@@ -375,6 +393,9 @@
 
 void MediaAnalyticsItem::addRate(MediaAnalyticsItem::Attr name, int64_t count, int64_t duration) {
     Prop *prop = allocateProp(name);
+    if (prop == NULL) {
+        return;
+    }
     switch (prop->mType) {
         case kTypeRate:
             prop->u.rate.count += count;
@@ -391,6 +412,9 @@
 
 void MediaAnalyticsItem::addDouble(MediaAnalyticsItem::Attr name, double value) {
     Prop *prop = allocateProp(name);
+    if (prop == NULL) {
+        return;
+    }
     switch (prop->mType) {
         case kTypeDouble:
             prop->u.doubleValue += value;
@@ -585,7 +609,7 @@
     }
 }
 
-void MediaAnalyticsItem::growProps(int increment)
+bool MediaAnalyticsItem::growProps(int increment)
 {
     if (increment <= 0) {
         increment = kGrowProps;
@@ -599,6 +623,10 @@
         }
         mProps = ni;
         mPropSize = nsize;
+        return true;
+    } else {
+        ALOGW("MediaAnalyticsItem::growProps fails");
+        return false;
     }
 }
 
@@ -612,7 +640,7 @@
     mPid = data.readInt32();
     mUid = data.readInt32();
     mPkgName = data.readCString();
-    mPkgVersionCode = data.readInt32();
+    mPkgVersionCode = data.readInt64();
     mSessionID = data.readInt64();
     mFinalized = data.readInt32();
     mTimestamp = data.readInt64();
@@ -659,7 +687,7 @@
     data->writeInt32(mPid);
     data->writeInt32(mUid);
     data->writeCString(mPkgName.c_str());
-    data->writeInt32(mPkgVersionCode);
+    data->writeInt64(mPkgVersionCode);
     data->writeInt64(mSessionID);
     data->writeInt32(mFinalized);
     data->writeInt64(mTimestamp);
@@ -738,7 +766,7 @@
 
     if (version >= PROTO_V1) {
         result.append(mPkgName);
-        snprintf(buffer, sizeof(buffer), ":%d:", mPkgVersionCode);
+        snprintf(buffer, sizeof(buffer), ":%"  PRId64 ":", mPkgVersionCode);
         result.append(buffer);
     }
 
@@ -963,32 +991,26 @@
     int nattr = incoming->mPropCount;
     for (int i = 0 ; i < nattr; i++ ) {
         Prop *iprop = &incoming->mProps[i];
-        Prop *oprop = findProp(iprop->mName);
         const char *p = iprop->mName;
         size_t len = strlen(p);
-        char semantic = p[len-1];
+
+        // should ignore a zero length name...
+        if (len == 0) {
+            continue;
+        }
+
+        Prop *oprop = findProp(iprop->mName);
 
         if (oprop == NULL) {
             // no oprop, so we insert the new one
             oprop = allocateProp(p);
-            copyProp(oprop, iprop);
-        } else {
-            // merge iprop into oprop
-            switch (semantic) {
-                case '<':       // first  aka keep old)
-                    /* nop */
-                    break;
-
-                default:        // default is 'last'
-                case '>':       // last (aka keep new)
-                    copyProp(oprop, iprop);
-                    break;
-
-                case '+':       /* sum */
-                    // XXX validate numeric types, sum in place
-                    break;
-
+            if (oprop != NULL) {
+                copyProp(oprop, iprop);
+            } else {
+                ALOGW("dropped property '%s'", iprop->mName);
             }
+        } else {
+            copyProp(oprop, iprop);
         }
     }
 
diff --git a/media/libmediametrics/include/MediaAnalyticsItem.h b/media/libmediametrics/include/MediaAnalyticsItem.h
index dd7452f..ec9b660 100644
--- a/media/libmediametrics/include/MediaAnalyticsItem.h
+++ b/media/libmediametrics/include/MediaAnalyticsItem.h
@@ -173,8 +173,8 @@
         MediaAnalyticsItem &setPkgName(AString);
         AString getPkgName() const;
 
-        MediaAnalyticsItem &setPkgVersionCode(int32_t);
-        int32_t getPkgVersionCode() const;
+        MediaAnalyticsItem &setPkgVersionCode(int64_t);
+        int64_t getPkgVersionCode() const;
 
         // our serialization code for binder calls
         int32_t writeToParcel(Parcel *);
@@ -205,7 +205,7 @@
         pid_t     mPid;
         uid_t     mUid;
         AString   mPkgName;
-        int32_t   mPkgVersionCode;
+        int64_t   mPkgVersionCode;
 
         // let's reuse a binder connection
         static sp<IMediaAnalyticsService> sAnalyticsService;
@@ -243,7 +243,7 @@
         enum {
             kGrowProps = 10
         };
-        void growProps(int increment = kGrowProps);
+        bool growProps(int increment = kGrowProps);
         size_t findPropIndex(const char *name, size_t len);
         Prop *findProp(const char *name);
         Prop *allocateProp(const char *name);
diff --git a/media/libmediaplayerservice/Android.bp b/media/libmediaplayerservice/Android.bp
new file mode 100644
index 0000000..a37973b
--- /dev/null
+++ b/media/libmediaplayerservice/Android.bp
@@ -0,0 +1,78 @@
+cc_library_shared {
+
+    srcs: [
+        "ActivityManager.cpp",
+        "MediaPlayerFactory.cpp",
+        "MediaPlayerService.cpp",
+        "MediaRecorderClient.cpp",
+        "MetadataRetrieverClient.cpp",
+        "StagefrightRecorder.cpp",
+        "TestPlayerStub.cpp",
+    ],
+
+    shared_libs: [
+        "android.hardware.media.omx@1.0",
+        "libaudioclient",
+        "libbinder",
+        "libcamera_client",
+        "libcrypto",
+        "libcutils",
+        "libdl",
+        "libgui",
+        "libhidlbase",
+        "libhidlmemory",
+        "liblog",
+        "libmedia",
+        "libmedia_omx",
+        "libmediaextractor",
+        "libmediadrm",
+        "libmediametrics",
+        "libmediautils",
+        "libmemunreachable",
+        "libpowermanager",
+        "libstagefright",
+        "libstagefright_foundation",
+        "libstagefright_httplive",
+        "libutils",
+    ],
+
+    header_libs: [
+        "media_plugin_headers",
+    ],
+
+    static_libs: [
+        "libstagefright_nuplayer",
+        "libstagefright_rtsp",
+        "libstagefright_timedtext",
+    ],
+
+    export_shared_lib_headers: ["libmedia"],
+
+    include_dirs: [
+        "frameworks/av/media/libstagefright/rtsp",
+        "frameworks/av/media/libstagefright/webm",
+    ],
+
+    local_include_dirs: ["include"],
+
+    cflags: [
+        "-Werror",
+        "-Wno-error=deprecated-declarations",
+        "-Wall",
+    ],
+
+    name: "libmediaplayerservice",
+
+    compile_multilib: "32",
+
+    sanitize: {
+        cfi: true,
+        diag: {
+            cfi: true,
+        },
+    },
+
+}
+
+subdirs = ["*"]
+
diff --git a/media/libmediaplayerservice/Android.mk b/media/libmediaplayerservice/Android.mk
deleted file mode 100644
index 1fc74a9..0000000
--- a/media/libmediaplayerservice/Android.mk
+++ /dev/null
@@ -1,73 +0,0 @@
-LOCAL_PATH:= $(call my-dir)
-
-#
-# libmediaplayerservice
-#
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:=               \
-    ActivityManager.cpp         \
-    HDCP.cpp                    \
-    MediaPlayerFactory.cpp      \
-    MediaPlayerService.cpp      \
-    MediaRecorderClient.cpp     \
-    MetadataRetrieverClient.cpp \
-    RemoteDisplay.cpp           \
-    StagefrightRecorder.cpp     \
-    TestPlayerStub.cpp          \
-
-LOCAL_SHARED_LIBRARIES :=       \
-    libbinder                   \
-    libcrypto                   \
-    libcutils                   \
-    libdrmframework             \
-    liblog                      \
-    libdl                       \
-    libgui                      \
-    libaudioclient              \
-    libmedia                    \
-    libmediametrics             \
-    libmediadrm                 \
-    libmediautils               \
-    libmemunreachable           \
-    libstagefright              \
-    libstagefright_foundation   \
-    libstagefright_httplive     \
-    libstagefright_omx          \
-    libstagefright_wfd          \
-    libutils                    \
-    libnativewindow             \
-    libhidlbase                 \
-    android.hardware.media.omx@1.0 \
-
-LOCAL_STATIC_LIBRARIES :=       \
-    libstagefright_nuplayer     \
-    libstagefright_rtsp         \
-    libstagefright_timedtext    \
-
-LOCAL_EXPORT_SHARED_LIBRARY_HEADERS := libmedia
-
-LOCAL_C_INCLUDES :=                                                 \
-    frameworks/av/media/libstagefright/include               \
-    frameworks/av/media/libstagefright/rtsp                  \
-    frameworks/av/media/libstagefright/wifi-display          \
-    frameworks/av/media/libstagefright/webm                  \
-    $(LOCAL_PATH)/include/media                              \
-    frameworks/av/include/camera                             \
-    frameworks/native/include/media/openmax                  \
-    frameworks/native/include/media/hardware                 \
-    external/tremolo/Tremolo                                 \
-
-LOCAL_CFLAGS += -Werror -Wno-error=deprecated-declarations -Wall
-
-LOCAL_MODULE:= libmediaplayerservice
-
-LOCAL_32_BIT_ONLY := true
-
-LOCAL_SANITIZE := cfi
-LOCAL_SANITIZE_DIAG := cfi
-
-include $(BUILD_SHARED_LIBRARY)
-
-include $(call all-makefiles-under,$(LOCAL_PATH))
diff --git a/media/libmediaplayerservice/HDCP.cpp b/media/libmediaplayerservice/HDCP.cpp
deleted file mode 100644
index afe3936..0000000
--- a/media/libmediaplayerservice/HDCP.cpp
+++ /dev/null
@@ -1,175 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "HDCP"
-#include <utils/Log.h>
-
-#include "HDCP.h"
-
-#include <media/stagefright/foundation/ADebug.h>
-
-#include <dlfcn.h>
-
-namespace android {
-
-HDCP::HDCP(bool createEncryptionModule)
-    : mIsEncryptionModule(createEncryptionModule),
-      mLibHandle(NULL),
-      mHDCPModule(NULL) {
-    mLibHandle = dlopen("libstagefright_hdcp.so", RTLD_NOW);
-
-    if (mLibHandle == NULL) {
-        ALOGE("Unable to locate libstagefright_hdcp.so");
-        return;
-    }
-
-    typedef HDCPModule *(*CreateHDCPModuleFunc)(
-            void *, HDCPModule::ObserverFunc);
-
-    CreateHDCPModuleFunc createHDCPModule =
-        mIsEncryptionModule
-            ? (CreateHDCPModuleFunc)dlsym(mLibHandle, "createHDCPModule")
-            : (CreateHDCPModuleFunc)dlsym(
-                    mLibHandle, "createHDCPModuleForDecryption");
-
-    if (createHDCPModule == NULL) {
-        ALOGE("Unable to find symbol 'createHDCPModule'.");
-    } else if ((mHDCPModule = createHDCPModule(
-                    this, &HDCP::ObserveWrapper)) == NULL) {
-        ALOGE("createHDCPModule failed.");
-    }
-}
-
-HDCP::~HDCP() {
-    Mutex::Autolock autoLock(mLock);
-
-    if (mHDCPModule != NULL) {
-        delete mHDCPModule;
-        mHDCPModule = NULL;
-    }
-
-    if (mLibHandle != NULL) {
-        dlclose(mLibHandle);
-        mLibHandle = NULL;
-    }
-}
-
-status_t HDCP::setObserver(const sp<IHDCPObserver> &observer) {
-    Mutex::Autolock autoLock(mLock);
-
-    if (mHDCPModule == NULL) {
-        return NO_INIT;
-    }
-
-    mObserver = observer;
-
-    return OK;
-}
-
-status_t HDCP::initAsync(const char *host, unsigned port) {
-    Mutex::Autolock autoLock(mLock);
-
-    if (mHDCPModule == NULL) {
-        return NO_INIT;
-    }
-
-    return mHDCPModule->initAsync(host, port);
-}
-
-status_t HDCP::shutdownAsync() {
-    Mutex::Autolock autoLock(mLock);
-
-    if (mHDCPModule == NULL) {
-        return NO_INIT;
-    }
-
-    return mHDCPModule->shutdownAsync();
-}
-
-uint32_t HDCP::getCaps() {
-    Mutex::Autolock autoLock(mLock);
-
-    if (mHDCPModule == NULL) {
-        return NO_INIT;
-    }
-
-    return mHDCPModule->getCaps();
-}
-
-status_t HDCP::encrypt(
-        const void *inData, size_t size, uint32_t streamCTR,
-        uint64_t *outInputCTR, void *outData) {
-    Mutex::Autolock autoLock(mLock);
-
-    CHECK(mIsEncryptionModule);
-
-    if (mHDCPModule == NULL) {
-        *outInputCTR = 0;
-
-        return NO_INIT;
-    }
-
-    return mHDCPModule->encrypt(inData, size, streamCTR, outInputCTR, outData);
-}
-
-status_t HDCP::encryptNative(
-        const sp<GraphicBuffer> &graphicBuffer,
-        size_t offset, size_t size, uint32_t streamCTR,
-        uint64_t *outInputCTR, void *outData) {
-    Mutex::Autolock autoLock(mLock);
-
-    CHECK(mIsEncryptionModule);
-
-    if (mHDCPModule == NULL) {
-        *outInputCTR = 0;
-
-        return NO_INIT;
-    }
-
-    return mHDCPModule->encryptNative(graphicBuffer->handle,
-                    offset, size, streamCTR, outInputCTR, outData);
-}
-
-status_t HDCP::decrypt(
-        const void *inData, size_t size,
-        uint32_t streamCTR, uint64_t outInputCTR, void *outData) {
-    Mutex::Autolock autoLock(mLock);
-
-    CHECK(!mIsEncryptionModule);
-
-    if (mHDCPModule == NULL) {
-        return NO_INIT;
-    }
-
-    return mHDCPModule->decrypt(inData, size, streamCTR, outInputCTR, outData);
-}
-
-// static
-void HDCP::ObserveWrapper(void *me, int msg, int ext1, int ext2) {
-    static_cast<HDCP *>(me)->observe(msg, ext1, ext2);
-}
-
-void HDCP::observe(int msg, int ext1, int ext2) {
-    Mutex::Autolock autoLock(mLock);
-
-    if (mObserver != NULL) {
-        mObserver->notify(msg, ext1, ext2, NULL /* obj */);
-    }
-}
-
-}  // namespace android
-
diff --git a/media/libmediaplayerservice/HDCP.h b/media/libmediaplayerservice/HDCP.h
deleted file mode 100644
index 83c61b5..0000000
--- a/media/libmediaplayerservice/HDCP.h
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef HDCP_H_
-
-#define HDCP_H_
-
-#include <media/IHDCP.h>
-#include <utils/Mutex.h>
-
-namespace android {
-
-struct HDCP : public BnHDCP {
-    explicit HDCP(bool createEncryptionModule);
-    virtual ~HDCP();
-
-    virtual status_t setObserver(const sp<IHDCPObserver> &observer);
-    virtual status_t initAsync(const char *host, unsigned port);
-    virtual status_t shutdownAsync();
-    virtual uint32_t getCaps();
-
-    virtual status_t encrypt(
-            const void *inData, size_t size, uint32_t streamCTR,
-            uint64_t *outInputCTR, void *outData);
-
-    virtual status_t encryptNative(
-            const sp<GraphicBuffer> &graphicBuffer,
-            size_t offset, size_t size, uint32_t streamCTR,
-            uint64_t *outInputCTR, void *outData);
-
-    virtual status_t decrypt(
-            const void *inData, size_t size,
-            uint32_t streamCTR, uint64_t outInputCTR, void *outData);
-
-private:
-    Mutex mLock;
-
-    bool mIsEncryptionModule;
-
-    void *mLibHandle;
-    HDCPModule *mHDCPModule;
-    sp<IHDCPObserver> mObserver;
-
-    static void ObserveWrapper(void *me, int msg, int ext1, int ext2);
-    void observe(int msg, int ext1, int ext2);
-
-    DISALLOW_EVIL_CONSTRUCTORS(HDCP);
-};
-
-}  // namespace android
-
-#endif  // HDCP_H_
-
diff --git a/media/libmediaplayerservice/MediaPlayerFactory.cpp b/media/libmediaplayerservice/MediaPlayerFactory.cpp
index cd0a003..3209c6d 100644
--- a/media/libmediaplayerservice/MediaPlayerFactory.cpp
+++ b/media/libmediaplayerservice/MediaPlayerFactory.cpp
@@ -20,8 +20,8 @@
 #include <utils/Log.h>
 
 #include <cutils/properties.h>
+#include <media/DataSource.h>
 #include <media/IMediaPlayer.h>
-#include <media/stagefright/DataSource.h>
 #include <media/stagefright/FileSource.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <utils/Errors.h>
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index 6e17b8d..88221ed 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -55,6 +55,7 @@
 #include <media/Metadata.h>
 #include <media/AudioTrack.h>
 #include <media/MemoryLeakTrackUtil.h>
+#include <media/stagefright/InterfaceUtils.h>
 #include <media/stagefright/MediaCodecList.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/Utils.h>
@@ -77,11 +78,7 @@
 #include "TestPlayerStub.h"
 #include "nuplayer/NuPlayerDriver.h"
 
-#include <media/stagefright/omx/OMX.h>
-
-#include "HDCP.h"
 #include "HTTPBase.h"
-#include "RemoteDisplay.h"
 
 static const int kDumpLockRetries = 50;
 static const int kDumpLockSleepUs = 20000;
@@ -93,6 +90,7 @@
 using android::BAD_VALUE;
 using android::NOT_ENOUGH_DATA;
 using android::Parcel;
+using android::media::VolumeShaper;
 
 // Max number of entries in the filter.
 const int kMaxFilterSize = 64;  // I pulled that out of thin air.
@@ -337,29 +335,13 @@
     return MediaCodecList::getLocalInstance();
 }
 
-sp<IOMX> MediaPlayerService::getOMX() {
-    ALOGI("MediaPlayerService::getOMX");
-    Mutex::Autolock autoLock(mLock);
-
-    if (mOMX.get() == NULL) {
-        mOMX = new OMX;
-    }
-
-    return mOMX;
-}
-
-sp<IHDCP> MediaPlayerService::makeHDCP(bool createEncryptionModule) {
-    return new HDCP(createEncryptionModule);
-}
-
 sp<IRemoteDisplay> MediaPlayerService::listenForRemoteDisplay(
-        const String16 &opPackageName,
-        const sp<IRemoteDisplayClient>& client, const String8& iface) {
-    if (!checkPermission("android.permission.CONTROL_WIFI_DISPLAY")) {
-        return NULL;
-    }
+        const String16 &/*opPackageName*/,
+        const sp<IRemoteDisplayClient>& /*client*/,
+        const String8& /*iface*/) {
+    ALOGE("listenForRemoteDisplay is no longer supported!");
 
-    return new RemoteDisplay(opPackageName, client, iface.string());
+    return NULL;
 }
 
 status_t MediaPlayerService::AudioOutput::dump(int fd, const Vector<String16>& args) const
@@ -608,6 +590,7 @@
         free(mAudioAttributes);
     }
     clearDeathNotifiers_l();
+    mAudioDeviceUpdatedListener.clear();
 }
 
 void MediaPlayerService::Client::disconnect()
@@ -715,6 +698,17 @@
     }
 }
 
+void MediaPlayerService::Client::AudioDeviceUpdatedNotifier::onAudioDeviceUpdate(
+        audio_io_handle_t audioIo,
+        audio_port_handle_t deviceId) {
+    sp<MediaPlayerBase> listener = mListener.promote();
+    if (listener != NULL) {
+        listener->sendEvent(MEDIA_AUDIO_ROUTING_CHANGED, audioIo, deviceId);
+    } else {
+        ALOGW("listener for process %d death is gone", MEDIA_AUDIO_ROUTING_CHANGED);
+    }
+}
+
 void MediaPlayerService::Client::clearDeathNotifiers_l() {
     if (mExtractorDeathListener != nullptr) {
         mExtractorDeathListener->unlinkToDeath();
@@ -747,36 +741,25 @@
             new ServiceDeathNotifier(binder, p, MEDIAEXTRACTOR_PROCESS_DEATH);
     binder->linkToDeath(extractorDeathListener);
 
-    sp<ServiceDeathNotifier> codecDeathListener;
-    if (property_get_bool("persist.media.treble_omx", true)) {
-        // Treble IOmx
-        sp<IOmx> omx = IOmx::getService();
-        if (omx == nullptr) {
-            ALOGE("Treble IOmx not available");
-            return NULL;
-        }
-        codecDeathListener = new ServiceDeathNotifier(omx, p, MEDIACODEC_PROCESS_DEATH);
-        omx->linkToDeath(codecDeathListener, 0);
-    } else {
-        // Legacy IOMX
-        binder = sm->getService(String16("media.codec"));
-        if (binder == NULL) {
-            ALOGE("codec service not available");
-            return NULL;
-        }
-        codecDeathListener = new ServiceDeathNotifier(binder, p, MEDIACODEC_PROCESS_DEATH);
-        binder->linkToDeath(codecDeathListener);
+    sp<IOmx> omx = IOmx::getService();
+    if (omx == nullptr) {
+        ALOGE("IOmx service is not available");
+        return NULL;
     }
+    sp<ServiceDeathNotifier> codecDeathListener =
+            new ServiceDeathNotifier(omx, p, MEDIACODEC_PROCESS_DEATH);
+    omx->linkToDeath(codecDeathListener, 0);
 
     Mutex::Autolock lock(mLock);
 
     clearDeathNotifiers_l();
     mExtractorDeathListener = extractorDeathListener;
     mCodecDeathListener = codecDeathListener;
+    mAudioDeviceUpdatedListener = new AudioDeviceUpdatedNotifier(p);
 
     if (!p->hardwareOutput()) {
         mAudioOutput = new AudioOutput(mAudioSessionId, IPCThreadState::self()->getCallingUid(),
-                mPid, mAudioAttributes);
+                mPid, mAudioAttributes, mAudioDeviceUpdatedListener);
         static_cast<MediaPlayerInterface*>(p.get())->setAudioSink(mAudioOutput);
     }
 
@@ -906,7 +889,7 @@
 
 status_t MediaPlayerService::Client::setDataSource(
         const sp<IDataSource> &source) {
-    sp<DataSource> dataSource = DataSource::CreateFromIDataSource(source);
+    sp<DataSource> dataSource = CreateDataSourceFromIDataSource(source);
     player_type playerType = MediaPlayerFactory::getPlayerType(this, dataSource);
     sp<MediaPlayerBase> p = setDataSource_pre(playerType);
     if (p == NULL) {
@@ -1063,18 +1046,18 @@
     return p->setBufferingSettings(buffering);
 }
 
-status_t MediaPlayerService::Client::getDefaultBufferingSettings(
+status_t MediaPlayerService::Client::getBufferingSettings(
         BufferingSettings* buffering /* nonnull */)
 {
     sp<MediaPlayerBase> p = getPlayer();
     // TODO: create mPlayer on demand.
     if (p == 0) return UNKNOWN_ERROR;
-    status_t ret = p->getDefaultBufferingSettings(buffering);
+    status_t ret = p->getBufferingSettings(buffering);
     if (ret == NO_ERROR) {
-        ALOGV("[%d] getDefaultBufferingSettings{%s}",
+        ALOGV("[%d] getBufferingSettings{%s}",
                 mConnId, buffering->toString().string());
     } else {
-        ALOGV("[%d] getDefaultBufferingSettings returned %d", mConnId, ret);
+        ALOGE("[%d] getBufferingSettings returned %d", mConnId, ret);
     }
     return ret;
 }
@@ -1281,6 +1264,14 @@
     return p->reset();
 }
 
+status_t MediaPlayerService::Client::notifyAt(int64_t mediaTimeUs)
+{
+    ALOGV("[%d] notifyAt(%lld)", mConnId, (long long)mediaTimeUs);
+    sp<MediaPlayerBase> p = getPlayer();
+    if (p == 0) return UNKNOWN_ERROR;
+    return p->notifyAt(mediaTimeUs);
+}
+
 status_t MediaPlayerService::Client::setAudioStreamType(audio_stream_type_t type)
 {
     ALOGV("[%d] setAudioStreamType(%d)", mConnId, type);
@@ -1539,6 +1530,42 @@
     return ret;
 }
 
+status_t MediaPlayerService::Client::setOutputDevice(audio_port_handle_t deviceId)
+{
+    ALOGV("[%d] setOutputDevice", mConnId);
+    {
+        Mutex::Autolock l(mLock);
+        if (mAudioOutput.get() != nullptr) {
+            return mAudioOutput->setOutputDevice(deviceId);
+        }
+    }
+    return NO_INIT;
+}
+
+status_t MediaPlayerService::Client::getRoutedDeviceId(audio_port_handle_t* deviceId)
+{
+    ALOGV("[%d] getRoutedDeviceId", mConnId);
+    {
+        Mutex::Autolock l(mLock);
+        if (mAudioOutput.get() != nullptr) {
+            return mAudioOutput->getRoutedDeviceId(deviceId);
+        }
+    }
+    return NO_INIT;
+}
+
+status_t MediaPlayerService::Client::enableAudioDeviceCallback(bool enabled)
+{
+    ALOGV("[%d] enableAudioDeviceCallback, %d", mConnId, enabled);
+    {
+        Mutex::Autolock l(mLock);
+        if (mAudioOutput.get() != nullptr) {
+            return mAudioOutput->enableAudioDeviceCallback(enabled);
+        }
+    }
+    return NO_INIT;
+}
+
 #if CALLBACK_ANTAGONIZER
 const int Antagonizer::interval = 10000; // 10 msecs
 
@@ -1577,7 +1604,7 @@
 #undef LOG_TAG
 #define LOG_TAG "AudioSink"
 MediaPlayerService::AudioOutput::AudioOutput(audio_session_t sessionId, uid_t uid, int pid,
-        const audio_attributes_t* attr)
+        const audio_attributes_t* attr, const sp<AudioSystem::AudioDeviceCallback>& deviceCallback)
     : mCallback(NULL),
       mCallbackCookie(NULL),
       mCallbackData(NULL),
@@ -1594,7 +1621,11 @@
       mSendLevel(0.0),
       mAuxEffectId(0),
       mFlags(AUDIO_OUTPUT_FLAG_NONE),
-      mVolumeHandler(new VolumeHandler())
+      mVolumeHandler(new media::VolumeHandler()),
+      mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE),
+      mRoutedDeviceId(AUDIO_PORT_HANDLE_NONE),
+      mDeviceCallbackEnabled(false),
+      mDeviceCallback(deviceCallback)
 {
     ALOGV("AudioOutput(%d)", sessionId);
     if (attr != NULL) {
@@ -1980,7 +2011,9 @@
                     mUid,
                     mPid,
                     mAttributes,
-                    doNotReconnect);
+                    doNotReconnect,
+                    1.0f,  // default value for maxRequiredSpeed
+                    mSelectedDeviceId);
         } else {
             // TODO: Due to buffer memory concerns, we use a max target playback speed
             // based on mPlaybackRate at the time of open (instead of kMaxRequiredSpeed),
@@ -2007,7 +2040,8 @@
                     mPid,
                     mAttributes,
                     doNotReconnect,
-                    targetSpeed);
+                    targetSpeed,
+                    mSelectedDeviceId);
         }
 
         if ((t == 0) || (t->initCheck() != NO_ERROR)) {
@@ -2101,6 +2135,10 @@
             res = mTrack->attachAuxEffect(mAuxEffectId);
         }
     }
+    mTrack->setOutputDevice(mSelectedDeviceId);
+    if (mDeviceCallbackEnabled) {
+        mTrack->addAudioDeviceCallback(mDeviceCallback.promote());
+    }
     ALOGV("updateTrack() DONE status %d", res);
     return res;
 }
@@ -2316,6 +2354,45 @@
     return NO_ERROR;
 }
 
+status_t MediaPlayerService::AudioOutput::setOutputDevice(audio_port_handle_t deviceId)
+{
+    ALOGV("setOutputDevice(%d)", deviceId);
+    Mutex::Autolock lock(mLock);
+    mSelectedDeviceId = deviceId;
+    if (mTrack != 0) {
+        return mTrack->setOutputDevice(deviceId);
+    }
+    return NO_ERROR;
+}
+
+status_t MediaPlayerService::AudioOutput::getRoutedDeviceId(audio_port_handle_t* deviceId)
+{
+    ALOGV("getRoutedDeviceId");
+    Mutex::Autolock lock(mLock);
+    if (mTrack != 0) {
+        mRoutedDeviceId = mTrack->getRoutedDeviceId();
+    }
+    *deviceId = mRoutedDeviceId;
+    return NO_ERROR;
+}
+
+status_t MediaPlayerService::AudioOutput::enableAudioDeviceCallback(bool enabled)
+{
+    ALOGV("enableAudioDeviceCallback, %d", enabled);
+    Mutex::Autolock lock(mLock);
+    mDeviceCallbackEnabled = enabled;
+    if (mTrack != 0) {
+        status_t status;
+        if (enabled) {
+            status = mTrack->addAudioDeviceCallback(mDeviceCallback.promote());
+        } else {
+            status = mTrack->removeAudioDeviceCallback(mDeviceCallback.promote());
+        }
+        return status;
+    }
+    return NO_ERROR;
+}
+
 VolumeShaper::Status MediaPlayerService::AudioOutput::applyVolumeShaper(
                 const sp<VolumeShaper::Configuration>& configuration,
                 const sp<VolumeShaper::Operation>& operation)
diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h
index f043f32..58efa8c 100644
--- a/media/libmediaplayerservice/MediaPlayerService.h
+++ b/media/libmediaplayerservice/MediaPlayerService.h
@@ -78,8 +78,12 @@
         class CallbackData;
 
      public:
-                                AudioOutput(audio_session_t sessionId, uid_t uid, int pid,
-                                        const audio_attributes_t * attr);
+                                AudioOutput(
+                                        audio_session_t sessionId,
+                                        uid_t uid,
+                                        int pid,
+                                        const audio_attributes_t * attr,
+                                        const sp<AudioSystem::AudioDeviceCallback>& deviceCallback);
         virtual                 ~AudioOutput();
 
         virtual bool            ready() const { return mTrack != 0; }
@@ -132,10 +136,15 @@
         virtual status_t        setParameters(const String8& keyValuePairs);
         virtual String8         getParameters(const String8& keys);
 
-        virtual VolumeShaper::Status applyVolumeShaper(
-                                        const sp<VolumeShaper::Configuration>& configuration,
-                                        const sp<VolumeShaper::Operation>& operation) override;
-        virtual sp<VolumeShaper::State> getVolumeShaperState(int id) override;
+        virtual media::VolumeShaper::Status applyVolumeShaper(
+                                        const sp<media::VolumeShaper::Configuration>& configuration,
+                                        const sp<media::VolumeShaper::Operation>& operation) override;
+        virtual sp<media::VolumeShaper::State> getVolumeShaperState(int id) override;
+
+        // AudioRouting
+        virtual status_t        setOutputDevice(audio_port_handle_t deviceId);
+        virtual status_t        getRoutedDeviceId(audio_port_handle_t* deviceId);
+        virtual status_t        enableAudioDeviceCallback(bool enabled);
 
     private:
         static void             setMinBufferCount();
@@ -165,7 +174,11 @@
         float                   mSendLevel;
         int                     mAuxEffectId;
         audio_output_flags_t    mFlags;
-        sp<VolumeHandler>       mVolumeHandler;
+        sp<media::VolumeHandler>       mVolumeHandler;
+        audio_port_handle_t     mSelectedDeviceId;
+        audio_port_handle_t     mRoutedDeviceId;
+        bool                    mDeviceCallbackEnabled;
+        wp<AudioSystem::AudioDeviceCallback>        mDeviceCallback;
         mutable Mutex           mLock;
 
         // static variables below not protected by mutex
@@ -228,8 +241,6 @@
                                        audio_session_t audioSessionId);
 
     virtual sp<IMediaCodecList> getCodecList() const;
-    virtual sp<IOMX>            getOMX();
-    virtual sp<IHDCP>           makeHDCP(bool createEncryptionModule);
 
     virtual sp<IRemoteDisplay> listenForRemoteDisplay(const String16 &opPackageName,
             const sp<IRemoteDisplayClient>& client, const String8& iface);
@@ -309,7 +320,7 @@
         virtual status_t        setVideoSurfaceTexture(
                                         const sp<IGraphicBufferProducer>& bufferProducer);
         virtual status_t        setBufferingSettings(const BufferingSettings& buffering) override;
-        virtual status_t        getDefaultBufferingSettings(
+        virtual status_t        getBufferingSettings(
                                         BufferingSettings* buffering /* nonnull */) override;
         virtual status_t        prepareAsync();
         virtual status_t        start();
@@ -327,6 +338,7 @@
         virtual status_t        getCurrentPosition(int* msec);
         virtual status_t        getDuration(int* msec);
         virtual status_t        reset();
+        virtual status_t        notifyAt(int64_t mediaTimeUs);
         virtual status_t        setAudioStreamType(audio_stream_type_t type);
         virtual status_t        setLooping(int loop);
         virtual status_t        setVolume(float leftVolume, float rightVolume);
@@ -343,10 +355,10 @@
         virtual status_t        getRetransmitEndpoint(struct sockaddr_in* endpoint);
         virtual status_t        setNextPlayer(const sp<IMediaPlayer>& player);
 
-        virtual VolumeShaper::Status applyVolumeShaper(
-                                        const sp<VolumeShaper::Configuration>& configuration,
-                                        const sp<VolumeShaper::Operation>& operation) override;
-        virtual sp<VolumeShaper::State> getVolumeShaperState(int id) override;
+        virtual media::VolumeShaper::Status applyVolumeShaper(
+                                        const sp<media::VolumeShaper::Configuration>& configuration,
+                                        const sp<media::VolumeShaper::Operation>& operation) override;
+        virtual sp<media::VolumeShaper::State> getVolumeShaperState(int id) override;
 
         sp<MediaPlayerBase>     createPlayer(player_type playerType);
 
@@ -375,6 +387,10 @@
         // Modular DRM
         virtual status_t prepareDrm(const uint8_t uuid[16], const Vector<uint8_t>& drmSessionId);
         virtual status_t releaseDrm();
+        // AudioRouting
+        virtual status_t setOutputDevice(audio_port_handle_t deviceId);
+        virtual status_t getRoutedDeviceId(audio_port_handle_t* deviceId);
+        virtual status_t enableAudioDeviceCallback(bool enabled);
 
     private:
         class ServiceDeathNotifier:
@@ -404,6 +420,21 @@
             wp<MediaPlayerBase> mListener;
         };
 
+        class AudioDeviceUpdatedNotifier: public AudioSystem::AudioDeviceCallback
+        {
+        public:
+            AudioDeviceUpdatedNotifier(const sp<MediaPlayerBase>& listener) {
+                mListener = listener;
+            }
+            ~AudioDeviceUpdatedNotifier() {}
+
+            virtual void onAudioDeviceUpdate(audio_io_handle_t audioIo,
+                                             audio_port_handle_t deviceId);
+
+        private:
+            wp<MediaPlayerBase> mListener;
+        };
+
         void clearDeathNotifiers_l();
 
         friend class MediaPlayerService;
@@ -467,6 +498,7 @@
 
         sp<ServiceDeathNotifier> mExtractorDeathListener;
         sp<ServiceDeathNotifier> mCodecDeathListener;
+        sp<AudioDeviceUpdatedNotifier> mAudioDeviceUpdatedListener;
 #if CALLBACK_ANTAGONIZER
                     Antagonizer*                mAntagonizer;
 #endif
@@ -481,7 +513,6 @@
                 SortedVector< wp<Client> >  mClients;
                 SortedVector< wp<MediaRecorderClient> > mMediaRecorderClients;
                 int32_t                     mNextConnId;
-                sp<IOMX>                    mOMX;
 };
 
 // ----------------------------------------------------------------------------
diff --git a/media/libmediaplayerservice/MediaRecorderClient.cpp b/media/libmediaplayerservice/MediaRecorderClient.cpp
index 9b9b3bb..dcd393b 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.cpp
+++ b/media/libmediaplayerservice/MediaRecorderClient.cpp
@@ -411,6 +411,25 @@
     }
 }
 
+MediaRecorderClient::AudioDeviceUpdatedNotifier::AudioDeviceUpdatedNotifier(
+        const sp<IMediaRecorderClient>& listener) {
+    mListener = listener;
+}
+
+MediaRecorderClient::AudioDeviceUpdatedNotifier::~AudioDeviceUpdatedNotifier() {
+}
+
+void MediaRecorderClient::AudioDeviceUpdatedNotifier::onAudioDeviceUpdate(
+        audio_io_handle_t audioIo,
+        audio_port_handle_t deviceId) {
+    sp<IMediaRecorderClient> listener = mListener.promote();
+    if (listener != NULL) {
+        listener->notify(MEDIA_RECORDER_AUDIO_ROUTING_CHANGED, audioIo, deviceId);
+    } else {
+        ALOGW("listener for process %d death is gone", MEDIA_RECORDER_AUDIO_ROUTING_CHANGED);
+    }
+}
+
 void MediaRecorderClient::clearDeathNotifiers_l() {
     if (mCameraDeathListener != nullptr) {
         mCameraDeathListener->unlinkToDeath();
@@ -450,27 +469,17 @@
     }
     sCameraChecked = true;
 
-    if (property_get_bool("persist.media.treble_omx", true)) {
-        // Treble IOmx
-        sp<IOmx> omx = IOmx::getService();
-        if (omx == nullptr) {
-            ALOGE("Treble IOmx not available");
-            return NO_INIT;
-        }
-        mCodecDeathListener = new ServiceDeathNotifier(omx, listener,
-                MediaPlayerService::MEDIACODEC_PROCESS_DEATH);
-        omx->linkToDeath(mCodecDeathListener, 0);
-    } else {
-        // Legacy IOMX
-        binder = sm->getService(String16("media.codec"));
-        if (binder == NULL) {
-           ALOGE("Unable to connect to media codec service");
-           return NO_INIT;
-        }
-        mCodecDeathListener = new ServiceDeathNotifier(binder, listener,
-                MediaPlayerService::MEDIACODEC_PROCESS_DEATH);
-        binder->linkToDeath(mCodecDeathListener);
+    sp<IOmx> omx = IOmx::getService();
+    if (omx == nullptr) {
+        ALOGE("IOmx service is not available");
+        return NO_INIT;
     }
+    mCodecDeathListener = new ServiceDeathNotifier(omx, listener,
+            MediaPlayerService::MEDIACODEC_PROCESS_DEATH);
+    omx->linkToDeath(mCodecDeathListener, 0);
+
+    mAudioDeviceUpdatedNotifier = new AudioDeviceUpdatedNotifier(listener);
+    mRecorder->setAudioDeviceCallback(mAudioDeviceUpdatedNotifier);
 
     return OK;
 }
@@ -492,4 +501,30 @@
     return OK;
 }
 
+status_t MediaRecorderClient::setInputDevice(audio_port_handle_t deviceId) {
+    ALOGV("setInputDevice(%d)", deviceId);
+    Mutex::Autolock lock(mLock);
+    if (mRecorder != NULL) {
+        return mRecorder->setInputDevice(deviceId);
+    }
+    return NO_INIT;
+}
+
+status_t MediaRecorderClient::getRoutedDeviceId(audio_port_handle_t* deviceId) {
+    ALOGV("getRoutedDeviceId");
+    Mutex::Autolock lock(mLock);
+    if (mRecorder != NULL) {
+        return mRecorder->getRoutedDeviceId(deviceId);
+    }
+    return NO_INIT;
+}
+
+status_t MediaRecorderClient::enableAudioDeviceCallback(bool enabled) {
+    ALOGV("enableDeviceCallback: %d", enabled);
+    Mutex::Autolock lock(mLock);
+    if (mRecorder != NULL) {
+        return mRecorder->enableAudioDeviceCallback(enabled);
+    }
+    return NO_INIT;
+}
 }; // namespace android
diff --git a/media/libmediaplayerservice/MediaRecorderClient.h b/media/libmediaplayerservice/MediaRecorderClient.h
index 711db2c..538b461 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.h
+++ b/media/libmediaplayerservice/MediaRecorderClient.h
@@ -18,6 +18,7 @@
 #ifndef ANDROID_MEDIARECORDERCLIENT_H
 #define ANDROID_MEDIARECORDERCLIENT_H
 
+#include <media/AudioSystem.h>
 #include <media/IMediaRecorder.h>
 
 #include <android/hardware/media/omx/1.0/IOmx.h>
@@ -58,6 +59,18 @@
         wp<IMediaRecorderClient> mListener;
     };
 
+    class AudioDeviceUpdatedNotifier: public AudioSystem::AudioDeviceCallback
+    {
+    public:
+        AudioDeviceUpdatedNotifier(const sp<IMediaRecorderClient>& listener);
+        virtual ~AudioDeviceUpdatedNotifier();
+        virtual void onAudioDeviceUpdate(
+                audio_io_handle_t audioIo,
+                audio_port_handle_t deviceId);
+    private:
+        wp<IMediaRecorderClient> mListener;
+    };
+
     void clearDeathNotifiers_l();
 
 public:
@@ -91,6 +104,9 @@
     virtual     status_t   dump(int fd, const Vector<String16>& args);
     virtual     status_t   setInputSurface(const sp<PersistentSurface>& surface);
     virtual     sp<IGraphicBufferProducer> querySurfaceMediaSource();
+    virtual     status_t   setInputDevice(audio_port_handle_t deviceId);
+    virtual     status_t   getRoutedDeviceId(audio_port_handle_t* deviceId);
+    virtual     status_t   enableAudioDeviceCallback(bool enabled);
 
 private:
     friend class           MediaPlayerService;  // for accessing private constructor
@@ -103,6 +119,7 @@
 
     sp<ServiceDeathNotifier> mCameraDeathListener;
     sp<ServiceDeathNotifier> mCodecDeathListener;
+    sp<AudioDeviceUpdatedNotifier> mAudioDeviceUpdatedNotifier;
 
     pid_t                  mPid;
     Mutex                  mLock;
diff --git a/media/libmediaplayerservice/MetadataRetrieverClient.cpp b/media/libmediaplayerservice/MetadataRetrieverClient.cpp
index 5a468f3..16ed530 100644
--- a/media/libmediaplayerservice/MetadataRetrieverClient.cpp
+++ b/media/libmediaplayerservice/MetadataRetrieverClient.cpp
@@ -31,10 +31,11 @@
 #include <binder/MemoryHeapBase.h>
 #include <binder/IPCThreadState.h>
 #include <binder/IServiceManager.h>
+#include <media/DataSource.h>
 #include <media/IMediaHTTPService.h>
 #include <media/MediaMetadataRetrieverInterface.h>
 #include <media/MediaPlayerInterface.h>
-#include <media/stagefright/DataSource.h>
+#include <media/stagefright/InterfaceUtils.h>
 #include <media/stagefright/Utils.h>
 #include <private/media/VideoFrame.h>
 #include "MetadataRetrieverClient.h"
@@ -180,7 +181,7 @@
     ALOGV("setDataSource(IDataSource)");
     Mutex::Autolock lock(mLock);
 
-    sp<DataSource> dataSource = DataSource::CreateFromIDataSource(source);
+    sp<DataSource> dataSource = CreateDataSourceFromIDataSource(source);
     player_type playerType =
         MediaPlayerFactory::getPlayerType(NULL /* client */, dataSource);
     ALOGV("player type = %d", playerType);
@@ -193,6 +194,25 @@
 
 Mutex MetadataRetrieverClient::sLock;
 
+static sp<IMemory> getThumbnail(VideoFrame* frame) {
+    std::unique_ptr<VideoFrame> frameDeleter(frame);
+
+    size_t size = frame->getFlattenedSize();
+    sp<MemoryHeapBase> heap = new MemoryHeapBase(size, 0, "MetadataRetrieverClient");
+    if (heap == NULL) {
+        ALOGE("failed to create MemoryDealer");
+        return NULL;
+    }
+    sp<IMemory> thrumbnail = new MemoryBase(heap, 0, size);
+    if (thrumbnail == NULL) {
+        ALOGE("not enough memory for VideoFrame size=%zu", size);
+        return NULL;
+    }
+    VideoFrame *frameCopy = static_cast<VideoFrame *>(thrumbnail->pointer());
+    frameCopy->copyFlattened(*frame);
+    return thrumbnail;
+}
+
 sp<IMemory> MetadataRetrieverClient::getFrameAtTime(
         int64_t timeUs, int option, int colorFormat, bool metaOnly)
 {
@@ -205,29 +225,55 @@
         ALOGE("retriever is not initialized");
         return NULL;
     }
-    VideoFrame *frame = mRetriever->getFrameAtTime(
-            timeUs, option, colorFormat, metaOnly);
+    VideoFrame *frame = mRetriever->getFrameAtTime(timeUs, option, colorFormat, metaOnly);
     if (frame == NULL) {
         ALOGE("failed to capture a video frame");
         return NULL;
     }
-    size_t size = frame->getFlattenedSize();
-    sp<MemoryHeapBase> heap = new MemoryHeapBase(size, 0, "MetadataRetrieverClient");
-    if (heap == NULL) {
-        ALOGE("failed to create MemoryDealer");
-        delete frame;
+    return getThumbnail(frame);
+}
+
+sp<IMemory> MetadataRetrieverClient::getImageAtIndex(
+        int index, int colorFormat, bool metaOnly) {
+    ALOGV("getFrameAtTime: index(%d) colorFormat(%d), metaOnly(%d)",
+            index, colorFormat, metaOnly);
+    Mutex::Autolock lock(mLock);
+    Mutex::Autolock glock(sLock);
+    mThumbnail.clear();
+    if (mRetriever == NULL) {
+        ALOGE("retriever is not initialized");
         return NULL;
     }
-    mThumbnail = new MemoryBase(heap, 0, size);
-    if (mThumbnail == NULL) {
-        ALOGE("not enough memory for VideoFrame size=%zu", size);
-        delete frame;
+    VideoFrame *frame = mRetriever->getImageAtIndex(index, colorFormat, metaOnly);
+    if (frame == NULL) {
+        ALOGE("failed to extract image");
         return NULL;
     }
-    VideoFrame *frameCopy = static_cast<VideoFrame *>(mThumbnail->pointer());
-    frameCopy->copyFlattened(*frame);
-    delete frame;  // Fix memory leakage
-    return mThumbnail;
+    return getThumbnail(frame);
+}
+
+status_t MetadataRetrieverClient::getFrameAtIndex(
+            std::vector<sp<IMemory> > *frames,
+            int frameIndex, int numFrames, int colorFormat, bool metaOnly) {
+    ALOGV("getFrameAtIndex: frameIndex(%d), numFrames(%d), colorFormat(%d), metaOnly(%d)",
+            frameIndex, numFrames, colorFormat, metaOnly);
+    Mutex::Autolock lock(mLock);
+    Mutex::Autolock glock(sLock);
+    if (mRetriever == NULL) {
+        ALOGE("retriever is not initialized");
+        return INVALID_OPERATION;
+    }
+
+    std::vector<VideoFrame*> videoFrames;
+    status_t err = mRetriever->getFrameAtIndex(
+            &videoFrames, frameIndex, numFrames, colorFormat, metaOnly);
+    if (err != OK) {
+        return err;
+    }
+    for (size_t i = 0; i < videoFrames.size(); i++) {
+        frames->push_back(getThumbnail(videoFrames[i]));
+    }
+    return OK;
 }
 
 sp<IMemory> MetadataRetrieverClient::extractAlbumArt()
diff --git a/media/libmediaplayerservice/MetadataRetrieverClient.h b/media/libmediaplayerservice/MetadataRetrieverClient.h
index c78cd4b..f71891a 100644
--- a/media/libmediaplayerservice/MetadataRetrieverClient.h
+++ b/media/libmediaplayerservice/MetadataRetrieverClient.h
@@ -52,6 +52,11 @@
     virtual status_t                setDataSource(const sp<IDataSource>& source, const char *mime);
     virtual sp<IMemory>             getFrameAtTime(
             int64_t timeUs, int option, int colorFormat, bool metaOnly);
+    virtual sp<IMemory>             getImageAtIndex(
+            int index, int colorFormat, bool metaOnly);
+    virtual status_t getFrameAtIndex(
+                std::vector<sp<IMemory> > *frames,
+                int frameIndex, int numFrames, int colorFormat, bool metaOnly);
     virtual sp<IMemory>             extractAlbumArt();
     virtual const char*             extractMetadata(int keyCode);
 
diff --git a/media/libmediaplayerservice/RemoteDisplay.cpp b/media/libmediaplayerservice/RemoteDisplay.cpp
deleted file mode 100644
index 0eb4b5d..0000000
--- a/media/libmediaplayerservice/RemoteDisplay.cpp
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Copyright 2012, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "RemoteDisplay.h"
-
-#include "source/WifiDisplaySource.h"
-
-#include <media/IRemoteDisplayClient.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/foundation/ANetworkSession.h>
-
-namespace android {
-
-RemoteDisplay::RemoteDisplay(
-        const String16 &opPackageName,
-        const sp<IRemoteDisplayClient> &client,
-        const char *iface)
-    : mLooper(new ALooper),
-      mNetSession(new ANetworkSession) {
-    mLooper->setName("wfd_looper");
-
-    mSource = new WifiDisplaySource(opPackageName, mNetSession, client);
-    mLooper->registerHandler(mSource);
-
-    mNetSession->start();
-    mLooper->start();
-
-    mSource->start(iface);
-}
-
-RemoteDisplay::~RemoteDisplay() {
-}
-
-status_t RemoteDisplay::pause() {
-    return mSource->pause();
-}
-
-status_t RemoteDisplay::resume() {
-    return mSource->resume();
-}
-
-status_t RemoteDisplay::dispose() {
-    mSource->stop();
-    mSource.clear();
-
-    mLooper->stop();
-    mNetSession->stop();
-
-    return OK;
-}
-
-}  // namespace android
diff --git a/media/libmediaplayerservice/RemoteDisplay.h b/media/libmediaplayerservice/RemoteDisplay.h
deleted file mode 100644
index d4573e9..0000000
--- a/media/libmediaplayerservice/RemoteDisplay.h
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Copyright 2012, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef REMOTE_DISPLAY_H_
-
-#define REMOTE_DISPLAY_H_
-
-#include <media/IMediaPlayerService.h>
-#include <media/IRemoteDisplay.h>
-#include <media/stagefright/foundation/ABase.h>
-#include <utils/Errors.h>
-#include <utils/RefBase.h>
-
-namespace android {
-
-struct ALooper;
-struct ANetworkSession;
-class IRemoteDisplayClient;
-struct WifiDisplaySource;
-
-struct RemoteDisplay : public BnRemoteDisplay {
-    RemoteDisplay(
-            const String16 &opPackageName,
-            const sp<IRemoteDisplayClient> &client,
-            const char *iface);
-
-    virtual status_t pause();
-    virtual status_t resume();
-    virtual status_t dispose();
-
-protected:
-    virtual ~RemoteDisplay();
-
-private:
-    sp<ALooper> mNetLooper;
-    sp<ALooper> mLooper;
-    sp<ANetworkSession> mNetSession;
-    sp<WifiDisplaySource> mSource;
-
-    DISALLOW_EVIL_CONSTRUCTORS(RemoteDisplay);
-};
-
-}  // namespace android
-
-#endif  // REMOTE_DISPLAY_H_
-
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index 89354d6..77eaefe 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -103,7 +103,9 @@
       mOutputFd(-1),
       mAudioSource(AUDIO_SOURCE_CNT),
       mVideoSource(VIDEO_SOURCE_LIST_END),
-      mStarted(false) {
+      mStarted(false),
+      mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE),
+      mDeviceCallbackEnabled(false) {
 
     ALOGV("Constructor");
 
@@ -204,7 +206,7 @@
     return OK;
 }
 
-// The client side of mediaserver asks it to creat a SurfaceMediaSource
+// The client side of mediaserver asks it to create a SurfaceMediaSource
 // and return a interface reference. The client side will use that
 // while encoding GL Frames
 sp<IGraphicBufferProducer> StagefrightRecorder::querySurfaceMediaSource() const {
@@ -1069,7 +1071,8 @@
                 mAudioChannels,
                 mSampleRate,
                 mClientUid,
-                mClientPid);
+                mClientPid,
+                mSelectedDeviceId);
 
     status_t err = audioSource->initCheck();
 
@@ -1120,6 +1123,10 @@
 
     sp<MediaCodecSource> audioEncoder =
             MediaCodecSource::Create(mLooper, format, audioSource);
+    sp<AudioSystem::AudioDeviceCallback> callback = mAudioDeviceCallback.promote();
+    if (mDeviceCallbackEnabled && callback != 0) {
+        audioSource->addAudioDeviceCallback(callback);
+    }
     mAudioSourceNode = audioSource;
 
     if (audioEncoder == NULL) {
@@ -2116,6 +2123,46 @@
     return OK;
 }
 
+status_t StagefrightRecorder::setInputDevice(audio_port_handle_t deviceId) {
+    ALOGV("setInputDevice");
+
+    if (mSelectedDeviceId != deviceId) {
+        mSelectedDeviceId = deviceId;
+        if (mAudioSourceNode != 0) {
+            return mAudioSourceNode->setInputDevice(deviceId);
+        }
+    }
+    return NO_ERROR;
+}
+
+status_t StagefrightRecorder::getRoutedDeviceId(audio_port_handle_t* deviceId) {
+    ALOGV("getRoutedDeviceId");
+
+    if (mAudioSourceNode != 0) {
+        status_t status = mAudioSourceNode->getRoutedDeviceId(deviceId);
+        return status;
+    }
+    return NO_INIT;
+}
+
+void StagefrightRecorder::setAudioDeviceCallback(
+        const sp<AudioSystem::AudioDeviceCallback>& callback) {
+    mAudioDeviceCallback = callback;
+}
+
+status_t StagefrightRecorder::enableAudioDeviceCallback(bool enabled) {
+    mDeviceCallbackEnabled = enabled;
+    sp<AudioSystem::AudioDeviceCallback> callback = mAudioDeviceCallback.promote();
+    if (mAudioSourceNode != 0 && callback != 0) {
+        if (enabled) {
+            return mAudioSourceNode->addAudioDeviceCallback(callback);
+        } else {
+            return mAudioSourceNode->removeAudioDeviceCallback(callback);
+        }
+    }
+    return NO_ERROR;
+}
+
 status_t StagefrightRecorder::dump(
         int fd, const Vector<String16>& args) const {
     ALOGV("dump");
diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h
index 9a6c4da..ec7e8ed 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.h
+++ b/media/libmediaplayerservice/StagefrightRecorder.h
@@ -25,7 +25,7 @@
 
 #include <system/audio.h>
 
-#include <MetadataBufferType.h>
+#include <media/hardware/MetadataBufferType.h>
 
 namespace android {
 
@@ -72,6 +72,10 @@
     virtual status_t dump(int fd, const Vector<String16> &args) const;
     // Querying a SurfaceMediaSourcer
     virtual sp<IGraphicBufferProducer> querySurfaceMediaSource() const;
+    virtual status_t setInputDevice(audio_port_handle_t deviceId);
+    virtual status_t getRoutedDeviceId(audio_port_handle_t* deviceId);
+    virtual void setAudioDeviceCallback(const sp<AudioSystem::AudioDeviceCallback>& callback);
+    virtual status_t enableAudioDeviceCallback(bool enabled);
 
 private:
     mutable Mutex mLock;
@@ -144,6 +148,10 @@
     sp<IGraphicBufferProducer> mGraphicBufferProducer;
     sp<ALooper> mLooper;
 
+    audio_port_handle_t mSelectedDeviceId;
+    bool mDeviceCallbackEnabled;
+    wp<AudioSystem::AudioDeviceCallback> mAudioDeviceCallback;
+
     static const int kMaxHighSpeedFps = 1000;
 
     status_t prepareInternal();
diff --git a/media/libmediaplayerservice/include/MediaPlayerInterface.h b/media/libmediaplayerservice/include/MediaPlayerInterface.h
index 34f1fe4..4b33e61 100644
--- a/media/libmediaplayerservice/include/MediaPlayerInterface.h
+++ b/media/libmediaplayerservice/include/MediaPlayerInterface.h
@@ -146,10 +146,15 @@
         virtual status_t    setParameters(const String8& /* keyValuePairs */) { return NO_ERROR; }
         virtual String8     getParameters(const String8& /* keys */) { return String8::empty(); }
 
-        virtual VolumeShaper::Status applyVolumeShaper(
-                                    const sp<VolumeShaper::Configuration>& configuration,
-                                    const sp<VolumeShaper::Operation>& operation);
-        virtual sp<VolumeShaper::State> getVolumeShaperState(int id);
+        virtual media::VolumeShaper::Status applyVolumeShaper(
+                                    const sp<media::VolumeShaper::Configuration>& configuration,
+                                    const sp<media::VolumeShaper::Operation>& operation);
+        virtual sp<media::VolumeShaper::State> getVolumeShaperState(int id);
+
+        // AudioRouting
+        virtual status_t    setOutputDevice(audio_port_handle_t deviceId);
+        virtual status_t    getRoutedDeviceId(audio_port_handle_t* deviceId);
+        virtual status_t    enableAudioDeviceCallback(bool enabled);
     };
 
                         MediaPlayerBase() : mClient(0), mNotify(0) {}
@@ -180,7 +185,7 @@
     virtual status_t    setVideoSurfaceTexture(
                                 const sp<IGraphicBufferProducer>& bufferProducer) = 0;
 
-    virtual status_t    getDefaultBufferingSettings(
+    virtual status_t    getBufferingSettings(
                                 BufferingSettings* buffering /* nonnull */) {
         *buffering = BufferingSettings();
         return OK;
@@ -225,6 +230,9 @@
     virtual status_t    getCurrentPosition(int *msec) = 0;
     virtual status_t    getDuration(int *msec) = 0;
     virtual status_t    reset() = 0;
+    virtual status_t    notifyAt(int64_t /* mediaTimeUs */) {
+        return INVALID_OPERATION;
+    }
     virtual status_t    setLooping(int loop) = 0;
     virtual player_type playerType() = 0;
     virtual status_t    setParameter(int key, const Parcel &request) = 0;
diff --git a/media/libstagefright/foundation/AWakeLock.cpp b/media/libmediaplayerservice/nuplayer/AWakeLock.cpp
similarity index 98%
rename from media/libstagefright/foundation/AWakeLock.cpp
rename to media/libmediaplayerservice/nuplayer/AWakeLock.cpp
index d9277ac..684ba2e 100644
--- a/media/libstagefright/foundation/AWakeLock.cpp
+++ b/media/libmediaplayerservice/nuplayer/AWakeLock.cpp
@@ -18,11 +18,11 @@
 #define LOG_TAG "AWakeLock"
 #include <utils/Log.h>
 
-#include "ADebug.h"
 #include "AWakeLock.h"
 
 #include <binder/IPCThreadState.h>
 #include <binder/IServiceManager.h>
+#include <media/stagefright/foundation/ADebug.h>
 #include <powermanager/PowerManager.h>
 
 
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/AWakeLock.h b/media/libmediaplayerservice/nuplayer/AWakeLock.h
similarity index 100%
rename from media/libstagefright/foundation/include/media/stagefright/foundation/AWakeLock.h
rename to media/libmediaplayerservice/nuplayer/AWakeLock.h
diff --git a/media/libmediaplayerservice/nuplayer/Android.bp b/media/libmediaplayerservice/nuplayer/Android.bp
new file mode 100644
index 0000000..645bb7a
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/Android.bp
@@ -0,0 +1,66 @@
+cc_library_static {
+
+    srcs: [
+        "AWakeLock.cpp",
+        "GenericSource.cpp",
+        "HTTPLiveSource.cpp",
+        "NuPlayer.cpp",
+        "NuPlayerCCDecoder.cpp",
+        "NuPlayerDecoder.cpp",
+        "NuPlayerDecoderBase.cpp",
+        "NuPlayerDecoderPassThrough.cpp",
+        "NuPlayerDriver.cpp",
+        "NuPlayerDrm.cpp",
+        "NuPlayerRenderer.cpp",
+        "NuPlayerStreamListener.cpp",
+        "RTSPSource.cpp",
+        "StreamingSource.cpp",
+    ],
+
+    header_libs: [
+        "media_plugin_headers",
+    ],
+
+    include_dirs: [
+        "frameworks/av/media/libstagefright",
+        "frameworks/av/media/libstagefright/httplive",
+        "frameworks/av/media/libstagefright/include",
+        "frameworks/av/media/libstagefright/mpeg2ts",
+        "frameworks/av/media/libstagefright/rtsp",
+        "frameworks/av/media/libstagefright/timedtext",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    product_variables: {
+        debuggable: {
+            cflags: [
+                "-DENABLE_STAGEFRIGHT_EXPERIMENTS",
+            ],
+        }
+    },
+
+    shared_libs: [
+        "libbinder",
+        "libui",
+        "libgui",
+        "libmedia",
+        "libmediadrm",
+        "libpowermanager",
+    ],
+
+    name: "libstagefright_nuplayer",
+
+    tags: ["eng"],
+
+    sanitize: {
+        cfi: true,
+        diag: {
+            cfi: true,
+        },
+    },
+
+}
diff --git a/media/libmediaplayerservice/nuplayer/Android.mk b/media/libmediaplayerservice/nuplayer/Android.mk
deleted file mode 100644
index c582631..0000000
--- a/media/libmediaplayerservice/nuplayer/Android.mk
+++ /dev/null
@@ -1,50 +0,0 @@
-LOCAL_PATH:= $(call my-dir)
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:=                       \
-        GenericSource.cpp               \
-        HTTPLiveSource.cpp              \
-        NuPlayer.cpp                    \
-        NuPlayerCCDecoder.cpp           \
-        NuPlayerDecoder.cpp             \
-        NuPlayerDecoderBase.cpp         \
-        NuPlayerDecoderPassThrough.cpp  \
-        NuPlayerDriver.cpp              \
-        NuPlayerDrm.cpp                 \
-        NuPlayerRenderer.cpp            \
-        NuPlayerStreamListener.cpp      \
-        RTSPSource.cpp                  \
-        StreamingSource.cpp             \
-
-LOCAL_C_INCLUDES := \
-	frameworks/av/media/libstagefright                     \
-	frameworks/av/media/libstagefright/httplive            \
-	frameworks/av/media/libstagefright/include             \
-	frameworks/av/media/libstagefright/mpeg2ts             \
-	frameworks/av/media/libstagefright/rtsp                \
-	frameworks/av/media/libstagefright/timedtext           \
-	frameworks/av/media/libmediaplayerservice              \
-	frameworks/native/include/media/openmax
-
-LOCAL_CFLAGS += -Werror -Wall
-
-# enable experiments only in userdebug and eng builds
-ifneq (,$(filter userdebug eng,$(TARGET_BUILD_VARIANT)))
-LOCAL_CFLAGS += -DENABLE_STAGEFRIGHT_EXPERIMENTS
-endif
-
-LOCAL_SHARED_LIBRARIES :=       \
-    libbinder                   \
-    libui                       \
-    libgui                      \
-    libmedia                    \
-    libmediadrm                 \
-
-LOCAL_MODULE:= libstagefright_nuplayer
-
-LOCAL_MODULE_TAGS := eng
-
-LOCAL_SANITIZE := cfi
-LOCAL_SANITIZE_DIAG := cfi
-
-include $(BUILD_STATIC_LIBRARY)
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.cpp b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
index aa21fff..33c3094 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
@@ -23,17 +23,20 @@
 #include "AnotherPacketSource.h"
 #include <binder/IServiceManager.h>
 #include <cutils/properties.h>
-#include <media/IMediaExtractorService.h>
+#include <media/DataSource.h>
+#include <media/MediaExtractor.h>
+#include <media/MediaSource.h>
 #include <media/IMediaHTTPService.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/DataSource.h>
+#include <media/stagefright/DataSourceFactory.h>
 #include <media/stagefright/FileSource.h>
+#include <media/stagefright/InterfaceUtils.h>
 #include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaClock.h>
 #include <media/stagefright/MediaDefs.h>
-#include <media/stagefright/MediaExtractor.h>
-#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MediaExtractorFactory.h>
 #include <media/stagefright/MetaData.h>
 #include <media/stagefright/Utils.h>
 #include "../../libstagefright/include/NuCachedSource2.h"
@@ -41,22 +44,26 @@
 
 namespace android {
 
-static const int kLowWaterMarkMs          = 2000;  // 2secs
-static const int kHighWaterMarkMs         = 5000;  // 5secs
-static const int kHighWaterMarkRebufferMs = 15000;  // 15secs
+static const int kInitialMarkMs        = 5000;  // 5secs
 
-static const int kLowWaterMarkKB  = 40;
-static const int kHighWaterMarkKB = 200;
+//static const int kPausePlaybackMarkMs  = 2000;  // 2secs
+static const int kResumePlaybackMarkMs = 15000;  // 15secs
 
 NuPlayer::GenericSource::GenericSource(
         const sp<AMessage> &notify,
         bool uidValid,
-        uid_t uid)
+        uid_t uid,
+        const sp<MediaClock> &mediaClock)
     : Source(notify),
       mAudioTimeUs(0),
       mAudioLastDequeueTimeUs(0),
       mVideoTimeUs(0),
       mVideoLastDequeueTimeUs(0),
+      mPrevBufferPercentage(-1),
+      mPollBufferingGeneration(0),
+      mSentPauseOnBuffering(false),
+      mAudioDataGeneration(0),
+      mVideoDataGeneration(0),
       mFetchSubtitleDataGeneration(0),
       mFetchTimedTextDataGeneration(0),
       mDurationUs(-1ll),
@@ -65,12 +72,15 @@
       mIsStreaming(false),
       mUIDValid(uidValid),
       mUID(uid),
+      mMediaClock(mediaClock),
       mFd(-1),
       mBitrate(-1ll),
       mPendingReadBufferTypes(0) {
     ALOGV("GenericSource");
+    CHECK(mediaClock != NULL);
 
-    mBufferingMonitor = new BufferingMonitor(notify);
+    mBufferingSettings.mInitialMarkMs = kInitialMarkMs;
+    mBufferingSettings.mResumePlaybackMarkMs = kResumePlaybackMarkMs;
     resetDataSource();
 }
 
@@ -79,6 +89,7 @@
 
     mHTTPService.clear();
     mHttpSource.clear();
+    mDisconnected = false;
     mUri.clear();
     mUriHeaders.clear();
     if (mFd >= 0) {
@@ -88,14 +99,7 @@
     mOffset = 0;
     mLength = 0;
     mStarted = false;
-    mStopRead = true;
-
-    if (mBufferingMonitorLooper != NULL) {
-        mBufferingMonitorLooper->unregisterHandler(mBufferingMonitor->id());
-        mBufferingMonitorLooper->stop();
-        mBufferingMonitorLooper = NULL;
-    }
-    mBufferingMonitor->stop();
+    mPreparing = false;
 
     mIsDrmProtected = false;
     mIsDrmReleased = false;
@@ -107,6 +111,7 @@
         const sp<IMediaHTTPService> &httpService,
         const char *url,
         const KeyedVector<String8, String8> *headers) {
+    Mutex::Autolock _l(mLock);
     ALOGV("setDataSource url: %s", url);
 
     resetDataSource();
@@ -125,6 +130,7 @@
 
 status_t NuPlayer::GenericSource::setDataSource(
         int fd, int64_t offset, int64_t length) {
+    Mutex::Autolock _l(mLock);
     ALOGV("setDataSource %d/%lld/%lld", fd, (long long)offset, (long long)length);
 
     resetDataSource();
@@ -139,6 +145,7 @@
 }
 
 status_t NuPlayer::GenericSource::setDataSource(const sp<DataSource>& source) {
+    Mutex::Autolock _l(mLock);
     ALOGV("setDataSource (source: %p)", source.get());
 
     resetDataSource();
@@ -147,21 +154,52 @@
 }
 
 sp<MetaData> NuPlayer::GenericSource::getFileFormatMeta() const {
+    Mutex::Autolock _l(mLock);
     return mFileMeta;
 }
 
 status_t NuPlayer::GenericSource::initFromDataSource() {
     sp<IMediaExtractor> extractor;
-    CHECK(mDataSource != NULL);
+    CHECK(mDataSource != NULL || mFd != -1);
+    sp<DataSource> dataSource = mDataSource;
+    const int fd = mFd;
+    const int64_t offset = mOffset;
+    const int64_t length = mLength;
 
-    extractor = MediaExtractor::Create(mDataSource, NULL);
+    mLock.unlock();
+    // This might take long time if data source is not reliable.
+    if (dataSource != nullptr) {
+        extractor = MediaExtractorFactory::Create(dataSource, NULL /* mime */);
+    } else {
+        extractor = MediaExtractorFactory::CreateFromFd(
+                fd, offset, length, NULL /* mime */, &dataSource);
+    }
 
-    if (extractor == NULL) {
-        ALOGE("initFromDataSource, cannot create extractor!");
+    if (dataSource == nullptr) {
+        ALOGE("initFromDataSource, failed to create data source!");
+        mLock.lock();
         return UNKNOWN_ERROR;
     }
 
-    mFileMeta = extractor->getMetaData();
+    if (extractor == NULL) {
+        ALOGE("initFromDataSource, cannot create extractor!");
+        mLock.lock();
+        return UNKNOWN_ERROR;
+    }
+
+    sp<MetaData> fileMeta = extractor->getMetaData();
+
+    size_t numtracks = extractor->countTracks();
+    if (numtracks == 0) {
+        ALOGE("initFromDataSource, source has no track!");
+        mLock.lock();
+        return UNKNOWN_ERROR;
+    }
+
+    mLock.lock();
+    mFd = -1;
+    mDataSource = dataSource;
+    mFileMeta = fileMeta;
     if (mFileMeta != NULL) {
         int64_t duration;
         if (mFileMeta->findInt64(kKeyDuration, &duration)) {
@@ -171,12 +209,6 @@
 
     int32_t totalBitrate = 0;
 
-    size_t numtracks = extractor->countTracks();
-    if (numtracks == 0) {
-        ALOGE("initFromDataSource, source has no track!");
-        return UNKNOWN_ERROR;
-    }
-
     mMimes.clear();
 
     for (size_t i = 0; i < numtracks; ++i) {
@@ -259,14 +291,23 @@
     return OK;
 }
 
-status_t NuPlayer::GenericSource::getDefaultBufferingSettings(
+status_t NuPlayer::GenericSource::getBufferingSettings(
         BufferingSettings* buffering /* nonnull */) {
-    mBufferingMonitor->getDefaultBufferingSettings(buffering);
+    {
+        Mutex::Autolock _l(mLock);
+        *buffering = mBufferingSettings;
+    }
+
+    ALOGV("getBufferingSettings{%s}", buffering->toString().string());
     return OK;
 }
 
 status_t NuPlayer::GenericSource::setBufferingSettings(const BufferingSettings& buffering) {
-    return mBufferingMonitor->setBufferingSettings(buffering);
+    ALOGV("setBufferingSettings{%s}", buffering.toString().string());
+
+    Mutex::Autolock _l(mLock);
+    mBufferingSettings = buffering;
+    return OK;
 }
 
 status_t NuPlayer::GenericSource::startSources() {
@@ -300,22 +341,11 @@
     }
 }
 
-status_t NuPlayer::GenericSource::setBuffers(
-        bool audio, Vector<MediaBuffer *> &buffers) {
-    if (mIsSecure && !audio && mVideoTrack.mSource != NULL) {
-        return mVideoTrack.mSource->setBuffers(buffers);
-    }
-    return INVALID_OPERATION;
-}
-
 bool NuPlayer::GenericSource::isStreaming() const {
+    Mutex::Autolock _l(mLock);
     return mIsStreaming;
 }
 
-void NuPlayer::GenericSource::setOffloadAudio(bool offload) {
-    mBufferingMonitor->setOffloadAudio(offload);
-}
-
 NuPlayer::GenericSource::~GenericSource() {
     ALOGV("~GenericSource");
     if (mLooper != NULL) {
@@ -326,6 +356,7 @@
 }
 
 void NuPlayer::GenericSource::prepareAsync() {
+    Mutex::Autolock _l(mLock);
     ALOGV("prepareAsync: (looper: %d)", (mLooper != NULL));
 
     if (mLooper == NULL) {
@@ -354,7 +385,7 @@
             String8 contentType;
 
             if (!strncasecmp("http://", uri, 7) || !strncasecmp("https://", uri, 8)) {
-                mHttpSource = DataSource::CreateMediaHTTP(mHTTPService);
+                mHttpSource = DataSourceFactory::CreateMediaHTTP(mHTTPService);
                 if (mHttpSource == NULL) {
                     ALOGE("Failed to create http source!");
                     notifyPreparedAndCleanup(UNKNOWN_ERROR);
@@ -362,54 +393,24 @@
                 }
             }
 
-            mDataSource = DataSource::CreateFromURI(
+            mLock.unlock();
+            // This might take long time if connection has some issue.
+            sp<DataSource> dataSource = DataSourceFactory::CreateFromURI(
                    mHTTPService, uri, &mUriHeaders, &contentType,
                    static_cast<HTTPBase *>(mHttpSource.get()));
-        } else {
-            if (property_get_bool("media.stagefright.extractremote", true) &&
-                    !FileSource::requiresDrm(mFd, mOffset, mLength, nullptr /* mime */)) {
-                sp<IBinder> binder =
-                        defaultServiceManager()->getService(String16("media.extractor"));
-                if (binder != nullptr) {
-                    ALOGD("FileSource remote");
-                    sp<IMediaExtractorService> mediaExService(
-                            interface_cast<IMediaExtractorService>(binder));
-                    sp<IDataSource> source =
-                            mediaExService->makeIDataSource(mFd, mOffset, mLength);
-                    ALOGV("IDataSource(FileSource): %p %d %lld %lld",
-                            source.get(), mFd, (long long)mOffset, (long long)mLength);
-                    if (source.get() != nullptr) {
-                        mDataSource = DataSource::CreateFromIDataSource(source);
-                        if (mDataSource != nullptr) {
-                            // Close the local file descriptor as it is not needed anymore.
-                            close(mFd);
-                            mFd = -1;
-                        }
-                    } else {
-                        ALOGW("extractor service cannot make data source");
-                    }
-                } else {
-                    ALOGW("extractor service not running");
-                }
+            mLock.lock();
+            if (!mDisconnected) {
+                mDataSource = dataSource;
             }
-            if (mDataSource == nullptr) {
-                ALOGD("FileSource local");
-                mDataSource = new FileSource(mFd, mOffset, mLength);
-            }
-            // TODO: close should always be done on mFd, see the lines following
-            // DataSource::CreateFromIDataSource above,
-            // and the FileSource constructor should dup the mFd argument as needed.
-            mFd = -1;
         }
-
-        if (mDataSource == NULL) {
+        if (mFd == -1 && mDataSource == NULL) {
             ALOGE("Failed to create data source!");
             notifyPreparedAndCleanup(UNKNOWN_ERROR);
             return;
         }
     }
 
-    if (mDataSource->flags() & DataSource::kIsCachingDataSource) {
+    if (mDataSource != nullptr && mDataSource->flags() & DataSource::kIsCachingDataSource) {
         mCachedSource = static_cast<NuCachedSource2 *>(mDataSource.get());
     }
 
@@ -427,7 +428,7 @@
     }
 
     if (mVideoTrack.mSource != NULL) {
-        sp<MetaData> meta = doGetFormatMeta(false /* audio */);
+        sp<MetaData> meta = getFormatMeta_l(false /* audio */);
         sp<AMessage> msg = new AMessage;
         err = convertMetaDataToMessage(meta, &msg);
         if(err != OK) {
@@ -461,47 +462,39 @@
     }
 
     if (mIsStreaming) {
-        if (mBufferingMonitorLooper == NULL) {
-            mBufferingMonitor->prepare(mCachedSource, mDurationUs, mBitrate,
-                    mIsStreaming);
-
-            mBufferingMonitorLooper = new ALooper;
-            mBufferingMonitorLooper->setName("GSBMonitor");
-            mBufferingMonitorLooper->start();
-            mBufferingMonitorLooper->registerHandler(mBufferingMonitor);
-        }
-
-        mBufferingMonitor->ensureCacheIsFetching();
-        mBufferingMonitor->restartPollBuffering();
+        mCachedSource->resumeFetchingIfNecessary();
+        mPreparing = true;
+        schedulePollBuffering();
     } else {
         notifyPrepared();
     }
+
+    if (mAudioTrack.mSource != NULL) {
+        postReadBuffer(MEDIA_TRACK_TYPE_AUDIO);
+    }
+
+    if (mVideoTrack.mSource != NULL) {
+        postReadBuffer(MEDIA_TRACK_TYPE_VIDEO);
+    }
 }
 
 void NuPlayer::GenericSource::notifyPreparedAndCleanup(status_t err) {
     if (err != OK) {
-        {
-            sp<DataSource> dataSource = mDataSource;
-            sp<NuCachedSource2> cachedSource = mCachedSource;
-            sp<DataSource> httpSource = mHttpSource;
-            {
-                Mutex::Autolock _l(mDisconnectLock);
-                mDataSource.clear();
-                mCachedSource.clear();
-                mHttpSource.clear();
-            }
-        }
-        mBitrate = -1;
+        mDataSource.clear();
+        mCachedSource.clear();
+        mHttpSource.clear();
 
-        mBufferingMonitor->cancelPollBuffering();
+        mBitrate = -1;
+        mPrevBufferPercentage = -1;
+        ++mPollBufferingGeneration;
     }
     notifyPrepared(err);
 }
 
 void NuPlayer::GenericSource::start() {
+    Mutex::Autolock _l(mLock);
     ALOGI("start");
 
-    mStopRead = false;
     if (mAudioTrack.mSource != NULL) {
         postReadBuffer(MEDIA_TRACK_TYPE_AUDIO);
     }
@@ -511,30 +504,30 @@
     }
 
     mStarted = true;
-
-    (new AMessage(kWhatStart, this))->post();
 }
 
 void NuPlayer::GenericSource::stop() {
+    Mutex::Autolock _l(mLock);
     mStarted = false;
 }
 
 void NuPlayer::GenericSource::pause() {
+    Mutex::Autolock _l(mLock);
     mStarted = false;
 }
 
 void NuPlayer::GenericSource::resume() {
+    Mutex::Autolock _l(mLock);
     mStarted = true;
-
-    (new AMessage(kWhatResume, this))->post();
 }
 
 void NuPlayer::GenericSource::disconnect() {
     sp<DataSource> dataSource, httpSource;
     {
-        Mutex::Autolock _l(mDisconnectLock);
+        Mutex::Autolock _l(mLock);
         dataSource = mDataSource;
         httpSource = mHttpSource;
+        mDisconnected = true;
     }
 
     if (dataSource != NULL) {
@@ -551,7 +544,24 @@
     return OK;
 }
 
+void NuPlayer::GenericSource::sendCacheStats() {
+    int32_t kbps = 0;
+    status_t err = UNKNOWN_ERROR;
+
+    if (mCachedSource != NULL) {
+        err = mCachedSource->getEstimatedBandwidthKbps(&kbps);
+    }
+
+    if (err == OK) {
+        sp<AMessage> notify = dupNotify();
+        notify->setInt32("what", kWhatCacheStats);
+        notify->setInt32("bandwidth", kbps);
+        notify->post();
+    }
+}
+
 void NuPlayer::GenericSource::onMessageReceived(const sp<AMessage> &msg) {
+    Mutex::Autolock _l(mLock);
     switch (msg->what()) {
       case kWhatPrepareAsync:
       {
@@ -620,6 +630,8 @@
           track->mSource = source;
           track->mSource->start();
           track->mIndex = trackIndex;
+          ++mAudioDataGeneration;
+          ++mVideoDataGeneration;
 
           int64_t timeUs, actualTimeUs;
           const bool formatChange = true;
@@ -637,37 +649,6 @@
           break;
       }
 
-      case kWhatStart:
-      case kWhatResume:
-      {
-          mBufferingMonitor->restartPollBuffering();
-          break;
-      }
-
-      case kWhatGetFormat:
-      {
-          onGetFormatMeta(msg);
-          break;
-      }
-
-      case kWhatGetSelectedTrack:
-      {
-          onGetSelectedTrack(msg);
-          break;
-      }
-
-      case kWhatGetTrackInfo:
-      {
-          onGetTrackInfo(msg);
-          break;
-      }
-
-      case kWhatSelectTrack:
-      {
-          onSelectTrack(msg);
-          break;
-      }
-
       case kWhatSeek:
       {
           onSeek(msg);
@@ -680,25 +661,13 @@
           break;
       }
 
-      case kWhatPrepareDrm:
+      case kWhatPollBuffering:
       {
-          status_t status = onPrepareDrm(msg);
-          sp<AMessage> response = new AMessage;
-          response->setInt32("status", status);
-          sp<AReplyToken> replyID;
-          CHECK(msg->senderAwaitsResponse(&replyID));
-          response->postReply(replyID);
-          break;
-      }
-
-      case kWhatReleaseDrm:
-      {
-          status_t status = onReleaseDrm();
-          sp<AMessage> response = new AMessage;
-          response->setInt32("status", status);
-          sp<AReplyToken> replyID;
-          CHECK(msg->senderAwaitsResponse(&replyID));
-          response->postReply(replyID);
+          int32_t generation;
+          CHECK(msg->findInt32("generation", &generation));
+          if (generation == mPollBufferingGeneration) {
+              onPollBuffering();
+          }
           break;
       }
 
@@ -729,17 +698,20 @@
     int64_t timeUs;
     CHECK(msg->findInt64("timeUs", &timeUs));
 
-    int64_t subTimeUs;
+    int64_t subTimeUs = 0;
     readBuffer(type, timeUs, MediaPlayerSeekMode::SEEK_PREVIOUS_SYNC /* mode */, &subTimeUs);
 
-    int64_t delayUs = subTimeUs - timeUs;
+    status_t eosResult;
+    if (!packets->hasBufferAvailable(&eosResult)) {
+        return;
+    }
+
     if (msg->what() == kWhatFetchSubtitleData) {
-        const int64_t oneSecUs = 1000000ll;
-        delayUs -= oneSecUs;
+        subTimeUs -= 1000000ll;  // send subtile data one second earlier
     }
     sp<AMessage> msg2 = new AMessage(sendWhat, this);
     msg2->setInt32("generation", msgGeneration);
-    msg2->post(delayUs < 0 ? 0 : delayUs);
+    mMediaClock->addTimer(msg2, subTimeUs);
 }
 
 void NuPlayer::GenericSource::sendTextData(
@@ -771,8 +743,10 @@
         notify->setBuffer("buffer", buffer);
         notify->post();
 
-        const int64_t delayUs = nextSubTimeUs - subTimeUs;
-        msg->post(delayUs < 0 ? 0 : delayUs);
+        if (msg->what() == kWhatSendSubtitleData) {
+            nextSubTimeUs -= 1000000ll;  // send subtile data one second earlier
+        }
+        mMediaClock->addTimer(msg, nextSubTimeUs);
     }
 }
 
@@ -808,34 +782,11 @@
 }
 
 sp<MetaData> NuPlayer::GenericSource::getFormatMeta(bool audio) {
-    sp<AMessage> msg = new AMessage(kWhatGetFormat, this);
-    msg->setInt32("audio", audio);
-
-    sp<AMessage> response;
-    sp<RefBase> format;
-    status_t err = msg->postAndAwaitResponse(&response);
-    if (err == OK && response != NULL) {
-        CHECK(response->findObject("format", &format));
-        return static_cast<MetaData*>(format.get());
-    } else {
-        return NULL;
-    }
+    Mutex::Autolock _l(mLock);
+    return getFormatMeta_l(audio);
 }
 
-void NuPlayer::GenericSource::onGetFormatMeta(const sp<AMessage>& msg) const {
-    int32_t audio;
-    CHECK(msg->findInt32("audio", &audio));
-
-    sp<AMessage> response = new AMessage;
-    sp<MetaData> format = doGetFormatMeta(audio);
-    response->setObject("format", format);
-
-    sp<AReplyToken> replyID;
-    CHECK(msg->senderAwaitsResponse(&replyID));
-    response->postReply(replyID);
-}
-
-sp<MetaData> NuPlayer::GenericSource::doGetFormatMeta(bool audio) const {
+sp<MetaData> NuPlayer::GenericSource::getFormatMeta_l(bool audio) {
     sp<IMediaSource> source = audio ? mAudioTrack.mSource : mVideoTrack.mSource;
 
     if (source == NULL) {
@@ -847,10 +798,7 @@
 
 status_t NuPlayer::GenericSource::dequeueAccessUnit(
         bool audio, sp<ABuffer> *accessUnit) {
-    if (audio && !mStarted) {
-        return -EWOULDBLOCK;
-    }
-
+    Mutex::Autolock _l(mLock);
     // If has gone through stop/releaseDrm sequence, we no longer send down any buffer b/c
     // the codec's crypto object has gone away (b/37960096).
     // Note: This will be unnecessary when stop() changes behavior and releases codec (b/35248283).
@@ -876,10 +824,32 @@
 
     status_t result = track->mPackets->dequeueAccessUnit(accessUnit);
 
-    // start pulling in more buffers if we only have one (or no) buffer left
+    // start pulling in more buffers if cache is running low
     // so that decoder has less chance of being starved
-    if (track->mPackets->getAvailableBufferCount(&finalResult) < 2) {
-        postReadBuffer(audio? MEDIA_TRACK_TYPE_AUDIO : MEDIA_TRACK_TYPE_VIDEO);
+    if (!mIsStreaming) {
+        if (track->mPackets->getAvailableBufferCount(&finalResult) < 2) {
+            postReadBuffer(audio? MEDIA_TRACK_TYPE_AUDIO : MEDIA_TRACK_TYPE_VIDEO);
+        }
+    } else {
+        int64_t durationUs = track->mPackets->getBufferedDurationUs(&finalResult);
+        // TODO: maxRebufferingMarkMs could be larger than
+        // mBufferingSettings.mResumePlaybackMarkMs
+        int64_t restartBufferingMarkUs =
+             mBufferingSettings.mResumePlaybackMarkMs * 1000ll / 2;
+        if (finalResult == OK) {
+            if (durationUs < restartBufferingMarkUs) {
+                postReadBuffer(audio? MEDIA_TRACK_TYPE_AUDIO : MEDIA_TRACK_TYPE_VIDEO);
+            }
+            if (track->mPackets->getAvailableBufferCount(&finalResult) < 2
+                && !mSentPauseOnBuffering && !mPreparing) {
+                mCachedSource->resumeFetchingIfNecessary();
+                sendCacheStats();
+                mSentPauseOnBuffering = true;
+                sp<AMessage> notify = dupNotify();
+                notify->setInt32("what", kWhatPauseOnBufferingStart);
+                notify->post();
+            }
+        }
     }
 
     if (result != OK) {
@@ -899,7 +869,6 @@
     CHECK((*accessUnit)->meta()->findInt64("timeUs", &timeUs));
     if (audio) {
         mAudioLastDequeueTimeUs = timeUs;
-        mBufferingMonitor->updateDequeuedBufferTime(timeUs);
     } else {
         mVideoLastDequeueTimeUs = timeUs;
     }
@@ -924,43 +893,18 @@
 }
 
 status_t NuPlayer::GenericSource::getDuration(int64_t *durationUs) {
+    Mutex::Autolock _l(mLock);
     *durationUs = mDurationUs;
     return OK;
 }
 
 size_t NuPlayer::GenericSource::getTrackCount() const {
+    Mutex::Autolock _l(mLock);
     return mSources.size();
 }
 
 sp<AMessage> NuPlayer::GenericSource::getTrackInfo(size_t trackIndex) const {
-    sp<AMessage> msg = new AMessage(kWhatGetTrackInfo, this);
-    msg->setSize("trackIndex", trackIndex);
-
-    sp<AMessage> response;
-    sp<RefBase> format;
-    status_t err = msg->postAndAwaitResponse(&response);
-    if (err == OK && response != NULL) {
-        CHECK(response->findObject("format", &format));
-        return static_cast<AMessage*>(format.get());
-    } else {
-        return NULL;
-    }
-}
-
-void NuPlayer::GenericSource::onGetTrackInfo(const sp<AMessage>& msg) const {
-    size_t trackIndex;
-    CHECK(msg->findSize("trackIndex", &trackIndex));
-
-    sp<AMessage> response = new AMessage;
-    sp<AMessage> format = doGetTrackInfo(trackIndex);
-    response->setObject("format", format);
-
-    sp<AReplyToken> replyID;
-    CHECK(msg->senderAwaitsResponse(&replyID));
-    response->postReply(replyID);
-}
-
-sp<AMessage> NuPlayer::GenericSource::doGetTrackInfo(size_t trackIndex) const {
+    Mutex::Autolock _l(mLock);
     size_t trackCount = mSources.size();
     if (trackIndex >= trackCount) {
         return NULL;
@@ -1010,35 +954,7 @@
 }
 
 ssize_t NuPlayer::GenericSource::getSelectedTrack(media_track_type type) const {
-    sp<AMessage> msg = new AMessage(kWhatGetSelectedTrack, this);
-    msg->setInt32("type", type);
-
-    sp<AMessage> response;
-    int32_t index;
-    status_t err = msg->postAndAwaitResponse(&response);
-    if (err == OK && response != NULL) {
-        CHECK(response->findInt32("index", &index));
-        return index;
-    } else {
-        return -1;
-    }
-}
-
-void NuPlayer::GenericSource::onGetSelectedTrack(const sp<AMessage>& msg) const {
-    int32_t tmpType;
-    CHECK(msg->findInt32("type", &tmpType));
-    media_track_type type = (media_track_type)tmpType;
-
-    sp<AMessage> response = new AMessage;
-    ssize_t index = doGetSelectedTrack(type);
-    response->setInt32("index", index);
-
-    sp<AReplyToken> replyID;
-    CHECK(msg->senderAwaitsResponse(&replyID));
-    response->postReply(replyID);
-}
-
-ssize_t NuPlayer::GenericSource::doGetSelectedTrack(media_track_type type) const {
+    Mutex::Autolock _l(mLock);
     const Track *track = NULL;
     switch (type) {
     case MEDIA_TRACK_TYPE_VIDEO:
@@ -1065,38 +981,9 @@
 }
 
 status_t NuPlayer::GenericSource::selectTrack(size_t trackIndex, bool select, int64_t timeUs) {
+    Mutex::Autolock _l(mLock);
     ALOGV("%s track: %zu", select ? "select" : "deselect", trackIndex);
-    sp<AMessage> msg = new AMessage(kWhatSelectTrack, this);
-    msg->setInt32("trackIndex", trackIndex);
-    msg->setInt32("select", select);
-    msg->setInt64("timeUs", timeUs);
 
-    sp<AMessage> response;
-    status_t err = msg->postAndAwaitResponse(&response);
-    if (err == OK && response != NULL) {
-        CHECK(response->findInt32("err", &err));
-    }
-
-    return err;
-}
-
-void NuPlayer::GenericSource::onSelectTrack(const sp<AMessage>& msg) {
-    int32_t trackIndex, select;
-    int64_t timeUs;
-    CHECK(msg->findInt32("trackIndex", &trackIndex));
-    CHECK(msg->findInt32("select", &select));
-    CHECK(msg->findInt64("timeUs", &timeUs));
-
-    sp<AMessage> response = new AMessage;
-    status_t err = doSelectTrack(trackIndex, select, timeUs);
-    response->setInt32("err", err);
-
-    sp<AReplyToken> replyID;
-    CHECK(msg->senderAwaitsResponse(&replyID));
-    response->postReply(replyID);
-}
-
-status_t NuPlayer::GenericSource::doSelectTrack(size_t trackIndex, bool select, int64_t timeUs) {
     if (trackIndex >= mSources.size()) {
         return BAD_INDEX;
     }
@@ -1188,10 +1075,14 @@
 }
 
 status_t NuPlayer::GenericSource::seekTo(int64_t seekTimeUs, MediaPlayerSeekMode mode) {
+    ALOGV("seekTo: %lld, %d", (long long)seekTimeUs, mode);
     sp<AMessage> msg = new AMessage(kWhatSeek, this);
     msg->setInt64("seekTimeUs", seekTimeUs);
     msg->setInt32("mode", mode);
 
+    // Need to call readBuffer on |mLooper| to ensure the calls to
+    // IMediaSource::read* are serialized. Note that IMediaSource::read*
+    // is called without |mLock| acquired and MediaSource is not thread safe.
     sp<AMessage> response;
     status_t err = msg->postAndAwaitResponse(&response);
     if (err == OK && response != NULL) {
@@ -1217,17 +1108,9 @@
 }
 
 status_t NuPlayer::GenericSource::doSeek(int64_t seekTimeUs, MediaPlayerSeekMode mode) {
-    mBufferingMonitor->updateDequeuedBufferTime(-1ll);
-
-    // If the Widevine source is stopped, do not attempt to read any
-    // more buffers.
-    //
-    // TODO: revisit after widevine is removed.  May be able to
-    // combine mStopRead with mStarted.
-    if (mStopRead) {
-        return INVALID_OPERATION;
-    }
     if (mVideoTrack.mSource != NULL) {
+        ++mVideoDataGeneration;
+
         int64_t actualTimeUs;
         readBuffer(MEDIA_TRACK_TYPE_VIDEO, seekTimeUs, mode, &actualTimeUs);
 
@@ -1238,7 +1121,8 @@
     }
 
     if (mAudioTrack.mSource != NULL) {
-        readBuffer(MEDIA_TRACK_TYPE_AUDIO, seekTimeUs);
+        ++mAudioDataGeneration;
+        readBuffer(MEDIA_TRACK_TYPE_AUDIO, seekTimeUs, MediaPlayerSeekMode::SEEK_CLOSEST);
         mAudioLastDequeueTimeUs = seekTimeUs;
     }
 
@@ -1252,12 +1136,8 @@
         mFetchTimedTextDataGeneration++;
     }
 
-    // If currently buffering, post kWhatBufferingEnd first, so that
-    // NuPlayer resumes. Otherwise, if cache hits high watermark
-    // before new polling happens, no one will resume the playback.
-    mBufferingMonitor->stopBufferingIfNecessary();
-    mBufferingMonitor->restartPollBuffering();
-
+    ++mPollBufferingGeneration;
+    schedulePollBuffering();
     return OK;
 }
 
@@ -1360,9 +1240,29 @@
     return ab;
 }
 
-void NuPlayer::GenericSource::postReadBuffer(media_track_type trackType) {
-    Mutex::Autolock _l(mReadBufferLock);
+int32_t NuPlayer::GenericSource::getDataGeneration(media_track_type type) const {
+    int32_t generation = -1;
+    switch (type) {
+    case MEDIA_TRACK_TYPE_VIDEO:
+        generation = mVideoDataGeneration;
+        break;
+    case MEDIA_TRACK_TYPE_AUDIO:
+        generation = mAudioDataGeneration;
+        break;
+    case MEDIA_TRACK_TYPE_TIMEDTEXT:
+        generation = mFetchTimedTextDataGeneration;
+        break;
+    case MEDIA_TRACK_TYPE_SUBTITLE:
+        generation = mFetchSubtitleDataGeneration;
+        break;
+    default:
+        break;
+    }
 
+    return generation;
+}
+
+void NuPlayer::GenericSource::postReadBuffer(media_track_type trackType) {
     if ((mPendingReadBufferTypes & (1 << trackType)) == 0) {
         mPendingReadBufferTypes |= (1 << trackType);
         sp<AMessage> msg = new AMessage(kWhatReadBuffer, this);
@@ -1375,25 +1275,13 @@
     int32_t tmpType;
     CHECK(msg->findInt32("trackType", &tmpType));
     media_track_type trackType = (media_track_type)tmpType;
+    mPendingReadBufferTypes &= ~(1 << trackType);
     readBuffer(trackType);
-    {
-        // only protect the variable change, as readBuffer may
-        // take considerable time.
-        Mutex::Autolock _l(mReadBufferLock);
-        mPendingReadBufferTypes &= ~(1 << trackType);
-    }
 }
 
 void NuPlayer::GenericSource::readBuffer(
         media_track_type trackType, int64_t seekTimeUs, MediaPlayerSeekMode mode,
         int64_t *actualTimeUs, bool formatChange) {
-    // Do not read data if Widevine source is stopped
-    //
-    // TODO: revisit after widevine is removed.  May be able to
-    // combine mStopRead with mStarted.
-    if (mStopRead) {
-        return;
-    }
     Track *track;
     size_t maxBuffers = 1;
     switch (trackType) {
@@ -1437,25 +1325,38 @@
         options.setNonBlocking();
     }
 
+    int32_t generation = getDataGeneration(trackType);
     for (size_t numBuffers = 0; numBuffers < maxBuffers; ) {
         Vector<MediaBuffer *> mediaBuffers;
         status_t err = NO_ERROR;
 
+        sp<IMediaSource> source = track->mSource;
+        mLock.unlock();
         if (couldReadMultiple) {
-            err = track->mSource->readMultiple(
+            err = source->readMultiple(
                     &mediaBuffers, maxBuffers - numBuffers, &options);
         } else {
             MediaBuffer *mbuf = NULL;
-            err = track->mSource->read(&mbuf, &options);
+            err = source->read(&mbuf, &options);
             if (err == OK && mbuf != NULL) {
                 mediaBuffers.push_back(mbuf);
             }
         }
+        mLock.lock();
 
         options.clearNonPersistent();
 
         size_t id = 0;
         size_t count = mediaBuffers.size();
+
+        // in case track has been changed since we don't have lock for some time.
+        if (generation != getDataGeneration(trackType)) {
+            for (; id < count; ++id) {
+                mediaBuffers[id]->release();
+            }
+            break;
+        }
+
         for (; id < count; ++id) {
             int64_t timeUs;
             MediaBuffer *mbuf = mediaBuffers[id];
@@ -1466,10 +1367,8 @@
             }
             if (trackType == MEDIA_TRACK_TYPE_AUDIO) {
                 mAudioTimeUs = timeUs;
-                mBufferingMonitor->updateQueuedTime(true /* isAudio */, timeUs);
             } else if (trackType == MEDIA_TRACK_TYPE_VIDEO) {
                 mVideoTimeUs = timeUs;
-                mBufferingMonitor->updateQueuedTime(false /* isAudio */, timeUs);
             }
 
             queueDiscontinuityIfNeeded(seeking, formatChange, trackType, track);
@@ -1516,6 +1415,41 @@
             break;
         }
     }
+
+    if (mIsStreaming
+        && (trackType == MEDIA_TRACK_TYPE_VIDEO || trackType == MEDIA_TRACK_TYPE_AUDIO)) {
+        status_t finalResult;
+        int64_t durationUs = track->mPackets->getBufferedDurationUs(&finalResult);
+
+        // TODO: maxRebufferingMarkMs could be larger than
+        // mBufferingSettings.mResumePlaybackMarkMs
+        int64_t markUs = (mPreparing ? mBufferingSettings.mInitialMarkMs
+            : mBufferingSettings.mResumePlaybackMarkMs) * 1000ll;
+        if (finalResult == ERROR_END_OF_STREAM || durationUs >= markUs) {
+            if (mPreparing || mSentPauseOnBuffering) {
+                Track *counterTrack =
+                    (trackType == MEDIA_TRACK_TYPE_VIDEO ? &mAudioTrack : &mVideoTrack);
+                if (counterTrack->mSource != NULL) {
+                    durationUs = counterTrack->mPackets->getBufferedDurationUs(&finalResult);
+                }
+                if (finalResult == ERROR_END_OF_STREAM || durationUs >= markUs) {
+                    if (mPreparing) {
+                        notifyPrepared();
+                        mPreparing = false;
+                    } else {
+                        sendCacheStats();
+                        mSentPauseOnBuffering = false;
+                        sp<AMessage> notify = dupNotify();
+                        notify->setInt32("what", kWhatResumeOnBufferingEnd);
+                        notify->post();
+                    }
+                }
+            }
+            return;
+        }
+
+        postReadBuffer(trackType);
+    }
 }
 
 void NuPlayer::GenericSource::queueDiscontinuityIfNeeded(
@@ -1533,160 +1467,7 @@
     }
 }
 
-NuPlayer::GenericSource::BufferingMonitor::BufferingMonitor(const sp<AMessage> &notify)
-    : mNotify(notify),
-      mDurationUs(-1ll),
-      mBitrate(-1ll),
-      mIsStreaming(false),
-      mAudioTimeUs(0),
-      mVideoTimeUs(0),
-      mPollBufferingGeneration(0),
-      mPrepareBuffering(false),
-      mBuffering(false),
-      mPrevBufferPercentage(-1),
-      mOffloadAudio(false),
-      mFirstDequeuedBufferRealUs(-1ll),
-      mFirstDequeuedBufferMediaUs(-1ll),
-      mlastDequeuedBufferMediaUs(-1ll) {
-      getDefaultBufferingSettings(&mSettings);
-}
-
-NuPlayer::GenericSource::BufferingMonitor::~BufferingMonitor() {
-}
-
-void NuPlayer::GenericSource::BufferingMonitor::getDefaultBufferingSettings(
-        BufferingSettings *buffering /* nonnull */) {
-    buffering->mInitialBufferingMode = BUFFERING_MODE_TIME_ONLY;
-    buffering->mRebufferingMode = BUFFERING_MODE_TIME_THEN_SIZE;
-    buffering->mInitialWatermarkMs = kHighWaterMarkMs;
-    buffering->mRebufferingWatermarkLowMs = kLowWaterMarkMs;
-    buffering->mRebufferingWatermarkHighMs = kHighWaterMarkRebufferMs;
-    buffering->mRebufferingWatermarkLowKB = kLowWaterMarkKB;
-    buffering->mRebufferingWatermarkHighKB = kHighWaterMarkKB;
-
-    ALOGV("BufferingMonitor::getDefaultBufferingSettings{%s}",
-            buffering->toString().string());
-}
-
-status_t NuPlayer::GenericSource::BufferingMonitor::setBufferingSettings(
-        const BufferingSettings &buffering) {
-    ALOGV("BufferingMonitor::setBufferingSettings{%s}",
-            buffering.toString().string());
-
-    Mutex::Autolock _l(mLock);
-    if (buffering.IsSizeBasedBufferingMode(buffering.mInitialBufferingMode)
-            || (buffering.IsTimeBasedBufferingMode(buffering.mRebufferingMode)
-                && buffering.mRebufferingWatermarkLowMs > buffering.mRebufferingWatermarkHighMs)
-            || (buffering.IsSizeBasedBufferingMode(buffering.mRebufferingMode)
-                && buffering.mRebufferingWatermarkLowKB > buffering.mRebufferingWatermarkHighKB)) {
-        return BAD_VALUE;
-    }
-    mSettings = buffering;
-    if (mSettings.mInitialBufferingMode == BUFFERING_MODE_NONE) {
-        mSettings.mInitialWatermarkMs = BufferingSettings::kNoWatermark;
-    }
-    if (!mSettings.IsTimeBasedBufferingMode(mSettings.mRebufferingMode)) {
-        mSettings.mRebufferingWatermarkLowMs = BufferingSettings::kNoWatermark;
-        mSettings.mRebufferingWatermarkHighMs = INT32_MAX;
-    }
-    if (!mSettings.IsSizeBasedBufferingMode(mSettings.mRebufferingMode)) {
-        mSettings.mRebufferingWatermarkLowKB = BufferingSettings::kNoWatermark;
-        mSettings.mRebufferingWatermarkHighKB = INT32_MAX;
-    }
-    return OK;
-}
-
-void NuPlayer::GenericSource::BufferingMonitor::prepare(
-        const sp<NuCachedSource2> &cachedSource,
-        int64_t durationUs,
-        int64_t bitrate,
-        bool isStreaming) {
-    Mutex::Autolock _l(mLock);
-    prepare_l(cachedSource, durationUs, bitrate, isStreaming);
-}
-
-void NuPlayer::GenericSource::BufferingMonitor::stop() {
-    Mutex::Autolock _l(mLock);
-    prepare_l(NULL /* cachedSource */, -1 /* durationUs */,
-            -1 /* bitrate */, false /* isStreaming */);
-}
-
-void NuPlayer::GenericSource::BufferingMonitor::cancelPollBuffering() {
-    Mutex::Autolock _l(mLock);
-    cancelPollBuffering_l();
-}
-
-void NuPlayer::GenericSource::BufferingMonitor::restartPollBuffering() {
-    Mutex::Autolock _l(mLock);
-    if (mIsStreaming) {
-        cancelPollBuffering_l();
-        onPollBuffering_l();
-    }
-}
-
-void NuPlayer::GenericSource::BufferingMonitor::stopBufferingIfNecessary() {
-    Mutex::Autolock _l(mLock);
-    stopBufferingIfNecessary_l();
-}
-
-void NuPlayer::GenericSource::BufferingMonitor::ensureCacheIsFetching() {
-    Mutex::Autolock _l(mLock);
-    ensureCacheIsFetching_l();
-}
-
-void NuPlayer::GenericSource::BufferingMonitor::updateQueuedTime(bool isAudio, int64_t timeUs) {
-    Mutex::Autolock _l(mLock);
-    if (isAudio) {
-        mAudioTimeUs = timeUs;
-    } else {
-        mVideoTimeUs = timeUs;
-    }
-}
-
-void NuPlayer::GenericSource::BufferingMonitor::setOffloadAudio(bool offload) {
-    Mutex::Autolock _l(mLock);
-    mOffloadAudio = offload;
-}
-
-void NuPlayer::GenericSource::BufferingMonitor::updateDequeuedBufferTime(int64_t mediaUs) {
-    Mutex::Autolock _l(mLock);
-    if (mediaUs < 0) {
-        mFirstDequeuedBufferRealUs = -1ll;
-        mFirstDequeuedBufferMediaUs = -1ll;
-    } else if (mFirstDequeuedBufferRealUs < 0) {
-        mFirstDequeuedBufferRealUs = ALooper::GetNowUs();
-        mFirstDequeuedBufferMediaUs = mediaUs;
-    }
-    mlastDequeuedBufferMediaUs = mediaUs;
-}
-
-void NuPlayer::GenericSource::BufferingMonitor::prepare_l(
-        const sp<NuCachedSource2> &cachedSource,
-        int64_t durationUs,
-        int64_t bitrate,
-        bool isStreaming) {
-
-    mCachedSource = cachedSource;
-    mDurationUs = durationUs;
-    mBitrate = bitrate;
-    mIsStreaming = isStreaming;
-    mAudioTimeUs = 0;
-    mVideoTimeUs = 0;
-    mPrepareBuffering = (cachedSource != NULL);
-    cancelPollBuffering_l();
-    mOffloadAudio = false;
-    mFirstDequeuedBufferRealUs = -1ll;
-    mFirstDequeuedBufferMediaUs = -1ll;
-    mlastDequeuedBufferMediaUs = -1ll;
-}
-
-void NuPlayer::GenericSource::BufferingMonitor::cancelPollBuffering_l() {
-    mBuffering = false;
-    ++mPollBufferingGeneration;
-    mPrevBufferPercentage = -1;
-}
-
-void NuPlayer::GenericSource::BufferingMonitor::notifyBufferingUpdate_l(int32_t percentage) {
+void NuPlayer::GenericSource::notifyBufferingUpdate(int32_t percentage) {
     // Buffering percent could go backward as it's estimated from remaining
     // data and last access time. This could cause the buffering position
     // drawn on media control to jitter slightly. Remember previously reported
@@ -1699,106 +1480,28 @@
 
     mPrevBufferPercentage = percentage;
 
-    ALOGV("notifyBufferingUpdate_l: buffering %d%%", percentage);
+    ALOGV("notifyBufferingUpdate: buffering %d%%", percentage);
 
-    sp<AMessage> msg = mNotify->dup();
-    msg->setInt32("what", kWhatBufferingUpdate);
-    msg->setInt32("percentage", percentage);
-    msg->post();
+    sp<AMessage> notify = dupNotify();
+    notify->setInt32("what", kWhatBufferingUpdate);
+    notify->setInt32("percentage", percentage);
+    notify->post();
 }
 
-void NuPlayer::GenericSource::BufferingMonitor::startBufferingIfNecessary_l() {
-    if (mPrepareBuffering) {
-        return;
-    }
-
-    if (!mBuffering) {
-        ALOGD("startBufferingIfNecessary_l");
-
-        mBuffering = true;
-
-        ensureCacheIsFetching_l();
-        sendCacheStats_l();
-
-        sp<AMessage> notify = mNotify->dup();
-        notify->setInt32("what", kWhatPauseOnBufferingStart);
-        notify->post();
-    }
-}
-
-void NuPlayer::GenericSource::BufferingMonitor::stopBufferingIfNecessary_l() {
-    if (mPrepareBuffering) {
-        ALOGD("stopBufferingIfNecessary_l, mBuffering=%d", mBuffering);
-
-        mPrepareBuffering = false;
-
-        sp<AMessage> notify = mNotify->dup();
-        notify->setInt32("what", kWhatPrepared);
-        notify->setInt32("err", OK);
-        notify->post();
-
-        return;
-    }
-
-    if (mBuffering) {
-        ALOGD("stopBufferingIfNecessary_l");
-        mBuffering = false;
-
-        sendCacheStats_l();
-
-        sp<AMessage> notify = mNotify->dup();
-        notify->setInt32("what", kWhatResumeOnBufferingEnd);
-        notify->post();
-    }
-}
-
-void NuPlayer::GenericSource::BufferingMonitor::sendCacheStats_l() {
-    int32_t kbps = 0;
-    status_t err = UNKNOWN_ERROR;
-
-    if (mCachedSource != NULL) {
-        err = mCachedSource->getEstimatedBandwidthKbps(&kbps);
-    }
-
-    if (err == OK) {
-        sp<AMessage> notify = mNotify->dup();
-        notify->setInt32("what", kWhatCacheStats);
-        notify->setInt32("bandwidth", kbps);
-        notify->post();
-    }
-}
-
-void NuPlayer::GenericSource::BufferingMonitor::ensureCacheIsFetching_l() {
-    if (mCachedSource != NULL) {
-        mCachedSource->resumeFetchingIfNecessary();
-    }
-}
-
-void NuPlayer::GenericSource::BufferingMonitor::schedulePollBuffering_l() {
+void NuPlayer::GenericSource::schedulePollBuffering() {
     sp<AMessage> msg = new AMessage(kWhatPollBuffering, this);
     msg->setInt32("generation", mPollBufferingGeneration);
     // Enquires buffering status every second.
     msg->post(1000000ll);
 }
 
-int64_t NuPlayer::GenericSource::BufferingMonitor::getLastReadPosition_l() {
-    if (mAudioTimeUs > 0) {
-        return mAudioTimeUs;
-    } else if (mVideoTimeUs > 0) {
-        return mVideoTimeUs;
-    } else {
-        return 0;
-    }
-}
-
-void NuPlayer::GenericSource::BufferingMonitor::onPollBuffering_l() {
+void NuPlayer::GenericSource::onPollBuffering() {
     status_t finalStatus = UNKNOWN_ERROR;
     int64_t cachedDurationUs = -1ll;
     ssize_t cachedDataRemaining = -1;
 
     if (mCachedSource != NULL) {
-        cachedDataRemaining =
-                mCachedSource->approxDataRemaining(&finalStatus);
+        cachedDataRemaining = mCachedSource->approxDataRemaining(&finalStatus);
 
         if (finalStatus == OK) {
             off64_t size;
@@ -1816,157 +1519,49 @@
     }
 
     if (finalStatus != OK) {
-        ALOGV("onPollBuffering_l: EOS (finalStatus = %d)", finalStatus);
+        ALOGV("onPollBuffering: EOS (finalStatus = %d)", finalStatus);
 
         if (finalStatus == ERROR_END_OF_STREAM) {
-            notifyBufferingUpdate_l(100);
+            notifyBufferingUpdate(100);
         }
 
-        stopBufferingIfNecessary_l();
         return;
     }
 
     if (cachedDurationUs >= 0ll) {
         if (mDurationUs > 0ll) {
-            int64_t cachedPosUs = getLastReadPosition_l() + cachedDurationUs;
+            int64_t cachedPosUs = getLastReadPosition() + cachedDurationUs;
             int percentage = 100.0 * cachedPosUs / mDurationUs;
             if (percentage > 100) {
                 percentage = 100;
             }
 
-            notifyBufferingUpdate_l(percentage);
+            notifyBufferingUpdate(percentage);
         }
 
-        ALOGV("onPollBuffering_l: cachedDurationUs %.1f sec", cachedDurationUs / 1000000.0f);
-
-        if (mPrepareBuffering) {
-            if (cachedDurationUs > mSettings.mInitialWatermarkMs * 1000) {
-                stopBufferingIfNecessary_l();
-            }
-        } else if (mSettings.IsTimeBasedBufferingMode(mSettings.mRebufferingMode)) {
-            if (cachedDurationUs < mSettings.mRebufferingWatermarkLowMs * 1000) {
-                // Take into account the data cached in downstream components to try to avoid
-                // unnecessary pause.
-                if (mOffloadAudio && mFirstDequeuedBufferRealUs >= 0) {
-                    int64_t downStreamCacheUs =
-                        mlastDequeuedBufferMediaUs - mFirstDequeuedBufferMediaUs
-                            - (ALooper::GetNowUs() - mFirstDequeuedBufferRealUs);
-                    if (downStreamCacheUs > 0) {
-                        cachedDurationUs += downStreamCacheUs;
-                    }
-                }
-
-                if (cachedDurationUs < mSettings.mRebufferingWatermarkLowMs * 1000) {
-                    startBufferingIfNecessary_l();
-                }
-            } else if (cachedDurationUs > mSettings.mRebufferingWatermarkHighMs * 1000) {
-                stopBufferingIfNecessary_l();
-            }
-        }
-    } else if (cachedDataRemaining >= 0
-            && mSettings.IsSizeBasedBufferingMode(mSettings.mRebufferingMode)) {
-        ALOGV("onPollBuffering_l: cachedDataRemaining %zd bytes",
-                cachedDataRemaining);
-
-        if (cachedDataRemaining < (mSettings.mRebufferingWatermarkLowKB << 10)) {
-            startBufferingIfNecessary_l();
-        } else if (cachedDataRemaining > (mSettings.mRebufferingWatermarkHighKB << 10)) {
-            stopBufferingIfNecessary_l();
-        }
+        ALOGV("onPollBuffering: cachedDurationUs %.1f sec", cachedDurationUs / 1000000.0f);
     }
 
-    schedulePollBuffering_l();
-}
-
-void NuPlayer::GenericSource::BufferingMonitor::onMessageReceived(const sp<AMessage> &msg) {
-    switch (msg->what()) {
-      case kWhatPollBuffering:
-      {
-          int32_t generation;
-          CHECK(msg->findInt32("generation", &generation));
-          Mutex::Autolock _l(mLock);
-          if (generation == mPollBufferingGeneration) {
-              onPollBuffering_l();
-          }
-          break;
-      }
-      default:
-          TRESPASS();
-          break;
-    }
+    schedulePollBuffering();
 }
 
 // Modular DRM
 status_t NuPlayer::GenericSource::prepareDrm(
-        const uint8_t uuid[16], const Vector<uint8_t> &drmSessionId, sp<ICrypto> *crypto)
-{
+        const uint8_t uuid[16], const Vector<uint8_t> &drmSessionId, sp<ICrypto> *outCrypto) {
+    Mutex::Autolock _l(mLock);
     ALOGV("prepareDrm");
 
-    sp<AMessage> msg = new AMessage(kWhatPrepareDrm, this);
-    // synchronous call so just passing the address but with local copies of "const" args
-    uint8_t UUID[16];
-    memcpy(UUID, uuid, sizeof(UUID));
-    Vector<uint8_t> sessionId = drmSessionId;
-    msg->setPointer("uuid", (void*)UUID);
-    msg->setPointer("drmSessionId", (void*)&sessionId);
-    msg->setPointer("crypto", (void*)crypto);
-
-    sp<AMessage> response;
-    status_t status = msg->postAndAwaitResponse(&response);
-
-    if (status == OK && response != NULL) {
-        CHECK(response->findInt32("status", &status));
-        ALOGV_IF(status == OK, "prepareDrm: mCrypto: %p (%d)", crypto->get(),
-                (*crypto != NULL ? (*crypto)->getStrongCount() : 0));
-        ALOGD("prepareDrm ret: %d ", status);
-    } else {
-        ALOGE("prepareDrm err: %d", status);
-    }
-
-    return status;
-}
-
-status_t NuPlayer::GenericSource::releaseDrm()
-{
-    ALOGV("releaseDrm");
-
-    sp<AMessage> msg = new AMessage(kWhatReleaseDrm, this);
-
-    // synchronous call to update the source states before the player proceedes with crypto cleanup
-    sp<AMessage> response;
-    status_t status = msg->postAndAwaitResponse(&response);
-
-    if (status == OK && response != NULL) {
-        ALOGD("releaseDrm ret: OK ");
-    } else {
-        ALOGE("releaseDrm err: %d", status);
-    }
-
-    return status;
-}
-
-status_t NuPlayer::GenericSource::onPrepareDrm(const sp<AMessage> &msg)
-{
-    ALOGV("onPrepareDrm ");
-
     mIsDrmProtected = false;
     mIsDrmReleased = false;
     mIsSecure = false;
 
-    uint8_t *uuid;
-    Vector<uint8_t> *drmSessionId;
-    sp<ICrypto> *outCrypto;
-    CHECK(msg->findPointer("uuid", (void**)&uuid));
-    CHECK(msg->findPointer("drmSessionId", (void**)&drmSessionId));
-    CHECK(msg->findPointer("crypto", (void**)&outCrypto));
-
     status_t status = OK;
-    sp<ICrypto> crypto = NuPlayerDrm::createCryptoAndPlugin(uuid, *drmSessionId, status);
+    sp<ICrypto> crypto = NuPlayerDrm::createCryptoAndPlugin(uuid, drmSessionId, status);
     if (crypto == NULL) {
-        ALOGE("onPrepareDrm: createCrypto failed. status: %d", status);
+        ALOGE("prepareDrm: createCrypto failed. status: %d", status);
         return status;
     }
-    ALOGV("onPrepareDrm: createCryptoAndPlugin succeeded for uuid: %s",
+    ALOGV("prepareDrm: createCryptoAndPlugin succeeded for uuid: %s",
             DrmUUID::toHexString(uuid).string());
 
     *outCrypto = crypto;
@@ -1975,14 +1570,14 @@
 
     if (mMimes.size() == 0) {
         status = UNKNOWN_ERROR;
-        ALOGE("onPrepareDrm: Unexpected. Must have at least one track. status: %d", status);
+        ALOGE("prepareDrm: Unexpected. Must have at least one track. status: %d", status);
         return status;
     }
 
     // first mime in this list is either the video track, or the first audio track
     const char *mime = mMimes[0].string();
     mIsSecure = crypto->requiresSecureDecoderComponent(mime);
-    ALOGV("onPrepareDrm: requiresSecureDecoderComponent mime: %s  isSecure: %d",
+    ALOGV("prepareDrm: requiresSecureDecoderComponent mime: %s  isSecure: %d",
             mime, mIsSecure);
 
     // Checking the member flags while in the looper to send out the notification.
@@ -1996,18 +1591,27 @@
             FLAG_CAN_SEEK_FORWARD |
             FLAG_CAN_SEEK);
 
+    if (status == OK) {
+        ALOGV("prepareDrm: mCrypto: %p (%d)", outCrypto->get(),
+                (*outCrypto != NULL ? (*outCrypto)->getStrongCount() : 0));
+        ALOGD("prepareDrm ret: %d ", status);
+    } else {
+        ALOGE("prepareDrm err: %d", status);
+    }
     return status;
 }
 
-status_t NuPlayer::GenericSource::onReleaseDrm()
-{
+status_t NuPlayer::GenericSource::releaseDrm() {
+    Mutex::Autolock _l(mLock);
+    ALOGV("releaseDrm");
+
     if (mIsDrmProtected) {
         mIsDrmProtected = false;
         // to prevent returning any more buffer after stop/releaseDrm (b/37960096)
         mIsDrmReleased = true;
-        ALOGV("onReleaseDrm: mIsDrmProtected is reset.");
+        ALOGV("releaseDrm: mIsDrmProtected is reset.");
     } else {
-        ALOGE("onReleaseDrm: mIsDrmProtected is already false.");
+        ALOGE("releaseDrm: mIsDrmProtected is already false.");
     }
 
     return OK;
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.h b/media/libmediaplayerservice/nuplayer/GenericSource.h
index 4064133..856f03b 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.h
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.h
@@ -24,6 +24,7 @@
 #include "ATSParser.h"
 
 #include <media/mediaplayer.h>
+#include <media/stagefright/MediaBuffer.h>
 
 namespace android {
 
@@ -35,12 +36,14 @@
 struct IMediaHTTPService;
 struct MediaSource;
 class MediaBuffer;
+struct MediaClock;
 struct NuCachedSource2;
 
 struct NuPlayer::GenericSource : public NuPlayer::Source,
                                  public MediaBufferObserver // Modular DRM
 {
-    GenericSource(const sp<AMessage> &notify, bool uidValid, uid_t uid);
+    GenericSource(const sp<AMessage> &notify, bool uidValid, uid_t uid,
+                  const sp<MediaClock> &mediaClock);
 
     status_t setDataSource(
             const sp<IMediaHTTPService> &httpService,
@@ -51,7 +54,7 @@
 
     status_t setDataSource(const sp<DataSource>& dataSource);
 
-    virtual status_t getDefaultBufferingSettings(
+    virtual status_t getBufferingSettings(
             BufferingSettings* buffering /* nonnull */) override;
     virtual status_t setBufferingSettings(const BufferingSettings& buffering) override;
 
@@ -79,17 +82,13 @@
         int64_t seekTimeUs,
         MediaPlayerSeekMode mode = MediaPlayerSeekMode::SEEK_PREVIOUS_SYNC) override;
 
-    virtual status_t setBuffers(bool audio, Vector<MediaBuffer *> &buffers);
-
     virtual bool isStreaming() const;
 
-    virtual void setOffloadAudio(bool offload);
-
     // Modular DRM
     virtual void signalBufferReturned(MediaBuffer *buffer);
 
     virtual status_t prepareDrm(
-            const uint8_t uuid[16], const Vector<uint8_t> &drmSessionId, sp<ICrypto> *crypto);
+            const uint8_t uuid[16], const Vector<uint8_t> &drmSessionId, sp<ICrypto> *outCrypto);
 
     virtual status_t releaseDrm();
 
@@ -111,17 +110,11 @@
         kWhatSendTimedTextData,
         kWhatChangeAVSource,
         kWhatPollBuffering,
-        kWhatGetFormat,
-        kWhatGetSelectedTrack,
-        kWhatSelectTrack,
         kWhatSeek,
         kWhatReadBuffer,
         kWhatStart,
         kWhatResume,
         kWhatSecureDecodersInstantiated,
-        // Modular DRM
-        kWhatPrepareDrm,
-        kWhatReleaseDrm,
     };
 
     struct Track {
@@ -130,84 +123,6 @@
         sp<AnotherPacketSource> mPackets;
     };
 
-    // Helper to monitor buffering status. The polling happens every second.
-    // When necessary, it will send out buffering events to the player.
-    struct BufferingMonitor : public AHandler {
-    public:
-        explicit BufferingMonitor(const sp<AMessage> &notify);
-
-        void getDefaultBufferingSettings(BufferingSettings *buffering /* nonnull */);
-        status_t setBufferingSettings(const BufferingSettings &buffering);
-
-        // Set up state.
-        void prepare(const sp<NuCachedSource2> &cachedSource,
-                int64_t durationUs,
-                int64_t bitrate,
-                bool isStreaming);
-        // Stop and reset buffering monitor.
-        void stop();
-        // Cancel the current monitor task.
-        void cancelPollBuffering();
-        // Restart the monitor task.
-        void restartPollBuffering();
-        // Stop buffering task and send out corresponding events.
-        void stopBufferingIfNecessary();
-        // Make sure data source is getting data.
-        void ensureCacheIsFetching();
-        // Update media time of just extracted buffer from data source.
-        void updateQueuedTime(bool isAudio, int64_t timeUs);
-
-        // Set the offload mode.
-        void setOffloadAudio(bool offload);
-        // Update media time of last dequeued buffer which is sent to the decoder.
-        void updateDequeuedBufferTime(int64_t mediaUs);
-
-    protected:
-        virtual ~BufferingMonitor();
-        virtual void onMessageReceived(const sp<AMessage> &msg);
-
-    private:
-        enum {
-            kWhatPollBuffering,
-        };
-
-        sp<AMessage> mNotify;
-
-        sp<NuCachedSource2> mCachedSource;
-        int64_t mDurationUs;
-        int64_t mBitrate;
-        bool mIsStreaming;
-
-        int64_t mAudioTimeUs;
-        int64_t mVideoTimeUs;
-        int32_t mPollBufferingGeneration;
-        bool mPrepareBuffering;
-        bool mBuffering;
-        int32_t mPrevBufferPercentage;
-
-        mutable Mutex mLock;
-
-        BufferingSettings mSettings;
-        bool mOffloadAudio;
-        int64_t mFirstDequeuedBufferRealUs;
-        int64_t mFirstDequeuedBufferMediaUs;
-        int64_t mlastDequeuedBufferMediaUs;
-
-        void prepare_l(const sp<NuCachedSource2> &cachedSource,
-                int64_t durationUs,
-                int64_t bitrate,
-                bool isStreaming);
-        void cancelPollBuffering_l();
-        void notifyBufferingUpdate_l(int32_t percentage);
-        void startBufferingIfNecessary_l();
-        void stopBufferingIfNecessary_l();
-        void sendCacheStats_l();
-        void ensureCacheIsFetching_l();
-        int64_t getLastReadPosition_l();
-        void onPollBuffering_l();
-        void schedulePollBuffering_l();
-    };
-
     Vector<sp<IMediaSource> > mSources;
     Track mAudioTrack;
     int64_t mAudioTimeUs;
@@ -218,6 +133,13 @@
     Track mSubtitleTrack;
     Track mTimedTextTrack;
 
+    BufferingSettings mBufferingSettings;
+    int32_t mPrevBufferPercentage;
+    int32_t mPollBufferingGeneration;
+    bool mSentPauseOnBuffering;
+
+    int32_t mAudioDataGeneration;
+    int32_t mVideoDataGeneration;
     int32_t mFetchSubtitleDataGeneration;
     int32_t mFetchTimedTextDataGeneration;
     int64_t mDurationUs;
@@ -227,6 +149,7 @@
     bool mIsStreaming;
     bool mUIDValid;
     uid_t mUID;
+    const sp<MediaClock> mMediaClock;
     sp<IMediaHTTPService> mHTTPService;
     AString mUri;
     KeyedVector<String8, String8> mUriHeaders;
@@ -234,22 +157,20 @@
     int64_t mOffset;
     int64_t mLength;
 
+    bool mDisconnected;
     sp<DataSource> mDataSource;
     sp<NuCachedSource2> mCachedSource;
     sp<DataSource> mHttpSource;
     sp<MetaData> mFileMeta;
     bool mStarted;
-    bool mStopRead;
+    bool mPreparing;
     int64_t mBitrate;
-    sp<BufferingMonitor> mBufferingMonitor;
     uint32_t mPendingReadBufferTypes;
     sp<ABuffer> mGlobalTimedText;
 
-    mutable Mutex mReadBufferLock;
-    mutable Mutex mDisconnectLock;
+    mutable Mutex mLock;
 
     sp<ALooper> mLooper;
-    sp<ALooper> mBufferingMonitorLooper;
 
     void resetDataSource();
 
@@ -261,18 +182,6 @@
     void finishPrepareAsync();
     status_t startSources();
 
-    void onGetFormatMeta(const sp<AMessage>& msg) const;
-    sp<MetaData> doGetFormatMeta(bool audio) const;
-
-    void onGetTrackInfo(const sp<AMessage>& msg) const;
-    sp<AMessage> doGetTrackInfo(size_t trackIndex) const;
-
-    void onGetSelectedTrack(const sp<AMessage>& msg) const;
-    ssize_t doGetSelectedTrack(media_track_type type) const;
-
-    void onSelectTrack(const sp<AMessage>& msg);
-    status_t doSelectTrack(size_t trackIndex, bool select, int64_t timeUs);
-
     void onSeek(const sp<AMessage>& msg);
     status_t doSeek(int64_t seekTimeUs, MediaPlayerSeekMode mode);
 
@@ -310,6 +219,15 @@
     void queueDiscontinuityIfNeeded(
             bool seeking, bool formatChange, media_track_type trackType, Track *track);
 
+    void schedulePollBuffering();
+    void onPollBuffering();
+    void notifyBufferingUpdate(int32_t percentage);
+
+    void sendCacheStats();
+
+    sp<MetaData> getFormatMeta_l(bool audio);
+    int32_t getDataGeneration(media_track_type type) const;
+
     // Modular DRM
     // The source is DRM protected and is prepared for DRM.
     bool mIsDrmProtected;
@@ -318,8 +236,6 @@
     Vector<String8> mMimes;
 
     status_t checkDrmInfo();
-    status_t onPrepareDrm(const sp<AMessage> &msg);
-    status_t onReleaseDrm();
 
     DISALLOW_EVIL_CONSTRUCTORS(GenericSource);
 };
diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
index ad4c223..11f1bfd 100644
--- a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
@@ -35,7 +35,6 @@
 // default buffer prepare/ready/underflow marks
 static const int kReadyMarkMs     = 5000;  // 5 seconds
 static const int kPrepareMarkMs   = 1500;  // 1.5 seconds
-static const int kUnderflowMarkMs = 1000;  // 1 second
 
 namespace android {
 
@@ -54,7 +53,8 @@
       mFetchMetaDataGeneration(0),
       mHasMetadata(false),
       mMetadataSelected(false) {
-    getDefaultBufferingSettings(&mBufferingSettings);
+    mBufferingSettings.mInitialMarkMs = kPrepareMarkMs;
+    mBufferingSettings.mResumePlaybackMarkMs = kReadyMarkMs;
     if (headers) {
         mExtraHeaders = *headers;
 
@@ -82,35 +82,16 @@
     }
 }
 
-status_t NuPlayer::HTTPLiveSource::getDefaultBufferingSettings(
+status_t NuPlayer::HTTPLiveSource::getBufferingSettings(
             BufferingSettings* buffering /* nonnull */) {
-    buffering->mInitialBufferingMode = BUFFERING_MODE_TIME_ONLY;
-    buffering->mRebufferingMode = BUFFERING_MODE_TIME_ONLY;
-    buffering->mInitialWatermarkMs = kPrepareMarkMs;
-    buffering->mRebufferingWatermarkLowMs = kUnderflowMarkMs;
-    buffering->mRebufferingWatermarkHighMs = kReadyMarkMs;
+    *buffering = mBufferingSettings;
 
     return OK;
 }
 
 status_t NuPlayer::HTTPLiveSource::setBufferingSettings(const BufferingSettings& buffering) {
-    if (buffering.IsSizeBasedBufferingMode(buffering.mInitialBufferingMode)
-            || buffering.IsSizeBasedBufferingMode(buffering.mRebufferingMode)
-            || (buffering.IsTimeBasedBufferingMode(buffering.mRebufferingMode)
-                && buffering.mRebufferingWatermarkLowMs > buffering.mRebufferingWatermarkHighMs)) {
-        return BAD_VALUE;
-    }
-
     mBufferingSettings = buffering;
 
-    if (mBufferingSettings.mInitialBufferingMode == BUFFERING_MODE_NONE) {
-        mBufferingSettings.mInitialWatermarkMs = BufferingSettings::kNoWatermark;
-    }
-    if (mBufferingSettings.mRebufferingMode == BUFFERING_MODE_NONE) {
-        mBufferingSettings.mRebufferingWatermarkLowMs = BufferingSettings::kNoWatermark;
-        mBufferingSettings.mRebufferingWatermarkHighMs = INT32_MAX;
-    }
-
     if (mLiveSession != NULL) {
         mLiveSession->setBufferingSettings(mBufferingSettings);
     }
diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h
index 2866a6a..2d6c604 100644
--- a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h
+++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h
@@ -34,7 +34,7 @@
             const char *url,
             const KeyedVector<String8, String8> *headers);
 
-    virtual status_t getDefaultBufferingSettings(
+    virtual status_t getBufferingSettings(
             BufferingSettings* buffering /* nonnull */) override;
     virtual status_t setBufferingSettings(const BufferingSettings& buffering) override;
 
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index df36046..d1e5d45 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -48,7 +48,9 @@
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/avc_utils.h>
 #include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaClock.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/MetaData.h>
@@ -56,7 +58,6 @@
 #include <gui/IGraphicBufferProducer.h>
 #include <gui/Surface.h>
 
-#include "avc_utils.h"
 
 #include "ESDS.h"
 #include <media/stagefright/Utils.h>
@@ -172,14 +173,16 @@
 
 ////////////////////////////////////////////////////////////////////////////////
 
-NuPlayer::NuPlayer(pid_t pid)
+NuPlayer::NuPlayer(pid_t pid, const sp<MediaClock> &mediaClock)
     : mUIDValid(false),
       mPID(pid),
+      mMediaClock(mediaClock),
       mSourceFlags(0),
       mOffloadAudio(false),
       mAudioDecoderGeneration(0),
       mVideoDecoderGeneration(0),
       mRendererGeneration(0),
+      mLastStartedPlayingTimeNs(0),
       mPreviousSeekTimeUs(0),
       mAudioEOS(false),
       mVideoEOS(false),
@@ -204,6 +207,7 @@
       mPausedForBuffering(false),
       mIsDrmProtected(false),
       mDataSourceType(DATA_SOURCE_TYPE_NONE) {
+    CHECK(mediaClock != NULL);
     clearFlushComplete();
 }
 
@@ -278,7 +282,7 @@
         ALOGV("setDataSourceAsync GenericSource %s", url);
 
         sp<GenericSource> genericSource =
-                new GenericSource(notify, mUIDValid, mUID);
+                new GenericSource(notify, mUIDValid, mUID, mMediaClock);
 
         status_t err = genericSource->setDataSource(httpService, url, headers);
 
@@ -301,7 +305,7 @@
     sp<AMessage> notify = new AMessage(kWhatSourceNotify, this);
 
     sp<GenericSource> source =
-            new GenericSource(notify, mUIDValid, mUID);
+            new GenericSource(notify, mUIDValid, mUID, mMediaClock);
 
     ALOGV("setDataSourceAsync fd %d/%lld/%lld source: %p",
             fd, (long long)offset, (long long)length, source.get());
@@ -322,7 +326,7 @@
     sp<AMessage> msg = new AMessage(kWhatSetDataSource, this);
     sp<AMessage> notify = new AMessage(kWhatSourceNotify, this);
 
-    sp<GenericSource> source = new GenericSource(notify, mUIDValid, mUID);
+    sp<GenericSource> source = new GenericSource(notify, mUIDValid, mUID, mMediaClock);
     status_t err = source->setDataSource(dataSource);
 
     if (err != OK) {
@@ -335,9 +339,9 @@
     mDataSourceType = DATA_SOURCE_TYPE_MEDIA;
 }
 
-status_t NuPlayer::getDefaultBufferingSettings(
+status_t NuPlayer::getBufferingSettings(
         BufferingSettings *buffering /* nonnull */) {
-    sp<AMessage> msg = new AMessage(kWhatGetDefaultBufferingSettings, this);
+    sp<AMessage> msg = new AMessage(kWhatGetBufferingSettings, this);
     sp<AMessage> response;
     status_t err = msg->postAndAwaitResponse(&response);
     if (err == OK && response != NULL) {
@@ -470,6 +474,13 @@
     (new AMessage(kWhatReset, this))->post();
 }
 
+status_t NuPlayer::notifyAt(int64_t mediaTimeUs) {
+    sp<AMessage> notify = new AMessage(kWhatNotifyTime, this);
+    notify->setInt64("timerUs", mediaTimeUs);
+    mMediaClock->addTimer(notify, mediaTimeUs);
+    return OK;
+}
+
 void NuPlayer::seekToAsync(int64_t seekTimeUs, MediaPlayerSeekMode mode, bool needNotify) {
     sp<AMessage> msg = new AMessage(kWhatSeek, this);
     msg->setInt64("seekTimeUs", seekTimeUs);
@@ -556,16 +567,16 @@
             break;
         }
 
-        case kWhatGetDefaultBufferingSettings:
+        case kWhatGetBufferingSettings:
         {
             sp<AReplyToken> replyID;
             CHECK(msg->senderAwaitsResponse(&replyID));
 
-            ALOGV("kWhatGetDefaultBufferingSettings");
+            ALOGV("kWhatGetBufferingSettings");
             BufferingSettings buffering;
             status_t err = OK;
             if (mSource != NULL) {
-                err = mSource->getDefaultBufferingSettings(&buffering);
+                err = mSource->getBufferingSettings(&buffering);
             } else {
                 err = INVALID_OPERATION;
             }
@@ -1299,6 +1310,8 @@
             ALOGV("kWhatReset");
 
             mResetting = true;
+            stopPlaybackTimer("kWhatReset");
+            stopRebufferingTimer(true);
 
             mDeferredActions.push_back(
                     new FlushDecoderAction(
@@ -1312,6 +1325,16 @@
             break;
         }
 
+        case kWhatNotifyTime:
+        {
+            ALOGV("kWhatNotifyTime");
+            int64_t timerUs;
+            CHECK(msg->findInt64("timerUs", &timerUs));
+
+            notifyListener(MEDIA_NOTIFY_TIME, timerUs, 0);
+            break;
+        }
+
         case kWhatSeek:
         {
             int64_t seekTimeUs;
@@ -1429,7 +1452,7 @@
         ALOGW("resume called when renderer is gone or not set");
     }
 
-    mLastStartedPlayingTimeNs = systemTime();
+    startPlaybackTimer("onresume");
 }
 
 status_t NuPlayer::onInstantiateSecureDecoders() {
@@ -1523,7 +1546,7 @@
     sp<AMessage> notify = new AMessage(kWhatRendererNotify, this);
     ++mRendererGeneration;
     notify->setInt32("generation", mRendererGeneration);
-    mRenderer = new Renderer(mAudioSink, notify, flags);
+    mRenderer = new Renderer(mAudioSink, mMediaClock, notify, flags);
     mRendererLooper = new ALooper;
     mRendererLooper->setName("NuPlayerRenderer");
     mRendererLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
@@ -1549,12 +1572,74 @@
         mAudioDecoder->setRenderer(mRenderer);
     }
 
-    mLastStartedPlayingTimeNs = systemTime();
+    startPlaybackTimer("onstart");
 
     postScanSources();
 }
 
+void NuPlayer::startPlaybackTimer(const char *where) {
+    Mutex::Autolock autoLock(mPlayingTimeLock);
+    if (mLastStartedPlayingTimeNs == 0) {
+        mLastStartedPlayingTimeNs = systemTime();
+        ALOGV("startPlaybackTimer() time %20" PRId64 " (%s)",  mLastStartedPlayingTimeNs, where);
+    }
+}
+
+void NuPlayer::stopPlaybackTimer(const char *where) {
+    Mutex::Autolock autoLock(mPlayingTimeLock);
+
+    ALOGV("stopPlaybackTimer()  time %20" PRId64 " (%s)", mLastStartedPlayingTimeNs, where);
+
+    if (mLastStartedPlayingTimeNs != 0) {
+        sp<NuPlayerDriver> driver = mDriver.promote();
+        if (driver != NULL) {
+            int64_t now = systemTime();
+            int64_t played = now - mLastStartedPlayingTimeNs;
+            ALOGV("stopPlaybackTimer()  log  %20" PRId64 "", played);
+
+            if (played > 0) {
+                driver->notifyMorePlayingTimeUs((played+500)/1000);
+            }
+        }
+        mLastStartedPlayingTimeNs = 0;
+    }
+}
+
+void NuPlayer::startRebufferingTimer() {
+    Mutex::Autolock autoLock(mPlayingTimeLock);
+    if (mLastStartedRebufferingTimeNs == 0) {
+        mLastStartedRebufferingTimeNs = systemTime();
+        ALOGV("startRebufferingTimer() time %20" PRId64 "",  mLastStartedRebufferingTimeNs);
+    }
+}
+
+void NuPlayer::stopRebufferingTimer(bool exitingPlayback) {
+    Mutex::Autolock autoLock(mPlayingTimeLock);
+
+    ALOGV("stopRebufferTimer()  time %20" PRId64 " (exiting %d)", mLastStartedRebufferingTimeNs, exitingPlayback);
+
+    if (mLastStartedRebufferingTimeNs != 0) {
+        sp<NuPlayerDriver> driver = mDriver.promote();
+        if (driver != NULL) {
+            int64_t now = systemTime();
+            int64_t rebuffered = now - mLastStartedRebufferingTimeNs;
+            ALOGV("stopRebufferingTimer()  log  %20" PRId64 "", rebuffered);
+
+            if (rebuffered > 0) {
+                driver->notifyMoreRebufferingTimeUs((rebuffered+500)/1000);
+                if (exitingPlayback) {
+                    driver->notifyRebufferingWhenExit(true);
+                }
+            }
+        }
+        mLastStartedRebufferingTimeNs = 0;
+    }
+}
+
 void NuPlayer::onPause() {
+
+    stopPlaybackTimer("onPause");
+
     if (mPaused) {
         return;
     }
@@ -1570,13 +1655,6 @@
         ALOGW("pause called when renderer is gone or not set");
     }
 
-    sp<NuPlayerDriver> driver = mDriver.promote();
-    if (driver != NULL) {
-        int64_t now = systemTime();
-        int64_t played = now - mLastStartedPlayingTimeNs;
-
-        driver->notifyMorePlayingTimeUs((played+500)/1000);
-    }
 }
 
 bool NuPlayer::audioDecoderStillNeeded() {
@@ -2203,6 +2281,9 @@
     CHECK(mAudioDecoder == NULL);
     CHECK(mVideoDecoder == NULL);
 
+    stopPlaybackTimer("performReset");
+    stopRebufferingTimer(true);
+
     cancelPollDuration();
 
     ++mScanSourcesGeneration;
@@ -2455,6 +2536,7 @@
             if (mStarted) {
                 ALOGI("buffer low, pausing...");
 
+                startRebufferingTimer();
                 mPausedForBuffering = true;
                 onPause();
             }
@@ -2468,6 +2550,7 @@
             if (mStarted) {
                 ALOGI("buffer ready, resuming...");
 
+                stopRebufferingTimer(false);
                 mPausedForBuffering = false;
 
                 // do not resume yet if client didn't unpause
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h
index c69835f..fda69e8 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h
@@ -30,11 +30,12 @@
 struct AudioPlaybackRate;
 struct AVSyncSettings;
 class IDataSource;
+struct MediaClock;
 class MetaData;
 struct NuPlayerDriver;
 
 struct NuPlayer : public AHandler {
-    explicit NuPlayer(pid_t pid);
+    explicit NuPlayer(pid_t pid, const sp<MediaClock> &mediaClock);
 
     void setUID(uid_t uid);
 
@@ -51,7 +52,7 @@
 
     void setDataSourceAsync(const sp<DataSource> &source);
 
-    status_t getDefaultBufferingSettings(BufferingSettings* buffering /* nonnull */);
+    status_t getBufferingSettings(BufferingSettings* buffering /* nonnull */);
     status_t setBufferingSettings(const BufferingSettings& buffering);
 
     void prepareAsync();
@@ -72,6 +73,9 @@
     // Will notify the driver through "notifyResetComplete" once finished.
     void resetAsync();
 
+    // Request a notification when specified media time is reached.
+    status_t notifyAt(int64_t mediaTimeUs);
+
     // Will notify the driver through "notifySeekComplete" once finished
     // and needNotify is true.
     void seekToAsync(
@@ -139,6 +143,7 @@
         kWhatClosedCaptionNotify        = 'capN',
         kWhatRendererNotify             = 'renN',
         kWhatReset                      = 'rset',
+        kWhatNotifyTime                 = 'nfyT',
         kWhatSeek                       = 'seek',
         kWhatPause                      = 'paus',
         kWhatResume                     = 'rsme',
@@ -147,7 +152,7 @@
         kWhatGetTrackInfo               = 'gTrI',
         kWhatGetSelectedTrack           = 'gSel',
         kWhatSelectTrack                = 'selT',
-        kWhatGetDefaultBufferingSettings = 'gDBS',
+        kWhatGetBufferingSettings       = 'gBus',
         kWhatSetBufferingSettings       = 'sBuS',
         kWhatPrepareDrm                 = 'pDrm',
         kWhatReleaseDrm                 = 'rDrm',
@@ -157,6 +162,7 @@
     bool mUIDValid;
     uid_t mUID;
     pid_t mPID;
+    const sp<MediaClock> mMediaClock;
     Mutex mSourceLock;  // guard |mSource|.
     sp<Source> mSource;
     uint32_t mSourceFlags;
@@ -172,7 +178,14 @@
     int32_t mVideoDecoderGeneration;
     int32_t mRendererGeneration;
 
+    Mutex mPlayingTimeLock;
     int64_t mLastStartedPlayingTimeNs;
+    void stopPlaybackTimer(const char *where);
+    void startPlaybackTimer(const char *where);
+
+    int64_t mLastStartedRebufferingTimeNs;
+    void startRebufferingTimer();
+    void stopRebufferingTimer(bool exitingPlayback);
 
     int64_t mPreviousSeekTimeUs;
 
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.cpp
index 73b07bb..0a8b97f 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.cpp
@@ -19,13 +19,13 @@
 #include <utils/Log.h>
 #include <inttypes.h>
 
-#include "avc_utils.h"
 #include "NuPlayerCCDecoder.h"
 
 #include <media/stagefright/foundation/ABitReader.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/avc_utils.h>
 #include <media/stagefright/MediaDefs.h>
 
 namespace android {
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
index ac187cc..1b02adb 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
@@ -33,6 +33,7 @@
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/avc_utils.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaCodec.h>
 #include <media/stagefright/MediaDefs.h>
@@ -40,7 +41,6 @@
 #include <media/stagefright/SurfaceUtils.h>
 #include <gui/Surface.h>
 
-#include "avc_utils.h"
 #include "ATSParser.h"
 
 namespace android {
@@ -750,20 +750,28 @@
 
         buffer->meta()->setInt32("eos", true);
         reply->setInt32("eos", true);
-    } else if (mSkipRenderingUntilMediaTimeUs >= 0) {
+    }
+
+    mNumFramesTotal += !mIsAudio;
+
+    if (mSkipRenderingUntilMediaTimeUs >= 0) {
         if (timeUs < mSkipRenderingUntilMediaTimeUs) {
             ALOGV("[%s] dropping buffer at time %lld as requested.",
                      mComponentName.c_str(), (long long)timeUs);
 
             reply->post();
+            if (eos) {
+                notifyResumeCompleteIfNecessary();
+                if (mRenderer != NULL && !isDiscontinuityPending()) {
+                    mRenderer->queueEOS(mIsAudio, ERROR_END_OF_STREAM);
+                }
+            }
             return true;
         }
 
         mSkipRenderingUntilMediaTimeUs = -1;
     }
 
-    mNumFramesTotal += !mIsAudio;
-
     // wait until 1st frame comes out to signal resume complete
     notifyResumeCompleteIfNecessary();
 
@@ -937,7 +945,8 @@
                             mCurrentMaxVideoTemporalLayerId);
                 } else if (layerId > mCurrentMaxVideoTemporalLayerId) {
                     mCurrentMaxVideoTemporalLayerId = layerId;
-                } else if (layerId == 0 && mNumVideoTemporalLayerTotal > 1 && IsIDR(accessUnit)) {
+                } else if (layerId == 0 && mNumVideoTemporalLayerTotal > 1
+                        && IsIDR(accessUnit->data(), accessUnit->size())) {
                     mCurrentMaxVideoTemporalLayerId = mNumVideoTemporalLayerTotal - 1;
                 }
             }
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
index dc29761..8aa06fc 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
@@ -28,6 +28,8 @@
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/ALooper.h>
 #include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/foundation/ByteUtils.h>
+#include <media/stagefright/MediaClock.h>
 #include <media/stagefright/MetaData.h>
 #include <media/stagefright/Utils.h>
 
@@ -53,7 +55,12 @@
 static const char *kPlayerPlaying = "android.media.mediaplayer.playingMs";
 static const char *kPlayerError = "android.media.mediaplayer.err";
 static const char *kPlayerErrorCode = "android.media.mediaplayer.errcode";
+static const char *kPlayerErrorState = "android.media.mediaplayer.errstate";
 static const char *kPlayerDataSourceType = "android.media.mediaplayer.dataSource";
+//
+static const char *kPlayerRebuffering = "android.media.mediaplayer.rebufferingMs";
+static const char *kPlayerRebufferingCount = "android.media.mediaplayer.rebuffers";
+static const char *kPlayerRebufferingAtExit = "android.media.mediaplayer.rebufferExit";
 
 
 NuPlayerDriver::NuPlayerDriver(pid_t pid)
@@ -65,8 +72,12 @@
       mPositionUs(-1),
       mSeekInProgress(false),
       mPlayingTimeUs(0),
+      mRebufferingTimeUs(0),
+      mRebufferingEvents(0),
+      mRebufferingAtExit(false),
       mLooper(new ALooper),
-      mPlayer(new NuPlayer(pid)),
+      mMediaClock(new MediaClock),
+      mPlayer(new NuPlayer(pid, mMediaClock)),
       mPlayerFlags(0),
       mAnalyticsItem(NULL),
       mClientUid(-1),
@@ -76,6 +87,8 @@
     ALOGD("NuPlayerDriver(%p) created, clientPid(%d)", this, pid);
     mLooper->setName("NuPlayerDriver Looper");
 
+    mMediaClock->init();
+
     // set up an analytics record
     mAnalyticsItem = new MediaAnalyticsItem(kKeyPlayer);
     mAnalyticsItem->generateSessionID();
@@ -226,8 +239,8 @@
     return OK;
 }
 
-status_t NuPlayerDriver::getDefaultBufferingSettings(BufferingSettings* buffering) {
-    ALOGV("getDefaultBufferingSettings(%p)", this);
+status_t NuPlayerDriver::getBufferingSettings(BufferingSettings* buffering) {
+    ALOGV("getBufferingSettings(%p)", this);
     {
         Mutex::Autolock autoLock(mLock);
         if (mState == STATE_IDLE) {
@@ -235,7 +248,7 @@
         }
     }
 
-    return mPlayer->getDefaultBufferingSettings(buffering);
+    return mPlayer->getBufferingSettings(buffering);
 }
 
 status_t NuPlayerDriver::setBufferingSettings(const BufferingSettings& buffering) {
@@ -577,6 +590,12 @@
 
     mAnalyticsItem->setInt64(kPlayerPlaying, (mPlayingTimeUs+500)/1000 );
 
+    if (mRebufferingEvents != 0) {
+        mAnalyticsItem->setInt64(kPlayerRebuffering, (mRebufferingTimeUs+500)/1000 );
+        mAnalyticsItem->setInt32(kPlayerRebufferingCount, mRebufferingEvents);
+        mAnalyticsItem->setInt32(kPlayerRebufferingAtExit, mRebufferingAtExit);
+    }
+
     mAnalyticsItem->setCString(kPlayerDataSourceType, mPlayer->getDataSourceType());
 }
 
@@ -656,10 +675,18 @@
     mPositionUs = -1;
     mLooping = false;
     mPlayingTimeUs = 0;
+    mRebufferingTimeUs = 0;
+    mRebufferingEvents = 0;
+    mRebufferingAtExit = false;
 
     return OK;
 }
 
+status_t NuPlayerDriver::notifyAt(int64_t mediaTimeUs) {
+    ALOGV("notifyAt(%p), time:%lld", this, (long long)mediaTimeUs);
+    return mPlayer->notifyAt(mediaTimeUs);
+}
+
 status_t NuPlayerDriver::setLooping(int loop) {
     mLooping = loop != 0;
     return OK;
@@ -801,6 +828,17 @@
     mPlayingTimeUs += playingUs;
 }
 
+void NuPlayerDriver::notifyMoreRebufferingTimeUs(int64_t rebufferingUs) {
+    Mutex::Autolock autoLock(mLock);
+    mRebufferingTimeUs += rebufferingUs;
+    mRebufferingEvents++;
+}
+
+void NuPlayerDriver::notifyRebufferingWhenExit(bool status) {
+    Mutex::Autolock autoLock(mLock);
+    mRebufferingAtExit = status;
+}
+
 void NuPlayerDriver::notifySeekComplete() {
     ALOGV("notifySeekComplete(%p)", this);
     Mutex::Autolock autoLock(mLock);
@@ -956,6 +994,7 @@
                 if (ext2 != 0) {
                     mAnalyticsItem->setInt32(kPlayerErrorCode, ext2);
                 }
+                mAnalyticsItem->setCString(kPlayerErrorState, stateString(mState).c_str());
             }
             mAtEOS = true;
             break;
@@ -1052,4 +1091,30 @@
     return ret;
 }
 
+std::string NuPlayerDriver::stateString(State state) {
+    const char *rval = NULL;
+    char rawbuffer[16];  // allows "%d"
+
+    switch (state) {
+        case STATE_IDLE: rval = "IDLE"; break;
+        case STATE_SET_DATASOURCE_PENDING: rval = "SET_DATASOURCE_PENDING"; break;
+        case STATE_UNPREPARED: rval = "UNPREPARED"; break;
+        case STATE_PREPARING: rval = "PREPARING"; break;
+        case STATE_PREPARED: rval = "PREPARED"; break;
+        case STATE_RUNNING: rval = "RUNNING"; break;
+        case STATE_PAUSED: rval = "PAUSED"; break;
+        case STATE_RESET_IN_PROGRESS: rval = "RESET_IN_PROGRESS"; break;
+        case STATE_STOPPED: rval = "STOPPED"; break;
+        case STATE_STOPPED_AND_PREPARING: rval = "STOPPED_AND_PREPARING"; break;
+        case STATE_STOPPED_AND_PREPARED: rval = "STOPPED_AND_PREPARED"; break;
+        default:
+            // yes, this buffer is shared and vulnerable to races
+            snprintf(rawbuffer, sizeof(rawbuffer), "%d", state);
+            rval = rawbuffer;
+            break;
+    }
+
+    return rval;
+}
+
 }  // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
index d0cf1dd..ad878f8 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
@@ -22,6 +22,7 @@
 namespace android {
 
 struct ALooper;
+struct MediaClock;
 struct NuPlayer;
 
 struct NuPlayerDriver : public MediaPlayerInterface {
@@ -45,7 +46,7 @@
     virtual status_t setVideoSurfaceTexture(
             const sp<IGraphicBufferProducer> &bufferProducer);
 
-    virtual status_t getDefaultBufferingSettings(
+    virtual status_t getBufferingSettings(
             BufferingSettings* buffering /* nonnull */) override;
     virtual status_t setBufferingSettings(const BufferingSettings& buffering) override;
 
@@ -64,6 +65,7 @@
     virtual status_t getCurrentPosition(int *msec);
     virtual status_t getDuration(int *msec);
     virtual status_t reset();
+    virtual status_t notifyAt(int64_t mediaTimeUs) override;
     virtual status_t setLooping(int loop);
     virtual player_type playerType();
     virtual status_t invoke(const Parcel &request, Parcel *reply);
@@ -82,6 +84,8 @@
     void notifySetSurfaceComplete();
     void notifyDuration(int64_t durationUs);
     void notifyMorePlayingTimeUs(int64_t timeUs);
+    void notifyMoreRebufferingTimeUs(int64_t timeUs);
+    void notifyRebufferingWhenExit(bool status);
     void notifySeekComplete();
     void notifySeekComplete_l();
     void notifyListener(int msg, int ext1 = 0, int ext2 = 0, const Parcel *in = NULL);
@@ -109,6 +113,8 @@
         STATE_STOPPED_AND_PREPARED,     // equivalent to PAUSED, but seek complete
     };
 
+    std::string stateString(State state);
+
     mutable Mutex mLock;
     Condition mCondition;
 
@@ -124,9 +130,13 @@
     int64_t mPositionUs;
     bool mSeekInProgress;
     int64_t mPlayingTimeUs;
+    int64_t mRebufferingTimeUs;
+    int32_t mRebufferingEvents;
+    bool mRebufferingAtExit;
     // <<<
 
     sp<ALooper> mLooper;
+    const sp<MediaClock> mMediaClock;
     const sp<NuPlayer> mPlayer;
     sp<AudioSink> mAudioSink;
     uint32_t mPlayerFlags;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index bd866cb..cc7f688 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -18,13 +18,13 @@
 #define LOG_TAG "NuPlayerRenderer"
 #include <utils/Log.h>
 
+#include "AWakeLock.h"
 #include "NuPlayerRenderer.h"
 #include <algorithm>
 #include <cutils/properties.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/foundation/AUtils.h>
-#include <media/stagefright/foundation/AWakeLock.h>
 #include <media/stagefright/MediaClock.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/MetaData.h>
@@ -89,6 +89,7 @@
 
 NuPlayer::Renderer::Renderer(
         const sp<MediaPlayerBase::AudioSink> &sink,
+        const sp<MediaClock> &mediaClock,
         const sp<AMessage> &notify,
         uint32_t flags)
     : mAudioSink(sink),
@@ -103,11 +104,13 @@
       mAudioDrainGeneration(0),
       mVideoDrainGeneration(0),
       mAudioEOSGeneration(0),
+      mMediaClock(mediaClock),
       mPlaybackSettings(AUDIO_PLAYBACK_RATE_DEFAULT),
       mAudioFirstAnchorTimeMediaUs(-1),
       mAnchorTimeMediaUs(-1),
       mAnchorNumFramesWritten(-1),
       mVideoLateByUs(0ll),
+      mNextVideoTimeMediaUs(-1),
       mHasAudio(false),
       mHasVideo(false),
       mNotifyCompleteAudio(false),
@@ -130,7 +133,7 @@
       mLastAudioBufferDrained(0),
       mUseAudioCallback(false),
       mWakeLock(new AWakeLock()) {
-    mMediaClock = new MediaClock;
+    CHECK(mediaClock != NULL);
     mPlaybackRate = mPlaybackSettings.mSpeed;
     mMediaClock->setPlaybackRate(mPlaybackRate);
 }
@@ -149,7 +152,6 @@
         flushQueue(&mVideoQueue);
     }
     mWakeLock.clear();
-    mMediaClock.clear();
     mVideoScheduler.clear();
     mNotify.clear();
     mAudioSink.clear();
@@ -300,6 +302,7 @@
 
         mMediaClock->clearAnchor();
         mVideoLateByUs = 0;
+        mNextVideoTimeMediaUs = -1;
         mSyncQueues = false;
     }
 
@@ -548,8 +551,10 @@
                 CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed),
                          (status_t)OK);
 
+                // Handle AudioTrack race when start is immediately called after flush.
                 uint32_t numFramesPendingPlayout =
-                    mNumFramesWritten - numFramesPlayed;
+                    (mNumFramesWritten > numFramesPlayed ?
+                        mNumFramesWritten - numFramesPlayed : 0);
 
                 // This is how long the audio sink will have data to
                 // play back.
@@ -1245,82 +1250,49 @@
         return;
     }
 
-    bool needRepostDrainVideoQueue = false;
-    int64_t delayUs;
     int64_t nowUs = ALooper::GetNowUs();
-    int64_t realTimeUs;
     if (mFlags & FLAG_REAL_TIME) {
-        int64_t mediaTimeUs;
-        CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
-        realTimeUs = mediaTimeUs;
-    } else {
-        int64_t mediaTimeUs;
-        CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
+        int64_t realTimeUs;
+        CHECK(entry.mBuffer->meta()->findInt64("timeUs", &realTimeUs));
 
-        {
-            Mutex::Autolock autoLock(mLock);
-            if (mAnchorTimeMediaUs < 0) {
-                mMediaClock->updateAnchor(mediaTimeUs, nowUs, mediaTimeUs);
-                mAnchorTimeMediaUs = mediaTimeUs;
-                realTimeUs = nowUs;
-            } else if (!mVideoSampleReceived) {
-                // Always render the first video frame.
-                realTimeUs = nowUs;
-            } else if (mAudioFirstAnchorTimeMediaUs < 0
-                || mMediaClock->getRealTimeFor(mediaTimeUs, &realTimeUs) == OK) {
-                realTimeUs = getRealTimeUs(mediaTimeUs, nowUs);
-            } else if (mediaTimeUs - mAudioFirstAnchorTimeMediaUs >= 0) {
-                needRepostDrainVideoQueue = true;
-                realTimeUs = nowUs;
-            } else {
-                realTimeUs = nowUs;
-            }
-        }
-        if (!mHasAudio) {
-            // smooth out videos >= 10fps
-            mMediaClock->updateMaxTimeMedia(mediaTimeUs + 100000);
-        }
+        realTimeUs = mVideoScheduler->schedule(realTimeUs * 1000) / 1000;
 
-        // Heuristics to handle situation when media time changed without a
-        // discontinuity. If we have not drained an audio buffer that was
-        // received after this buffer, repost in 10 msec. Otherwise repost
-        // in 500 msec.
-        delayUs = realTimeUs - nowUs;
-        int64_t postDelayUs = -1;
-        if (delayUs > 500000) {
-            postDelayUs = 500000;
-            if (mHasAudio && (mLastAudioBufferDrained - entry.mBufferOrdinal) <= 0) {
-                postDelayUs = 10000;
-            }
-        } else if (needRepostDrainVideoQueue) {
-            // CHECK(mPlaybackRate > 0);
-            // CHECK(mAudioFirstAnchorTimeMediaUs >= 0);
-            // CHECK(mediaTimeUs - mAudioFirstAnchorTimeMediaUs >= 0);
-            postDelayUs = mediaTimeUs - mAudioFirstAnchorTimeMediaUs;
-            postDelayUs /= mPlaybackRate;
-        }
+        int64_t twoVsyncsUs = 2 * (mVideoScheduler->getVsyncPeriod() / 1000);
 
-        if (postDelayUs >= 0) {
-            msg->setWhat(kWhatPostDrainVideoQueue);
-            msg->post(postDelayUs);
-            mVideoScheduler->restart();
-            ALOGI("possible video time jump of %dms (%lld : %lld) or uninitialized media clock,"
-                    " retrying in %dms",
-                    (int)(delayUs / 1000), (long long)mediaTimeUs,
-                    (long long)mAudioFirstAnchorTimeMediaUs, (int)(postDelayUs / 1000));
-            mDrainVideoQueuePending = true;
-            return;
-        }
+        int64_t delayUs = realTimeUs - nowUs;
+
+        ALOGW_IF(delayUs > 500000, "unusually high delayUs: %lld", (long long)delayUs);
+        // post 2 display refreshes before rendering is due
+        msg->post(delayUs > twoVsyncsUs ? delayUs - twoVsyncsUs : 0);
+
+        mDrainVideoQueuePending = true;
+        return;
     }
 
-    realTimeUs = mVideoScheduler->schedule(realTimeUs * 1000) / 1000;
-    int64_t twoVsyncsUs = 2 * (mVideoScheduler->getVsyncPeriod() / 1000);
+    int64_t mediaTimeUs;
+    CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
 
-    delayUs = realTimeUs - nowUs;
+    {
+        Mutex::Autolock autoLock(mLock);
+        if (mAnchorTimeMediaUs < 0) {
+            mMediaClock->updateAnchor(mediaTimeUs, nowUs, mediaTimeUs);
+            mAnchorTimeMediaUs = mediaTimeUs;
+        }
+    }
+    mNextVideoTimeMediaUs = mediaTimeUs + 100000;
+    if (!mHasAudio) {
+        // smooth out videos >= 10fps
+        mMediaClock->updateMaxTimeMedia(mNextVideoTimeMediaUs);
+    }
 
-    ALOGW_IF(delayUs > 500000, "unusually high delayUs: %" PRId64, delayUs);
-    // post 2 display refreshes before rendering is due
-    msg->post(delayUs > twoVsyncsUs ? delayUs - twoVsyncsUs : 0);
+    if (!mVideoSampleReceived || mediaTimeUs < mAudioFirstAnchorTimeMediaUs) {
+        msg->post();
+    } else {
+        int64_t twoVsyncsUs = 2 * (mVideoScheduler->getVsyncPeriod() / 1000);
+
+        // post 2 display refreshes before rendering is due
+        mMediaClock->addTimer(msg, mediaTimeUs, -twoVsyncsUs);
+    }
 
     mDrainVideoQueuePending = true;
 }
@@ -1354,6 +1326,7 @@
 
         realTimeUs = getRealTimeUs(mediaTimeUs, nowUs);
     }
+    realTimeUs = mVideoScheduler->schedule(realTimeUs * 1000) / 1000;
 
     bool tooLate = false;
 
@@ -1440,6 +1413,14 @@
     if (audio) {
         // Video might outlive audio. Clear anchor to enable video only case.
         mAnchorTimeMediaUs = -1;
+        mHasAudio = false;
+        if (mNextVideoTimeMediaUs >= 0) {
+            int64_t mediaUs = 0;
+            mMediaClock->getMediaTime(ALooper::GetNowUs(), &mediaUs);
+            if (mNextVideoTimeMediaUs > mediaUs) {
+                mMediaClock->updateMaxTimeMedia(mNextVideoTimeMediaUs);
+            }
+        }
     }
 }
 
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
index f58b79c..a047975 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
@@ -36,6 +36,7 @@
         FLAG_OFFLOAD_AUDIO = 2,
     };
     Renderer(const sp<MediaPlayerBase::AudioSink> &sink,
+             const sp<MediaClock> &mediaClock,
              const sp<AMessage> &notify,
              uint32_t flags = 0);
 
@@ -165,7 +166,7 @@
     int32_t mVideoDrainGeneration;
     int32_t mAudioEOSGeneration;
 
-    sp<MediaClock> mMediaClock;
+    const sp<MediaClock> mMediaClock;
     float mPlaybackRate; // audio track rate
 
     AudioPlaybackRate mPlaybackSettings;
@@ -176,6 +177,7 @@
     int64_t mAnchorTimeMediaUs;
     int64_t mAnchorNumFramesWritten;
     int64_t mVideoLateByUs;
+    int64_t mNextVideoTimeMediaUs;
     bool mHasAudio;
     bool mHasVideo;
 
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
index 8ba9c0d..9f5ef78 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
@@ -66,7 +66,7 @@
         : mNotify(notify) {
     }
 
-    virtual status_t getDefaultBufferingSettings(
+    virtual status_t getBufferingSettings(
             BufferingSettings* buffering /* nonnull */) = 0;
     virtual status_t setBufferingSettings(const BufferingSettings& buffering) = 0;
 
@@ -121,10 +121,6 @@
         return INVALID_OPERATION;
     }
 
-    virtual status_t setBuffers(bool /* audio */, Vector<MediaBuffer *> &/* buffers */) {
-        return INVALID_OPERATION;
-    }
-
     virtual bool isRealTime() const {
         return false;
     }
diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
index 8b3d0dc..851217b 100644
--- a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
@@ -62,7 +62,8 @@
       mSeekGeneration(0),
       mEOSTimeoutAudio(0),
       mEOSTimeoutVideo(0) {
-    getDefaultBufferingSettings(&mBufferingSettings);
+    mBufferingSettings.mInitialMarkMs = kPrepareMarkMs;
+    mBufferingSettings.mResumePlaybackMarkMs = kOverflowMarkMs;
     if (headers) {
         mExtraHeaders = *headers;
 
@@ -84,32 +85,17 @@
     }
 }
 
-status_t NuPlayer::RTSPSource::getDefaultBufferingSettings(
+status_t NuPlayer::RTSPSource::getBufferingSettings(
             BufferingSettings* buffering /* nonnull */) {
-    buffering->mInitialBufferingMode = BUFFERING_MODE_TIME_ONLY;
-    buffering->mRebufferingMode = BUFFERING_MODE_TIME_ONLY;
-    buffering->mInitialWatermarkMs = kPrepareMarkMs;
-    buffering->mRebufferingWatermarkLowMs = kUnderflowMarkMs;
-    buffering->mRebufferingWatermarkHighMs = kOverflowMarkMs;
-
+    Mutex::Autolock _l(mBufferingSettingsLock);
+    *buffering = mBufferingSettings;
     return OK;
 }
 
 status_t NuPlayer::RTSPSource::setBufferingSettings(const BufferingSettings& buffering) {
-    if (mLooper == NULL) {
-        mBufferingSettings = buffering;
-        return OK;
-    }
-
-    sp<AMessage> msg = new AMessage(kWhatSetBufferingSettings, this);
-    writeToAMessage(msg, buffering);
-    sp<AMessage> response;
-    status_t err = msg->postAndAwaitResponse(&response);
-    if (err == OK && response != NULL) {
-        CHECK(response->findInt32("err", &err));
-    }
-
-    return err;
+    Mutex::Autolock _l(mBufferingSettingsLock);
+    mBufferingSettings = buffering;
+    return OK;
 }
 
 void NuPlayer::RTSPSource::prepareAsync() {
@@ -356,8 +342,17 @@
         }
         int64_t bufferedDurationUs = src->getBufferedDurationUs(&finalResult);
 
+        int64_t initialMarkUs;
+        int64_t maxRebufferingMarkUs;
+        {
+            Mutex::Autolock _l(mBufferingSettingsLock);
+            initialMarkUs = mBufferingSettings.mInitialMarkMs * 1000ll;
+            // TODO: maxRebufferingMarkUs could be larger than
+            // mBufferingSettings.mResumePlaybackMarkMs * 1000ll.
+            maxRebufferingMarkUs = mBufferingSettings.mResumePlaybackMarkMs * 1000ll;
+        }
         // isFinished when duration is 0 checks for EOS result only
-        if (bufferedDurationUs > mBufferingSettings.mInitialWatermarkMs * 1000
+        if (bufferedDurationUs > initialMarkUs
                 || src->isFinished(/* duration */ 0)) {
             ++preparedCount;
         }
@@ -366,15 +361,15 @@
             ++overflowCount;
             ++finishedCount;
         } else {
-            if (bufferedDurationUs < mBufferingSettings.mRebufferingWatermarkLowMs * 1000) {
+            // TODO: redefine kUnderflowMarkMs to a fair value,
+            if (bufferedDurationUs < kUnderflowMarkMs * 1000) {
                 ++underflowCount;
             }
-            if (bufferedDurationUs > mBufferingSettings.mRebufferingWatermarkHighMs * 1000) {
+            if (bufferedDurationUs > maxRebufferingMarkUs) {
                 ++overflowCount;
             }
             int64_t startServerMarkUs =
-                    (mBufferingSettings.mRebufferingWatermarkLowMs
-                        + mBufferingSettings.mRebufferingWatermarkHighMs) / 2 * 1000ll;
+                    (kUnderflowMarkMs * 1000ll + maxRebufferingMarkUs) / 2;
             if (bufferedDurationUs < startServerMarkUs) {
                 ++startCount;
             }
@@ -512,36 +507,6 @@
     } else if (msg->what() == kWhatSignalEOS) {
         onSignalEOS(msg);
         return;
-    } else if (msg->what() == kWhatSetBufferingSettings) {
-        sp<AReplyToken> replyID;
-        CHECK(msg->senderAwaitsResponse(&replyID));
-
-        BufferingSettings buffering;
-        readFromAMessage(msg, &buffering);
-
-        status_t err = OK;
-        if (buffering.IsSizeBasedBufferingMode(buffering.mInitialBufferingMode)
-                || buffering.IsSizeBasedBufferingMode(buffering.mRebufferingMode)
-                || (buffering.mRebufferingWatermarkLowMs > buffering.mRebufferingWatermarkHighMs
-                    && buffering.IsTimeBasedBufferingMode(buffering.mRebufferingMode))) {
-            err = BAD_VALUE;
-        } else {
-            if (buffering.mInitialBufferingMode == BUFFERING_MODE_NONE) {
-                buffering.mInitialWatermarkMs = BufferingSettings::kNoWatermark;
-            }
-            if (buffering.mRebufferingMode == BUFFERING_MODE_NONE) {
-                buffering.mRebufferingWatermarkLowMs = BufferingSettings::kNoWatermark;
-                buffering.mRebufferingWatermarkHighMs = INT32_MAX;
-            }
-
-            mBufferingSettings = buffering;
-        }
-
-        sp<AMessage> response = new AMessage;
-        response->setInt32("err", err);
-        response->postReply(replyID);
-
-        return;
     }
 
     CHECK_EQ(msg->what(), kWhatNotify);
diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.h b/media/libmediaplayerservice/nuplayer/RTSPSource.h
index 0812991..03fce08 100644
--- a/media/libmediaplayerservice/nuplayer/RTSPSource.h
+++ b/media/libmediaplayerservice/nuplayer/RTSPSource.h
@@ -40,7 +40,7 @@
             uid_t uid = 0,
             bool isSDP = false);
 
-    virtual status_t getDefaultBufferingSettings(
+    virtual status_t getBufferingSettings(
             BufferingSettings* buffering /* nonnull */) override;
     virtual status_t setBufferingSettings(const BufferingSettings& buffering) override;
 
@@ -71,7 +71,6 @@
         kWhatPerformSeek     = 'seek',
         kWhatPollBuffering   = 'poll',
         kWhatSignalEOS       = 'eos ',
-        kWhatSetBufferingSettings = 'sBuS',
     };
 
     enum State {
@@ -109,6 +108,8 @@
     bool mBuffering;
     bool mInPreparationPhase;
     bool mEOSPending;
+
+    Mutex mBufferingSettingsLock;
     BufferingSettings mBufferingSettings;
 
     sp<ALooper> mLooper;
diff --git a/media/libmediaplayerservice/nuplayer/StreamingSource.cpp b/media/libmediaplayerservice/nuplayer/StreamingSource.cpp
index fc0803b..b3da53f 100644
--- a/media/libmediaplayerservice/nuplayer/StreamingSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/StreamingSource.cpp
@@ -24,10 +24,11 @@
 #include "AnotherPacketSource.h"
 #include "NuPlayerStreamListener.h"
 
+#include <media/MediaSource.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/foundation/MediaKeys.h>
 #include <media/stagefright/MetaData.h>
 #include <media/stagefright/Utils.h>
 
@@ -51,19 +52,14 @@
     }
 }
 
-status_t NuPlayer::StreamingSource::getDefaultBufferingSettings(
+status_t NuPlayer::StreamingSource::getBufferingSettings(
         BufferingSettings *buffering /* nonnull */) {
     *buffering = BufferingSettings();
     return OK;
 }
 
 status_t NuPlayer::StreamingSource::setBufferingSettings(
-        const BufferingSettings &buffering) {
-    if (buffering.mInitialBufferingMode != BUFFERING_MODE_NONE
-            || buffering.mRebufferingMode != BUFFERING_MODE_NONE) {
-        return BAD_VALUE;
-    }
-
+        const BufferingSettings & /* buffering */) {
     return OK;
 }
 
@@ -119,7 +115,7 @@
             int32_t mask;
             if (extra != NULL
                     && extra->findInt32(
-                        IStreamListener::kKeyDiscontinuityMask, &mask)) {
+                        kIStreamListenerKeyDiscontinuityMask, &mask)) {
                 if (mask == 0) {
                     ALOGE("Client specified an illegal discontinuity type.");
                     setError(ERROR_UNSUPPORTED);
@@ -147,7 +143,7 @@
                     int64_t mediaTimeUs;
                     memcpy(&mediaTimeUs, &buffer[2], sizeof(mediaTimeUs));
 
-                    extra->setInt64(IStreamListener::kKeyMediaTimeUs, mediaTimeUs);
+                    extra->setInt64(kATSParserKeyMediaTimeUs, mediaTimeUs);
                 }
 
                 mTSParser->signalDiscontinuity(
diff --git a/media/libmediaplayerservice/nuplayer/StreamingSource.h b/media/libmediaplayerservice/nuplayer/StreamingSource.h
index 2e1d2b3..76d1d0b 100644
--- a/media/libmediaplayerservice/nuplayer/StreamingSource.h
+++ b/media/libmediaplayerservice/nuplayer/StreamingSource.h
@@ -32,7 +32,7 @@
             const sp<AMessage> &notify,
             const sp<IStreamSource> &source);
 
-    virtual status_t getDefaultBufferingSettings(
+    virtual status_t getBufferingSettings(
             BufferingSettings* buffering /* nonnull */) override;
     virtual status_t setBufferingSettings(const BufferingSettings& buffering) override;
 
diff --git a/media/libmediaplayerservice/tests/Android.bp b/media/libmediaplayerservice/tests/Android.bp
new file mode 100644
index 0000000..d6c1d27
--- /dev/null
+++ b/media/libmediaplayerservice/tests/Android.bp
@@ -0,0 +1,24 @@
+cc_test {
+
+    name: "DrmSessionManager_test",
+
+    tags: ["tests"],
+
+    srcs: ["DrmSessionManager_test.cpp"],
+
+    shared_libs: [
+        "liblog",
+        "libmediaplayerservice",
+        "libmediadrm",
+        "libutils",
+        "android.hardware.drm@1.0",
+    ],
+
+    compile_multilib: "32",
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+}
diff --git a/media/libmediaplayerservice/tests/Android.mk b/media/libmediaplayerservice/tests/Android.mk
deleted file mode 100644
index 0b9b85f..0000000
--- a/media/libmediaplayerservice/tests/Android.mk
+++ /dev/null
@@ -1,28 +0,0 @@
-# Build the unit tests.
-LOCAL_PATH:= $(call my-dir)
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := DrmSessionManager_test
-
-LOCAL_MODULE_TAGS := tests
-
-LOCAL_SRC_FILES := \
-	DrmSessionManager_test.cpp \
-
-LOCAL_SHARED_LIBRARIES := \
-	liblog \
-	libmediaplayerservice \
-	libmediadrm \
-	libutils \
-	android.hardware.drm@1.0 \
-
-LOCAL_C_INCLUDES := \
-	frameworks/av/include \
-	frameworks/av/media/libmediaplayerservice \
-
-LOCAL_CFLAGS += -Werror -Wall
-
-LOCAL_32_BIT_ONLY := true
-
-include $(BUILD_NATIVE_TEST)
-
diff --git a/media/libnbaio/Android.bp b/media/libnbaio/Android.bp
index 4220b77..a4df38d 100644
--- a/media/libnbaio/Android.bp
+++ b/media/libnbaio/Android.bp
@@ -41,11 +41,8 @@
         "AudioBufferProviderSource.cpp",
         "AudioStreamInSource.cpp",
         "AudioStreamOutSink.cpp",
-        "NBLog.cpp",
-        "PerformanceAnalysis.cpp",
         "Pipe.cpp",
         "PipeReader.cpp",
-        "ReportPerformance.cpp",
         "SourceAudioBufferProvider.cpp",
     ],
 
diff --git a/media/libnbaio/OWNERS b/media/libnbaio/OWNERS
index f9cb567..eece71f 100644
--- a/media/libnbaio/OWNERS
+++ b/media/libnbaio/OWNERS
@@ -1 +1,2 @@
 gkasten@google.com
+hunga@google.com
diff --git a/media/libnbaio/PerformanceAnalysis.cpp b/media/libnbaio/PerformanceAnalysis.cpp
deleted file mode 100644
index fb3bddc..0000000
--- a/media/libnbaio/PerformanceAnalysis.cpp
+++ /dev/null
@@ -1,374 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-#define LOG_TAG "PerformanceAnalysis"
-// #define LOG_NDEBUG 0
-
-#include <algorithm>
-#include <climits>
-#include <deque>
-#include <iostream>
-#include <math.h>
-#include <numeric>
-#include <vector>
-#include <stdarg.h>
-#include <stdint.h>
-#include <stdio.h>
-#include <string.h>
-#include <sys/prctl.h>
-#include <time.h>
-#include <new>
-#include <audio_utils/roundup.h>
-#include <media/nbaio/NBLog.h>
-#include <media/nbaio/PerformanceAnalysis.h>
-#include <media/nbaio/ReportPerformance.h>
-// #include <utils/CallStack.h> // used to print callstack
-#include <utils/Log.h>
-#include <utils/String8.h>
-
-#include <queue>
-#include <utility>
-
-namespace android {
-
-namespace ReportPerformance {
-
-PerformanceAnalysis::PerformanceAnalysis() {
-    // These variables will be (FIXME) learned from the data
-    kPeriodMs = 4; // typical buffer period (mode)
-    // average number of Ms spent processing buffer
-    kPeriodMsCPU = static_cast<int>(kPeriodMs * kRatio);
-}
-
-// converts a time series into a map. key: buffer period length. value: count
-static std::map<int, int> buildBuckets(const std::vector<int64_t> &samples) {
-    // TODO allow buckets of variable resolution
-    std::map<int, int> buckets;
-    for (size_t i = 1; i < samples.size(); ++i) {
-        ++buckets[deltaMs(samples[i - 1], samples[i])];
-    }
-    return buckets;
-}
-
-static int widthOf(int x) {
-    int width = 0;
-    while (x > 0) {
-        ++width;
-        x /= 10;
-    }
-    return width;
-}
-
-// Given a series of audio processing wakeup timestamps,
-// buckets the time intervals into a histogram, searches for
-// outliers, analyzes the outlier series for unexpectedly
-// small or large values and stores these as peaks, and flushes
-// the timestamp series from memory.
-void PerformanceAnalysis::processAndFlushTimeStampSeries() {
-    // 1) analyze the series to store all outliers and their exact timestamps:
-    storeOutlierData(mTimeStampSeries);
-
-    // 2) detect peaks in the outlier series
-    detectPeaks();
-
-    // 3) compute its histogram, append to mRecentHists and clear the time series
-    mRecentHists.emplace_back(static_cast<timestamp>(mTimeStampSeries[0]),
-                              buildBuckets(mTimeStampSeries));
-    // do not let mRecentHists exceed capacity
-    // ALOGD("mRecentHists size: %d", static_cast<int>(mRecentHists.size()));
-    if (mRecentHists.size() >= kRecentHistsCapacity) {
-        //  ALOGD("popped back mRecentHists");
-        mRecentHists.pop_front();
-    }
-    mTimeStampSeries.clear();
-}
-
-// forces short-term histogram storage to avoid adding idle audio time interval
-// to buffer period data
-void PerformanceAnalysis::handleStateChange() {
-    ALOGD("handleStateChange");
-    processAndFlushTimeStampSeries();
-    return;
-}
-
-// Takes a single buffer period timestamp entry information and stores it in a
-// temporary series of timestamps. Once the series is full, the data is analyzed,
-// stored, and emptied.
-void PerformanceAnalysis::logTsEntry(int64_t ts) {
-    // TODO might want to filter excessively high outliers, which are usually caused
-    // by the thread being inactive.
-    // Store time series data for each reader in order to bucket it once there
-    // is enough data. Then, write to recentHists as a histogram.
-    mTimeStampSeries.push_back(ts);
-    // if length of the time series has reached kShortHistSize samples,
-    // analyze the data and flush the timestamp series from memory
-    if (mTimeStampSeries.size() >= kShortHistSize) {
-        processAndFlushTimeStampSeries();
-    }
-}
-
-// When the short-term histogram array mRecentHists has reached capacity,
-// merge histograms for data compression and store them in mLongTermHists
-// clears mRecentHists
-// TODO: have logTsEntry write directly to mLongTermHists, discard mRecentHists,
-// start a new histogram when a peak occurs
-void PerformanceAnalysis::processAndFlushRecentHists() {
-
-    // Buckets is used to aggregate short-term histograms.
-    Histogram buckets;
-    timestamp startingTs = mRecentHists[0].first;
-
-    for (const auto &shortHist: mRecentHists) {
-        // If the time between starting and ending timestamps has reached the maximum,
-        // add the current histogram (buckets) to the long-term histogram buffer,
-        // clear buckets, and start a new long-term histogram aggregation process.
-        if (deltaMs(startingTs, shortHist.first) >= kMaxHistTimespanMs) {
-            mLongTermHists.emplace_back(startingTs, std::move(buckets));
-            buckets.clear();
-            startingTs = shortHist.first;
-            // When memory is full, delete oldest histogram
-            // TODO use a circular buffer
-            if (mLongTermHists.size() >= kLongTermHistsCapacity) {
-                mLongTermHists.pop_front();
-            }
-        }
-
-        // add current histogram to buckets
-        for (const auto &countPair : shortHist.second) {
-            buckets[countPair.first] += countPair.second;
-        }
-    }
-    mRecentHists.clear();
-    // TODO: decide when/where to call writeToFile
-    // TODO: add a thread-specific extension to the file name
-    static const char* const kName = (const char *) "/data/misc/audioserver/sample_results.txt";
-    writeToFile(mOutlierData, mLongTermHists, kName, false);
-}
-
-// Given a series of outlier intervals (mOutlier data),
-// looks for changes in distribution (peaks), which can be either positive or negative.
-// The function sets the mean to the starting value and sigma to 0, and updates
-// them as long as no peak is detected. When a value is more than 'threshold'
-// standard deviations from the mean, a peak is detected and the mean and sigma
-// are set to the peak value and 0.
-void PerformanceAnalysis::detectPeaks() {
-    if (mOutlierData.empty()) {
-        return;
-    }
-
-    // compute mean of the distribution. Used to check whether a value is large
-    const double kTypicalDiff = std::accumulate(
-        mOutlierData.begin(), mOutlierData.end(), 0,
-        [](auto &a, auto &b){return a + b.first;}) / mOutlierData.size();
-    // ALOGD("typicalDiff %f", kTypicalDiff);
-
-    // iterator at the beginning of a sequence, or updated to the most recent peak
-    std::deque<std::pair<uint64_t, uint64_t>>::iterator start = mOutlierData.begin();
-    // the mean and standard deviation are updated every time a peak is detected
-    // initialize first time. The mean from the previous sequence is stored
-    // for the next sequence. Here, they are initialized for the first time.
-    if (mPeakDetectorMean < 0) {
-        mPeakDetectorMean = static_cast<double>(start->first);
-        mPeakDetectorSd = 0;
-    }
-    auto sqr = [](auto x){ return x * x; };
-    for (auto it = mOutlierData.begin(); it != mOutlierData.end(); ++it) {
-        // no surprise occurred:
-        // the new element is a small number of standard deviations from the mean
-        if ((fabs(it->first - mPeakDetectorMean) < kStddevThreshold * mPeakDetectorSd) ||
-             // or: right after peak has been detected, the delta is smaller than average
-            (mPeakDetectorSd == 0 && fabs(it->first - mPeakDetectorMean) < kTypicalDiff)) {
-            // update the mean and sd:
-            // count number of elements (distance between start interator and current)
-            const int kN = std::distance(start, it) + 1;
-            // usual formulas for mean and sd
-            mPeakDetectorMean = std::accumulate(start, it + 1, 0.0,
-                                   [](auto &a, auto &b){return a + b.first;}) / kN;
-            mPeakDetectorSd = sqrt(std::accumulate(start, it + 1, 0.0,
-                      [=](auto &a, auto &b){ return a + sqr(b.first - mPeakDetectorMean);})) /
-                      ((kN > 1)? kN - 1 : kN); // kN - 1: mean is correlated with variance
-        }
-        // surprising value: store peak timestamp and reset mean, sd, and start iterator
-        else {
-            mPeakTimestamps.emplace_back(it->second);
-            // TODO: remove pop_front once a circular buffer is in place
-            if (mPeakTimestamps.size() >= kPeakSeriesSize) {
-                mPeakTimestamps.pop_front();
-            }
-            mPeakDetectorMean = static_cast<double>(it->first);
-            mPeakDetectorSd = 0;
-            start = it;
-        }
-    }
-    return;
-}
-
-// Called by LogTsEntry. The input is a vector of timestamps.
-// Finds outliers and writes to mOutlierdata.
-// Each value in mOutlierdata consists of: <outlier timestamp, time elapsed since previous outlier>.
-// e.g. timestamps (ms) 1, 4, 5, 16, 18, 28 will produce pairs (4, 5), (13, 18).
-// This function is applied to the time series before it is converted into a histogram.
-void PerformanceAnalysis::storeOutlierData(const std::vector<int64_t> &timestamps) {
-    if (timestamps.size() < 1) {
-        return;
-    }
-    // first pass: need to initialize
-    if (mElapsed == 0) {
-        mPrevNs = timestamps[0];
-    }
-    for (const auto &ts: timestamps) {
-        const uint64_t diffMs = static_cast<uint64_t>(deltaMs(mPrevNs, ts));
-        if (diffMs >= static_cast<uint64_t>(kOutlierMs)) {
-            mOutlierData.emplace_back(mElapsed, static_cast<uint64_t>(mPrevNs));
-            // Remove oldest value if the vector is full
-            // TODO: remove pop_front once circular buffer is in place
-            // FIXME: make sure kShortHistSize is large enough that that data will never be lost
-            // before being written to file or to a FIFO
-            if (mOutlierData.size() >= kOutlierSeriesSize) {
-                mOutlierData.pop_front();
-            }
-            mElapsed = 0;
-        }
-        mElapsed += diffMs;
-        mPrevNs = ts;
-    }
-}
-
-
-// FIXME: delete this temporary test code, recycled for various new functions
-void PerformanceAnalysis::testFunction() {
-    // produces values (4: 5000000), (13: 18000000)
-    // ns timestamps of buffer periods
-    const std::vector<int64_t>kTempTestData = {1000000, 4000000, 5000000,
-                                               16000000, 18000000, 28000000};
-    PerformanceAnalysis::storeOutlierData(kTempTestData);
-    for (const auto &outlier: mOutlierData) {
-        ALOGE("PerformanceAnalysis test %lld: %lld",
-              static_cast<long long>(outlier.first), static_cast<long long>(outlier.second));
-    }
-    detectPeaks();
-}
-
-// TODO Make it return a std::string instead of modifying body --> is this still relevant?
-// TODO consider changing all ints to uint32_t or uint64_t
-// TODO: move this to ReportPerformance, probably make it a friend function of PerformanceAnalysis
-void PerformanceAnalysis::reportPerformance(String8 *body, int maxHeight) {
-    if (mRecentHists.size() < 1) {
-        ALOGD("reportPerformance: mRecentHists is empty");
-        return;
-    }
-    ALOGD("reportPerformance: hists size %d", static_cast<int>(mRecentHists.size()));
-    // TODO: more elaborate data analysis
-    std::map<int, int> buckets;
-    for (const auto &shortHist: mRecentHists) {
-        for (const auto &countPair : shortHist.second) {
-            buckets[countPair.first] += countPair.second;
-        }
-    }
-
-    // underscores and spaces length corresponds to maximum width of histogram
-    static const int kLen = 40;
-    std::string underscores(kLen, '_');
-    std::string spaces(kLen, ' ');
-
-    auto it = buckets.begin();
-    int maxDelta = it->first;
-    int maxCount = it->second;
-    // Compute maximum values
-    while (++it != buckets.end()) {
-        if (it->first > maxDelta) {
-            maxDelta = it->first;
-        }
-        if (it->second > maxCount) {
-            maxCount = it->second;
-        }
-    }
-    int height = log2(maxCount) + 1; // maxCount > 0, safe to call log2
-    const int leftPadding = widthOf(1 << height);
-    const int colWidth = std::max(std::max(widthOf(maxDelta) + 1, 3), leftPadding + 2);
-    int scalingFactor = 1;
-    // scale data if it exceeds maximum height
-    if (height > maxHeight) {
-        scalingFactor = (height + maxHeight) / maxHeight;
-        height /= scalingFactor;
-    }
-    body->appendFormat("\n%*s", leftPadding + 11, "Occurrences");
-    // write histogram label line with bucket values
-    body->appendFormat("\n%s", " ");
-    body->appendFormat("%*s", leftPadding, " ");
-    for (auto const &x : buckets) {
-        body->appendFormat("%*d", colWidth, x.second);
-    }
-    // write histogram ascii art
-    body->appendFormat("\n%s", " ");
-    for (int row = height * scalingFactor; row >= 0; row -= scalingFactor) {
-        const int value = 1 << row;
-        body->appendFormat("%.*s", leftPadding, spaces.c_str());
-        for (auto const &x : buckets) {
-          body->appendFormat("%.*s%s", colWidth - 1, spaces.c_str(), x.second < value ? " " : "|");
-        }
-        body->appendFormat("\n%s", " ");
-    }
-    // print x-axis
-    const int columns = static_cast<int>(buckets.size());
-    body->appendFormat("%*c", leftPadding, ' ');
-    body->appendFormat("%.*s", (columns + 1) * colWidth, underscores.c_str());
-    body->appendFormat("\n%s", " ");
-
-    // write footer with bucket labels
-    body->appendFormat("%*s", leftPadding, " ");
-    for (auto const &x : buckets) {
-        body->appendFormat("%*d", colWidth, x.first);
-    }
-    body->appendFormat("%.*s%s", colWidth, spaces.c_str(), "ms\n");
-
-    // Now report glitches
-    body->appendFormat("\ntime elapsed between glitches and glitch timestamps\n");
-    for (const auto &outlier: mOutlierData) {
-        body->appendFormat("%lld: %lld\n", static_cast<long long>(outlier.first),
-                           static_cast<long long>(outlier.second));
-    }
-
-}
-
-
-// Produces a log warning if the timing of recent buffer periods caused a glitch
-// Computes sum of running window of three buffer periods
-// Checks whether the buffer periods leave enough CPU time for the next one
-// e.g. if a buffer period is expected to be 4 ms and a buffer requires 3 ms of CPU time,
-// here are some glitch cases:
-// 4 + 4 + 6 ; 5 + 4 + 5; 2 + 2 + 10
-// TODO: develop this code to track changes in histogram distribution in addition
-// to / instead of glitches.
-void PerformanceAnalysis::alertIfGlitch(const std::vector<int64_t> &samples) {
-    std::deque<int> periods(kNumBuff, kPeriodMs);
-    for (size_t i = 2; i < samples.size(); ++i) { // skip first time entry
-        periods.push_front(deltaMs(samples[i - 1], samples[i]));
-        periods.pop_back();
-        // TODO: check that all glitch cases are covered
-        if (std::accumulate(periods.begin(), periods.end(), 0) > kNumBuff * kPeriodMs +
-            kPeriodMs - kPeriodMsCPU) {
-                ALOGW("A glitch occurred");
-                periods.assign(kNumBuff, kPeriodMs);
-        }
-    }
-    return;
-}
-
-} // namespace ReportPerformance
-
-}   // namespace android
diff --git a/media/libnbaio/ReportPerformance.cpp b/media/libnbaio/ReportPerformance.cpp
deleted file mode 100644
index dc50ada..0000000
--- a/media/libnbaio/ReportPerformance.cpp
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "ReportPerformance"
-
-#include <fstream>
-#include <iostream>
-#include <queue>
-#include <stdarg.h>
-#include <stdint.h>
-#include <stdio.h>
-#include <string.h>
-#include <sys/prctl.h>
-#include <utility>
-#include <media/nbaio/NBLog.h>
-#include <media/nbaio/PerformanceAnalysis.h>
-#include <media/nbaio/ReportPerformance.h>
-// #include <utils/CallStack.h> // used to print callstack
-#include <utils/Log.h>
-#include <utils/String8.h>
-
-namespace android {
-
-namespace ReportPerformance {
-
-// Writes outlier intervals, timestamps, and histograms spanning long time intervals to a file.
-// TODO: format the data efficiently and write different types of data to different files
-void writeToFile(std::deque<std::pair<outlierInterval, timestamp>> &outlierData,
-                                    std::deque<std::pair<timestamp, Histogram>> &hists,
-                                    const char * kName,
-                                    bool append) {
-    ALOGD("writing performance data to file");
-    if (outlierData.empty() || hists.empty()) {
-        return;
-    }
-
-    std::ofstream ofs;
-    ofs.open(kName, append ? std::ios::app : std::ios::trunc);
-    if (!ofs.is_open()) {
-        ALOGW("couldn't open file %s", kName);
-        return;
-    }
-    ofs << "Outlier data: interval and timestamp\n";
-    for (const auto &outlier : outlierData) {
-        ofs << outlier.first << ": " << outlier.second << "\n";
-    }
-    ofs << "Histogram data\n";
-    for (const auto &hist : hists) {
-        ofs << "\ttimestamp\n";
-        ofs << hist.first << "\n";
-        ofs << "\tbuckets and counts\n";
-        for (const auto &bucket : hist.second) {
-            ofs << bucket.first << ": " << bucket.second << "\n";
-        }
-    }
-    ofs.close();
-}
-
-} // namespace ReportPerformance
-
-}   // namespace android
diff --git a/media/libnbaio/include/media/nbaio/NBLog.h b/media/libnbaio/include/media/nbaio/NBLog.h
deleted file mode 100644
index 3e48ee1..0000000
--- a/media/libnbaio/include/media/nbaio/NBLog.h
+++ /dev/null
@@ -1,595 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-// Non-blocking event logger intended for safe communication between processes via shared memory
-
-#ifndef ANDROID_MEDIA_NBLOG_H
-#define ANDROID_MEDIA_NBLOG_H
-
-#include <binder/IMemory.h>
-#include <audio_utils/fifo.h>
-#include <utils/Mutex.h>
-#include <utils/threads.h>
-
-#include <map>
-#include <deque>
-#include <set>
-#include <vector>
-
-namespace android {
-
-class String8;
-
-class NBLog {
-
-public:
-
-typedef uint64_t log_hash_t;
-
-// FIXME Everything needed for client (writer API and registration) should be isolated
-//       from the rest of the implementation.
-class Writer;
-class Reader;
-
-enum Event : uint8_t {
-    EVENT_RESERVED,
-    EVENT_STRING,               // ASCII string, not NUL-terminated
-    // TODO: make timestamp optional
-    EVENT_TIMESTAMP,            // clock_gettime(CLOCK_MONOTONIC)
-    EVENT_INTEGER,              // integer value entry
-    EVENT_FLOAT,                // floating point value entry
-    EVENT_PID,                  // process ID and process name
-    EVENT_AUTHOR,               // author index (present in merged logs) tracks entry's original log
-    EVENT_START_FMT,            // logFormat start event: entry includes format string, following
-                                // entries contain format arguments
-    EVENT_HASH,                 // unique HASH of log origin, originates from hash of file name
-                                // and line number
-    EVENT_HISTOGRAM_ENTRY_TS,   // single datum for timestamp histogram
-    EVENT_AUDIO_STATE,          // audio on/off event: logged upon FastMixer::onStateChange() call
-    EVENT_END_FMT,              // end of logFormat argument list
-
-    EVENT_UPPER_BOUND,          // to check for invalid events
-};
-
-private:
-
-// ---------------------------------------------------------------------------
-// API for handling format entry operations
-
-// a formatted entry has the following structure:
-//    * START_FMT entry, containing the format string
-//    * TIMESTAMP entry
-//    * HASH entry
-//    * author entry of the thread that generated it (optional, present in merged log)
-//    * format arg1
-//    * format arg2
-//    * ...
-//    * END_FMT entry
-
-// entry representation in memory
-struct entry {
-    const uint8_t type;
-    const uint8_t length;
-    const uint8_t data[0];
-};
-
-// entry tail representation (after data)
-struct ending {
-    uint8_t length;
-    uint8_t next[0];
-};
-
-// entry iterator
-class EntryIterator {
-public:
-    EntryIterator();
-    explicit EntryIterator(const uint8_t *entry);
-    EntryIterator(const EntryIterator &other);
-
-    // dereference underlying entry
-    const entry&    operator*() const;
-    const entry*    operator->() const;
-    // advance to next entry
-    EntryIterator&       operator++(); // ++i
-    // back to previous entry
-    EntryIterator&       operator--(); // --i
-    EntryIterator        next() const;
-    EntryIterator        prev() const;
-    bool            operator!=(const EntryIterator &other) const;
-    int             operator-(const EntryIterator &other) const;
-
-    bool            hasConsistentLength() const;
-    void            copyTo(std::unique_ptr<audio_utils_fifo_writer> &dst) const;
-    void            copyData(uint8_t *dst) const;
-
-    template<typename T>
-    inline const T& payload() {
-        return *reinterpret_cast<const T *>(ptr + offsetof(entry, data));
-    }
-
-    inline operator const uint8_t*() const {
-        return ptr;
-    }
-
-private:
-    const uint8_t  *ptr;
-};
-
-class AbstractEntry {
-public:
-
-    // Entry starting in the given pointer
-    explicit AbstractEntry(const uint8_t *entry);
-    virtual ~AbstractEntry() {}
-
-    // build concrete entry of appropriate class from pointer
-    static std::unique_ptr<AbstractEntry> buildEntry(const uint8_t *ptr);
-
-    // get format entry timestamp
-    // TODO consider changing to uint64_t
-    virtual int64_t      timestamp() const = 0;
-
-    // get format entry's unique id
-    virtual log_hash_t   hash() const = 0;
-
-    // entry's author index (-1 if none present)
-    // a Merger has a vector of Readers, author simply points to the index of the
-    // Reader that originated the entry
-    // TODO consider changing to uint32_t
-    virtual int          author() const = 0;
-
-    // copy entry, adding author before timestamp, returns iterator to end of entry
-    virtual EntryIterator    copyWithAuthor(std::unique_ptr<audio_utils_fifo_writer> &dst,
-                                       int author) const = 0;
-
-protected:
-    // copies ordinary entry from src to dst, and returns length of entry
-    // size_t      copyEntry(audio_utils_fifo_writer *dst, const iterator &it);
-    const uint8_t  *mEntry;
-};
-
-class FormatEntry : public AbstractEntry {
-public:
-    // explicit FormatEntry(const EntryIterator &it);
-    explicit FormatEntry(const uint8_t *ptr) : AbstractEntry(ptr) {}
-    virtual ~FormatEntry() {}
-
-    EntryIterator begin() const;
-
-    // Entry's format string
-    const   char* formatString() const;
-
-    // Enrty's format string length
-            size_t      formatStringLength() const;
-
-    // Format arguments (excluding format string, timestamp and author)
-            EntryIterator    args() const;
-
-    // get format entry timestamp
-    virtual int64_t     timestamp() const override;
-
-    // get format entry's unique id
-    virtual log_hash_t  hash() const override;
-
-    // entry's author index (-1 if none present)
-    // a Merger has a vector of Readers, author simply points to the index of the
-    // Reader that originated the entry
-    virtual int         author() const override;
-
-    // copy entry, adding author before timestamp, returns size of original entry
-    virtual EntryIterator    copyWithAuthor(std::unique_ptr<audio_utils_fifo_writer> &dst,
-                                       int author) const override;
-
-};
-
-class HistogramEntry : public AbstractEntry {
-public:
-    explicit HistogramEntry(const uint8_t *ptr) : AbstractEntry(ptr) {
-    }
-    virtual ~HistogramEntry() {}
-
-    virtual int64_t     timestamp() const override;
-
-    virtual log_hash_t  hash() const override;
-
-    virtual int         author() const override;
-
-    virtual EntryIterator    copyWithAuthor(std::unique_ptr<audio_utils_fifo_writer> &dst,
-                                       int author) const override;
-
-};
-
-// ---------------------------------------------------------------------------
-
-// representation of a single log entry in private memory
-struct Entry {
-    Entry(Event event, const void *data, size_t length)
-        : mEvent(event), mLength(length), mData(data) { }
-    /*virtual*/ ~Entry() { }
-
-    // used during writing to format Entry information as follows: [type][length][data ... ][length]
-    int     copyEntryDataAt(size_t offset) const;
-
-private:
-    friend class Writer;
-    Event       mEvent;     // event type
-    uint8_t     mLength;    // length of additional data, 0 <= mLength <= kMaxLength
-    const void *mData;      // event type-specific data
-    static const size_t kMaxLength = 255;
-public:
-    // mEvent, mLength, mData[...], duplicate mLength
-    static const size_t kOverhead = sizeof(entry) + sizeof(ending);
-    // endind length of previous entry
-    static const size_t kPreviousLengthOffset = - sizeof(ending) +
-                                                offsetof(ending, length);
-};
-
-struct HistTsEntry {
-    log_hash_t hash;
-    int64_t ts;
-}; //TODO __attribute__((packed));
-
-struct HistTsEntryWithAuthor {
-    log_hash_t hash;
-    int64_t ts;
-    int author;
-}; //TODO __attribute__((packed));
-
-using StateTsEntryWithAuthor = HistTsEntryWithAuthor;
-
-struct HistIntEntry {
-    log_hash_t hash;
-    int value;
-}; //TODO __attribute__((packed));
-
-// representation of a single log entry in shared memory
-//  byte[0]             mEvent
-//  byte[1]             mLength
-//  byte[2]             mData[0]
-//  ...
-//  byte[2+i]           mData[i]
-//  ...
-//  byte[2+mLength-1]   mData[mLength-1]
-//  byte[2+mLength]     duplicate copy of mLength to permit reverse scan
-//  byte[3+mLength]     start of next log entry
-
-    static void    appendInt(String8 *body, const void *data);
-    static void    appendFloat(String8 *body, const void *data);
-    static void    appendPID(String8 *body, const void *data, size_t length);
-    static void    appendTimestamp(String8 *body, const void *data);
-    static size_t  fmtEntryLength(const uint8_t *data);
-    static String8 bufferDump(const uint8_t *buffer, size_t size);
-    static String8 bufferDump(const EntryIterator &it);
-public:
-
-// Located in shared memory, must be POD.
-// Exactly one process must explicitly call the constructor or use placement new.
-// Since this is a POD, the destructor is empty and unnecessary to call it explicitly.
-struct Shared {
-    Shared() /* mRear initialized via default constructor */ { }
-    /*virtual*/ ~Shared() { }
-
-    audio_utils_fifo_index  mRear;  // index one byte past the end of most recent Entry
-    char    mBuffer[0];             // circular buffer for entries
-};
-
-public:
-
-// ---------------------------------------------------------------------------
-
-// FIXME Timeline was intended to wrap Writer and Reader, but isn't actually used yet.
-// For now it is just a namespace for sharedSize().
-class Timeline : public RefBase {
-public:
-#if 0
-    Timeline(size_t size, void *shared = NULL);
-    virtual ~Timeline();
-#endif
-
-    // Input parameter 'size' is the desired size of the timeline in byte units.
-    // Returns the size rounded up to a power-of-2, plus the constant size overhead for indices.
-    static size_t sharedSize(size_t size);
-
-#if 0
-private:
-    friend class    Writer;
-    friend class    Reader;
-
-    const size_t    mSize;      // circular buffer size in bytes, must be a power of 2
-    bool            mOwn;       // whether I own the memory at mShared
-    Shared* const   mShared;    // pointer to shared memory
-#endif
-};
-
-// ---------------------------------------------------------------------------
-
-// Writer is thread-safe with respect to Reader, but not with respect to multiple threads
-// calling Writer methods.  If you need multi-thread safety for writing, use LockedWriter.
-class Writer : public RefBase {
-public:
-    Writer();                   // dummy nop implementation without shared memory
-
-    // Input parameter 'size' is the desired size of the timeline in byte units.
-    // The size of the shared memory must be at least Timeline::sharedSize(size).
-    Writer(void *shared, size_t size);
-    Writer(const sp<IMemory>& iMemory, size_t size);
-
-    virtual ~Writer();
-
-    // FIXME needs comments, and some should be private
-    virtual void    log(const char *string);
-    virtual void    logf(const char *fmt, ...) __attribute__ ((format (printf, 2, 3)));
-    virtual void    logvf(const char *fmt, va_list ap);
-    virtual void    logTimestamp();
-    virtual void    logTimestamp(const int64_t ts);
-    virtual void    logInteger(const int x);
-    virtual void    logFloat(const float x);
-    virtual void    logPID();
-    virtual void    logFormat(const char *fmt, log_hash_t hash, ...);
-    virtual void    logVFormat(const char *fmt, log_hash_t hash, va_list ap);
-    virtual void    logStart(const char *fmt);
-    virtual void    logEnd();
-    virtual void    logHash(log_hash_t hash);
-    virtual void    logEventHistTs(Event event, log_hash_t hash);
-
-    virtual bool    isEnabled() const;
-
-    // return value for all of these is the previous isEnabled()
-    virtual bool    setEnabled(bool enabled);   // but won't enable if no shared memory
-            bool    enable()    { return setEnabled(true); }
-            bool    disable()   { return setEnabled(false); }
-
-    sp<IMemory>     getIMemory() const  { return mIMemory; }
-
-private:
-    // 0 <= length <= kMaxLength
-    // writes a single Entry to the FIFO
-    void    log(Event event, const void *data, size_t length);
-    // checks validity of an event before calling log above this one
-    void    log(const Entry *entry, bool trusted = false);
-
-    Shared* const   mShared;    // raw pointer to shared memory
-    sp<IMemory>     mIMemory;   // ref-counted version, initialized in constructor and then const
-    audio_utils_fifo * const mFifo;                 // FIFO itself,
-                                                    // non-NULL unless constructor fails
-    audio_utils_fifo_writer * const mFifoWriter;    // used to write to FIFO,
-                                                    // non-NULL unless dummy constructor used
-    bool            mEnabled;   // whether to actually log
-
-    // cached pid and process name to use in %p format specifier
-    // total tag length is mPidTagSize and process name is not zero terminated
-    char   *mPidTag;
-    size_t  mPidTagSize;
-};
-
-// ---------------------------------------------------------------------------
-
-// Similar to Writer, but safe for multiple threads to call concurrently
-class LockedWriter : public Writer {
-public:
-    LockedWriter();
-    LockedWriter(void *shared, size_t size);
-
-    virtual void    log(const char *string);
-    virtual void    logf(const char *fmt, ...) __attribute__ ((format (printf, 2, 3)));
-    virtual void    logvf(const char *fmt, va_list ap);
-    virtual void    logTimestamp();
-    virtual void    logTimestamp(const int64_t ts);
-    virtual void    logInteger(const int x);
-    virtual void    logFloat(const float x);
-    virtual void    logPID();
-    virtual void    logStart(const char *fmt);
-    virtual void    logEnd();
-    virtual void    logHash(log_hash_t hash);
-
-    virtual bool    isEnabled() const;
-    virtual bool    setEnabled(bool enabled);
-
-private:
-    mutable Mutex   mLock;
-};
-
-// ---------------------------------------------------------------------------
-
-class Reader : public RefBase {
-public:
-
-    // A snapshot of a readers buffer
-    // This is raw data. No analysis has been done on it
-    class Snapshot {
-    public:
-        Snapshot() : mData(NULL), mLost(0) {}
-
-        Snapshot(size_t bufferSize) : mData(new uint8_t[bufferSize]) {}
-
-        ~Snapshot() { delete[] mData; }
-
-        // copy of the buffer
-        uint8_t *data() const { return mData; }
-
-        // amount of data lost (given by audio_utils_fifo_reader)
-        size_t   lost() const { return mLost; }
-
-        // iterator to beginning of readable segment of snapshot
-        // data between begin and end has valid entries
-        EntryIterator begin() { return mBegin; }
-
-        // iterator to end of readable segment of snapshot
-        EntryIterator end() { return mEnd; }
-
-    private:
-        friend class Reader;
-        uint8_t              *mData;
-        size_t                mLost;
-        EntryIterator mBegin;
-        EntryIterator mEnd;
-    };
-
-    // Input parameter 'size' is the desired size of the timeline in byte units.
-    // The size of the shared memory must be at least Timeline::sharedSize(size).
-    Reader(const void *shared, size_t size);
-    Reader(const sp<IMemory>& iMemory, size_t size);
-
-    virtual ~Reader();
-
-    // get snapshot of readers fifo buffer, effectively consuming the buffer
-    std::unique_ptr<Snapshot> getSnapshot();
-    // dump a particular snapshot of the reader
-    // TODO: move dump to PerformanceAnalysis. Model/view/controller design
-    void     dump(int fd, size_t indent, Snapshot & snap);
-    // dump the current content of the reader's buffer (call getSnapshot() and previous dump())
-    void     dump(int fd, size_t indent = 0);
-    bool     isIMemory(const sp<IMemory>& iMemory) const;
-
-private:
-
-    static const std::set<Event> startingTypes;
-    static const std::set<Event> endingTypes;
-    /*const*/ Shared* const mShared;    // raw pointer to shared memory, actually const but not
-                                        // declared as const because audio_utils_fifo() constructor
-    sp<IMemory> mIMemory;       // ref-counted version, assigned only in constructor
-    int     mFd;                // file descriptor
-    int     mIndent;            // indentation level
-    audio_utils_fifo * const mFifo;                 // FIFO itself,
-                                                    // non-NULL unless constructor fails
-    audio_utils_fifo_reader * const mFifoReader;    // used to read from FIFO,
-                                                    // non-NULL unless constructor fails
-
-    // TODO: it might be clearer, instead of a direct map from source location to vector of
-    // timestamps, if we instead first mapped from source location to an object that
-    // represented that location. And one_of its fields would be a vector of timestamps.
-    // That would allow us to record other information about the source location beyond timestamps.
-    void    dumpLine(const String8& timestamp, String8& body);
-
-    EntryIterator   handleFormat(const FormatEntry &fmtEntry,
-                                         String8 *timestamp,
-                                         String8 *body);
-    // dummy method for handling absent author entry
-    virtual void handleAuthor(const AbstractEntry& /*fmtEntry*/, String8* /*body*/) {}
-
-    // Searches for the last entry of type <type> in the range [front, back)
-    // back has to be entry-aligned. Returns nullptr if none enconuntered.
-    static const uint8_t *findLastEntryOfTypes(const uint8_t *front, const uint8_t *back,
-                                         const std::set<Event> &types);
-
-    static const size_t kSquashTimestamp = 5; // squash this many or more adjacent timestamps
-};
-
-// Wrapper for a reader with a name. Contains a pointer to the reader and a pointer to the name
-class NamedReader {
-public:
-    NamedReader() { mName[0] = '\0'; } // for Vector
-    NamedReader(const sp<NBLog::Reader>& reader, const char *name) :
-        mReader(reader)
-        { strlcpy(mName, name, sizeof(mName)); }
-    ~NamedReader() { }
-    const sp<NBLog::Reader>&  reader() const { return mReader; }
-    const char*               name() const { return mName; }
-
-private:
-    sp<NBLog::Reader>   mReader;
-    static const size_t kMaxName = 32;
-    char                mName[kMaxName];
-};
-
-// ---------------------------------------------------------------------------
-
-class Merger : public RefBase {
-public:
-    Merger(const void *shared, size_t size);
-
-    virtual ~Merger() {}
-
-    void addReader(const NamedReader &reader);
-    // TODO add removeReader
-    void merge();
-    // FIXME This is returning a reference to a shared variable that needs a lock
-    const std::vector<NamedReader>& getNamedReaders() const;
-private:
-    // vector of the readers the merger is supposed to merge from.
-    // every reader reads from a writer's buffer
-    // FIXME Needs to be protected by a lock
-    std::vector<NamedReader> mNamedReaders;
-
-    // TODO Need comments on all of these
-    Shared * const mShared;
-    std::unique_ptr<audio_utils_fifo> mFifo;
-    std::unique_ptr<audio_utils_fifo_writer> mFifoWriter;
-};
-
-class MergeReader : public Reader {
-public:
-    MergeReader(const void *shared, size_t size, Merger &merger);
-private:
-    // FIXME Needs to be protected by a lock,
-    //       because even though our use of it is read-only there may be asynchronous updates
-    const std::vector<NamedReader>& mNamedReaders;
-    // handle author entry by looking up the author's name and appending it to the body
-    // returns number of bytes read from fmtEntry
-    void handleAuthor(const AbstractEntry &fmtEntry, String8 *body);
-};
-
-// MergeThread is a thread that contains a Merger. It works as a retriggerable one-shot:
-// when triggered, it awakes for a lapse of time, during which it periodically merges; if
-// retriggered, the timeout is reset.
-// The thread is triggered on AudioFlinger binder activity.
-class MergeThread : public Thread {
-public:
-    MergeThread(Merger &merger);
-    virtual ~MergeThread() override;
-
-    // Reset timeout and activate thread to merge periodically if it's idle
-    void wakeup();
-
-    // Set timeout period until the merging thread goes idle again
-    void setTimeoutUs(int time);
-
-private:
-    virtual bool threadLoop() override;
-
-    // the merger who actually does the work of merging the logs
-    Merger&     mMerger;
-
-    // mutex for the condition variable
-    Mutex       mMutex;
-
-    // condition variable to activate merging on timeout >= 0
-    Condition   mCond;
-
-    // time left until the thread blocks again (in microseconds)
-    int         mTimeoutUs;
-
-    // merging period when the thread is awake
-    static const int  kThreadSleepPeriodUs = 1000000 /*1s*/;
-
-    // initial timeout value when triggered
-    static const int  kThreadWakeupPeriodUs = 3000000 /*3s*/;
-};
-
-};  // class NBLog
-
-// TODO put somewhere else
-static inline int64_t get_monotonic_ns() {
-    timespec ts;
-    if (clock_gettime(CLOCK_MONOTONIC, &ts) == 0) {
-        return (uint64_t) ts.tv_sec * 1000 * 1000 * 1000 + ts.tv_nsec;
-    }
-    return 0; // should not happen.
-}
-
-}   // namespace android
-
-#endif  // ANDROID_MEDIA_NBLOG_H
diff --git a/media/libnbaio/include/media/nbaio/PerformanceAnalysis.h b/media/libnbaio/include/media/nbaio/PerformanceAnalysis.h
deleted file mode 100644
index b0dc148..0000000
--- a/media/libnbaio/include/media/nbaio/PerformanceAnalysis.h
+++ /dev/null
@@ -1,146 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-// Non-blocking event logger intended for safe communication between processes via shared memory
-
-#ifndef ANDROID_MEDIA_PERFORMANCEANALYSIS_H
-#define ANDROID_MEDIA_PERFORMANCEANALYSIS_H
-
-#include <map>
-#include <deque>
-#include <vector>
-#include "NBLog.h"
-#include "ReportPerformance.h"
-
-namespace android {
-
-namespace ReportPerformance {
-
-class PerformanceAnalysis {
-    // This class stores and analyzes audio processing wakeup timestamps from NBLog
-    // FIXME: currently, all performance data is stored in deques. Need to add a mutex.
-    // FIXME: continue this way until analysis is done in a separate thread. Then, use
-    // the fifo writer utilities.
-public:
-
-    PerformanceAnalysis();
-
-    // Given a series of audio processing wakeup timestamps,
-    // compresses and and analyzes the data, and flushes
-    // the timestamp series from memory.
-    void processAndFlushTimeStampSeries();
-
-    // Called when an audio on/off event is read from the buffer,
-    // e.g. EVENT_AUDIO_STATE.
-    // calls flushTimeStampSeries on the data up to the event,
-    // effectively discarding the idle audio time interval
-    void handleStateChange();
-
-    // When the short-term histogram array mRecentHists has reached capacity,
-    // merges histograms for data compression and stores them in mLongTermHists
-    void processAndFlushRecentHists();
-
-    // Writes wakeup timestamp entry to log and runs analysis
-    // TODO: make this thread safe. Each thread should have its own instance
-    // of PerformanceAnalysis.
-    void logTsEntry(timestamp_raw ts);
-
-    // FIXME: make peakdetector and storeOutlierData a single function
-    // Input: mOutlierData. Looks at time elapsed between outliers
-    // finds significant changes in the distribution
-    // writes timestamps of significant changes to mPeakTimestamps
-    void detectPeaks();
-
-    // runs analysis on timestamp series before it is converted to a histogram
-    // finds outliers
-    // writes to mOutlierData <time elapsed since previous outlier, outlier timestamp>
-    void storeOutlierData(const std::vector<timestamp_raw> &timestamps);
-
-    // input: series of short histograms. Generates a string of analysis of the buffer periods
-    // TODO: WIP write more detailed analysis
-    // FIXME: move this data visualization to a separate class. Model/view/controller
-    void reportPerformance(String8 *body, int maxHeight = 10);
-
-    // TODO: delete this. temp for testing
-    void testFunction();
-
-    // This function used to detect glitches in a time series
-    // TODO incorporate this into the analysis (currently unused)
-    void alertIfGlitch(const std::vector<timestamp_raw> &samples);
-
-private:
-
-    // stores outlier analysis: <elapsed time between outliers in ms, outlier timestamp>
-    std::deque<std::pair<outlierInterval, timestamp>> mOutlierData;
-
-    // stores each timestamp at which a peak was detected
-    // a peak is a moment at which the average outlier interval changed significantly
-    std::deque<timestamp> mPeakTimestamps;
-
-    // TODO: turn these into circular buffers for better data flow
-    // FIFO of small histograms
-    // stores fixed-size short buffer period histograms with timestamp of first sample
-    std::deque<std::pair<timestamp, Histogram>> mRecentHists;
-
-    // FIFO of small histograms
-    // stores fixed-size long-term buffer period histograms with timestamp of first sample
-    std::deque<std::pair<timestamp, Histogram>> mLongTermHists;
-
-    // vector of timestamps, collected from NBLog for a (TODO) specific thread
-    // when a vector reaches its maximum size, the data is processed and flushed
-    std::vector<timestamp_raw> mTimeStampSeries;
-
-    static const int kMsPerSec = 1000;
-
-    // Parameters used when detecting outliers
-    // TODO: learn some of these from the data, delete unused ones
-    // FIXME: decide whether to make kPeriodMs static.
-    static const int kNumBuff = 3; // number of buffers considered in local history
-    int kPeriodMs; // current period length is ideally 4 ms
-    static const int kOutlierMs = 7; // values greater or equal to this cause glitches
-    // DAC processing time for 4 ms buffer
-    static constexpr double kRatio = 0.75; // estimate of CPU time as ratio of period length
-    int kPeriodMsCPU; // compute based on kPeriodLen and kRatio
-
-    // Peak detection: number of standard deviations from mean considered a significant change
-    static const int kStddevThreshold = 5;
-
-    // capacity allocated to data structures
-    // TODO: adjust all of these values
-    static const int kRecentHistsCapacity = 100; // number of short-term histograms stored in memory
-    static const int kShortHistSize = 50; // number of samples in a short-term histogram
-    static const int kOutlierSeriesSize = 100; // number of values stored in outlier array
-    static const int kPeakSeriesSize = 100; // number of values stored in peak array
-    static const int kLongTermHistsCapacity = 20; // number of long-term histogram stored in memory
-    // maximum elapsed time between first and last timestamp of a long-term histogram
-    static const int kMaxHistTimespanMs = 5 * kMsPerSec;
-
-    // these variables are stored in-class to ensure continuity while analyzing the timestamp
-    // series one short sequence at a time: the variables are not re-initialized every time.
-    // FIXME: create inner class for these variables and decide which other ones to add to it
-    double mPeakDetectorMean = -1;
-    double mPeakDetectorSd = -1;
-    // variables for storeOutlierData
-    uint64_t mElapsed = 0;
-    int64_t mPrevNs = -1;
-
-};
-
-} // namespace ReportPerformance
-
-}   // namespace android
-
-#endif  // ANDROID_MEDIA_PERFORMANCEANALYSIS_H
diff --git a/media/libnbaio/include/media/nbaio/ReportPerformance.h b/media/libnbaio/include/media/nbaio/ReportPerformance.h
deleted file mode 100644
index 27d2810..0000000
--- a/media/libnbaio/include/media/nbaio/ReportPerformance.h
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_MEDIA_REPORTPERFORMANCE_H
-#define ANDROID_MEDIA_REPORTPERFORMANCE_H
-
-#include <deque>
-#include <map>
-#include <vector>
-
-namespace android {
-
-// This class is used by reportPerformance function
-// TODO move reportPerformance function to ReportPerformance.cpp
-class String8;
-
-namespace ReportPerformance {
-
-// stores a histogram: key: observed buffer period. value: count
-// TODO: unsigned, unsigned
-using Histogram = std::map<int, int>;
-
-using outlierInterval = uint64_t;
-// int64_t timestamps are converted to uint64_t in PerformanceAnalysis::storeOutlierData,
-// and all analysis functions use uint64_t.
-using timestamp = uint64_t;
-using timestamp_raw = int64_t;
-
-// FIXME: decide whether to use 64 or 32 bits
-// TODO: the code has a mix of typedef and using. Standardize to one or the other.
-typedef uint64_t log_hash_t;
-
-static inline int deltaMs(int64_t ns1, int64_t ns2) {
-    return (ns2 - ns1) / (1000 * 1000);
-}
-
-static inline uint32_t log2(uint32_t x) {
-    // This works for x > 0
-    return 31 - __builtin_clz(x);
-}
-
-// Writes outlier intervals, timestamps, and histograms spanning long time
-// intervals to a file.
-void writeToFile(std::deque<std::pair<outlierInterval, timestamp>> &outlierData,
-                 std::deque<std::pair<timestamp, Histogram>> &hists,
-                 const char * kName,
-                 bool append);
-
-} // namespace ReportPerformance
-
-}   // namespace android
-
-#endif  // ANDROID_MEDIA_REPORTPERFORMANCE_H
diff --git a/media/libnblog/Android.bp b/media/libnblog/Android.bp
new file mode 100644
index 0000000..74aaf77
--- /dev/null
+++ b/media/libnblog/Android.bp
@@ -0,0 +1,28 @@
+cc_library_shared {
+
+    name: "libnblog",
+
+    srcs: [
+        "NBLog.cpp",
+        "PerformanceAnalysis.cpp",
+        "ReportPerformance.cpp",
+    ],
+
+    shared_libs: [
+        "libaudioutils",
+        "libbinder",
+        "libcutils",
+        "liblog",
+        "libutils",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    include_dirs: ["system/media/audio_utils/include"],
+
+    export_include_dirs: ["include"],
+
+}
diff --git a/media/libnbaio/NBLog.cpp b/media/libnblog/NBLog.cpp
similarity index 86%
rename from media/libnbaio/NBLog.cpp
rename to media/libnblog/NBLog.cpp
index 827cba9..d6fa3e3 100644
--- a/media/libnbaio/NBLog.cpp
+++ b/media/libnblog/NBLog.cpp
@@ -12,88 +12,16 @@
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * See the License for the specific language governing permissions and
  * limitations under the License.
+ *
+ *
  */
 
-/*
-* Documentation: Workflow summary for histogram data processing:
-* For more details on FIFO, please see system/media/audio_utils; doxygen
-* TODO: add this documentation to doxygen once it is further developed
-* 1) Writing buffer period timestamp to the circular buffer
-* onWork()
-*     Called every period length (e.g., 4ms)
-*     Calls LOG_HIST_TS
-* LOG_HIST_TS
-*     Hashes file name and line number, and writes single timestamp to buffer
-*     calls NBLOG::Writer::logEventHistTS once
-* NBLOG::Writer::logEventHistTS
-*     calls NBLOG::Writer::log on hash and current timestamp
-*     time is in CLOCK_MONOTONIC converted to ns
-* NBLOG::Writer::log(Event, const void*, size_t)
-*     Initializes Entry, a struct containing one log entry
-*     Entry contains the event type (mEvent), data length (mLength),
-*     and data pointer (mData)
-*     TODO: why mLength (max length of buffer data)  must be <= kMaxLength = 255?
-*     calls NBLOG::Writer::log(Entry *, bool)
-* NBLog::Writer::log(Entry *, bool)
-*     Calls copyEntryDataAt to format data as follows in temp array:
-*     [type][length][data ... ][length]
-*     calls audio_utils_fifo_writer.write on temp
-* audio_utils_fifo_writer.write
-*     calls obtain(), memcpy (reference in doxygen)
-*     returns number of frames written
-* ssize_t audio_utils_fifo_reader::obtain
-*     Determines readable buffer section via pointer arithmetic on reader
-*     and writer pointers
-* Similarly, LOG_AUDIO_STATE() is called by onStateChange whenever audio is
-* turned on or off, and writes this notification to the FIFO.
-*
-* 2) reading the data from shared memory
-* Thread::threadloop()
-*     TODO: add description?
-* NBLog::MergeThread::threadLoop()
-*     calls NBLog::Merger::merge
-* NBLog::Merger::merge
-*     Merges snapshots sorted by timestamp
-*     for each reader in vector of class NamedReader,
-*     callsNamedReader::reader()->getSnapshot
-*     TODO: check whether the rest of this function is relevant
-* NBLog::Reader::getSnapshot
-*     copies snapshot of reader's fifo buffer into its own buffer
-*     calls mFifoReader->obtain to find readable data
-*     sets snapshot.begin() and .end() iterators to boundaries of valid entries
-*     moves the fifo reader index to after the last entry read
-*     in this case, the buffer is in shared memory. in (4), the buffer is private
-*
-* 3) reading the data from private buffer
-* MediaLogService::dump
-*     calls NBLog::Reader::dump(CONSOLE)
-*     The private buffer contains all logs for all readers in shared memory
-* NBLog::Reader::dump(int)
-*     calls getSnapshot on the current reader
-*     calls dump(int, size_t, Snapshot)
-* NBLog::Reader::dump(int, size, snapshot)
-*     iterates through snapshot's events and switches based on their type
-*     (string, timestamp, etc...)
-*     In the case of EVENT_HISTOGRAM_ENTRY_TS, adds a list of timestamp sequences
-*     (histogram entry) to NBLog::mHists
-*     TODO: add every HISTOGRAM_ENTRY_TS to two
-*     circular buffers: one short-term and one long-term (can add even longer-term
-*     structures in the future). When dump is called, print everything currently
-*     in the buffer.
-* NBLog::drawHistogram
-*     input: timestamp array
-*     buckets this to a histogram and prints
-*
-*/
-
 #define LOG_TAG "NBLog"
-// #define LOG_NDEBUG 0
 
 #include <algorithm>
 #include <climits>
 #include <deque>
 #include <fstream>
-// #include <inttypes.h>
 #include <iostream>
 #include <math.h>
 #include <numeric>
@@ -106,10 +34,10 @@
 #include <time.h>
 #include <new>
 #include <audio_utils/roundup.h>
-#include <media/nbaio/NBLog.h>
-#include <media/nbaio/PerformanceAnalysis.h>
-#include <media/nbaio/ReportPerformance.h>
-// #include <utils/CallStack.h> // used to print callstack
+#include <media/nblog/NBLog.h>
+#include <media/nblog/PerformanceAnalysis.h>
+#include <media/nblog/ReportPerformance.h>
+#include <utils/CallStack.h>
 #include <utils/Log.h>
 #include <utils/String8.h>
 
@@ -761,14 +689,15 @@
 // ---------------------------------------------------------------------------
 
 const std::set<NBLog::Event> NBLog::Reader::startingTypes {NBLog::Event::EVENT_START_FMT,
-                                                           NBLog::Event::EVENT_HISTOGRAM_ENTRY_TS};
+        NBLog::Event::EVENT_HISTOGRAM_ENTRY_TS,
+        NBLog::Event::EVENT_AUDIO_STATE};
 const std::set<NBLog::Event> NBLog::Reader::endingTypes   {NBLog::Event::EVENT_END_FMT,
-                                                           NBLog::Event::EVENT_HISTOGRAM_ENTRY_TS,
-                                                           NBLog::Event::EVENT_AUDIO_STATE};
+        NBLog::Event::EVENT_HISTOGRAM_ENTRY_TS,
+        NBLog::Event::EVENT_AUDIO_STATE};
 
 NBLog::Reader::Reader(const void *shared, size_t size)
-    : mShared((/*const*/ Shared *) shared), /*mIMemory*/
-      mFd(-1), mIndent(0),
+    : mFd(-1), mIndent(0), mLost(0),
+      mShared((/*const*/ Shared *) shared), /*mIMemory*/
       mFifo(mShared != NULL ?
         new audio_utils_fifo(size, sizeof(uint8_t),
             mShared->mBuffer, mShared->mRear, NULL /*throttlesFront*/) : NULL),
@@ -806,6 +735,9 @@
     return nullptr; // no entry found
 }
 
+// Copies content of a Reader FIFO into its Snapshot
+// The Snapshot has the same raw data, but represented as a sequence of entries
+// and an EntryIterator making it possible to process the data.
 std::unique_ptr<NBLog::Reader::Snapshot> NBLog::Reader::getSnapshot()
 {
     if (mFifoReader == NULL) {
@@ -871,24 +803,11 @@
 
 }
 
-// TODO: move this to PerformanceAnalysis
-// TODO: make call to dump periodic so that data in shared FIFO does not get overwritten
-void NBLog::Reader::dump(int fd, size_t indent, NBLog::Reader::Snapshot &snapshot)
+// Takes raw content of the local merger FIFO, processes log entries, and
+// writes the data to a map of class PerformanceAnalysis, based on their thread ID.
+void NBLog::MergeReader::getAndProcessSnapshot(NBLog::Reader::Snapshot &snapshot)
 {
-    mFd = fd;
-    mIndent = indent;
     String8 timestamp, body;
-    // FIXME: this is not thread safe
-    // TODO: need a separate instance of performanceAnalysis for each thread
-    // used to store data and to call analysis functions
-    static ReportPerformance::PerformanceAnalysis performanceAnalysis;
-    size_t lost = snapshot.lost() + (snapshot.begin() - EntryIterator(snapshot.data()));
-    if (lost > 0) {
-        body.appendFormat("warning: lost %zu bytes worth of events", lost);
-        // TODO timestamp empty here, only other choice to wait for the first timestamp event in the
-        //      log to push it out.  Consider keeping the timestamp/body between calls to copyEntryDataAt().
-        dumpLine(timestamp, body);
-    }
 
     for (auto entry = snapshot.begin(); entry != snapshot.end();) {
         switch (entry->type) {
@@ -903,12 +822,22 @@
             memcpy(&hash, &(data->hash), sizeof(hash));
             int64_t ts;
             memcpy(&ts, &data->ts, sizeof(ts));
-            performanceAnalysis.logTsEntry(ts);
+            // TODO: hash for histogram ts and audio state need to match
+            // and correspond to audio production source file location
+            mThreadPerformanceAnalysis[data->author][0 /*hash*/].logTsEntry(ts);
             ++entry;
             break;
         }
         case EVENT_AUDIO_STATE: {
-            performanceAnalysis.handleStateChange();
+            HistTsEntryWithAuthor *data = (HistTsEntryWithAuthor *) (entry->data);
+            // TODO This memcpies are here to avoid unaligned memory access crash.
+            // There's probably a more efficient way to do it
+            log_hash_t hash;
+            memcpy(&hash, &(data->hash), sizeof(hash));
+            // TODO: remove ts if unused
+            int64_t ts;
+            memcpy(&ts, &data->ts, sizeof(ts));
+            mThreadPerformanceAnalysis[data->author][0 /*hash*/].handleStateChange();
             ++entry;
             break;
         }
@@ -923,19 +852,25 @@
             break;
         }
     }
-    performanceAnalysis.reportPerformance(&body);
+    // FIXME: decide whether to print the warnings here or elsewhere
     if (!body.isEmpty()) {
         dumpLine(timestamp, body);
     }
 }
 
-void NBLog::Reader::dump(int fd, size_t indent)
+void NBLog::MergeReader::getAndProcessSnapshot()
 {
-    // get a snapshot, dump it
+    // get a snapshot, process it
     std::unique_ptr<Snapshot> snap = getSnapshot();
-    dump(fd, indent, *snap);
+    getAndProcessSnapshot(*snap);
 }
 
+void NBLog::MergeReader::dump(int fd, int indent) {
+    // TODO: add a mutex around media.log dump
+    ReportPerformance::dump(fd, indent, mThreadPerformanceAnalysis);
+}
+
+// Writes a string to the console
 void NBLog::Reader::dumpLine(const String8 &timestamp, String8 &body)
 {
     if (mFd >= 0) {
@@ -1094,6 +1029,7 @@
       {}
 
 void NBLog::Merger::addReader(const NBLog::NamedReader &reader) {
+
     // FIXME This is called by binder thread in MediaLogService::registerWriter
     //       but the access to shared variable mNamedReaders is not yet protected by a lock.
     mNamedReaders.push_back(reader);
@@ -1117,7 +1053,7 @@
     return i1.ts > i2.ts || (i1.ts == i2.ts && i1.index > i2.index);
 }
 
-// Merge registered readers, sorted by timestamp
+// Merge registered readers, sorted by timestamp, and write data to a single FIFO in local memory
 void NBLog::Merger::merge() {
     // FIXME This is called by merge thread
     //       but the access to shared variable mNamedReaders is not yet protected by a lock.
@@ -1174,8 +1110,9 @@
 
 // ---------------------------------------------------------------------------
 
-NBLog::MergeThread::MergeThread(NBLog::Merger &merger)
+NBLog::MergeThread::MergeThread(NBLog::Merger &merger, NBLog::MergeReader &mergeReader)
     : mMerger(merger),
+      mMergeReader(mergeReader),
       mTimeoutUs(0) {}
 
 NBLog::MergeThread::~MergeThread() {
@@ -1197,7 +1134,12 @@
         mTimeoutUs -= kThreadSleepPeriodUs;
     }
     if (doMerge) {
+        // Merge data from all the readers
         mMerger.merge();
+        // Process the data collected by mMerger and write it to PerformanceAnalysis
+        // FIXME: decide whether to call getAndProcessSnapshot every time
+        // or whether to have a separate thread that calls it with a lower frequency
+        mMergeReader.getAndProcessSnapshot();
     }
     return true;
 }
diff --git a/media/libnblog/PerformanceAnalysis.cpp b/media/libnblog/PerformanceAnalysis.cpp
new file mode 100644
index 0000000..f09e93d
--- /dev/null
+++ b/media/libnblog/PerformanceAnalysis.cpp
@@ -0,0 +1,378 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+#define LOG_TAG "PerformanceAnalysis"
+// #define LOG_NDEBUG 0
+
+#include <algorithm>
+#include <climits>
+#include <deque>
+#include <iostream>
+#include <math.h>
+#include <numeric>
+#include <vector>
+#include <stdarg.h>
+#include <stdint.h>
+#include <stdio.h>
+#include <string.h>
+#include <sys/prctl.h>
+#include <time.h>
+#include <new>
+#include <audio_utils/roundup.h>
+#include <media/nblog/NBLog.h>
+#include <media/nblog/PerformanceAnalysis.h>
+#include <media/nblog/ReportPerformance.h>
+#include <utils/Log.h>
+#include <utils/String8.h>
+
+#include <queue>
+#include <utility>
+
+namespace android {
+
+namespace ReportPerformance {
+
+// Given an audio processing wakeup timestamp, buckets the time interval
+// since the previous timestamp into a histogram, searches for
+// outliers, analyzes the outlier series for unexpectedly
+// small or large values and stores these as peaks
+void PerformanceAnalysis::logTsEntry(timestamp ts) {
+    // after a state change, start a new series and do not
+    // record time intervals in-between
+    if (mBufferPeriod.mPrevTs == 0) {
+        mBufferPeriod.mPrevTs = ts;
+        return;
+    }
+
+    // calculate time interval between current and previous timestamp
+    const msInterval diffMs = static_cast<msInterval>(
+        deltaMs(mBufferPeriod.mPrevTs, ts));
+
+    const int diffJiffy = deltaJiffy(mBufferPeriod.mPrevTs, ts);
+
+    // old versus new weight ratio when updating the buffer period mean
+    static constexpr double exponentialWeight = 0.999;
+    // update buffer period mean with exponential weighting
+    mBufferPeriod.mMean = (mBufferPeriod.mMean < 0) ? diffMs :
+            exponentialWeight * mBufferPeriod.mMean + (1.0 - exponentialWeight) * diffMs;
+    // set mOutlierFactor to a smaller value for the fastmixer thread
+    const int kFastMixerMax = 10;
+    // NormalMixer times vary much more than FastMixer times.
+    // TODO: mOutlierFactor values are set empirically based on what appears to be
+    // an outlier. Learn these values from the data.
+    mBufferPeriod.mOutlierFactor = mBufferPeriod.mMean < kFastMixerMax ? 1.8 : 2.0;
+    // set outlier threshold
+    mBufferPeriod.mOutlier = mBufferPeriod.mMean * mBufferPeriod.mOutlierFactor;
+
+    // Check whether the time interval between the current timestamp
+    // and the previous one is long enough to count as an outlier
+    const bool isOutlier = detectAndStoreOutlier(diffMs);
+    // If an outlier was found, check whether it was a peak
+    if (isOutlier) {
+        /*bool isPeak =*/ detectAndStorePeak(
+            mOutlierData[0].first, mOutlierData[0].second);
+        // TODO: decide whether to insert a new empty histogram if a peak
+        // TODO: remove isPeak if unused to avoid "unused variable" error
+        // occurred at the current timestamp
+    }
+
+    // Insert a histogram to mHists if it is empty, or
+    // close the current histogram and insert a new empty one if
+    // if the current histogram has spanned its maximum time interval.
+    if (mHists.empty() ||
+        deltaMs(mHists[0].first, ts) >= kMaxLength.HistTimespanMs) {
+        mHists.emplace_front(ts, std::map<int, int>());
+        // When memory is full, delete oldest histogram
+        // TODO: use a circular buffer
+        if (mHists.size() >= kMaxLength.Hists) {
+            mHists.resize(kMaxLength.Hists);
+        }
+    }
+    // add current time intervals to histogram
+    ++mHists[0].second[diffJiffy];
+    // update previous timestamp
+    mBufferPeriod.mPrevTs = ts;
+}
+
+
+// forces short-term histogram storage to avoid adding idle audio time interval
+// to buffer period data
+void PerformanceAnalysis::handleStateChange() {
+    mBufferPeriod.mPrevTs = 0;
+    return;
+}
+
+
+// Checks whether the time interval between two outliers is far enough from
+// a typical delta to be considered a peak.
+// looks for changes in distribution (peaks), which can be either positive or negative.
+// The function sets the mean to the starting value and sigma to 0, and updates
+// them as long as no peak is detected. When a value is more than 'threshold'
+// standard deviations from the mean, a peak is detected and the mean and sigma
+// are set to the peak value and 0.
+bool PerformanceAnalysis::detectAndStorePeak(msInterval diff, timestamp ts) {
+    bool isPeak = false;
+    if (mOutlierData.empty()) {
+        return false;
+    }
+    // Update mean of the distribution
+    // TypicalDiff is used to check whether a value is unusually large
+    // when we cannot use standard deviations from the mean because the sd is set to 0.
+    mOutlierDistribution.mTypicalDiff = (mOutlierDistribution.mTypicalDiff *
+            (mOutlierData.size() - 1) + diff) / mOutlierData.size();
+
+    // Initialize short-term mean at start of program
+    if (mOutlierDistribution.mMean == 0) {
+        mOutlierDistribution.mMean = diff;
+    }
+    // Update length of current sequence of outliers
+    mOutlierDistribution.mN++;
+
+    // Check whether a large deviation from the mean occurred.
+    // If the standard deviation has been reset to zero, the comparison is
+    // instead to the mean of the full mOutlierInterval sequence.
+    if ((fabs(diff - mOutlierDistribution.mMean) <
+            mOutlierDistribution.kMaxDeviation * mOutlierDistribution.mSd) ||
+            (mOutlierDistribution.mSd == 0 &&
+            fabs(diff - mOutlierDistribution.mMean) <
+            mOutlierDistribution.mTypicalDiff)) {
+        // update the mean and sd using online algorithm
+        // https://en.wikipedia.org/wiki/
+        // Algorithms_for_calculating_variance#Online_algorithm
+        mOutlierDistribution.mN++;
+        const double kDelta = diff - mOutlierDistribution.mMean;
+        mOutlierDistribution.mMean += kDelta / mOutlierDistribution.mN;
+        const double kDelta2 = diff - mOutlierDistribution.mMean;
+        mOutlierDistribution.mM2 += kDelta * kDelta2;
+        mOutlierDistribution.mSd = (mOutlierDistribution.mN < 2) ? 0 :
+                sqrt(mOutlierDistribution.mM2 / (mOutlierDistribution.mN - 1));
+    } else {
+        // new value is far from the mean:
+        // store peak timestamp and reset mean, sd, and short-term sequence
+        isPeak = true;
+        mPeakTimestamps.emplace_front(ts);
+        // if mPeaks has reached capacity, delete oldest data
+        // Note: this means that mOutlierDistribution values do not exactly
+        // match the data we have in mPeakTimestamps, but this is not an issue
+        // in practice for estimating future peaks.
+        // TODO: turn this into a circular buffer
+        if (mPeakTimestamps.size() >= kMaxLength.Peaks) {
+            mPeakTimestamps.resize(kMaxLength.Peaks);
+        }
+        mOutlierDistribution.mMean = 0;
+        mOutlierDistribution.mSd = 0;
+        mOutlierDistribution.mN = 0;
+        mOutlierDistribution.mM2 = 0;
+    }
+    return isPeak;
+}
+
+
+// Determines whether the difference between a timestamp and the previous
+// one is beyond a threshold. If yes, stores the timestamp as an outlier
+// and writes to mOutlierdata in the following format:
+// Time elapsed since previous outlier: Timestamp of start of outlier
+// e.g. timestamps (ms) 1, 4, 5, 16, 18, 28 will produce pairs (4, 5), (13, 18).
+// TODO: learn what timestamp sequences correlate with glitches instead of
+// manually designing a heuristic.
+bool PerformanceAnalysis::detectAndStoreOutlier(const msInterval diffMs) {
+    bool isOutlier = false;
+    if (diffMs >= mBufferPeriod.mOutlier) {
+        isOutlier = true;
+        mOutlierData.emplace_front(
+                mOutlierDistribution.mElapsed, mBufferPeriod.mPrevTs);
+        // Remove oldest value if the vector is full
+        // TODO: turn this into a circular buffer
+        // TODO: make sure kShortHistSize is large enough that that data will never be lost
+        // before being written to file or to a FIFO
+        if (mOutlierData.size() >= kMaxLength.Outliers) {
+            mOutlierData.resize(kMaxLength.Outliers);
+        }
+        mOutlierDistribution.mElapsed = 0;
+    }
+    mOutlierDistribution.mElapsed += diffMs;
+    return isOutlier;
+}
+
+static int widthOf(int x) {
+    int width = 0;
+    if (x < 0) {
+        width++;
+        x = x == INT_MIN ? INT_MAX : -x;
+    }
+    // assert (x >= 0)
+    do {
+        ++width;
+        x /= 10;
+    } while (x > 0);
+    return width;
+}
+
+// computes the column width required for a specific histogram value
+inline int numberWidth(double number, int leftPadding) {
+    // Added values account for whitespaces needed around numbers, and for the
+    // dot and decimal digit not accounted for by widthOf
+    return std::max(std::max(widthOf(static_cast<int>(number)) + 3, 2), leftPadding + 1);
+}
+
+// rounds value to precision based on log-distance from mean
+__attribute__((no_sanitize("signed-integer-overflow")))
+inline double logRound(double x, double mean) {
+    // Larger values decrease range of high resolution and prevent overflow
+    // of a histogram on the console.
+    // The following formula adjusts kBase based on the buffer period length.
+    // Different threads have buffer periods ranging from 2 to 40. The
+    // formula below maps buffer period 2 to kBase = ~1, 4 to ~2, 20 to ~3, 40 to ~4.
+    // TODO: tighten this for higher means, the data still overflows
+    const double kBase = log(mean) / log(2.2);
+    const double power = floor(
+        log(abs(x - mean) / mean) / log(kBase)) + 2;
+    // do not round values close to the mean
+    if (power < 1) {
+        return x;
+    }
+    const int factor = static_cast<int>(pow(10, power));
+    return (static_cast<int>(x) * factor) / factor;
+}
+
+// TODO Make it return a std::string instead of modifying body
+// TODO: move this to ReportPerformance, probably make it a friend function
+// of PerformanceAnalysis
+void PerformanceAnalysis::reportPerformance(String8 *body, int author, log_hash_t hash,
+                                            int maxHeight) {
+    if (mHists.empty()) {
+        return;
+    }
+
+    // ms of active audio in displayed histogram
+    double elapsedMs = 0;
+    // starting timestamp of histogram
+    timestamp startingTs = mHists[0].first;
+
+    // histogram which stores .1 precision ms counts instead of Jiffy multiple counts
+    std::map<double, int> buckets;
+    for (const auto &shortHist: mHists) {
+        for (const auto &countPair : shortHist.second) {
+            const double ms = static_cast<double>(countPair.first) / kJiffyPerMs;
+            buckets[logRound(ms, mBufferPeriod.mMean)] += countPair.second;
+            elapsedMs += ms * countPair.second;
+        }
+    }
+
+    // underscores and spaces length corresponds to maximum width of histogram
+    static const int kLen = 200;
+    std::string underscores(kLen, '_');
+    std::string spaces(kLen, ' ');
+
+    auto it = buckets.begin();
+    double maxDelta = it->first;
+    int maxCount = it->second;
+    // Compute maximum values
+    while (++it != buckets.end()) {
+        if (it->first > maxDelta) {
+            maxDelta = it->first;
+        }
+        if (it->second > maxCount) {
+            maxCount = it->second;
+        }
+    }
+    int height = log2(maxCount) + 1; // maxCount > 0, safe to call log2
+    const int leftPadding = widthOf(1 << height);
+    const int bucketWidth = numberWidth(maxDelta, leftPadding);
+    int scalingFactor = 1;
+    // scale data if it exceeds maximum height
+    if (height > maxHeight) {
+        scalingFactor = (height + maxHeight) / maxHeight;
+        height /= scalingFactor;
+    }
+    body->appendFormat("\n%*s %3.2f %s", leftPadding + 11,
+            "Occurrences in", (elapsedMs / kMsPerSec), "seconds of audio:");
+    body->appendFormat("\n%*s%d, %lld, %lld\n", leftPadding + 11,
+            "Thread, hash, starting timestamp: ", author,
+            static_cast<long long int>(hash), static_cast<long long int>(startingTs));
+    // write histogram label line with bucket values
+    body->appendFormat("\n%s", " ");
+    body->appendFormat("%*s", leftPadding, " ");
+    for (auto const &x : buckets) {
+        const int colWidth = numberWidth(x.first, leftPadding);
+        body->appendFormat("%*d", colWidth, x.second);
+    }
+    // write histogram ascii art
+    body->appendFormat("\n%s", " ");
+    for (int row = height * scalingFactor; row >= 0; row -= scalingFactor) {
+        const int value = 1 << row;
+        body->appendFormat("%.*s", leftPadding, spaces.c_str());
+        for (auto const &x : buckets) {
+            const int colWidth = numberWidth(x.first, leftPadding);
+            body->appendFormat("%.*s%s", colWidth - 1,
+                               spaces.c_str(), x.second < value ? " " : "|");
+        }
+        body->appendFormat("\n%s", " ");
+    }
+    // print x-axis
+    const int columns = static_cast<int>(buckets.size());
+    body->appendFormat("%*c", leftPadding, ' ');
+    body->appendFormat("%.*s", (columns + 1) * bucketWidth, underscores.c_str());
+    body->appendFormat("\n%s", " ");
+
+    // write footer with bucket labels
+    body->appendFormat("%*s", leftPadding, " ");
+    for (auto const &x : buckets) {
+        const int colWidth = numberWidth(x.first, leftPadding);
+        body->appendFormat("%*.*f", colWidth, 1, x.first);
+    }
+    body->appendFormat("%.*s%s", bucketWidth, spaces.c_str(), "ms\n");
+
+    // Now report glitches
+    body->appendFormat("\ntime elapsed between glitches and glitch timestamps:\n");
+    for (const auto &outlier: mOutlierData) {
+        body->appendFormat("%lld: %lld\n", static_cast<long long>(outlier.first),
+                           static_cast<long long>(outlier.second));
+    }
+}
+
+//------------------------------------------------------------------------------
+
+// writes summary of performance into specified file descriptor
+void dump(int fd, int indent, PerformanceAnalysisMap &threadPerformanceAnalysis) {
+    String8 body;
+    const char* const kDirectory = "/data/misc/audioserver/";
+    for (auto & thread : threadPerformanceAnalysis) {
+        for (auto & hash: thread.second) {
+            PerformanceAnalysis& curr = hash.second;
+            // write performance data to console
+            curr.reportPerformance(&body, thread.first, hash.first);
+            if (!body.isEmpty()) {
+                dumpLine(fd, indent, body);
+                body.clear();
+            }
+            // write to file
+            writeToFile(curr.mHists, curr.mOutlierData, curr.mPeakTimestamps,
+                        kDirectory, false, thread.first, hash.first);
+        }
+    }
+}
+
+
+// Writes a string into specified file descriptor
+void dumpLine(int fd, int indent, const String8 &body) {
+    dprintf(fd, "%.*s%s \n", indent, "", body.string());
+}
+
+} // namespace ReportPerformance
+
+}   // namespace android
diff --git a/media/libnblog/ReportPerformance.cpp b/media/libnblog/ReportPerformance.cpp
new file mode 100644
index 0000000..827e731
--- /dev/null
+++ b/media/libnblog/ReportPerformance.cpp
@@ -0,0 +1,133 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "ReportPerformance"
+
+#include <fstream>
+#include <iostream>
+#include <queue>
+#include <stdarg.h>
+#include <stdint.h>
+#include <stdio.h>
+#include <string.h>
+#include <sstream>
+#include <sys/prctl.h>
+#include <sys/time.h>
+#include <utility>
+#include <media/nblog/NBLog.h>
+#include <media/nblog/PerformanceAnalysis.h>
+#include <media/nblog/ReportPerformance.h>
+#include <utils/Log.h>
+#include <utils/String8.h>
+
+namespace android {
+
+namespace ReportPerformance {
+
+
+// TODO: use a function like this to extract logic from writeToFile
+// https://stackoverflow.com/a/9279620
+
+// Writes outlier intervals, timestamps, and histograms spanning long time intervals to file.
+// TODO: write data in binary format
+void writeToFile(const std::deque<std::pair<timestamp, Histogram>> &hists,
+                 const std::deque<std::pair<msInterval, timestamp>> &outlierData,
+                 const std::deque<timestamp> &peakTimestamps,
+                 const char * directory, bool append, int author, log_hash_t hash) {
+
+    // TODO: remove old files, implement rotating files as in AudioFlinger.cpp
+
+    if (outlierData.empty() && hists.empty() && peakTimestamps.empty()) {
+        ALOGW("No data, returning.");
+        return;
+    }
+
+    std::stringstream outlierName;
+    std::stringstream histogramName;
+    std::stringstream peakName;
+
+    // get current time
+    char currTime[16]; //YYYYMMDDHHMMSS + '\0' + one unused
+    struct timeval tv;
+    gettimeofday(&tv, NULL);
+    struct tm tm;
+    localtime_r(&tv.tv_sec, &tm);
+    strftime(currTime, sizeof(currTime), "%Y%m%d%H%M%S", &tm);
+
+    // generate file names
+    std::stringstream common;
+    common << author << "_" << hash << "_" << currTime << ".csv";
+
+    histogramName << directory << "histograms_" << common.str();
+    outlierName << directory << "outliers_" << common.str();
+    peakName << directory << "peaks_" << common.str();
+
+    std::ofstream hfs;
+    hfs.open(histogramName.str(), append ? std::ios::app : std::ios::trunc);
+    if (!hfs.is_open()) {
+        ALOGW("couldn't open file %s", histogramName.str().c_str());
+        return;
+    }
+    // each histogram is written as a line where the first value is the timestamp and
+    // subsequent values are pairs of buckets and counts. Each value is separated
+    // by a comma, and each histogram is separated by a newline.
+    for (auto hist = hists.begin(); hist != hists.end(); ++hist) {
+        hfs << hist->first << ", ";
+        for (auto bucket = hist->second.begin(); bucket != hist->second.end(); ++bucket) {
+            hfs << bucket->first / static_cast<double>(kJiffyPerMs)
+                << ", " << bucket->second;
+            if (std::next(bucket) != end(hist->second)) {
+                hfs << ", ";
+            }
+        }
+        if (std::next(hist) != end(hists)) {
+            hfs << "\n";
+        }
+    }
+    hfs.close();
+
+    std::ofstream ofs;
+    ofs.open(outlierName.str(), append ? std::ios::app : std::ios::trunc);
+    if (!ofs.is_open()) {
+        ALOGW("couldn't open file %s", outlierName.str().c_str());
+        return;
+    }
+    // outliers are written as pairs separated by newlines, where each
+    // pair's values are separated by a comma
+    for (const auto &outlier : outlierData) {
+        ofs << outlier.first << ", " << outlier.second << "\n";
+    }
+    ofs.close();
+
+    std::ofstream pfs;
+    pfs.open(peakName.str(), append ? std::ios::app : std::ios::trunc);
+    if (!pfs.is_open()) {
+        ALOGW("couldn't open file %s", peakName.str().c_str());
+        return;
+    }
+    // peaks are simply timestamps separated by commas
+    for (auto peak = peakTimestamps.begin(); peak != peakTimestamps.end(); ++peak) {
+        pfs << *peak;
+        if (std::next(peak) != end(peakTimestamps)) {
+            pfs << ", ";
+        }
+    }
+    pfs.close();
+}
+
+} // namespace ReportPerformance
+
+}   // namespace android
diff --git a/media/libnblog/include/media/nblog/NBLog.h b/media/libnblog/include/media/nblog/NBLog.h
new file mode 100644
index 0000000..fb6f179
--- /dev/null
+++ b/media/libnblog/include/media/nblog/NBLog.h
@@ -0,0 +1,613 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Non-blocking event logger intended for safe communication between processes via shared memory
+
+#ifndef ANDROID_MEDIA_NBLOG_H
+#define ANDROID_MEDIA_NBLOG_H
+
+#include <deque>
+#include <map>
+#include <set>
+#include <vector>
+
+#include <audio_utils/fifo.h>
+#include <binder/IMemory.h>
+#include <media/nblog/PerformanceAnalysis.h>
+#include <media/nblog/ReportPerformance.h>
+#include <utils/Mutex.h>
+#include <utils/threads.h>
+
+namespace android {
+
+class String8;
+
+class NBLog {
+
+public:
+
+    using log_hash_t = ReportPerformance::log_hash_t;
+
+    // FIXME Everything needed for client (writer API and registration) should be isolated
+    //       from the rest of the implementation.
+    class Writer;
+    class Reader;
+
+    enum Event : uint8_t {
+        EVENT_RESERVED,
+        EVENT_STRING,               // ASCII string, not NUL-terminated
+                                    // TODO: make timestamp optional
+        EVENT_TIMESTAMP,            // clock_gettime(CLOCK_MONOTONIC)
+        EVENT_INTEGER,              // integer value entry
+        EVENT_FLOAT,                // floating point value entry
+        EVENT_PID,                  // process ID and process name
+        EVENT_AUTHOR,               // author index (present in merged logs) tracks entry's
+                                    // original log
+        EVENT_START_FMT,            // logFormat start event: entry includes format string,
+                                    // following entries contain format arguments
+        EVENT_HASH,                 // unique HASH of log origin, originates from hash of file name
+                                    // and line number
+        EVENT_HISTOGRAM_ENTRY_TS,   // single datum for timestamp histogram
+        EVENT_AUDIO_STATE,          // audio on/off event: logged on FastMixer::onStateChange call
+        EVENT_END_FMT,              // end of logFormat argument list
+
+        EVENT_UPPER_BOUND,          // to check for invalid events
+    };
+
+private:
+
+    // ---------------------------------------------------------------------------
+    // API for handling format entry operations
+
+    // a formatted entry has the following structure:
+    //    * START_FMT entry, containing the format string
+    //    * TIMESTAMP entry
+    //    * HASH entry
+    //    * author entry of the thread that generated it (optional, present in merged log)
+    //    * format arg1
+    //    * format arg2
+    //    * ...
+    //    * END_FMT entry
+
+    // entry representation in memory
+    struct entry {
+        const uint8_t type;
+        const uint8_t length;
+        const uint8_t data[0];
+    };
+
+    // entry tail representation (after data)
+    struct ending {
+        uint8_t length;
+        uint8_t next[0];
+    };
+
+    // entry iterator
+    class EntryIterator {
+    public:
+        EntryIterator();
+        explicit EntryIterator(const uint8_t *entry);
+        EntryIterator(const EntryIterator &other);
+
+        // dereference underlying entry
+        const entry&    operator*() const;
+        const entry*    operator->() const;
+        // advance to next entry
+        EntryIterator&       operator++(); // ++i
+        // back to previous entry
+        EntryIterator&       operator--(); // --i
+        EntryIterator        next() const;
+        EntryIterator        prev() const;
+        bool            operator!=(const EntryIterator &other) const;
+        int             operator-(const EntryIterator &other) const;
+
+        bool            hasConsistentLength() const;
+        void            copyTo(std::unique_ptr<audio_utils_fifo_writer> &dst) const;
+        void            copyData(uint8_t *dst) const;
+
+        template<typename T>
+        inline const T& payload() {
+            return *reinterpret_cast<const T *>(ptr + offsetof(entry, data));
+        }
+
+        inline operator const uint8_t*() const {
+            return ptr;
+        }
+
+    private:
+        const uint8_t  *ptr;
+    };
+
+    class AbstractEntry {
+    public:
+
+        // Entry starting in the given pointer
+        explicit AbstractEntry(const uint8_t *entry);
+        virtual ~AbstractEntry() {}
+
+        // build concrete entry of appropriate class from pointer
+        static std::unique_ptr<AbstractEntry> buildEntry(const uint8_t *ptr);
+
+        // get format entry timestamp
+        virtual int64_t      timestamp() const = 0;
+
+        // get format entry's unique id
+        virtual log_hash_t   hash() const = 0;
+
+        // entry's author index (-1 if none present)
+        // a Merger has a vector of Readers, author simply points to the index of the
+        // Reader that originated the entry
+        // TODO consider changing to uint32_t
+        virtual int          author() const = 0;
+
+        // copy entry, adding author before timestamp, returns iterator to end of entry
+        virtual EntryIterator    copyWithAuthor(std::unique_ptr<audio_utils_fifo_writer> &dst,
+                                                int author) const = 0;
+
+    protected:
+        // copies ordinary entry from src to dst, and returns length of entry
+        // size_t      copyEntry(audio_utils_fifo_writer *dst, const iterator &it);
+        const uint8_t  *mEntry;
+    };
+
+    class FormatEntry : public AbstractEntry {
+    public:
+        // explicit FormatEntry(const EntryIterator &it);
+        explicit FormatEntry(const uint8_t *ptr) : AbstractEntry(ptr) {}
+        virtual ~FormatEntry() {}
+
+        EntryIterator begin() const;
+
+        // Entry's format string
+        const   char* formatString() const;
+
+        // Enrty's format string length
+        size_t      formatStringLength() const;
+
+        // Format arguments (excluding format string, timestamp and author)
+        EntryIterator    args() const;
+
+        // get format entry timestamp
+        virtual int64_t     timestamp() const override;
+
+        // get format entry's unique id
+        virtual log_hash_t  hash() const override;
+
+        // entry's author index (-1 if none present)
+        // a Merger has a vector of Readers, author simply points to the index of the
+        // Reader that originated the entry
+        virtual int         author() const override;
+
+        // copy entry, adding author before timestamp, returns size of original entry
+        virtual EntryIterator    copyWithAuthor(std::unique_ptr<audio_utils_fifo_writer> &dst,
+                                                int author) const override;
+
+    };
+
+    class HistogramEntry : public AbstractEntry {
+    public:
+        explicit HistogramEntry(const uint8_t *ptr) : AbstractEntry(ptr) {
+        }
+        virtual ~HistogramEntry() {}
+
+        virtual int64_t     timestamp() const override;
+
+        virtual log_hash_t  hash() const override;
+
+        virtual int         author() const override;
+
+        virtual EntryIterator    copyWithAuthor(std::unique_ptr<audio_utils_fifo_writer> &dst,
+                                                int author) const override;
+
+    };
+
+    // ---------------------------------------------------------------------------
+
+    // representation of a single log entry in private memory
+    struct Entry {
+        Entry(Event event, const void *data, size_t length)
+            : mEvent(event), mLength(length), mData(data) { }
+        /*virtual*/ ~Entry() { }
+
+        // used during writing to format Entry information as follows:
+        // [type][length][data ... ][length]
+        int     copyEntryDataAt(size_t offset) const;
+
+    private:
+        friend class Writer;
+        Event       mEvent;     // event type
+        uint8_t     mLength;    // length of additional data, 0 <= mLength <= kMaxLength
+        const void *mData;      // event type-specific data
+        static const size_t kMaxLength = 255;
+    public:
+        // mEvent, mLength, mData[...], duplicate mLength
+        static const size_t kOverhead = sizeof(entry) + sizeof(ending);
+        // endind length of previous entry
+        static const ssize_t kPreviousLengthOffset = - sizeof(ending) +
+            offsetof(ending, length);
+    };
+
+    struct HistTsEntry {
+        log_hash_t hash;
+        int64_t ts;
+    }; //TODO __attribute__((packed));
+
+    struct HistTsEntryWithAuthor {
+        log_hash_t hash;
+        int64_t ts;
+        int author;
+    }; //TODO __attribute__((packed));
+
+    struct HistIntEntry {
+        log_hash_t hash;
+        int value;
+    }; //TODO __attribute__((packed));
+
+    // representation of a single log entry in shared memory
+    //  byte[0]             mEvent
+    //  byte[1]             mLength
+    //  byte[2]             mData[0]
+    //  ...
+    //  byte[2+i]           mData[i]
+    //  ...
+    //  byte[2+mLength-1]   mData[mLength-1]
+    //  byte[2+mLength]     duplicate copy of mLength to permit reverse scan
+    //  byte[3+mLength]     start of next log entry
+
+    static void    appendInt(String8 *body, const void *data);
+    static void    appendFloat(String8 *body, const void *data);
+    static void    appendPID(String8 *body, const void *data, size_t length);
+    static void    appendTimestamp(String8 *body, const void *data);
+    static size_t  fmtEntryLength(const uint8_t *data);
+    static String8 bufferDump(const uint8_t *buffer, size_t size);
+    static String8 bufferDump(const EntryIterator &it);
+public:
+
+    // Located in shared memory, must be POD.
+    // Exactly one process must explicitly call the constructor or use placement new.
+    // Since this is a POD, the destructor is empty and unnecessary to call it explicitly.
+    struct Shared {
+        Shared() /* mRear initialized via default constructor */ { }
+        /*virtual*/ ~Shared() { }
+
+        audio_utils_fifo_index  mRear;  // index one byte past the end of most recent Entry
+        char    mBuffer[0];             // circular buffer for entries
+    };
+
+public:
+
+    // ---------------------------------------------------------------------------
+
+    // FIXME Timeline was intended to wrap Writer and Reader, but isn't actually used yet.
+    // For now it is just a namespace for sharedSize().
+    class Timeline : public RefBase {
+    public:
+#if 0
+        Timeline(size_t size, void *shared = NULL);
+        virtual ~Timeline();
+#endif
+
+        // Input parameter 'size' is the desired size of the timeline in byte units.
+        // Returns the size rounded up to a power-of-2, plus the constant size overhead for indices.
+        static size_t sharedSize(size_t size);
+
+#if 0
+    private:
+        friend class    Writer;
+        friend class    Reader;
+
+        const size_t    mSize;      // circular buffer size in bytes, must be a power of 2
+        bool            mOwn;       // whether I own the memory at mShared
+        Shared* const   mShared;    // pointer to shared memory
+#endif
+    };
+
+    // ---------------------------------------------------------------------------
+
+    // Writer is thread-safe with respect to Reader, but not with respect to multiple threads
+    // calling Writer methods.  If you need multi-thread safety for writing, use LockedWriter.
+    class Writer : public RefBase {
+    public:
+        Writer();                   // dummy nop implementation without shared memory
+
+        // Input parameter 'size' is the desired size of the timeline in byte units.
+        // The size of the shared memory must be at least Timeline::sharedSize(size).
+        Writer(void *shared, size_t size);
+        Writer(const sp<IMemory>& iMemory, size_t size);
+
+        virtual ~Writer();
+
+        // FIXME needs comments, and some should be private
+        virtual void    log(const char *string);
+        virtual void    logf(const char *fmt, ...) __attribute__ ((format (printf, 2, 3)));
+        virtual void    logvf(const char *fmt, va_list ap);
+        virtual void    logTimestamp();
+        virtual void    logTimestamp(const int64_t ts);
+        virtual void    logInteger(const int x);
+        virtual void    logFloat(const float x);
+        virtual void    logPID();
+        virtual void    logFormat(const char *fmt, log_hash_t hash, ...);
+        virtual void    logVFormat(const char *fmt, log_hash_t hash, va_list ap);
+        virtual void    logStart(const char *fmt);
+        virtual void    logEnd();
+        virtual void    logHash(log_hash_t hash);
+        virtual void    logEventHistTs(Event event, log_hash_t hash);
+
+        virtual bool    isEnabled() const;
+
+        // return value for all of these is the previous isEnabled()
+        virtual bool    setEnabled(bool enabled);   // but won't enable if no shared memory
+        bool    enable()    { return setEnabled(true); }
+        bool    disable()   { return setEnabled(false); }
+
+        sp<IMemory>     getIMemory() const  { return mIMemory; }
+
+    private:
+        // 0 <= length <= kMaxLength
+        // writes a single Entry to the FIFO
+        void    log(Event event, const void *data, size_t length);
+        // checks validity of an event before calling log above this one
+        void    log(const Entry *entry, bool trusted = false);
+
+        Shared* const   mShared;    // raw pointer to shared memory
+        sp<IMemory>     mIMemory;   // ref-counted version, initialized in constructor
+                                    // and then const
+        audio_utils_fifo * const mFifo;                 // FIFO itself, non-NULL
+                                                        // unless constructor fails
+        audio_utils_fifo_writer * const mFifoWriter;    // used to write to FIFO, non-NULL
+                                                        // unless dummy constructor used
+        bool            mEnabled;   // whether to actually log
+
+        // cached pid and process name to use in %p format specifier
+        // total tag length is mPidTagSize and process name is not zero terminated
+        char   *mPidTag;
+        size_t  mPidTagSize;
+    };
+
+    // ---------------------------------------------------------------------------
+
+    // Similar to Writer, but safe for multiple threads to call concurrently
+    class LockedWriter : public Writer {
+    public:
+        LockedWriter();
+        LockedWriter(void *shared, size_t size);
+
+        virtual void    log(const char *string);
+        virtual void    logf(const char *fmt, ...) __attribute__ ((format (printf, 2, 3)));
+        virtual void    logvf(const char *fmt, va_list ap);
+        virtual void    logTimestamp();
+        virtual void    logTimestamp(const int64_t ts);
+        virtual void    logInteger(const int x);
+        virtual void    logFloat(const float x);
+        virtual void    logPID();
+        virtual void    logStart(const char *fmt);
+        virtual void    logEnd();
+        virtual void    logHash(log_hash_t hash);
+
+        virtual bool    isEnabled() const;
+        virtual bool    setEnabled(bool enabled);
+
+    private:
+        mutable Mutex   mLock;
+    };
+
+    // ---------------------------------------------------------------------------
+
+    class Reader : public RefBase {
+    public:
+        // A snapshot of a readers buffer
+        // This is raw data. No analysis has been done on it
+        class Snapshot {
+        public:
+            Snapshot() : mData(NULL), mLost(0) {}
+
+            Snapshot(size_t bufferSize) : mData(new uint8_t[bufferSize]) {}
+
+            ~Snapshot() { delete[] mData; }
+
+            // copy of the buffer
+            uint8_t *data() const { return mData; }
+
+            // amount of data lost (given by audio_utils_fifo_reader)
+            size_t   lost() const { return mLost; }
+
+            // iterator to beginning of readable segment of snapshot
+            // data between begin and end has valid entries
+            EntryIterator begin() { return mBegin; }
+
+            // iterator to end of readable segment of snapshot
+            EntryIterator end() { return mEnd; }
+
+        private:
+            friend class MergeReader;
+            friend class Reader;
+            uint8_t              *mData;
+            size_t                mLost;
+            EntryIterator mBegin;
+            EntryIterator mEnd;
+        };
+
+        // Input parameter 'size' is the desired size of the timeline in byte units.
+        // The size of the shared memory must be at least Timeline::sharedSize(size).
+        Reader(const void *shared, size_t size);
+        Reader(const sp<IMemory>& iMemory, size_t size);
+
+        virtual ~Reader();
+
+        // get snapshot of readers fifo buffer, effectively consuming the buffer
+        std::unique_ptr<Snapshot> getSnapshot();
+
+        bool     isIMemory(const sp<IMemory>& iMemory) const;
+
+    protected:
+        // print a summary of the performance to the console
+        void    dumpLine(const String8& timestamp, String8& body);
+        EntryIterator   handleFormat(const FormatEntry &fmtEntry,
+                                     String8 *timestamp,
+                                     String8 *body);
+        int mFd;                // file descriptor
+        int mIndent;            // indentation level
+        int mLost;              // bytes of data lost before buffer was read
+
+    private:
+        static const std::set<Event> startingTypes;
+        static const std::set<Event> endingTypes;
+
+        // declared as const because audio_utils_fifo() constructor
+        sp<IMemory> mIMemory;       // ref-counted version, assigned only in constructor
+
+        /*const*/ Shared* const mShared;    // raw pointer to shared memory, actually const but not
+        audio_utils_fifo * const mFifo;                 // FIFO itself,
+        // non-NULL unless constructor fails
+        audio_utils_fifo_reader * const mFifoReader;    // used to read from FIFO,
+        // non-NULL unless constructor fails
+
+        // Searches for the last entry of type <type> in the range [front, back)
+        // back has to be entry-aligned. Returns nullptr if none enconuntered.
+        static const uint8_t *findLastEntryOfTypes(const uint8_t *front, const uint8_t *back,
+                                                   const std::set<Event> &types);
+
+        // dummy method for handling absent author entry
+        virtual void handleAuthor(const AbstractEntry& /*fmtEntry*/, String8* /*body*/) {}
+    };
+
+    // Wrapper for a reader with a name. Contains a pointer to the reader and a pointer to the name
+    class NamedReader {
+    public:
+        NamedReader() { mName[0] = '\0'; } // for Vector
+        NamedReader(const sp<NBLog::Reader>& reader, const char *name) :
+            mReader(reader)
+            { strlcpy(mName, name, sizeof(mName)); }
+        ~NamedReader() { }
+        const sp<NBLog::Reader>&  reader() const { return mReader; }
+        const char*               name() const { return mName; }
+
+    private:
+        sp<NBLog::Reader>   mReader;
+        static const size_t kMaxName = 32;
+        char                mName[kMaxName];
+    };
+
+    // ---------------------------------------------------------------------------
+
+    // This class is used to read data from each thread's individual FIFO in shared memory
+    // and write it to a single FIFO in local memory.
+    class Merger : public RefBase {
+    public:
+        Merger(const void *shared, size_t size);
+
+        virtual ~Merger() {}
+
+        void addReader(const NamedReader &reader);
+        // TODO add removeReader
+        void merge();
+
+        // FIXME This is returning a reference to a shared variable that needs a lock
+        const std::vector<NamedReader>& getNamedReaders() const;
+
+    private:
+        // vector of the readers the merger is supposed to merge from.
+        // every reader reads from a writer's buffer
+        // FIXME Needs to be protected by a lock
+        std::vector<NamedReader> mNamedReaders;
+
+        Shared * const mShared; // raw pointer to shared memory
+        std::unique_ptr<audio_utils_fifo> mFifo; // FIFO itself
+        std::unique_ptr<audio_utils_fifo_writer> mFifoWriter; // used to write to FIFO
+    };
+
+    // This class has a pointer to the FIFO in local memory which stores the merged
+    // data collected by NBLog::Merger from all NamedReaders. It is used to process
+    // this data and write the result to PerformanceAnalysis.
+    class MergeReader : public Reader {
+    public:
+        MergeReader(const void *shared, size_t size, Merger &merger);
+
+        void dump(int fd, int indent = 0);
+        // process a particular snapshot of the reader
+        void getAndProcessSnapshot(Snapshot & snap);
+        // call getSnapshot of the content of the reader's buffer and process the data
+        void getAndProcessSnapshot();
+
+    private:
+        // FIXME Needs to be protected by a lock,
+        //       because even though our use of it is read-only there may be asynchronous updates
+        const std::vector<NamedReader>& mNamedReaders;
+
+        // analyzes, compresses and stores the merged data
+        // contains a separate instance for every author (thread), and for every source file
+        // location within each author
+        ReportPerformance::PerformanceAnalysisMap mThreadPerformanceAnalysis;
+
+        // handle author entry by looking up the author's name and appending it to the body
+        // returns number of bytes read from fmtEntry
+        void handleAuthor(const AbstractEntry &fmtEntry, String8 *body);
+    };
+
+    // MergeThread is a thread that contains a Merger. It works as a retriggerable one-shot:
+    // when triggered, it awakes for a lapse of time, during which it periodically merges; if
+    // retriggered, the timeout is reset.
+    // The thread is triggered on AudioFlinger binder activity.
+    class MergeThread : public Thread {
+    public:
+        MergeThread(Merger &merger, MergeReader &mergeReader);
+        virtual ~MergeThread() override;
+
+        // Reset timeout and activate thread to merge periodically if it's idle
+        void wakeup();
+
+        // Set timeout period until the merging thread goes idle again
+        void setTimeoutUs(int time);
+
+    private:
+        virtual bool threadLoop() override;
+
+        // the merger who actually does the work of merging the logs
+        Merger&     mMerger;
+
+        // the mergereader used to process data merged by mMerger
+        MergeReader& mMergeReader;
+
+        // mutex for the condition variable
+        Mutex       mMutex;
+
+        // condition variable to activate merging on timeout >= 0
+        Condition   mCond;
+
+        // time left until the thread blocks again (in microseconds)
+        int         mTimeoutUs;
+
+        // merging period when the thread is awake
+        static const int  kThreadSleepPeriodUs = 1000000 /*1s*/;
+
+        // initial timeout value when triggered
+        static const int  kThreadWakeupPeriodUs = 3000000 /*3s*/;
+    };
+
+};  // class NBLog
+
+// TODO put somewhere else
+static inline int64_t get_monotonic_ns() {
+    timespec ts;
+    if (clock_gettime(CLOCK_MONOTONIC, &ts) == 0) {
+        return (uint64_t) ts.tv_sec * 1000 * 1000 * 1000 + ts.tv_nsec;
+    }
+    return 0; // should not happen.
+}
+
+}   // namespace android
+
+#endif  // ANDROID_MEDIA_NBLOG_H
diff --git a/media/libnblog/include/media/nblog/PerformanceAnalysis.h b/media/libnblog/include/media/nblog/PerformanceAnalysis.h
new file mode 100644
index 0000000..ddfe9d6
--- /dev/null
+++ b/media/libnblog/include/media/nblog/PerformanceAnalysis.h
@@ -0,0 +1,126 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_PERFORMANCEANALYSIS_H
+#define ANDROID_MEDIA_PERFORMANCEANALYSIS_H
+
+#include <deque>
+#include <map>
+#include <vector>
+
+#include <media/nblog/ReportPerformance.h>
+
+namespace android {
+
+namespace ReportPerformance {
+
+class PerformanceAnalysis;
+
+// a map of PerformanceAnalysis instances
+// The outer key is for the thread, the inner key for the source file location.
+using PerformanceAnalysisMap = std::map<int, std::map<log_hash_t, PerformanceAnalysis>>;
+
+class PerformanceAnalysis {
+    // This class stores and analyzes audio processing wakeup timestamps from NBLog
+    // FIXME: currently, all performance data is stored in deques. Turn these into circular
+    // buffers.
+    // TODO: add a mutex.
+public:
+
+    PerformanceAnalysis() {};
+
+    friend void dump(int fd, int indent,
+                     PerformanceAnalysisMap &threadPerformanceAnalysis);
+
+    // Called in the case of an audio on/off event, e.g., EVENT_AUDIO_STATE.
+    // Used to discard idle time intervals
+    void handleStateChange();
+
+    // Writes wakeup timestamp entry to log and runs analysis
+    void logTsEntry(timestamp ts);
+
+    // FIXME: make peakdetector and storeOutlierData a single function
+    // Input: mOutlierData. Looks at time elapsed between outliers
+    // finds significant changes in the distribution
+    // writes timestamps of significant changes to mPeakTimestamps
+    bool detectAndStorePeak(msInterval delta, timestamp ts);
+
+    // stores timestamps of intervals above a threshold: these are assumed outliers.
+    // writes to mOutlierData <time elapsed since previous outlier, outlier timestamp>
+    bool detectAndStoreOutlier(const msInterval diffMs);
+
+    // Generates a string of analysis of the buffer periods and prints to console
+    // FIXME: move this data visualization to a separate class. Model/view/controller
+    void reportPerformance(String8 *body, int author, log_hash_t hash,
+                           int maxHeight = 10);
+
+private:
+
+    // TODO use a circular buffer for the deques and vectors below
+
+    // stores outlier analysis:
+    // <elapsed time between outliers in ms, outlier beginning timestamp>
+    std::deque<std::pair<msInterval, timestamp>> mOutlierData;
+
+    // stores each timestamp at which a peak was detected
+    // a peak is a moment at which the average outlier interval changed significantly
+    std::deque<timestamp> mPeakTimestamps;
+
+    // stores buffer period histograms with timestamp of first sample
+    std::deque<std::pair<timestamp, Histogram>> mHists;
+
+    // Parameters used when detecting outliers
+    struct BufferPeriod {
+        double    mMean = -1;          // average time between audio processing wakeups
+        double    mOutlierFactor = -1; // values > mMean * mOutlierFactor are outliers
+        double    mOutlier = -1;       // this is set to mMean * mOutlierFactor
+        timestamp mPrevTs = -1;        // previous timestamp
+    } mBufferPeriod;
+
+    // capacity allocated to data structures
+    struct MaxLength {
+        size_t Hists; // number of histograms stored in memory
+        size_t Outliers; // number of values stored in outlier array
+        size_t Peaks; // number of values stored in peak array
+        int HistTimespanMs; // maximum histogram timespan
+    };
+    // These values allow for 10 hours of data allowing for a glitch and a peak
+    // as often as every 3 seconds
+    static constexpr MaxLength kMaxLength = {.Hists = 60, .Outliers = 12000,
+            .Peaks = 12000, .HistTimespanMs = 10 * kSecPerMin * kMsPerSec };
+
+    // these variables ensure continuity while analyzing the timestamp
+    // series one sample at a time.
+    // TODO: change this to a running variance/mean class
+    struct OutlierDistribution {
+        msInterval mMean = 0;         // sample mean since previous peak
+        msInterval mSd = 0;           // sample sd since previous peak
+        msInterval mElapsed = 0;      // time since previous detected outlier
+        const int  kMaxDeviation = 5; // standard deviations from the mean threshold
+        msInterval mTypicalDiff = 0;  // global mean of outliers
+        double     mN = 0;            // length of sequence since the last peak
+        double     mM2 = 0;           // used to calculate sd
+    } mOutlierDistribution;
+};
+
+void dump(int fd, int indent, PerformanceAnalysisMap &threadPerformanceAnalysis);
+void dumpLine(int fd, int indent, const String8 &body);
+
+} // namespace ReportPerformance
+
+}   // namespace android
+
+#endif  // ANDROID_MEDIA_PERFORMANCEANALYSIS_H
diff --git a/media/libnblog/include/media/nblog/ReportPerformance.h b/media/libnblog/include/media/nblog/ReportPerformance.h
new file mode 100644
index 0000000..ec0842f
--- /dev/null
+++ b/media/libnblog/include/media/nblog/ReportPerformance.h
@@ -0,0 +1,69 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_REPORTPERFORMANCE_H
+#define ANDROID_MEDIA_REPORTPERFORMANCE_H
+
+#include <deque>
+#include <map>
+#include <vector>
+
+namespace android {
+
+// The String8 class is used by reportPerformance function
+class String8;
+
+namespace ReportPerformance {
+
+constexpr int kMsPerSec = 1000;
+constexpr int kSecPerMin = 60;
+
+constexpr int kJiffyPerMs = 10; // time unit for histogram as a multiple of milliseconds
+
+// stores a histogram: key: observed buffer period (multiple of jiffy). value: count
+using Histogram = std::map<int, int>;
+
+using msInterval = double;
+using jiffyInterval = double;
+
+using timestamp = int64_t;
+
+using log_hash_t = uint64_t;
+
+static inline int deltaMs(int64_t ns1, int64_t ns2) {
+    return (ns2 - ns1) / (1000 * 1000);
+}
+
+static inline int deltaJiffy(int64_t ns1, int64_t ns2) {
+    return (kJiffyPerMs * (ns2 - ns1)) / (1000 * 1000);
+}
+
+static inline uint32_t log2(uint32_t x) {
+    // This works for x > 0
+    return 31 - __builtin_clz(x);
+}
+
+// Writes outlier intervals, timestamps, peaks timestamps, and histograms to a file.
+void writeToFile(const std::deque<std::pair<timestamp, Histogram>> &hists,
+                 const std::deque<std::pair<msInterval, timestamp>> &outlierData,
+                 const std::deque<timestamp> &peakTimestamps,
+                 const char * kDirectory, bool append, int author, log_hash_t hash);
+
+} // namespace ReportPerformance
+
+}   // namespace android
+
+#endif  // ANDROID_MEDIA_REPORTPERFORMANCE_H
diff --git a/media/libstagefright/AACWriter.cpp b/media/libstagefright/AACWriter.cpp
index 8b1e1c3..281af47 100644
--- a/media/libstagefright/AACWriter.cpp
+++ b/media/libstagefright/AACWriter.cpp
@@ -30,8 +30,8 @@
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/MediaSource.h>
 #include <media/stagefright/MetaData.h>
+#include <media/MediaSource.h>
 #include <media/mediarecorder.h>
 
 namespace android {
@@ -67,7 +67,7 @@
 }
 
 
-status_t AACWriter::addSource(const sp<IMediaSource> &source) {
+status_t AACWriter::addSource(const sp<MediaSource> &source) {
     if (mInitCheck != OK) {
         return mInitCheck;
     }
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index a2eb9a8..d9fdfe3 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -28,8 +28,7 @@
 
 #include <media/stagefright/ACodec.h>
 
-#include <binder/MemoryDealer.h>
-
+#include <media/stagefright/foundation/avc_utils.h>
 #include <media/stagefright/foundation/hexdump.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
@@ -55,7 +54,6 @@
 #include <media/openmax/OMX_IndexExt.h>
 #include <media/openmax/OMX_AsString.h>
 
-#include "include/avc_utils.h"
 #include "include/ACodecBufferChannel.h"
 #include "include/DataConverter.h"
 #include "include/SecureBuffer.h"
@@ -575,8 +573,6 @@
     memset(&mLastNativeWindowCrop, 0, sizeof(mLastNativeWindowCrop));
 
     changeState(mUninitializedState);
-
-    mTrebleFlag = false;
 }
 
 ACodec::~ACodec() {
@@ -828,11 +824,7 @@
 status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) {
     CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
 
-    if (getTrebleFlag()) {
-        CHECK(mAllocator[portIndex] == NULL);
-    } else {
-        CHECK(mDealer[portIndex] == NULL);
-    }
+    CHECK(mAllocator[portIndex] == NULL);
     CHECK(mBuffers[portIndex].isEmpty());
 
     status_t err;
@@ -874,7 +866,10 @@
                 }
             }
 
-            size_t alignment = MemoryDealer::getAllocationAlignment();
+            size_t alignment = 32; // This is the value currently returned by
+                                   // MemoryDealer::getAllocationAlignment().
+                                   // TODO: Fix this when Treble has
+                                   // MemoryHeap/MemoryDealer.
 
             ALOGV("[%s] Allocating %u buffers of size %zu (from %u using %s) on %s port",
                     mComponentName.c_str(),
@@ -896,18 +891,15 @@
             }
 
             if (mode != IOMX::kPortModePresetSecureBuffer) {
-                if (getTrebleFlag()) {
-                    mAllocator[portIndex] = TAllocator::getService("ashmem");
-                    if (mAllocator[portIndex] == nullptr) {
-                        ALOGE("hidl allocator on port %d is null",
-                                (int)portIndex);
-                        return NO_MEMORY;
-                    }
-                } else {
-                    size_t totalSize = def.nBufferCountActual *
-                            (alignedSize + alignedConvSize);
-                    mDealer[portIndex] = new MemoryDealer(totalSize, "ACodec");
+                mAllocator[portIndex] = TAllocator::getService("ashmem");
+                if (mAllocator[portIndex] == nullptr) {
+                    ALOGE("hidl allocator on port %d is null",
+                            (int)portIndex);
+                    return NO_MEMORY;
                 }
+                // TODO: When Treble has MemoryHeap/MemoryDealer, we should
+                // specify the heap size to be
+                // def.nBufferCountActual * (alignedSize + alignedConvSize).
             }
 
             const sp<AMessage> &format =
@@ -936,23 +928,55 @@
                             : new SecureBuffer(format, native_handle, bufSize);
                     info.mCodecData = info.mData;
                 } else {
-                    if (getTrebleFlag()) {
+                    bool success;
+                    auto transStatus = mAllocator[portIndex]->allocate(
+                            bufSize,
+                            [&success, &hidlMemToken](
+                                    bool s,
+                                    hidl_memory const& m) {
+                                success = s;
+                                hidlMemToken = m;
+                            });
+
+                    if (!transStatus.isOk()) {
+                        ALOGE("hidl's AshmemAllocator failed at the "
+                                "transport: %s",
+                                transStatus.description().c_str());
+                        return NO_MEMORY;
+                    }
+                    if (!success) {
+                        return NO_MEMORY;
+                    }
+                    hidlMem = mapMemory(hidlMemToken);
+                    if (hidlMem == nullptr) {
+                        return NO_MEMORY;
+                    }
+                    err = mOMXNode->useBuffer(
+                            portIndex, hidlMemToken, &info.mBufferID);
+
+                    if (mode == IOMX::kPortModeDynamicANWBuffer) {
+                        VideoNativeMetadata* metaData = (VideoNativeMetadata*)(
+                                (void*)hidlMem->getPointer());
+                        metaData->nFenceFd = -1;
+                    }
+
+                    info.mCodecData = new SharedMemoryBuffer(
+                            format, hidlMem);
+                    info.mCodecRef = hidlMem;
+
+                    // if we require conversion, allocate conversion buffer for client use;
+                    // otherwise, reuse codec buffer
+                    if (mConverter[portIndex] != NULL) {
+                        CHECK_GT(conversionBufferSize, (size_t)0);
                         bool success;
-                        auto transStatus = mAllocator[portIndex]->allocate(
-                                bufSize,
+                        mAllocator[portIndex]->allocate(
+                                conversionBufferSize,
                                 [&success, &hidlMemToken](
                                         bool s,
                                         hidl_memory const& m) {
                                     success = s;
                                     hidlMemToken = m;
                                 });
-
-                        if (!transStatus.isOk()) {
-                            ALOGE("hidl's AshmemAllocator failed at the "
-                                    "transport: %s",
-                                    transStatus.description().c_str());
-                            return NO_MEMORY;
-                        }
                         if (!success) {
                             return NO_MEMORY;
                         }
@@ -960,67 +984,8 @@
                         if (hidlMem == nullptr) {
                             return NO_MEMORY;
                         }
-                        err = mOMXNode->useBuffer(
-                                portIndex, hidlMemToken, &info.mBufferID);
-                    } else {
-                        mem = mDealer[portIndex]->allocate(bufSize);
-                        if (mem == NULL || mem->pointer() == NULL) {
-                            return NO_MEMORY;
-                        }
-
-                        err = mOMXNode->useBuffer(
-                                portIndex, mem, &info.mBufferID);
-                    }
-
-                    if (mode == IOMX::kPortModeDynamicANWBuffer) {
-                        VideoNativeMetadata* metaData = (VideoNativeMetadata*)(
-                                getTrebleFlag() ?
-                                (void*)hidlMem->getPointer() : mem->pointer());
-                        metaData->nFenceFd = -1;
-                    }
-
-                    if (getTrebleFlag()) {
-                        info.mCodecData = new SharedMemoryBuffer(
-                                format, hidlMem);
-                        info.mCodecRef = hidlMem;
-                    } else {
-                        info.mCodecData = new SharedMemoryBuffer(
-                                format, mem);
-                        info.mCodecRef = mem;
-                    }
-
-                    // if we require conversion, allocate conversion buffer for client use;
-                    // otherwise, reuse codec buffer
-                    if (mConverter[portIndex] != NULL) {
-                        CHECK_GT(conversionBufferSize, (size_t)0);
-                        if (getTrebleFlag()) {
-                            bool success;
-                            mAllocator[portIndex]->allocate(
-                                    conversionBufferSize,
-                                    [&success, &hidlMemToken](
-                                            bool s,
-                                            hidl_memory const& m) {
-                                        success = s;
-                                        hidlMemToken = m;
-                                    });
-                            if (!success) {
-                                return NO_MEMORY;
-                            }
-                            hidlMem = mapMemory(hidlMemToken);
-                            if (hidlMem == nullptr) {
-                                return NO_MEMORY;
-                            }
-                            info.mData = new SharedMemoryBuffer(format, hidlMem);
-                            info.mMemRef = hidlMem;
-                        } else {
-                            mem = mDealer[portIndex]->allocate(
-                                    conversionBufferSize);
-                            if (mem == NULL|| mem->pointer() == NULL) {
-                                return NO_MEMORY;
-                            }
-                            info.mData = new SharedMemoryBuffer(format, mem);
-                            info.mMemRef = mem;
-                        }
+                        info.mData = new SharedMemoryBuffer(format, hidlMem);
+                        info.mMemRef = hidlMem;
                     } else {
                         info.mData = info.mCodecData;
                         info.mMemRef = info.mCodecRef;
@@ -1581,11 +1546,7 @@
         }
     }
 
-    if (getTrebleFlag()) {
-        mAllocator[portIndex].clear();
-    } else {
-        mDealer[portIndex].clear();
-    }
+    mAllocator[portIndex].clear();
     return err;
 }
 
@@ -3781,6 +3742,8 @@
     } else {
         mFps = (double)framerate;
     }
+    // propagate framerate to the output so that the muxer has it
+    outputFormat->setInt32("frame-rate", (int32_t)mFps);
 
     video_def->xFramerate = (OMX_U32)(mFps * 65536);
     video_def->eCompressionFormat = OMX_VIDEO_CodingUnused;
@@ -6052,7 +6015,7 @@
             }
 #if 0
             if (mCodec->mNativeWindow == NULL) {
-                if (IsIDR(info->mData)) {
+                if (IsIDR(info->mData->data(), info->mData->size())) {
                     ALOGI("IDR frame");
                 }
             }
@@ -6249,13 +6212,8 @@
 
     if (mDeathNotifier != NULL) {
         if (mCodec->mOMXNode != NULL) {
-            if (mCodec->getTrebleFlag()) {
-                auto tOmxNode = mCodec->mOMXNode->getHalInterface();
-                tOmxNode->unlinkToDeath(mDeathNotifier);
-            } else {
-                sp<IBinder> binder = IInterface::asBinder(mCodec->mOMXNode);
-                binder->unlinkToDeath(mDeathNotifier);
-            }
+            auto tOmxNode = mCodec->mOMXNode->getHalInterface();
+            tOmxNode->unlinkToDeath(mDeathNotifier);
         }
         mDeathNotifier.clear();
     }
@@ -6403,8 +6361,7 @@
         componentName = matchingCodecs[matchIndex];
 
         OMXClient client;
-        bool trebleFlag;
-        if (client.connect(owners[matchIndex].c_str(), &trebleFlag) != OK) {
+        if (client.connect(owners[matchIndex].c_str()) != OK) {
             mCodec->signalError(OMX_ErrorUndefined, NO_INIT);
             return false;
         }
@@ -6417,7 +6374,6 @@
         androidSetThreadPriority(tid, prevPriority);
 
         if (err == OK) {
-            mCodec->setTrebleFlag(trebleFlag);
             break;
         } else {
             ALOGW("Allocating component '%s' failed, try next one.", componentName.c_str());
@@ -6439,17 +6395,9 @@
     }
 
     mDeathNotifier = new DeathNotifier(notify);
-    if (mCodec->getTrebleFlag()) {
-        auto tOmxNode = omxNode->getHalInterface();
-        if (!tOmxNode->linkToDeath(mDeathNotifier, 0)) {
-            mDeathNotifier.clear();
-        }
-    } else {
-        if (IInterface::asBinder(omxNode)->linkToDeath(mDeathNotifier) != OK) {
-            // This was a local binder, if it dies so do we, we won't care
-            // about any notifications in the afterlife.
-            mDeathNotifier.clear();
-        }
+    auto tOmxNode = omxNode->getHalInterface();
+    if (!tOmxNode->linkToDeath(mDeathNotifier, 0)) {
+        mDeathNotifier.clear();
     }
 
     notify = new AMessage(kWhatOMXMessageList, mCodec);
@@ -7856,11 +7804,7 @@
                             mCodec->mBuffers[kPortIndexOutput].size());
                     err = FAILED_TRANSACTION;
                 } else {
-                    if (mCodec->getTrebleFlag()) {
-                        mCodec->mAllocator[kPortIndexOutput].clear();
-                    } else {
-                        mCodec->mDealer[kPortIndexOutput].clear();
-                    }
+                    mCodec->mAllocator[kPortIndexOutput].clear();
                 }
 
                 if (err == OK) {
@@ -8462,12 +8406,4 @@
     return OK;
 }
 
-void ACodec::setTrebleFlag(bool trebleFlag) {
-    mTrebleFlag = trebleFlag;
-}
-
-bool ACodec::getTrebleFlag() const {
-    return mTrebleFlag;
-}
-
 }  // namespace android
diff --git a/media/libstagefright/AMRWriter.cpp b/media/libstagefright/AMRWriter.cpp
index 961b57f..910abc6 100644
--- a/media/libstagefright/AMRWriter.cpp
+++ b/media/libstagefright/AMRWriter.cpp
@@ -25,8 +25,8 @@
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/MediaSource.h>
 #include <media/stagefright/MetaData.h>
+#include <media/MediaSource.h>
 #include <media/mediarecorder.h>
 
 namespace android {
@@ -54,7 +54,7 @@
     return mInitCheck;
 }
 
-status_t AMRWriter::addSource(const sp<IMediaSource> &source) {
+status_t AMRWriter::addSource(const sp<MediaSource> &source) {
     if (mInitCheck != OK) {
         return mInitCheck;
     }
diff --git a/media/libstagefright/AVIExtractor.cpp b/media/libstagefright/AVIExtractor.cpp
index 5a6211e..406074b 100644
--- a/media/libstagefright/AVIExtractor.cpp
+++ b/media/libstagefright/AVIExtractor.cpp
@@ -18,14 +18,14 @@
 #define LOG_TAG "AVIExtractor"
 #include <utils/Log.h>
 
-#include "include/avc_utils.h"
 #include "include/AVIExtractor.h"
 
 #include <binder/ProcessState.h>
+#include <media/DataSource.h>
+#include <media/stagefright/foundation/avc_utils.h>
 #include <media/stagefright/foundation/hexdump.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/DataSource.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaBufferGroup.h>
 #include <media/stagefright/MediaDefs.h>
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index ac4e819..484e310 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -4,37 +4,57 @@
     vendor_available: true,
 }
 
+cc_library_static {
+    name: "libstagefright_esds",
+
+    srcs: ["ESDS.cpp"],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+    sanitize: {
+        misc_undefined: [
+            "signed-integer-overflow",
+        ],
+        cfi: true,
+        diag: {
+            cfi: true,
+        },
+    },
+
+    shared_libs: ["libmedia"],
+}
+
 cc_library_shared {
     name: "libstagefright",
 
     srcs: [
         "ACodec.cpp",
         "ACodecBufferChannel.cpp",
-        "AACExtractor.cpp",
         "AACWriter.cpp",
-        "AMRExtractor.cpp",
         "AMRWriter.cpp",
         "AudioPlayer.cpp",
         "AudioSource.cpp",
         "BufferImpl.cpp",
+        "CCodec.cpp",
+        "CCodecBufferChannel.cpp",
         "CodecBase.cpp",
         "CallbackDataSource.cpp",
+        "CallbackMediaSource.cpp",
         "CameraSource.cpp",
         "CameraSourceTimeLapse.cpp",
         "DataConverter.cpp",
-        "DataSource.cpp",
+        "DataSourceFactory.cpp",
         "DataURISource.cpp",
-        "ESDS.cpp",
         "FileSource.cpp",
-        "FLACExtractor.cpp",
+        "FrameDecoder.cpp",
         "FrameRenderTracker.cpp",
         "HTTPBase.cpp",
         "HevcUtils.cpp",
-        "ItemTable.cpp",
+        "InterfaceUtils.cpp",
         "JPEGSource.cpp",
-        "MP3Extractor.cpp",
         "MPEG2TSWriter.cpp",
-        "MPEG4Extractor.cpp",
         "MPEG4Writer.cpp",
         "MediaAdapter.cpp",
         "MediaClock.cpp",
@@ -42,19 +62,16 @@
         "MediaCodecList.cpp",
         "MediaCodecListOverrides.cpp",
         "MediaCodecSource.cpp",
-        "MediaExtractor.cpp",
+        "MediaExtractorFactory.cpp",
         "MediaSync.cpp",
-        "MidiExtractor.cpp",
         "http/MediaHTTP.cpp",
         "MediaMuxer.cpp",
-        "MediaSource.cpp",
         "NuCachedSource2.cpp",
         "NuMediaExtractor.cpp",
         "OMXClient.cpp",
         "OmxInfoBuilder.cpp",
-        "OggExtractor.cpp",
-        "SampleIterator.cpp",
-        "SampleTable.cpp",
+        "RemoteMediaExtractor.cpp",
+        "RemoteMediaSource.cpp",
         "SimpleDecodingSource.cpp",
         "SkipCutBuffer.cpp",
         "StagefrightMediaScanner.cpp",
@@ -63,65 +80,59 @@
         "SurfaceUtils.cpp",
         "ThrottledSource.cpp",
         "Utils.cpp",
-        "VBRISeeker.cpp",
         "VideoFrameScheduler.cpp",
-        "WAVExtractor.cpp",
-        "XINGSeeker.cpp",
-        "avc_utils.cpp",
     ],
 
     shared_libs: [
         "libaudioutils",
         "libbinder",
         "libcamera_client",
-        "libcrypto",
         "libcutils",
         "libdl",
         "libdrmframework",
-        "libexpat",
         "libgui",
+        "libion",
         "liblog",
         "libmedia",
+        "libmedia_omx",
         "libaudioclient",
+        "libmediaextractor",
         "libmediametrics",
         "libmediautils",
         "libnetd_client",
-        "libsonivox",
         "libui",
         "libutils",
-        "libvorbisidec",
-        "libmediadrm",
-        "libnativewindow",
-
         "libmedia_helper",
-        "libstagefright_omx_utils",
-        "libstagefright_flacdec",
+        "libstagefright_codec2",
         "libstagefright_foundation",
+        "libstagefright_omx",
+        "libstagefright_omx_utils",
         "libstagefright_xmlparser",
         "libdl",
         "libRScpp",
         "libhidlbase",
         "libhidlmemory",
         "android.hidl.allocator@1.0",
-        "android.hidl.memory@1.0",
-        "android.hidl.token@1.0-utils",
-        "android.hardware.cas@1.0",
         "android.hardware.cas.native@1.0",
         "android.hardware.media.omx@1.0",
+        "android.hardware.graphics.allocator@2.0",
+        "android.hardware.graphics.mapper@2.0",
     ],
 
     static_libs: [
         "libstagefright_color_conversion",
         "libyuv_static",
-        "libstagefright_matroska",
         "libstagefright_mediafilter",
         "libstagefright_webm",
         "libstagefright_timedtext",
         "libvpx",
         "libwebm",
-        "libstagefright_mpeg2ts",
+        "libstagefright_mpeg2support",
+        "libstagefright_esds",
         "libstagefright_id3",
         "libFLAC",
+
+        "libstagefright_codec2_vndk",
     ],
 
     export_shared_lib_headers: [
@@ -159,7 +170,92 @@
     },
 }
 
+cc_library_shared {
+    name: "libstagefright_player2",
+
+    srcs: [
+        "CallbackDataSource.cpp",
+        "CallbackMediaSource.cpp",
+        "DataSourceFactory.cpp",
+        "DataURISource.cpp",
+        "FileSource.cpp",
+        "HTTPBase.cpp",
+        "HevcUtils.cpp",
+        "InterfaceUtils.cpp",
+        "MediaClock.cpp",
+        "MediaExtractorFactory.cpp",
+        "NuCachedSource2.cpp",
+        "RemoteMediaExtractor.cpp",
+        "RemoteMediaSource.cpp",
+        "SurfaceUtils.cpp",
+        "Utils.cpp",
+        "VideoFrameScheduler.cpp",
+        "http/MediaHTTP.cpp",
+    ],
+
+    shared_libs: [
+        "libbinder",
+        "libcutils",
+        "libdrmframework",
+        "libgui",
+        "liblog",
+        "libmedia_omx",
+        "libmedia_player2_util",
+        "libaudioclient",
+        "libmediaextractor",
+        "libmediametrics",
+        "libmediautils",
+        "libnetd_client",
+        "libui",
+        "libutils",
+        "libmedia_helper",
+        "libstagefright_foundation",
+        "libdl",
+    ],
+
+    static_libs: [
+        "libstagefright_esds",
+        "libstagefright_id3",
+        "libstagefright_mpeg2support",
+        "libstagefright_timedtext",
+    ],
+
+    export_shared_lib_headers: [
+        "libmedia_player2_util",
+    ],
+
+    export_include_dirs: [
+        "include",
+    ],
+
+    cflags: [
+        "-Wno-multichar",
+        "-Werror",
+        "-Wno-error=deprecated-declarations",
+        "-Wall",
+    ],
+
+    product_variables: {
+        debuggable: {
+            // enable experiments only in userdebug and eng builds
+            cflags: ["-DENABLE_STAGEFRIGHT_EXPERIMENTS"],
+        },
+    },
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+        diag: {
+            cfi: true,
+        },
+    },
+}
+
 subdirs = [
+    "codec2",
     "codecs/*",
     "colorconversion",
     "filters",
@@ -168,13 +264,11 @@
     "http",
     "httplive",
     "id3",
-    "matroska",
     "mpeg2ts",
     "omx",
     "rtsp",
     "tests",
     "timedtext",
     "webm",
-    "wifi-display",
     "xmlparser",
 ]
diff --git a/media/libstagefright/AudioPlayer.cpp b/media/libstagefright/AudioPlayer.cpp
index b3fb8d4..16ea5b5 100644
--- a/media/libstagefright/AudioPlayer.cpp
+++ b/media/libstagefright/AudioPlayer.cpp
@@ -23,6 +23,7 @@
 
 #include <binder/IPCThreadState.h>
 #include <media/AudioTrack.h>
+#include <media/MediaSource.h>
 #include <media/openmax/OMX_Audio.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/ALookup.h>
@@ -30,7 +31,6 @@
 #include <media/stagefright/AudioPlayer.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/MediaSource.h>
 #include <media/stagefright/MetaData.h>
 #include <media/stagefright/Utils.h>
 
@@ -67,7 +67,7 @@
     }
 }
 
-void AudioPlayer::setSource(const sp<IMediaSource> &source) {
+void AudioPlayer::setSource(const sp<MediaSource> &source) {
     CHECK(mSource == NULL);
     mSource = source;
 }
@@ -363,7 +363,7 @@
     // When offloading, the OMX component is not used so this hack
     // is not needed
     if (!useOffload()) {
-        wp<IMediaSource> tmp = mSource;
+        wp<MediaSource> tmp = mSource;
         mSource.clear();
         while (tmp.promote() != NULL) {
             usleep(1000);
diff --git a/media/libstagefright/AudioSource.cpp b/media/libstagefright/AudioSource.cpp
index f2b1f10..b8da980 100644
--- a/media/libstagefright/AudioSource.cpp
+++ b/media/libstagefright/AudioSource.cpp
@@ -52,7 +52,7 @@
 AudioSource::AudioSource(
         audio_source_t inputSource, const String16 &opPackageName,
         uint32_t sampleRate, uint32_t channelCount, uint32_t outSampleRate,
-        uid_t uid, pid_t pid)
+        uid_t uid, pid_t pid, audio_port_handle_t selectedDeviceId)
     : mStarted(false),
       mSampleRate(sampleRate),
       mOutSampleRate(outSampleRate > 0 ? outSampleRate : sampleRate),
@@ -101,7 +101,9 @@
                     AudioRecord::TRANSFER_DEFAULT,
                     AUDIO_INPUT_FLAG_NONE,
                     uid,
-                    pid);
+                    pid,
+                    NULL /*pAttributes*/,
+                    selectedDeviceId);
         mInitCheck = mRecord->initCheck();
         if (mInitCheck != OK) {
             mRecord.clear();
@@ -465,4 +467,35 @@
     return value;
 }
 
+status_t AudioSource::setInputDevice(audio_port_handle_t deviceId) {
+    if (mRecord != 0) {
+        return mRecord->setInputDevice(deviceId);
+    }
+    return NO_INIT;
+}
+
+status_t AudioSource::getRoutedDeviceId(audio_port_handle_t* deviceId) {
+    if (mRecord != 0) {
+        *deviceId = mRecord->getRoutedDeviceId();
+        return NO_ERROR;
+    }
+    return NO_INIT;
+}
+
+status_t AudioSource::addAudioDeviceCallback(
+        const sp<AudioSystem::AudioDeviceCallback>& callback) {
+    if (mRecord != 0) {
+        return mRecord->addAudioDeviceCallback(callback);
+    }
+    return NO_INIT;
+}
+
+status_t AudioSource::removeAudioDeviceCallback(
+        const sp<AudioSystem::AudioDeviceCallback>& callback) {
+    if (mRecord != 0) {
+        return mRecord->removeAudioDeviceCallback(callback);
+    }
+    return NO_INIT;
+}
+
 }  // namespace android
diff --git a/media/libstagefright/BufferImpl.cpp b/media/libstagefright/BufferImpl.cpp
index fee3739..9fb6d34 100644
--- a/media/libstagefright/BufferImpl.cpp
+++ b/media/libstagefright/BufferImpl.cpp
@@ -24,11 +24,14 @@
 #include <media/ICrypto.h>
 #include <utils/NativeHandle.h>
 
+#include "include/Codec2Buffer.h"
 #include "include/SecureBuffer.h"
 #include "include/SharedMemoryBuffer.h"
 
 namespace android {
 
+// SharedMemoryBuffer
+
 SharedMemoryBuffer::SharedMemoryBuffer(const sp<AMessage> &format, const sp<IMemory> &mem)
     : MediaCodecBuffer(format, new ABuffer(mem->pointer(), mem->size())),
       mMemory(mem) {
@@ -39,6 +42,8 @@
       mTMemory(mem) {
 }
 
+// SecureBuffer
+
 SecureBuffer::SecureBuffer(const sp<AMessage> &format, const void *ptr, size_t size)
     : MediaCodecBuffer(format, new ABuffer(nullptr, size)),
       mPointer(ptr) {
@@ -59,4 +64,28 @@
     return ICrypto::kDestinationTypeNativeHandle;
 }
 
+// Codec2Buffer
+
+// static
+sp<Codec2Buffer> Codec2Buffer::allocate(
+        const sp<AMessage> &format, const std::shared_ptr<C2LinearBlock> &block) {
+    C2WriteView writeView(block->map().get());
+    if (writeView.error() != C2_OK) {
+        return nullptr;
+    }
+    return new Codec2Buffer(format, new ABuffer(writeView.base(), writeView.capacity()), block);
+}
+
+C2ConstLinearBlock Codec2Buffer::share() {
+    return mBlock->share(offset(), size(), C2Fence());
+}
+
+Codec2Buffer::Codec2Buffer(
+        const sp<AMessage> &format,
+        const sp<ABuffer> &buffer,
+        const std::shared_ptr<C2LinearBlock> &block)
+    : MediaCodecBuffer(format, buffer),
+      mBlock(block) {
+}
+
 }  // namespace android
diff --git a/media/libstagefright/CCodec.cpp b/media/libstagefright/CCodec.cpp
new file mode 100644
index 0000000..0103abd
--- /dev/null
+++ b/media/libstagefright/CCodec.cpp
@@ -0,0 +1,611 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "CCodec"
+#include <utils/Log.h>
+
+#include <thread>
+
+#include <C2PlatformSupport.h>
+
+#include <gui/Surface.h>
+#include <media/stagefright/CCodec.h>
+
+#include "include/CCodecBufferChannel.h"
+
+using namespace std::chrono_literals;
+
+namespace android {
+
+namespace {
+
+class CCodecWatchdog : public AHandler {
+private:
+    enum {
+        kWhatRegister,
+        kWhatWatch,
+    };
+    constexpr static int64_t kWatchIntervalUs = 3000000;  // 3 secs
+
+public:
+    static sp<CCodecWatchdog> getInstance() {
+        Mutexed<sp<CCodecWatchdog>>::Locked instance(sInstance);
+        if (*instance == nullptr) {
+            *instance = new CCodecWatchdog;
+            (*instance)->init();
+        }
+        return *instance;
+    }
+
+    ~CCodecWatchdog() = default;
+
+    void registerCodec(CCodec *codec) {
+        sp<AMessage> msg = new AMessage(kWhatRegister, this);
+        msg->setPointer("codec", codec);
+        msg->post();
+    }
+
+protected:
+    void onMessageReceived(const sp<AMessage> &msg) {
+        switch (msg->what()) {
+            case kWhatRegister: {
+                void *ptr = nullptr;
+                CHECK(msg->findPointer("codec", &ptr));
+                Mutexed<std::list<wp<CCodec>>>::Locked codecs(mCodecs);
+                codecs->emplace_back((CCodec *)ptr);
+                break;
+            }
+
+            case kWhatWatch: {
+                Mutexed<std::list<wp<CCodec>>>::Locked codecs(mCodecs);
+                for (auto it = codecs->begin(); it != codecs->end(); ) {
+                    sp<CCodec> codec = it->promote();
+                    if (codec == nullptr) {
+                        it = codecs->erase(it);
+                        continue;
+                    }
+                    codec->initiateReleaseIfStuck();
+                    ++it;
+                }
+                msg->post(kWatchIntervalUs);
+                break;
+            }
+
+            default: {
+                TRESPASS("CCodecWatchdog: unrecognized message");
+            }
+        }
+    }
+
+private:
+    CCodecWatchdog() : mLooper(new ALooper) {}
+
+    void init() {
+        mLooper->setName("CCodecWatchdog");
+        mLooper->registerHandler(this);
+        mLooper->start();
+        (new AMessage(kWhatWatch, this))->post(kWatchIntervalUs);
+    }
+
+    static Mutexed<sp<CCodecWatchdog>> sInstance;
+
+    sp<ALooper> mLooper;
+    Mutexed<std::list<wp<CCodec>>> mCodecs;
+};
+
+Mutexed<sp<CCodecWatchdog>> CCodecWatchdog::sInstance;
+
+class CCodecListener : public C2Component::Listener {
+public:
+    CCodecListener(const std::shared_ptr<CCodecBufferChannel> &channel)
+        : mChannel(channel) {
+    }
+
+    virtual void onWorkDone_nb(
+            std::weak_ptr<C2Component> component,
+            std::vector<std::unique_ptr<C2Work>> workItems) override {
+        (void) component;
+        mChannel->onWorkDone(std::move(workItems));
+    }
+
+    virtual void onTripped_nb(
+            std::weak_ptr<C2Component> component,
+            std::vector<std::shared_ptr<C2SettingResult>> settingResult) override {
+        // TODO
+        (void) component;
+        (void) settingResult;
+    }
+
+    virtual void onError_nb(std::weak_ptr<C2Component> component, uint32_t errorCode) override {
+        // TODO
+        (void) component;
+        (void) errorCode;
+    }
+
+private:
+    std::shared_ptr<CCodecBufferChannel> mChannel;
+};
+
+}  // namespace
+
+CCodec::CCodec()
+    : mChannel(new CCodecBufferChannel([this] (status_t err, enum ActionCode actionCode) {
+          mCallback->onError(err, actionCode);
+      })) {
+    CCodecWatchdog::getInstance()->registerCodec(this);
+}
+
+CCodec::~CCodec() {
+}
+
+std::shared_ptr<BufferChannelBase> CCodec::getBufferChannel() {
+    return mChannel;
+}
+
+void CCodec::initiateAllocateComponent(const sp<AMessage> &msg) {
+    {
+        Mutexed<State>::Locked state(mState);
+        if (state->mState != RELEASED) {
+            mCallback->onError(INVALID_OPERATION, ACTION_CODE_FATAL);
+            return;
+        }
+        state->mState = ALLOCATING;
+    }
+
+    AString componentName;
+    if (!msg->findString("componentName", &componentName)) {
+        // TODO: find componentName appropriate with the media type
+    }
+
+    sp<AMessage> allocMsg(new AMessage(kWhatAllocate, this));
+    allocMsg->setString("componentName", componentName);
+    allocMsg->post();
+}
+
+void CCodec::allocate(const AString &componentName) {
+    // TODO: use C2ComponentStore to create component
+    mListener.reset(new CCodecListener(mChannel));
+
+    std::shared_ptr<C2Component> comp;
+    c2_status_t err = GetCodec2PlatformComponentStore()->createComponent(
+            componentName.c_str(), &comp);
+    if (err != C2_OK) {
+        Mutexed<State>::Locked state(mState);
+        state->mState = RELEASED;
+        state.unlock();
+        mCallback->onError(err, ACTION_CODE_FATAL);
+        state.lock();
+        return;
+    }
+    comp->setListener_vb(mListener, C2_MAY_BLOCK);
+    {
+        Mutexed<State>::Locked state(mState);
+        if (state->mState != ALLOCATING) {
+            state->mState = RELEASED;
+            state.unlock();
+            mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+            state.lock();
+            return;
+        }
+        state->mState = ALLOCATED;
+        state->mComp = comp;
+    }
+    mChannel->setComponent(comp);
+    mCallback->onComponentAllocated(comp->intf()->getName().c_str());
+}
+
+void CCodec::initiateConfigureComponent(const sp<AMessage> &format) {
+    {
+        Mutexed<State>::Locked state(mState);
+        if (state->mState != ALLOCATED) {
+            mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+            return;
+        }
+    }
+
+    sp<AMessage> msg(new AMessage(kWhatConfigure, this));
+    msg->setMessage("format", format);
+    msg->post();
+}
+
+void CCodec::configure(const sp<AMessage> &msg) {
+    sp<AMessage> inputFormat(new AMessage);
+    sp<AMessage> outputFormat(new AMessage);
+    if (status_t err = [=] {
+        AString mime;
+        if (!msg->findString("mime", &mime)) {
+            return BAD_VALUE;
+        }
+
+        int32_t encoder;
+        if (!msg->findInt32("encoder", &encoder)) {
+            encoder = false;
+        }
+
+        sp<RefBase> obj;
+        if (msg->findObject("native-window", &obj)) {
+            sp<Surface> surface = static_cast<Surface *>(obj.get());
+            setSurface(surface);
+        }
+
+        // XXX: hack
+        bool audio = mime.startsWithIgnoreCase("audio/");
+        if (encoder) {
+            outputFormat->setString("mime", mime);
+            inputFormat->setString("mime", AStringPrintf("%s/raw", audio ? "audio" : "video"));
+            if (audio) {
+                inputFormat->setInt32("channel-count", 1);
+                inputFormat->setInt32("sample-rate", 44100);
+                outputFormat->setInt32("channel-count", 1);
+                outputFormat->setInt32("sample-rate", 44100);
+            }
+        } else {
+            inputFormat->setString("mime", mime);
+            outputFormat->setString("mime", AStringPrintf("%s/raw", audio ? "audio" : "video"));
+            if (audio) {
+                outputFormat->setInt32("channel-count", 2);
+                outputFormat->setInt32("sample-rate", 44100);
+            }
+        }
+
+        // TODO
+
+        return OK;
+    }() != OK) {
+        mCallback->onError(err, ACTION_CODE_FATAL);
+        return;
+    }
+
+    {
+        Mutexed<Formats>::Locked formats(mFormats);
+        formats->mInputFormat = inputFormat;
+        formats->mOutputFormat = outputFormat;
+    }
+    mCallback->onComponentConfigured(inputFormat, outputFormat);
+}
+
+
+void CCodec::initiateCreateInputSurface() {
+    // TODO
+}
+
+void CCodec::initiateSetInputSurface(const sp<PersistentSurface> &surface) {
+    // TODO
+    (void) surface;
+}
+
+void CCodec::initiateStart() {
+    {
+        Mutexed<State>::Locked state(mState);
+        if (state->mState != ALLOCATED) {
+            state.unlock();
+            mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+            state.lock();
+            return;
+        }
+        state->mState = STARTING;
+    }
+
+    (new AMessage(kWhatStart, this))->post();
+}
+
+void CCodec::start() {
+    std::shared_ptr<C2Component> comp;
+    {
+        Mutexed<State>::Locked state(mState);
+        if (state->mState != STARTING) {
+            state.unlock();
+            mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+            state.lock();
+            return;
+        }
+        comp = state->mComp;
+    }
+    c2_status_t err = comp->start();
+    if (err != C2_OK) {
+        // TODO: convert err into status_t
+        mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+        return;
+    }
+    sp<AMessage> inputFormat;
+    sp<AMessage> outputFormat;
+    {
+        Mutexed<Formats>::Locked formats(mFormats);
+        inputFormat = formats->mInputFormat;
+        outputFormat = formats->mOutputFormat;
+    }
+    mChannel->start(inputFormat, outputFormat);
+
+    {
+        Mutexed<State>::Locked state(mState);
+        if (state->mState != STARTING) {
+            state.unlock();
+            mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+            state.lock();
+            return;
+        }
+        state->mState = RUNNING;
+    }
+    mCallback->onStartCompleted();
+}
+
+void CCodec::initiateShutdown(bool keepComponentAllocated) {
+    if (keepComponentAllocated) {
+        initiateStop();
+    } else {
+        initiateRelease();
+    }
+}
+
+void CCodec::initiateStop() {
+    {
+        Mutexed<State>::Locked state(mState);
+        if (state->mState == ALLOCATED
+                || state->mState  == RELEASED
+                || state->mState == STOPPING
+                || state->mState == RELEASING) {
+            // We're already stopped, released, or doing it right now.
+            state.unlock();
+            mCallback->onStopCompleted();
+            state.lock();
+            return;
+        }
+        state->mState = STOPPING;
+    }
+
+    (new AMessage(kWhatStop, this))->post();
+}
+
+void CCodec::stop() {
+    std::shared_ptr<C2Component> comp;
+    {
+        Mutexed<State>::Locked state(mState);
+        if (state->mState == RELEASING) {
+            state.unlock();
+            // We're already stopped or release is in progress.
+            mCallback->onStopCompleted();
+            state.lock();
+            return;
+        } else if (state->mState != STOPPING) {
+            state.unlock();
+            mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+            state.lock();
+            return;
+        }
+        comp = state->mComp;
+    }
+    mChannel->stop();
+    status_t err = comp->stop();
+    if (err != C2_OK) {
+        // TODO: convert err into status_t
+        mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+    }
+
+    {
+        Mutexed<State>::Locked state(mState);
+        if (state->mState == STOPPING) {
+            state->mState = ALLOCATED;
+        }
+    }
+    mCallback->onStopCompleted();
+}
+
+void CCodec::initiateRelease(bool sendCallback /* = true */) {
+    {
+        Mutexed<State>::Locked state(mState);
+        if (state->mState == RELEASED || state->mState == RELEASING) {
+            // We're already released or doing it right now.
+            if (sendCallback) {
+                state.unlock();
+                mCallback->onReleaseCompleted();
+                state.lock();
+            }
+            return;
+        }
+        if (state->mState == ALLOCATING) {
+            state->mState = RELEASING;
+            // With the altered state allocate() would fail and clean up.
+            if (sendCallback) {
+                state.unlock();
+                mCallback->onReleaseCompleted();
+                state.lock();
+            }
+            return;
+        }
+        state->mState = RELEASING;
+    }
+
+    std::thread([this, sendCallback] { release(sendCallback); }).detach();
+}
+
+void CCodec::release(bool sendCallback) {
+    std::shared_ptr<C2Component> comp;
+    {
+        Mutexed<State>::Locked state(mState);
+        if (state->mState == RELEASED) {
+            if (sendCallback) {
+                state.unlock();
+                mCallback->onReleaseCompleted();
+                state.lock();
+            }
+            return;
+        }
+        comp = state->mComp;
+    }
+    mChannel->stop();
+    comp->release();
+
+    {
+        Mutexed<State>::Locked state(mState);
+        state->mState = RELEASED;
+        state->mComp.reset();
+    }
+    if (sendCallback) {
+        mCallback->onReleaseCompleted();
+    }
+}
+
+status_t CCodec::setSurface(const sp<Surface> &surface) {
+    return mChannel->setSurface(surface);
+}
+
+void CCodec::signalFlush() {
+    {
+        Mutexed<State>::Locked state(mState);
+        if (state->mState != RUNNING) {
+            mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+            return;
+        }
+        state->mState = FLUSHING;
+    }
+
+    (new AMessage(kWhatFlush, this))->post();
+}
+
+void CCodec::flush() {
+    std::shared_ptr<C2Component> comp;
+    {
+        Mutexed<State>::Locked state(mState);
+        if (state->mState != FLUSHING) {
+            state.unlock();
+            mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+            state.lock();
+            return;
+        }
+        comp = state->mComp;
+    }
+
+    mChannel->stop();
+
+    std::list<std::unique_ptr<C2Work>> flushedWork;
+    c2_status_t err = comp->flush_sm(C2Component::FLUSH_COMPONENT, &flushedWork);
+    if (err != C2_OK) {
+        // TODO: convert err into status_t
+        mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+    }
+
+    mChannel->flush(flushedWork);
+
+    {
+        Mutexed<State>::Locked state(mState);
+        state->mState = FLUSHED;
+    }
+    mCallback->onFlushCompleted();
+}
+
+void CCodec::signalResume() {
+    {
+        Mutexed<State>::Locked state(mState);
+        if (state->mState != FLUSHED) {
+            state.unlock();
+            mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+            state.lock();
+            return;
+        }
+        state->mState = RESUMING;
+    }
+
+    mChannel->start(nullptr, nullptr);
+
+    {
+        Mutexed<State>::Locked state(mState);
+        if (state->mState != RESUMING) {
+            state.unlock();
+            mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+            state.lock();
+            return;
+        }
+        state->mState = RUNNING;
+    }
+}
+
+void CCodec::signalSetParameters(const sp<AMessage> &msg) {
+    // TODO
+    (void) msg;
+}
+
+void CCodec::signalEndOfInputStream() {
+}
+
+void CCodec::signalRequestIDRFrame() {
+    // TODO
+}
+
+void CCodec::onMessageReceived(const sp<AMessage> &msg) {
+    TimePoint now = std::chrono::steady_clock::now();
+    switch (msg->what()) {
+        case kWhatAllocate: {
+            // C2ComponentStore::createComponent() should return within 100ms.
+            setDeadline(now + 150ms);
+            AString componentName;
+            CHECK(msg->findString("componentName", &componentName));
+            allocate(componentName);
+            break;
+        }
+        case kWhatConfigure: {
+            // C2Component::commit_sm() should return within 5ms.
+            setDeadline(now + 50ms);
+            sp<AMessage> format;
+            CHECK(msg->findMessage("format", &format));
+            configure(format);
+            break;
+        }
+        case kWhatStart: {
+            // C2Component::start() should return within 500ms.
+            setDeadline(now + 550ms);
+            start();
+            break;
+        }
+        case kWhatStop: {
+            // C2Component::stop() should return within 500ms.
+            setDeadline(now + 550ms);
+            stop();
+            break;
+        }
+        case kWhatFlush: {
+            // C2Component::flush_sm() should return within 5ms.
+            setDeadline(now + 50ms);
+            flush();
+            break;
+        }
+        default: {
+            ALOGE("unrecognized message");
+            break;
+        }
+    }
+    setDeadline(TimePoint::max());
+}
+
+void CCodec::setDeadline(const TimePoint &newDeadline) {
+    Mutexed<TimePoint>::Locked deadline(mDeadline);
+    *deadline = newDeadline;
+}
+
+void CCodec::initiateReleaseIfStuck() {
+    {
+        Mutexed<TimePoint>::Locked deadline(mDeadline);
+        if (*deadline >= std::chrono::steady_clock::now()) {
+            // We're not stuck.
+            return;
+        }
+    }
+
+    mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+    initiateRelease();
+}
+
+}  // namespace android
diff --git a/media/libstagefright/CCodecBufferChannel.cpp b/media/libstagefright/CCodecBufferChannel.cpp
new file mode 100644
index 0000000..eea9c78
--- /dev/null
+++ b/media/libstagefright/CCodecBufferChannel.cpp
@@ -0,0 +1,980 @@
+/*
+ * Copyright 2017, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "CCodecBufferChannel"
+#include <utils/Log.h>
+
+#include <numeric>
+#include <thread>
+
+#include <C2AllocatorGralloc.h>
+#include <C2PlatformSupport.h>
+
+#include <android/hardware/cas/native/1.0/IDescrambler.h>
+#include <binder/MemoryDealer.h>
+#include <gui/Surface.h>
+#include <media/openmax/OMX_Core.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/MediaCodec.h>
+#include <media/MediaCodecBuffer.h>
+#include <system/window.h>
+
+#include "include/CCodecBufferChannel.h"
+#include "include/Codec2Buffer.h"
+#include "include/SecureBuffer.h"
+#include "include/SharedMemoryBuffer.h"
+
+namespace android {
+
+using hardware::hidl_handle;
+using hardware::hidl_string;
+using hardware::hidl_vec;
+using namespace hardware::cas::V1_0;
+using namespace hardware::cas::native::V1_0;
+
+namespace {
+
+// TODO: get this info from component
+const static size_t kMinBufferArraySize = 16;
+
+template <class T>
+ssize_t findBufferSlot(
+        std::vector<T> *buffers,
+        size_t maxSize,
+        std::function<bool(const T&)> pred) {
+    auto it = std::find_if(buffers->begin(), buffers->end(), pred);
+    if (it == buffers->end()) {
+        if (buffers->size() < maxSize) {
+            buffers->emplace_back();
+            return buffers->size() - 1;
+        } else {
+            return -1;
+        }
+    }
+    return std::distance(buffers->begin(), it);
+}
+
+sp<Codec2Buffer> allocateLinearBuffer(
+        const std::shared_ptr<C2BlockPool> &pool,
+        const sp<AMessage> &format,
+        size_t size,
+        const C2MemoryUsage &usage) {
+    std::shared_ptr<C2LinearBlock> block;
+
+    status_t err = pool->fetchLinearBlock(
+            size,
+            usage,
+            &block);
+    if (err != OK) {
+        return nullptr;
+    }
+
+    return Codec2Buffer::allocate(format, block);
+}
+
+class LinearBuffer : public C2Buffer {
+public:
+    explicit LinearBuffer(C2ConstLinearBlock block) : C2Buffer({ block }) {}
+};
+
+class InputBuffersArray : public CCodecBufferChannel::InputBuffers {
+public:
+    InputBuffersArray() = default;
+
+    void add(
+            size_t index,
+            const sp<MediaCodecBuffer> &clientBuffer,
+            const std::shared_ptr<C2Buffer> &compBuffer,
+            bool available) {
+        if (mBufferArray.size() < index) {
+            mBufferArray.resize(index + 1);
+        }
+        mBufferArray[index].clientBuffer = clientBuffer;
+        mBufferArray[index].compBuffer = compBuffer;
+        mBufferArray[index].available = available;
+    }
+
+    bool isArrayMode() final { return true; }
+
+    std::unique_ptr<CCodecBufferChannel::InputBuffers> toArrayMode() final {
+        return nullptr;
+    }
+
+    void getArray(Vector<sp<MediaCodecBuffer>> *array) final {
+        array->clear();
+        for (const auto &entry : mBufferArray) {
+            array->push(entry.clientBuffer);
+        }
+    }
+
+    bool requestNewBuffer(size_t *index, sp<MediaCodecBuffer> *buffer) override {
+        for (size_t i = 0; i < mBufferArray.size(); ++i) {
+            if (mBufferArray[i].available) {
+                mBufferArray[i].available = false;
+                *index = i;
+                *buffer = mBufferArray[i].clientBuffer;
+                return true;
+            }
+        }
+        return false;
+    }
+
+    std::shared_ptr<C2Buffer> releaseBuffer(const sp<MediaCodecBuffer> &buffer) override {
+        for (size_t i = 0; i < mBufferArray.size(); ++i) {
+            if (!mBufferArray[i].available && mBufferArray[i].clientBuffer == buffer) {
+                mBufferArray[i].available = true;
+                return std::move(mBufferArray[i].compBuffer);
+            }
+        }
+        return nullptr;
+    }
+
+    void flush() override {
+        for (size_t i = 0; i < mBufferArray.size(); ++i) {
+            mBufferArray[i].available = true;
+            mBufferArray[i].compBuffer.reset();
+        }
+    }
+
+private:
+    struct Entry {
+        sp<MediaCodecBuffer> clientBuffer;
+        std::shared_ptr<C2Buffer> compBuffer;
+        bool available;
+    };
+
+    std::vector<Entry> mBufferArray;
+};
+
+class LinearInputBuffers : public CCodecBufferChannel::InputBuffers {
+public:
+    using CCodecBufferChannel::InputBuffers::InputBuffers;
+
+    bool requestNewBuffer(size_t *index, sp<MediaCodecBuffer> *buffer) override {
+        *buffer = nullptr;
+        ssize_t ret = findBufferSlot<wp<Codec2Buffer>>(
+                &mBuffers, kMinBufferArraySize,
+                [] (const auto &elem) { return elem.promote() == nullptr; });
+        if (ret < 0) {
+            return false;
+        }
+        // TODO: proper max input size and usage
+        // TODO: read usage from intf
+        C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
+        sp<Codec2Buffer> newBuffer = allocateLinearBuffer(mPool, mFormat, 65536, usage);
+        if (newBuffer == nullptr) {
+            return false;
+        }
+        mBuffers[ret] = newBuffer;
+        *index = ret;
+        *buffer = newBuffer;
+        return true;
+    }
+
+    std::shared_ptr<C2Buffer> releaseBuffer(const sp<MediaCodecBuffer> &buffer) override {
+        auto it = std::find(mBuffers.begin(), mBuffers.end(), buffer);
+        if (it == mBuffers.end()) {
+            return nullptr;
+        }
+        sp<Codec2Buffer> codecBuffer = it->promote();
+        // We got sp<> reference from the caller so this should never happen..
+        CHECK(codecBuffer != nullptr);
+        return std::make_shared<LinearBuffer>(codecBuffer->share());
+    }
+
+    void flush() override {
+    }
+
+    std::unique_ptr<CCodecBufferChannel::InputBuffers> toArrayMode() final {
+        std::unique_ptr<InputBuffersArray> array(new InputBuffersArray);
+        // TODO
+        const size_t size = std::max(kMinBufferArraySize, mBuffers.size());
+        for (size_t i = 0; i < size; ++i) {
+            sp<Codec2Buffer> clientBuffer = mBuffers[i].promote();
+            bool available = false;
+            if (clientBuffer == nullptr) {
+                // TODO: proper max input size
+                // TODO: read usage from intf
+                C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
+                clientBuffer = allocateLinearBuffer(mPool, mFormat, 65536, usage);
+                available = true;
+            }
+            array->add(
+                    i,
+                    clientBuffer,
+                    std::make_shared<LinearBuffer>(clientBuffer->share()),
+                    available);
+        }
+        return std::move(array);
+    }
+
+private:
+    // Buffers we passed to the client. The index of a buffer matches what
+    // was passed in BufferCallback::onInputBufferAvailable().
+    std::vector<wp<Codec2Buffer>> mBuffers;
+};
+
+// TODO: stub
+class GraphicInputBuffers : public CCodecBufferChannel::InputBuffers {
+public:
+    using CCodecBufferChannel::InputBuffers::InputBuffers;
+
+    bool requestNewBuffer(size_t *index, sp<MediaCodecBuffer> *buffer) override {
+        (void)index;
+        (void)buffer;
+        return false;
+    }
+
+    std::shared_ptr<C2Buffer> releaseBuffer(const sp<MediaCodecBuffer> &buffer) override {
+        (void)buffer;
+        return nullptr;
+    }
+
+    void flush() override {
+    }
+
+    std::unique_ptr<CCodecBufferChannel::InputBuffers> toArrayMode() final {
+        return nullptr;
+    }
+};
+
+class OutputBuffersArray : public CCodecBufferChannel::OutputBuffers {
+public:
+    using CCodecBufferChannel::OutputBuffers::OutputBuffers;
+
+    void add(
+            size_t index,
+            const sp<MediaCodecBuffer> &clientBuffer,
+            const std::shared_ptr<C2Buffer> &compBuffer,
+            bool available) {
+        if (mBufferArray.size() < index) {
+            mBufferArray.resize(index + 1);
+        }
+        mBufferArray[index].clientBuffer = clientBuffer;
+        mBufferArray[index].compBuffer = compBuffer;
+        mBufferArray[index].available = available;
+    }
+
+    bool isArrayMode() final { return true; }
+
+    std::unique_ptr<CCodecBufferChannel::OutputBuffers> toArrayMode() final {
+        return nullptr;
+    }
+
+    bool registerBuffer(
+            const std::shared_ptr<C2Buffer> &buffer,
+            size_t *index,
+            sp<MediaCodecBuffer> *codecBuffer) final {
+        for (size_t i = 0; i < mBufferArray.size(); ++i) {
+            if (mBufferArray[i].available && copy(buffer, mBufferArray[i].clientBuffer)) {
+                *index = i;
+                *codecBuffer = mBufferArray[i].clientBuffer;
+                mBufferArray[i].compBuffer = buffer;
+                mBufferArray[i].available = false;
+                return true;
+            }
+        }
+        return false;
+    }
+
+    bool registerCsd(
+            const C2StreamCsdInfo::output *csd,
+            size_t *index,
+            sp<MediaCodecBuffer> *codecBuffer) final {
+        for (size_t i = 0; i < mBufferArray.size(); ++i) {
+            if (mBufferArray[i].available
+                    && mBufferArray[i].clientBuffer->capacity() <= csd->flexCount()) {
+                memcpy(mBufferArray[i].clientBuffer->base(), csd->m.value, csd->flexCount());
+                *index = i;
+                *codecBuffer = mBufferArray[i].clientBuffer;
+                mBufferArray[i].available = false;
+                return true;
+            }
+        }
+        return false;
+    }
+
+    std::shared_ptr<C2Buffer> releaseBuffer(const sp<MediaCodecBuffer> &buffer) final {
+        for (size_t i = 0; i < mBufferArray.size(); ++i) {
+            if (!mBufferArray[i].available && mBufferArray[i].clientBuffer == buffer) {
+                mBufferArray[i].available = true;
+                return std::move(mBufferArray[i].compBuffer);
+            }
+        }
+        return nullptr;
+    }
+
+    void flush(
+            const std::list<std::unique_ptr<C2Work>> &flushedWork) override {
+        (void) flushedWork;
+        for (size_t i = 0; i < mBufferArray.size(); ++i) {
+            mBufferArray[i].available = true;
+            mBufferArray[i].compBuffer.reset();
+        }
+    }
+
+    virtual bool copy(
+            const std::shared_ptr<C2Buffer> &buffer,
+            const sp<MediaCodecBuffer> &clientBuffer) = 0;
+
+    void getArray(Vector<sp<MediaCodecBuffer>> *array) final {
+        array->clear();
+        for (const auto &entry : mBufferArray) {
+            array->push(entry.clientBuffer);
+        }
+    }
+
+private:
+    struct Entry {
+        sp<MediaCodecBuffer> clientBuffer;
+        std::shared_ptr<C2Buffer> compBuffer;
+        bool available;
+    };
+
+    std::vector<Entry> mBufferArray;
+};
+
+class LinearOutputBuffersArray : public OutputBuffersArray {
+public:
+    using OutputBuffersArray::OutputBuffersArray;
+
+    bool copy(
+            const std::shared_ptr<C2Buffer> &buffer,
+            const sp<MediaCodecBuffer> &clientBuffer) final {
+        if (!buffer) {
+            clientBuffer->setRange(0u, 0u);
+            return true;
+        }
+        C2ReadView view = buffer->data().linearBlocks().front().map().get();
+        if (clientBuffer->capacity() < view.capacity()) {
+            return false;
+        }
+        clientBuffer->setRange(0u, view.capacity());
+        memcpy(clientBuffer->data(), view.data(), view.capacity());
+        return true;
+    }
+};
+
+class GraphicOutputBuffersArray : public OutputBuffersArray {
+public:
+    using OutputBuffersArray::OutputBuffersArray;
+
+    bool copy(
+            const std::shared_ptr<C2Buffer> &buffer,
+            const sp<MediaCodecBuffer> &clientBuffer) final {
+        if (!buffer) {
+            clientBuffer->setRange(0u, 0u);
+            return true;
+        }
+        clientBuffer->setRange(0u, 1u);
+        return true;
+    }
+};
+
+// Flexible in a sense that it does not have fixed array size.
+class FlexOutputBuffers : public CCodecBufferChannel::OutputBuffers {
+public:
+    using CCodecBufferChannel::OutputBuffers::OutputBuffers;
+
+    bool registerBuffer(
+            const std::shared_ptr<C2Buffer> &buffer,
+            size_t *index,
+            sp<MediaCodecBuffer> *codecBuffer) override {
+        *codecBuffer = nullptr;
+        ssize_t ret = findBufferSlot<BufferInfo>(
+                &mBuffers,
+                std::numeric_limits<size_t>::max(),
+                [] (const auto &elem) { return elem.clientBuffer.promote() == nullptr; });
+        if (ret < 0) {
+            return false;
+        }
+        sp<MediaCodecBuffer> newBuffer = new MediaCodecBuffer(
+                mFormat,
+                convert(buffer));
+        mBuffers[ret] = { newBuffer, buffer };
+        *index = ret;
+        *codecBuffer = newBuffer;
+        return true;
+    }
+
+    bool registerCsd(
+            const C2StreamCsdInfo::output *csd,
+            size_t *index,
+            sp<MediaCodecBuffer> *codecBuffer) final {
+        *codecBuffer = nullptr;
+        ssize_t ret = findBufferSlot<BufferInfo>(
+                &mBuffers,
+                std::numeric_limits<size_t>::max(),
+                [] (const auto &elem) { return elem.clientBuffer.promote() == nullptr; });
+        if (ret < 0) {
+            return false;
+        }
+        sp<MediaCodecBuffer> newBuffer = new MediaCodecBuffer(
+                mFormat,
+                ABuffer::CreateAsCopy(csd->m.value, csd->flexCount()));
+        mBuffers[ret] = { newBuffer, nullptr };
+        *index = ret;
+        *codecBuffer = newBuffer;
+        return true;
+    }
+
+    std::shared_ptr<C2Buffer> releaseBuffer(
+            const sp<MediaCodecBuffer> &buffer) override {
+        auto it = std::find_if(
+                mBuffers.begin(), mBuffers.end(),
+                [buffer] (const auto &elem) {
+                    return elem.clientBuffer.promote() == buffer;
+                });
+        if (it == mBuffers.end()) {
+            return nullptr;
+        }
+        return std::move(it->bufferRef);
+    }
+
+    void flush(
+            const std::list<std::unique_ptr<C2Work>> &flushedWork) override {
+        (void) flushedWork;
+        // This is no-op by default unless we're in array mode where we need to keep
+        // track of the flushed work.
+    }
+
+    virtual sp<ABuffer> convert(const std::shared_ptr<C2Buffer> &buffer) = 0;
+
+protected:
+    struct BufferInfo {
+        // wp<> of MediaCodecBuffer for MediaCodec.
+        wp<MediaCodecBuffer> clientBuffer;
+        // Buffer reference to hold until clientBuffer is valid.
+        std::shared_ptr<C2Buffer> bufferRef;
+    };
+    // Buffers we passed to the client. The index of a buffer matches what
+    // was passed in BufferCallback::onInputBufferAvailable().
+    std::vector<BufferInfo> mBuffers;
+};
+
+class LinearOutputBuffers : public FlexOutputBuffers {
+public:
+    using FlexOutputBuffers::FlexOutputBuffers;
+
+    virtual sp<ABuffer> convert(const std::shared_ptr<C2Buffer> &buffer) override {
+        if (buffer == nullptr) {
+            return new ABuffer(nullptr, 0);
+        }
+        if (buffer->data().type() != C2BufferData::LINEAR) {
+            // We expect linear output buffers from the component.
+            return nullptr;
+        }
+        if (buffer->data().linearBlocks().size() != 1u) {
+            // We expect one and only one linear block from the component.
+            return nullptr;
+        }
+        C2ReadView view = buffer->data().linearBlocks().front().map().get();
+        if (view.error() != C2_OK) {
+            // Mapping the linear block failed
+            return nullptr;
+        }
+        return new ABuffer(
+                // XXX: the data is supposed to be read-only. We don't have
+                // const equivalent of ABuffer however...
+                const_cast<uint8_t *>(view.data()),
+                view.capacity());
+    }
+
+    std::unique_ptr<CCodecBufferChannel::OutputBuffers> toArrayMode() override {
+        std::unique_ptr<OutputBuffersArray> array(new LinearOutputBuffersArray);
+
+        const size_t size = std::max(kMinBufferArraySize, mBuffers.size());
+        for (size_t i = 0; i < size; ++i) {
+            sp<MediaCodecBuffer> clientBuffer = mBuffers[i].clientBuffer.promote();
+            std::shared_ptr<C2Buffer> compBuffer = mBuffers[i].bufferRef;
+            bool available = false;
+            if (clientBuffer == nullptr) {
+                // TODO: proper max input size
+                clientBuffer = new MediaCodecBuffer(mFormat, new ABuffer(65536));
+                available = true;
+                compBuffer.reset();
+            }
+            array->add(i, clientBuffer, compBuffer, available);
+        }
+        return std::move(array);
+    }
+};
+
+class GraphicOutputBuffers : public FlexOutputBuffers {
+public:
+    using FlexOutputBuffers::FlexOutputBuffers;
+
+    sp<ABuffer> convert(const std::shared_ptr<C2Buffer> &buffer) override {
+        return buffer ? new ABuffer(nullptr, 1) : new ABuffer(nullptr, 0);
+    }
+
+    std::unique_ptr<CCodecBufferChannel::OutputBuffers> toArrayMode() override {
+        std::unique_ptr<OutputBuffersArray> array(new GraphicOutputBuffersArray);
+
+        const size_t size = std::max(kMinBufferArraySize, mBuffers.size());
+        for (size_t i = 0; i < size; ++i) {
+            sp<MediaCodecBuffer> clientBuffer = mBuffers[i].clientBuffer.promote();
+            std::shared_ptr<C2Buffer> compBuffer = mBuffers[i].bufferRef;
+            bool available = false;
+            if (clientBuffer == nullptr) {
+                clientBuffer = new MediaCodecBuffer(mFormat, new ABuffer(nullptr, 1));
+                available = true;
+                compBuffer.reset();
+            }
+            array->add(i, clientBuffer, compBuffer, available);
+        }
+        return std::move(array);
+    }
+};
+
+}  // namespace
+
+CCodecBufferChannel::QueueGuard::QueueGuard(
+        CCodecBufferChannel::QueueSync &sync) : mSync(sync) {
+    std::unique_lock<std::mutex> l(mSync.mMutex);
+    // At this point it's guaranteed that mSync is not under state transition,
+    // as we are holding its mutex.
+    if (mSync.mCount == -1) {
+        mRunning = false;
+    } else {
+        ++mSync.mCount;
+        mRunning = true;
+    }
+}
+
+CCodecBufferChannel::QueueGuard::~QueueGuard() {
+    if (mRunning) {
+        // We are not holding mutex at this point so that QueueSync::stop() can
+        // keep holding the lock until mCount reaches zero.
+        --mSync.mCount;
+    }
+}
+
+void CCodecBufferChannel::QueueSync::start() {
+    std::unique_lock<std::mutex> l(mMutex);
+    // If stopped, it goes to running state; otherwise no-op.
+    int32_t expected = -1;
+    (void)mCount.compare_exchange_strong(expected, 0);
+}
+
+void CCodecBufferChannel::QueueSync::stop() {
+    std::unique_lock<std::mutex> l(mMutex);
+    if (mCount == -1) {
+        // no-op
+        return;
+    }
+    // Holding mutex here blocks creation of additional QueueGuard objects, so
+    // mCount can only decrement. In other words, threads that acquired the lock
+    // are allowed to finish execution but additional threads trying to acquire
+    // the lock at this point will block, and then get QueueGuard at STOPPED
+    // state.
+    int32_t expected = 0;
+    while (!mCount.compare_exchange_weak(expected, -1)) {
+        std::this_thread::yield();
+    }
+}
+
+CCodecBufferChannel::CCodecBufferChannel(
+        const std::function<void(status_t, enum ActionCode)> &onError)
+    : mOnError(onError),
+      mFrameIndex(0u),
+      mFirstValidFrameIndex(0u) {
+}
+
+CCodecBufferChannel::~CCodecBufferChannel() {
+    if (mCrypto != nullptr && mDealer != nullptr && mHeapSeqNum >= 0) {
+        mCrypto->unsetHeap(mHeapSeqNum);
+    }
+}
+
+void CCodecBufferChannel::setComponent(const std::shared_ptr<C2Component> &component) {
+    mComponent = component;
+    C2StreamFormatConfig::input inputFormat(0u);
+    C2StreamFormatConfig::output outputFormat(0u);
+    c2_status_t err = mComponent->intf()->query_vb(
+            { &inputFormat, &outputFormat },
+            {},
+            C2_DONT_BLOCK,
+            nullptr);
+    if (err != C2_OK) {
+        // TODO: error
+        return;
+    }
+
+    {
+        Mutexed<std::unique_ptr<InputBuffers>>::Locked buffers(mInputBuffers);
+
+        bool graphic = (inputFormat.value == C2FormatVideo);
+        if (graphic) {
+            buffers->reset(new GraphicInputBuffers);
+        } else {
+            buffers->reset(new LinearInputBuffers);
+        }
+
+        ALOGV("graphic = %s", graphic ? "true" : "false");
+        std::shared_ptr<C2BlockPool> pool;
+        err = GetCodec2BlockPool(
+                graphic ? C2BlockPool::BASIC_GRAPHIC : C2BlockPool::BASIC_LINEAR,
+                component,
+                &pool);
+        if (err == C2_OK) {
+            (*buffers)->setPool(pool);
+        } else {
+            // TODO: error
+        }
+    }
+
+    {
+        Mutexed<std::unique_ptr<OutputBuffers>>::Locked buffers(mOutputBuffers);
+
+        bool graphic = (outputFormat.value == C2FormatVideo);
+        if (graphic) {
+            buffers->reset(new GraphicOutputBuffers);
+        } else {
+            buffers->reset(new LinearOutputBuffers);
+        }
+    }
+}
+
+status_t CCodecBufferChannel::queueInputBuffer(const sp<MediaCodecBuffer> &buffer) {
+    QueueGuard guard(mSync);
+    if (!guard.isRunning()) {
+        ALOGW("No more buffers should be queued at current state.");
+        return -ENOSYS;
+    }
+
+    int64_t timeUs;
+    CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+
+    int32_t flags = 0;
+    int32_t tmp = 0;
+    if (buffer->meta()->findInt32("eos", &tmp) && tmp) {
+        flags |= C2BufferPack::FLAG_END_OF_STREAM;
+        ALOGV("input EOS");
+    }
+    if (buffer->meta()->findInt32("csd", &tmp) && tmp) {
+        flags |= C2BufferPack::FLAG_CODEC_CONFIG;
+    }
+    std::unique_ptr<C2Work> work(new C2Work);
+    work->input.flags = (C2BufferPack::flags_t)flags;
+    work->input.ordinal.timestamp = timeUs;
+    work->input.ordinal.frame_index = mFrameIndex++;
+    work->input.buffers.clear();
+    {
+        Mutexed<std::unique_ptr<InputBuffers>>::Locked buffers(mInputBuffers);
+        work->input.buffers.push_back((*buffers)->releaseBuffer(buffer));
+    }
+    // TODO: fill info's
+
+    work->worklets.clear();
+    work->worklets.emplace_back(new C2Worklet);
+
+    std::list<std::unique_ptr<C2Work>> items;
+    items.push_back(std::move(work));
+    return mComponent->queue_nb(&items);
+}
+
+status_t CCodecBufferChannel::queueSecureInputBuffer(
+        const sp<MediaCodecBuffer> &buffer, bool secure, const uint8_t *key,
+        const uint8_t *iv, CryptoPlugin::Mode mode, CryptoPlugin::Pattern pattern,
+        const CryptoPlugin::SubSample *subSamples, size_t numSubSamples,
+        AString *errorDetailMsg) {
+    // TODO
+    (void) buffer;
+    (void) secure;
+    (void) key;
+    (void) iv;
+    (void) mode;
+    (void) pattern;
+    (void) subSamples;
+    (void) numSubSamples;
+    (void) errorDetailMsg;
+    return -ENOSYS;
+}
+
+status_t CCodecBufferChannel::renderOutputBuffer(
+        const sp<MediaCodecBuffer> &buffer, int64_t timestampNs) {
+    ALOGV("renderOutputBuffer");
+
+    std::shared_ptr<C2Buffer> c2Buffer;
+    {
+        Mutexed<std::unique_ptr<OutputBuffers>>::Locked buffers(mOutputBuffers);
+        c2Buffer = (*buffers)->releaseBuffer(buffer);
+    }
+
+    Mutexed<sp<Surface>>::Locked surface(mSurface);
+    if (*surface == nullptr) {
+        ALOGE("no surface");
+        return OK;
+    }
+
+    std::list<C2ConstGraphicBlock> blocks = c2Buffer->data().graphicBlocks();
+    if (blocks.size() != 1u) {
+        ALOGE("# of graphic blocks expected to be 1, but %zu", blocks.size());
+        return UNKNOWN_ERROR;
+    }
+
+    native_handle_t *grallocHandle = UnwrapNativeCodec2GrallocHandle(blocks.front().handle());
+    sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(
+            grallocHandle,
+            GraphicBuffer::CLONE_HANDLE,
+            blocks.front().width(),
+            blocks.front().height(),
+            HAL_PIXEL_FORMAT_YV12,
+            // TODO
+            1,
+            (uint64_t)GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN,
+            // TODO
+            blocks.front().width()));
+    native_handle_delete(grallocHandle);
+
+    status_t result = (*surface)->attachBuffer(graphicBuffer.get());
+    if (result != OK) {
+        ALOGE("attachBuffer failed: %d", result);
+        return result;
+    }
+
+    // TODO: read and set crop
+
+    result = native_window_set_buffers_timestamp((*surface).get(), timestampNs);
+    ALOGW_IF(result != OK, "failed to set buffer timestamp: %d", result);
+
+    // TODO: fix after C2Fence implementation
+#if 0
+    const C2Fence &fence = blocks.front().fence();
+    result = ((ANativeWindow *)(*surface).get())->queueBuffer(
+            (*surface).get(), graphicBuffer.get(), fence.valid() ? fence.fd() : -1);
+#else
+    result = ((ANativeWindow *)(*surface).get())->queueBuffer(
+            (*surface).get(), graphicBuffer.get(), -1);
+#endif
+    if (result != OK) {
+        ALOGE("queueBuffer failed: %d", result);
+        return result;
+    }
+
+    return OK;
+}
+
+status_t CCodecBufferChannel::discardBuffer(const sp<MediaCodecBuffer> &buffer) {
+    ALOGV("discardBuffer: %p", buffer.get());
+    {
+        Mutexed<std::unique_ptr<InputBuffers>>::Locked buffers(mInputBuffers);
+        (void)(*buffers)->releaseBuffer(buffer);
+    }
+    {
+        Mutexed<std::unique_ptr<OutputBuffers>>::Locked buffers(mOutputBuffers);
+        (void)(*buffers)->releaseBuffer(buffer);
+    }
+    return OK;
+}
+
+void CCodecBufferChannel::getInputBufferArray(Vector<sp<MediaCodecBuffer>> *array) {
+    array->clear();
+    Mutexed<std::unique_ptr<InputBuffers>>::Locked buffers(mInputBuffers);
+
+    if (!(*buffers)->isArrayMode()) {
+        *buffers = (*buffers)->toArrayMode();
+    }
+
+    (*buffers)->getArray(array);
+}
+
+void CCodecBufferChannel::getOutputBufferArray(Vector<sp<MediaCodecBuffer>> *array) {
+    array->clear();
+    Mutexed<std::unique_ptr<OutputBuffers>>::Locked buffers(mOutputBuffers);
+
+    if (!(*buffers)->isArrayMode()) {
+        *buffers = (*buffers)->toArrayMode();
+    }
+
+    (*buffers)->getArray(array);
+}
+
+void CCodecBufferChannel::start(const sp<AMessage> &inputFormat, const sp<AMessage> &outputFormat) {
+    if (inputFormat != nullptr) {
+        Mutexed<std::unique_ptr<InputBuffers>>::Locked buffers(mInputBuffers);
+        (*buffers)->setFormat(inputFormat);
+    }
+    if (outputFormat != nullptr) {
+        Mutexed<std::unique_ptr<OutputBuffers>>::Locked buffers(mOutputBuffers);
+        (*buffers)->setFormat(outputFormat);
+    }
+
+    mSync.start();
+    // TODO: use proper buffer depth instead of this random value
+    for (size_t i = 0; i < kMinBufferArraySize; ++i) {
+        size_t index;
+        sp<MediaCodecBuffer> buffer;
+        {
+            Mutexed<std::unique_ptr<InputBuffers>>::Locked buffers(mInputBuffers);
+            if (!(*buffers)->requestNewBuffer(&index, &buffer)) {
+                buffers.unlock();
+                ALOGE("start: cannot allocate memory");
+                mOnError(NO_MEMORY, ACTION_CODE_FATAL);
+                buffers.lock();
+                return;
+            }
+        }
+        mCallback->onInputBufferAvailable(index, buffer);
+    }
+}
+
+void CCodecBufferChannel::stop() {
+    mSync.stop();
+    mFirstValidFrameIndex = mFrameIndex.load();
+}
+
+void CCodecBufferChannel::flush(const std::list<std::unique_ptr<C2Work>> &flushedWork) {
+    {
+        Mutexed<std::unique_ptr<InputBuffers>>::Locked buffers(mInputBuffers);
+        (*buffers)->flush();
+    }
+    {
+        Mutexed<std::unique_ptr<OutputBuffers>>::Locked buffers(mOutputBuffers);
+        (*buffers)->flush(flushedWork);
+    }
+}
+
+void CCodecBufferChannel::onWorkDone(std::vector<std::unique_ptr<C2Work>> workItems) {
+    for (const auto &work : workItems) {
+        sp<MediaCodecBuffer> inBuffer;
+        size_t index;
+        {
+            Mutexed<std::unique_ptr<InputBuffers>>::Locked buffers(mInputBuffers);
+            if (!(*buffers)->requestNewBuffer(&index, &inBuffer)) {
+                ALOGW("no new buffer available");
+                inBuffer = nullptr;
+            }
+        }
+        if (inBuffer != nullptr) {
+            mCallback->onInputBufferAvailable(index, inBuffer);
+        }
+
+        if (work->result != OK) {
+            ALOGE("work failed to complete: %d", work->result);
+            mOnError(work->result, ACTION_CODE_FATAL);
+            return;
+        }
+
+        // NOTE: MediaCodec usage supposedly have only one worklet
+        if (work->worklets.size() != 1u) {
+            ALOGE("incorrect number of worklets: %zu", work->worklets.size());
+            mOnError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+            continue;
+        }
+
+        const std::unique_ptr<C2Worklet> &worklet = work->worklets.front();
+        if (worklet->output.ordinal.frame_index < mFirstValidFrameIndex) {
+            // Discard frames from previous generation.
+            continue;
+        }
+        // NOTE: MediaCodec usage supposedly have only one output stream.
+        if (worklet->output.buffers.size() != 1u) {
+            ALOGE("incorrect number of output buffers: %zu", worklet->output.buffers.size());
+            mOnError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+            continue;
+        }
+
+        const std::shared_ptr<C2Buffer> &buffer = worklet->output.buffers[0];
+        const C2StreamCsdInfo::output *csdInfo = nullptr;
+        if (buffer) {
+            // TODO: transfer infos() into buffer metadata
+        }
+        for (const auto &info : worklet->output.infos) {
+            if (info->coreIndex() == C2StreamCsdInfo::output::CORE_INDEX) {
+                ALOGV("csd found");
+                csdInfo = static_cast<const C2StreamCsdInfo::output *>(info.get());
+            }
+        }
+
+        int32_t flags = 0;
+        if (worklet->output.flags & C2BufferPack::FLAG_END_OF_STREAM) {
+            flags |= MediaCodec::BUFFER_FLAG_EOS;
+            ALOGV("output EOS");
+        }
+
+        sp<MediaCodecBuffer> outBuffer;
+        if (csdInfo != nullptr) {
+            Mutexed<std::unique_ptr<OutputBuffers>>::Locked buffers(mOutputBuffers);
+            if ((*buffers)->registerCsd(csdInfo, &index, &outBuffer)) {
+                outBuffer->meta()->setInt64("timeUs", worklet->output.ordinal.timestamp);
+                outBuffer->meta()->setInt32("flags", flags | MediaCodec::BUFFER_FLAG_CODECCONFIG);
+                ALOGV("csd index = %zu", index);
+
+                buffers.unlock();
+                mCallback->onOutputBufferAvailable(index, outBuffer);
+                buffers.lock();
+            } else {
+                ALOGE("unable to register output buffer");
+                buffers.unlock();
+                mOnError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+                buffers.lock();
+                continue;
+            }
+        }
+
+        if (!buffer && !flags) {
+            ALOGV("Not reporting output buffer");
+            continue;
+        }
+
+        {
+            Mutexed<std::unique_ptr<OutputBuffers>>::Locked buffers(mOutputBuffers);
+            if (!(*buffers)->registerBuffer(buffer, &index, &outBuffer)) {
+                ALOGE("unable to register output buffer");
+
+                buffers.unlock();
+                mOnError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+                buffers.lock();
+                continue;
+            }
+        }
+
+        outBuffer->meta()->setInt64("timeUs", worklet->output.ordinal.timestamp);
+        outBuffer->meta()->setInt32("flags", flags);
+        ALOGV("index = %zu", index);
+        mCallback->onOutputBufferAvailable(index, outBuffer);
+    }
+}
+
+status_t CCodecBufferChannel::setSurface(const sp<Surface> &newSurface) {
+    if (newSurface != nullptr) {
+        newSurface->setScalingMode(NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
+    }
+
+    Mutexed<sp<Surface>>::Locked surface(mSurface);
+//    if (newSurface == nullptr) {
+//        if (*surface != nullptr) {
+//            ALOGW("cannot unset a surface");
+//            return INVALID_OPERATION;
+//        }
+//        return OK;
+//    }
+//
+//    if (*surface == nullptr) {
+//        ALOGW("component was not configured with a surface");
+//        return INVALID_OPERATION;
+//    }
+
+    *surface = newSurface;
+    return OK;
+}
+
+}  // namespace android
diff --git a/media/libstagefright/CallbackDataSource.cpp b/media/libstagefright/CallbackDataSource.cpp
index 6dfe2de..f479644 100644
--- a/media/libstagefright/CallbackDataSource.cpp
+++ b/media/libstagefright/CallbackDataSource.cpp
@@ -113,10 +113,6 @@
     return mIDataSource->DrmInitialization(mime);
 }
 
-sp<IDataSource> CallbackDataSource::getIDataSource() const {
-    return mIDataSource;
-}
-
 TinyCacheSource::TinyCacheSource(const sp<DataSource>& source)
     : mSource(source), mCachedOffset(0), mCachedSize(0) {
     mName = String8::format("TinyCacheSource(%s)", mSource->toString().string());
@@ -194,8 +190,4 @@
     return mSource->DrmInitialization(mime);
 }
 
-sp<IDataSource> TinyCacheSource::getIDataSource() const {
-    return mSource->getIDataSource();
-}
-
 } // namespace android
diff --git a/media/libstagefright/CallbackMediaSource.cpp b/media/libstagefright/CallbackMediaSource.cpp
new file mode 100644
index 0000000..6811882
--- /dev/null
+++ b/media/libstagefright/CallbackMediaSource.cpp
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2017, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/stagefright/CallbackMediaSource.h>
+#include <media/IMediaSource.h>
+
+namespace android {
+
+CallbackMediaSource::CallbackMediaSource(const sp<IMediaSource> &source)
+    :mSource(source) {}
+
+CallbackMediaSource::~CallbackMediaSource() {}
+
+status_t CallbackMediaSource::start(MetaData *params) {
+    return mSource->start(params);
+}
+
+status_t CallbackMediaSource::stop() {
+    return mSource->stop();
+}
+
+sp<MetaData> CallbackMediaSource::getFormat() {
+    return mSource->getFormat();
+}
+
+status_t CallbackMediaSource::read(MediaBuffer **buffer, const ReadOptions *options) {
+    return mSource->read(buffer, reinterpret_cast<const ReadOptions*>(options));
+}
+
+status_t CallbackMediaSource::pause() {
+    return mSource->pause();
+}
+
+}  // namespace android
diff --git a/media/libstagefright/DataSource.cpp b/media/libstagefright/DataSource.cpp
deleted file mode 100644
index c22053e..0000000
--- a/media/libstagefright/DataSource.cpp
+++ /dev/null
@@ -1,246 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-//#define LOG_NDEBUG 0
-#define LOG_TAG "DataSource"
-
-#include "include/CallbackDataSource.h"
-#include "include/HTTPBase.h"
-#include "include/NuCachedSource2.h"
-
-#include <media/IDataSource.h>
-#include <media/IMediaHTTPConnection.h>
-#include <media/IMediaHTTPService.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/DataSource.h>
-#include <media/stagefright/DataURISource.h>
-#include <media/stagefright/FileSource.h>
-#include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/MediaHTTP.h>
-#include <media/stagefright/RemoteDataSource.h>
-#include <media/stagefright/Utils.h>
-#include <utils/String8.h>
-
-#include <cutils/properties.h>
-
-#include <private/android_filesystem_config.h>
-
-namespace android {
-
-bool DataSource::getUInt16(off64_t offset, uint16_t *x) {
-    *x = 0;
-
-    uint8_t byte[2];
-    if (readAt(offset, byte, 2) != 2) {
-        return false;
-    }
-
-    *x = (byte[0] << 8) | byte[1];
-
-    return true;
-}
-
-bool DataSource::getUInt24(off64_t offset, uint32_t *x) {
-    *x = 0;
-
-    uint8_t byte[3];
-    if (readAt(offset, byte, 3) != 3) {
-        return false;
-    }
-
-    *x = (byte[0] << 16) | (byte[1] << 8) | byte[2];
-
-    return true;
-}
-
-bool DataSource::getUInt32(off64_t offset, uint32_t *x) {
-    *x = 0;
-
-    uint32_t tmp;
-    if (readAt(offset, &tmp, 4) != 4) {
-        return false;
-    }
-
-    *x = ntohl(tmp);
-
-    return true;
-}
-
-bool DataSource::getUInt64(off64_t offset, uint64_t *x) {
-    *x = 0;
-
-    uint64_t tmp;
-    if (readAt(offset, &tmp, 8) != 8) {
-        return false;
-    }
-
-    *x = ntoh64(tmp);
-
-    return true;
-}
-
-bool DataSource::getUInt16Var(off64_t offset, uint16_t *x, size_t size) {
-    if (size == 2) {
-        return getUInt16(offset, x);
-    }
-    if (size == 1) {
-        uint8_t tmp;
-        if (readAt(offset, &tmp, 1) == 1) {
-            *x = tmp;
-            return true;
-        }
-    }
-    return false;
-}
-
-bool DataSource::getUInt32Var(off64_t offset, uint32_t *x, size_t size) {
-    if (size == 4) {
-        return getUInt32(offset, x);
-    }
-    if (size == 2) {
-        uint16_t tmp;
-        if (getUInt16(offset, &tmp)) {
-            *x = tmp;
-            return true;
-        }
-    }
-    return false;
-}
-
-bool DataSource::getUInt64Var(off64_t offset, uint64_t *x, size_t size) {
-    if (size == 8) {
-        return getUInt64(offset, x);
-    }
-    if (size == 4) {
-        uint32_t tmp;
-        if (getUInt32(offset, &tmp)) {
-            *x = tmp;
-            return true;
-        }
-    }
-    return false;
-}
-
-status_t DataSource::getSize(off64_t *size) {
-    *size = 0;
-
-    return ERROR_UNSUPPORTED;
-}
-
-sp<IDataSource> DataSource::getIDataSource() const {
-    return nullptr;
-}
-
-////////////////////////////////////////////////////////////////////////////////
-
-// static
-sp<DataSource> DataSource::CreateFromURI(
-        const sp<IMediaHTTPService> &httpService,
-        const char *uri,
-        const KeyedVector<String8, String8> *headers,
-        String8 *contentType,
-        HTTPBase *httpSource) {
-    if (contentType != NULL) {
-        *contentType = "";
-    }
-
-    sp<DataSource> source;
-    if (!strncasecmp("file://", uri, 7)) {
-        source = new FileSource(uri + 7);
-    } else if (!strncasecmp("http://", uri, 7) || !strncasecmp("https://", uri, 8)) {
-        if (httpService == NULL) {
-            ALOGE("Invalid http service!");
-            return NULL;
-        }
-
-        if (httpSource == NULL) {
-            sp<IMediaHTTPConnection> conn = httpService->makeHTTPConnection();
-            if (conn == NULL) {
-                ALOGE("Failed to make http connection from http service!");
-                return NULL;
-            }
-            httpSource = new MediaHTTP(conn);
-        }
-
-        String8 cacheConfig;
-        bool disconnectAtHighwatermark = false;
-        KeyedVector<String8, String8> nonCacheSpecificHeaders;
-        if (headers != NULL) {
-            nonCacheSpecificHeaders = *headers;
-            NuCachedSource2::RemoveCacheSpecificHeaders(
-                    &nonCacheSpecificHeaders,
-                    &cacheConfig,
-                    &disconnectAtHighwatermark);
-        }
-
-        if (httpSource->connect(uri, &nonCacheSpecificHeaders) != OK) {
-            ALOGE("Failed to connect http source!");
-            return NULL;
-        }
-
-        if (contentType != NULL) {
-            *contentType = httpSource->getMIMEType();
-        }
-
-        source = NuCachedSource2::Create(
-                httpSource,
-                cacheConfig.isEmpty() ? NULL : cacheConfig.string(),
-                disconnectAtHighwatermark);
-    } else if (!strncasecmp("data:", uri, 5)) {
-        source = DataURISource::Create(uri);
-    } else {
-        // Assume it's a filename.
-        source = new FileSource(uri);
-    }
-
-    if (source == NULL || source->initCheck() != OK) {
-        return NULL;
-    }
-
-    return source;
-}
-
-sp<DataSource> DataSource::CreateFromFd(int fd, int64_t offset, int64_t length) {
-    sp<FileSource> source = new FileSource(fd, offset, length);
-    return source->initCheck() != OK ? nullptr : source;
-}
-
-sp<DataSource> DataSource::CreateMediaHTTP(const sp<IMediaHTTPService> &httpService) {
-    if (httpService == NULL) {
-        return NULL;
-    }
-
-    sp<IMediaHTTPConnection> conn = httpService->makeHTTPConnection();
-    if (conn == NULL) {
-        return NULL;
-    } else {
-        return new MediaHTTP(conn);
-    }
-}
-
-sp<DataSource> DataSource::CreateFromIDataSource(const sp<IDataSource> &source) {
-    return new TinyCacheSource(new CallbackDataSource(source));
-}
-
-String8 DataSource::getMIMEType() const {
-    return String8("application/octet-stream");
-}
-
-sp<IDataSource> DataSource::asIDataSource() {
-    return RemoteDataSource::wrap(sp<DataSource>(this));
-}
-
-}  // namespace android
diff --git a/media/libstagefright/DataSourceFactory.cpp b/media/libstagefright/DataSourceFactory.cpp
new file mode 100644
index 0000000..54bf0cc
--- /dev/null
+++ b/media/libstagefright/DataSourceFactory.cpp
@@ -0,0 +1,117 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+//#define LOG_NDEBUG 0
+#define LOG_TAG "DataSource"
+
+#include "include/HTTPBase.h"
+#include "include/NuCachedSource2.h"
+
+#include <media/MediaHTTPConnection.h>
+#include <media/MediaHTTPService.h>
+#include <media/stagefright/DataSourceFactory.h>
+#include <media/stagefright/DataURISource.h>
+#include <media/stagefright/FileSource.h>
+#include <media/stagefright/MediaHTTP.h>
+#include <utils/String8.h>
+
+namespace android {
+
+// static
+sp<DataSource> DataSourceFactory::CreateFromURI(
+        const sp<MediaHTTPService> &httpService,
+        const char *uri,
+        const KeyedVector<String8, String8> *headers,
+        String8 *contentType,
+        HTTPBase *httpSource) {
+    if (contentType != NULL) {
+        *contentType = "";
+    }
+
+    sp<DataSource> source;
+    if (!strncasecmp("file://", uri, 7)) {
+        source = new FileSource(uri + 7);
+    } else if (!strncasecmp("http://", uri, 7) || !strncasecmp("https://", uri, 8)) {
+        if (httpService == NULL) {
+            ALOGE("Invalid http service!");
+            return NULL;
+        }
+
+        if (httpSource == NULL) {
+            sp<MediaHTTPConnection> conn = httpService->makeHTTPConnection();
+            if (conn == NULL) {
+                ALOGE("Failed to make http connection from http service!");
+                return NULL;
+            }
+            httpSource = new MediaHTTP(conn);
+        }
+
+        String8 cacheConfig;
+        bool disconnectAtHighwatermark = false;
+        KeyedVector<String8, String8> nonCacheSpecificHeaders;
+        if (headers != NULL) {
+            nonCacheSpecificHeaders = *headers;
+            NuCachedSource2::RemoveCacheSpecificHeaders(
+                    &nonCacheSpecificHeaders,
+                    &cacheConfig,
+                    &disconnectAtHighwatermark);
+        }
+
+        if (httpSource->connect(uri, &nonCacheSpecificHeaders) != OK) {
+            ALOGE("Failed to connect http source!");
+            return NULL;
+        }
+
+        if (contentType != NULL) {
+            *contentType = httpSource->getMIMEType();
+        }
+
+        source = NuCachedSource2::Create(
+                httpSource,
+                cacheConfig.isEmpty() ? NULL : cacheConfig.string(),
+                disconnectAtHighwatermark);
+    } else if (!strncasecmp("data:", uri, 5)) {
+        source = DataURISource::Create(uri);
+    } else {
+        // Assume it's a filename.
+        source = new FileSource(uri);
+    }
+
+    if (source == NULL || source->initCheck() != OK) {
+        return NULL;
+    }
+
+    return source;
+}
+
+sp<DataSource> DataSourceFactory::CreateFromFd(int fd, int64_t offset, int64_t length) {
+    sp<FileSource> source = new FileSource(fd, offset, length);
+    return source->initCheck() != OK ? nullptr : source;
+}
+
+sp<DataSource> DataSourceFactory::CreateMediaHTTP(const sp<MediaHTTPService> &httpService) {
+    if (httpService == NULL) {
+        return NULL;
+    }
+
+    sp<MediaHTTPConnection> conn = httpService->makeHTTPConnection();
+    if (conn == NULL) {
+        return NULL;
+    } else {
+        return new MediaHTTP(conn);
+    }
+}
+
+}  // namespace android
diff --git a/media/libstagefright/ESDS.cpp b/media/libstagefright/ESDS.cpp
index c31720d..ea059e8 100644
--- a/media/libstagefright/ESDS.cpp
+++ b/media/libstagefright/ESDS.cpp
@@ -18,7 +18,7 @@
 #define LOG_TAG "ESDS"
 #include <utils/Log.h>
 
-#include <media/stagefright/Utils.h>
+#include <media/stagefright/foundation/ByteUtils.h>
 
 #include "include/ESDS.h"
 
diff --git a/media/libstagefright/FileSource.cpp b/media/libstagefright/FileSource.cpp
index 97d8988..eef5314 100644
--- a/media/libstagefright/FileSource.cpp
+++ b/media/libstagefright/FileSource.cpp
@@ -194,12 +194,6 @@
     return mDecryptHandle;
 }
 
-void FileSource::getDrmInfo(sp<DecryptHandle> &handle, DrmManagerClient **client) {
-    handle = mDecryptHandle;
-
-    *client = mDrmManagerClient;
-}
-
 ssize_t FileSource::readAtDRM(off64_t offset, void *data, size_t size) {
     size_t DRM_CACHE_SIZE = 1024;
     if (mDrmBuf == NULL) {
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
new file mode 100644
index 0000000..b529940
--- /dev/null
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -0,0 +1,614 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "FrameDecoder"
+
+#include <inttypes.h>
+
+#include <utils/Log.h>
+#include <gui/Surface.h>
+
+#include "include/FrameDecoder.h"
+#include <media/ICrypto.h>
+#include <media/IMediaSource.h>
+#include <media/MediaCodecBuffer.h>
+#include <media/stagefright/foundation/avc_utils.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/ColorConverter.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaCodec.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/Utils.h>
+#include <private/media/VideoFrame.h>
+
+namespace android {
+
+static const int64_t kBufferTimeOutUs = 30000ll; // 30 msec
+static const size_t kRetryCount = 20; // must be >0
+
+VideoFrame *FrameDecoder::allocVideoFrame(
+        int32_t width, int32_t height, bool metaOnly) {
+    int32_t rotationAngle;
+    if (!mTrackMeta->findInt32(kKeyRotation, &rotationAngle)) {
+        rotationAngle = 0;  // By default, no rotation
+    }
+
+    uint32_t type;
+    const void *iccData;
+    size_t iccSize;
+    if (!mTrackMeta->findData(kKeyIccProfile, &type, &iccData, &iccSize)){
+        iccData = NULL;
+        iccSize = 0;
+    }
+
+    int32_t sarWidth, sarHeight;
+    int32_t displayWidth, displayHeight;
+    if (mTrackMeta->findInt32(kKeySARWidth, &sarWidth)
+            && mTrackMeta->findInt32(kKeySARHeight, &sarHeight)
+            && sarHeight != 0) {
+        displayWidth = (width * sarWidth) / sarHeight;
+        displayHeight = height;
+    } else if (mTrackMeta->findInt32(kKeyDisplayWidth, &displayWidth)
+                && mTrackMeta->findInt32(kKeyDisplayHeight, &displayHeight)
+                && displayWidth > 0 && displayHeight > 0
+                && width > 0 && height > 0) {
+        ALOGV("found display size %dx%d", displayWidth, displayHeight);
+    } else {
+        displayWidth = width;
+        displayHeight = height;
+    }
+
+    return new VideoFrame(width, height, displayWidth, displayHeight,
+            rotationAngle, mDstBpp, !metaOnly, iccData, iccSize);
+}
+
+bool FrameDecoder::setDstColorFormat(android_pixel_format_t colorFormat) {
+    switch (colorFormat) {
+        case HAL_PIXEL_FORMAT_RGB_565:
+        {
+            mDstFormat = OMX_COLOR_Format16bitRGB565;
+            mDstBpp = 2;
+            return true;
+        }
+        case HAL_PIXEL_FORMAT_RGBA_8888:
+        {
+            mDstFormat = OMX_COLOR_Format32BitRGBA8888;
+            mDstBpp = 4;
+            return true;
+        }
+        case HAL_PIXEL_FORMAT_BGRA_8888:
+        {
+            mDstFormat = OMX_COLOR_Format32bitBGRA8888;
+            mDstBpp = 4;
+            return true;
+        }
+        default:
+        {
+            ALOGE("Unsupported color format: %d", colorFormat);
+            break;
+        }
+    }
+    return false;
+}
+
+VideoFrame* FrameDecoder::extractFrame(
+        int64_t frameTimeUs, int option, int colorFormat, bool metaOnly) {
+    if (!setDstColorFormat((android_pixel_format_t)colorFormat)) {
+        return NULL;
+    }
+
+    if (metaOnly) {
+        int32_t width, height;
+        CHECK(trackMeta()->findInt32(kKeyWidth, &width));
+        CHECK(trackMeta()->findInt32(kKeyHeight, &height));
+        return allocVideoFrame(width, height, true);
+    }
+
+    status_t err = extractInternal(frameTimeUs, 1, option);
+    if (err != OK) {
+        return NULL;
+    }
+
+    return mFrames.size() > 0 ? mFrames[0].release() : NULL;
+}
+
+status_t FrameDecoder::extractFrames(
+        int64_t frameTimeUs, size_t numFrames, int option, int colorFormat,
+        std::vector<VideoFrame*>* frames) {
+    if (!setDstColorFormat((android_pixel_format_t)colorFormat)) {
+        return ERROR_UNSUPPORTED;
+    }
+
+    status_t err = extractInternal(frameTimeUs, numFrames, option);
+    if (err != OK) {
+        return err;
+    }
+
+    for (size_t i = 0; i < mFrames.size(); i++) {
+        frames->push_back(mFrames[i].release());
+    }
+    return OK;
+}
+
+status_t FrameDecoder::extractInternal(
+        int64_t frameTimeUs, size_t numFrames, int option) {
+
+    MediaSource::ReadOptions options;
+    sp<AMessage> videoFormat = onGetFormatAndSeekOptions(
+            frameTimeUs, numFrames, option, &options);
+    if (videoFormat == NULL) {
+        ALOGE("video format or seek mode not supported");
+        return ERROR_UNSUPPORTED;
+    }
+
+    status_t err;
+    sp<ALooper> looper = new ALooper;
+    looper->start();
+    sp<MediaCodec> decoder = MediaCodec::CreateByComponentName(
+            looper, mComponentName, &err);
+    if (decoder.get() == NULL || err != OK) {
+        ALOGW("Failed to instantiate decoder [%s]", mComponentName.c_str());
+        return (decoder.get() == NULL) ? NO_MEMORY : err;
+    }
+
+    err = decoder->configure(videoFormat, NULL /* surface */, NULL /* crypto */, 0 /* flags */);
+    if (err != OK) {
+        ALOGW("configure returned error %d (%s)", err, asString(err));
+        decoder->release();
+        return err;
+    }
+
+    err = decoder->start();
+    if (err != OK) {
+        ALOGW("start returned error %d (%s)", err, asString(err));
+        decoder->release();
+        return err;
+    }
+
+    err = mSource->start();
+    if (err != OK) {
+        ALOGW("source failed to start: %d (%s)", err, asString(err));
+        decoder->release();
+        return err;
+    }
+
+    Vector<sp<MediaCodecBuffer> > inputBuffers;
+    err = decoder->getInputBuffers(&inputBuffers);
+    if (err != OK) {
+        ALOGW("failed to get input buffers: %d (%s)", err, asString(err));
+        decoder->release();
+        mSource->stop();
+        return err;
+    }
+
+    Vector<sp<MediaCodecBuffer> > outputBuffers;
+    err = decoder->getOutputBuffers(&outputBuffers);
+    if (err != OK) {
+        ALOGW("failed to get output buffers: %d (%s)", err, asString(err));
+        decoder->release();
+        mSource->stop();
+        return err;
+    }
+
+    sp<AMessage> outputFormat = NULL;
+    bool haveMoreInputs = true;
+    size_t index, offset, size;
+    int64_t timeUs;
+    size_t retriesLeft = kRetryCount;
+    bool done = false;
+    bool firstSample = true;
+    do {
+        size_t inputIndex = -1;
+        int64_t ptsUs = 0ll;
+        uint32_t flags = 0;
+        sp<MediaCodecBuffer> codecBuffer = NULL;
+
+        while (haveMoreInputs) {
+            err = decoder->dequeueInputBuffer(&inputIndex, kBufferTimeOutUs);
+            if (err != OK) {
+                ALOGW("Timed out waiting for input");
+                if (retriesLeft) {
+                    err = OK;
+                }
+                break;
+            }
+            codecBuffer = inputBuffers[inputIndex];
+
+            MediaBuffer *mediaBuffer = NULL;
+
+            err = mSource->read(&mediaBuffer, &options);
+            options.clearSeekTo();
+            if (err != OK) {
+                ALOGW("Input Error or EOS");
+                haveMoreInputs = false;
+                if (!firstSample && err == ERROR_END_OF_STREAM) {
+                    err = OK;
+                }
+                break;
+            }
+
+            if (mediaBuffer->range_length() > codecBuffer->capacity()) {
+                ALOGE("buffer size (%zu) too large for codec input size (%zu)",
+                        mediaBuffer->range_length(), codecBuffer->capacity());
+                haveMoreInputs = false;
+                err = BAD_VALUE;
+            } else {
+                codecBuffer->setRange(0, mediaBuffer->range_length());
+
+                CHECK(mediaBuffer->meta_data()->findInt64(kKeyTime, &ptsUs));
+                memcpy(codecBuffer->data(),
+                        (const uint8_t*)mediaBuffer->data() + mediaBuffer->range_offset(),
+                        mediaBuffer->range_length());
+
+                onInputReceived(codecBuffer, mediaBuffer->meta_data(), firstSample, &flags);
+                firstSample = false;
+            }
+
+            mediaBuffer->release();
+            break;
+        }
+
+        if (haveMoreInputs && inputIndex < inputBuffers.size()) {
+            ALOGV("QueueInput: size=%zu ts=%" PRId64 " us flags=%x",
+                    codecBuffer->size(), ptsUs, flags);
+
+            err = decoder->queueInputBuffer(
+                    inputIndex,
+                    codecBuffer->offset(),
+                    codecBuffer->size(),
+                    ptsUs,
+                    flags);
+
+            if (flags & MediaCodec::BUFFER_FLAG_EOS) {
+                haveMoreInputs = false;
+            }
+
+            // we don't expect an output from codec config buffer
+            if (flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) {
+                continue;
+            }
+        }
+
+        while (err == OK) {
+            // wait for a decoded buffer
+            err = decoder->dequeueOutputBuffer(
+                    &index,
+                    &offset,
+                    &size,
+                    &timeUs,
+                    &flags,
+                    kBufferTimeOutUs);
+
+            if (err == INFO_FORMAT_CHANGED) {
+                ALOGV("Received format change");
+                err = decoder->getOutputFormat(&outputFormat);
+            } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) {
+                ALOGV("Output buffers changed");
+                err = decoder->getOutputBuffers(&outputBuffers);
+            } else {
+                if (err == -EAGAIN /* INFO_TRY_AGAIN_LATER */ && --retriesLeft > 0) {
+                    ALOGV("Timed-out waiting for output.. retries left = %zu", retriesLeft);
+                    err = OK;
+                } else if (err == OK) {
+                    // If we're seeking with CLOSEST option and obtained a valid targetTimeUs
+                    // from the extractor, decode to the specified frame. Otherwise we're done.
+                    ALOGV("Received an output buffer, timeUs=%lld", (long long)timeUs);
+                    sp<MediaCodecBuffer> videoFrameBuffer = outputBuffers.itemAt(index);
+
+                    err = onOutputReceived(videoFrameBuffer, outputFormat, timeUs, &done);
+
+                    decoder->releaseOutputBuffer(index);
+                } else {
+                    ALOGW("Received error %d (%s) instead of output", err, asString(err));
+                    done = true;
+                }
+                break;
+            }
+        }
+    } while (err == OK && !done);
+
+    mSource->stop();
+    decoder->release();
+
+    if (err != OK) {
+        ALOGE("failed to get video frame (err %d)", err);
+    }
+
+    return err;
+}
+
+sp<AMessage> VideoFrameDecoder::onGetFormatAndSeekOptions(
+        int64_t frameTimeUs, size_t numFrames, int seekMode, MediaSource::ReadOptions *options) {
+    mSeekMode = static_cast<MediaSource::ReadOptions::SeekMode>(seekMode);
+    if (mSeekMode < MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC ||
+            mSeekMode > MediaSource::ReadOptions::SEEK_FRAME_INDEX) {
+        ALOGE("Unknown seek mode: %d", mSeekMode);
+        return NULL;
+    }
+    mNumFrames = numFrames;
+
+    const char *mime;
+    if (!trackMeta()->findCString(kKeyMIMEType, &mime)) {
+        ALOGE("Could not find mime type");
+        return NULL;
+    }
+
+    mIsAvcOrHevc = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)
+            || !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC);
+
+    if (frameTimeUs < 0) {
+        int64_t thumbNailTime;
+        if (!trackMeta()->findInt64(kKeyThumbnailTime, &thumbNailTime)
+                || thumbNailTime < 0) {
+            thumbNailTime = 0;
+        }
+        options->setSeekTo(thumbNailTime, mSeekMode);
+    } else {
+        options->setSeekTo(frameTimeUs, mSeekMode);
+    }
+
+    sp<AMessage> videoFormat;
+    if (convertMetaDataToMessage(trackMeta(), &videoFormat) != OK) {
+        ALOGE("b/23680780");
+        ALOGW("Failed to convert meta data to message");
+        return NULL;
+    }
+
+    // TODO: Use Flexible color instead
+    videoFormat->setInt32("color-format", OMX_COLOR_FormatYUV420Planar);
+
+    // For the thumbnail extraction case, try to allocate single buffer in both
+    // input and output ports, if seeking to a sync frame. NOTE: This request may
+    // fail if component requires more than that for decoding.
+    bool isSeekingClosest = (mSeekMode == MediaSource::ReadOptions::SEEK_CLOSEST)
+            || (mSeekMode == MediaSource::ReadOptions::SEEK_FRAME_INDEX);
+    if (!isSeekingClosest) {
+        videoFormat->setInt32("android._num-input-buffers", 1);
+        videoFormat->setInt32("android._num-output-buffers", 1);
+    }
+    return videoFormat;
+}
+
+status_t VideoFrameDecoder::onInputReceived(
+        const sp<MediaCodecBuffer> &codecBuffer,
+        const sp<MetaData> &sampleMeta, bool firstSample, uint32_t *flags) {
+    bool isSeekingClosest = (mSeekMode == MediaSource::ReadOptions::SEEK_CLOSEST)
+            || (mSeekMode == MediaSource::ReadOptions::SEEK_FRAME_INDEX);
+
+    if (firstSample && isSeekingClosest) {
+        sampleMeta->findInt64(kKeyTargetTime, &mTargetTimeUs);
+        ALOGV("Seeking closest: targetTimeUs=%lld", (long long)mTargetTimeUs);
+    }
+
+    if (mIsAvcOrHevc && !isSeekingClosest
+            && IsIDR(codecBuffer->data(), codecBuffer->size())) {
+        // Only need to decode one IDR frame, unless we're seeking with CLOSEST
+        // option, in which case we need to actually decode to targetTimeUs.
+        *flags |= MediaCodec::BUFFER_FLAG_EOS;
+    }
+    return OK;
+}
+
+status_t VideoFrameDecoder::onOutputReceived(
+        const sp<MediaCodecBuffer> &videoFrameBuffer,
+        const sp<AMessage> &outputFormat,
+        int64_t timeUs, bool *done) {
+    bool shouldOutput = (mTargetTimeUs < 0ll) || (timeUs >= mTargetTimeUs);
+
+    // If this is not the target frame, skip color convert.
+    if (!shouldOutput) {
+        *done = false;
+        return OK;
+    }
+
+    *done = (++mNumFramesDecoded >= mNumFrames);
+
+    if (outputFormat == NULL) {
+        return ERROR_MALFORMED;
+    }
+
+    int32_t width, height;
+    CHECK(outputFormat->findInt32("width", &width));
+    CHECK(outputFormat->findInt32("height", &height));
+
+    int32_t crop_left, crop_top, crop_right, crop_bottom;
+    if (!outputFormat->findRect("crop", &crop_left, &crop_top, &crop_right, &crop_bottom)) {
+        crop_left = crop_top = 0;
+        crop_right = width - 1;
+        crop_bottom = height - 1;
+    }
+
+    VideoFrame *frame = allocVideoFrame(
+            (crop_right - crop_left + 1),
+            (crop_bottom - crop_top + 1),
+            false /*metaOnly*/);
+    addFrame(frame);
+
+    int32_t srcFormat;
+    CHECK(outputFormat->findInt32("color-format", &srcFormat));
+
+    ColorConverter converter((OMX_COLOR_FORMATTYPE)srcFormat, dstFormat());
+
+    if (converter.isValid()) {
+        converter.convert(
+                (const uint8_t *)videoFrameBuffer->data(),
+                width, height,
+                crop_left, crop_top, crop_right, crop_bottom,
+                frame->mData,
+                frame->mWidth,
+                frame->mHeight,
+                crop_left, crop_top, crop_right, crop_bottom);
+        return OK;
+    }
+
+    ALOGE("Unable to convert from format 0x%08x to 0x%08x",
+                srcFormat, dstFormat());
+    return ERROR_UNSUPPORTED;
+}
+
+sp<AMessage> ImageDecoder::onGetFormatAndSeekOptions(
+        int64_t frameTimeUs, size_t /*numFrames*/,
+        int /*seekMode*/, MediaSource::ReadOptions *options) {
+    sp<MetaData> overrideMeta;
+    if (frameTimeUs < 0) {
+        uint32_t type;
+        const void *data;
+        size_t size;
+        int64_t thumbNailTime = 0;
+        int32_t thumbnailWidth, thumbnailHeight;
+
+        // if we have a stand-alone thumbnail, set up the override meta,
+        // and set seekTo time to -1.
+        if (trackMeta()->findInt32(kKeyThumbnailWidth, &thumbnailWidth)
+         && trackMeta()->findInt32(kKeyThumbnailHeight, &thumbnailHeight)
+         && trackMeta()->findData(kKeyThumbnailHVCC, &type, &data, &size)){
+            overrideMeta = new MetaData(*(trackMeta()));
+            overrideMeta->remove(kKeyDisplayWidth);
+            overrideMeta->remove(kKeyDisplayHeight);
+            overrideMeta->setInt32(kKeyWidth, thumbnailWidth);
+            overrideMeta->setInt32(kKeyHeight, thumbnailHeight);
+            overrideMeta->setData(kKeyHVCC, type, data, size);
+            thumbNailTime = -1ll;
+            ALOGV("thumbnail: %dx%d", thumbnailWidth, thumbnailHeight);
+        }
+        options->setSeekTo(thumbNailTime);
+    } else {
+        options->setSeekTo(frameTimeUs);
+    }
+
+    mGridRows = mGridCols = 1;
+    if (overrideMeta == NULL) {
+        // check if we're dealing with a tiled heif
+        int32_t gridWidth, gridHeight, gridRows, gridCols;
+        if (trackMeta()->findInt32(kKeyGridWidth, &gridWidth) && gridWidth > 0
+         && trackMeta()->findInt32(kKeyGridHeight, &gridHeight) && gridHeight > 0
+         && trackMeta()->findInt32(kKeyGridRows, &gridRows) && gridRows > 0
+         && trackMeta()->findInt32(kKeyGridCols, &gridCols) && gridCols > 0) {
+            int32_t width, height;
+            CHECK(trackMeta()->findInt32(kKeyWidth, &width));
+            CHECK(trackMeta()->findInt32(kKeyHeight, &height));
+
+            if (width <= gridWidth * gridCols && height <= gridHeight * gridRows) {
+                ALOGV("grid: %dx%d, size: %dx%d, picture size: %dx%d",
+                        gridCols, gridRows, gridWidth, gridHeight, width, height);
+
+                overrideMeta = new MetaData(*(trackMeta()));
+                overrideMeta->setInt32(kKeyWidth, gridWidth);
+                overrideMeta->setInt32(kKeyHeight, gridHeight);
+                mGridCols = gridCols;
+                mGridRows = gridRows;
+            } else {
+                ALOGE("bad grid: %dx%d, size: %dx%d, picture size: %dx%d",
+                        gridCols, gridRows, gridWidth, gridHeight, width, height);
+            }
+        }
+        if (overrideMeta == NULL) {
+            overrideMeta = trackMeta();
+        }
+    }
+
+    sp<AMessage> videoFormat;
+    if (convertMetaDataToMessage(overrideMeta, &videoFormat) != OK) {
+        ALOGE("b/23680780");
+        ALOGW("Failed to convert meta data to message");
+        return NULL;
+    }
+
+    // TODO: Use Flexible color instead
+    videoFormat->setInt32("color-format", OMX_COLOR_FormatYUV420Planar);
+
+    if ((mGridRows == 1) && (mGridCols == 1)) {
+        videoFormat->setInt32("android._num-input-buffers", 1);
+        videoFormat->setInt32("android._num-output-buffers", 1);
+    }
+    return videoFormat;
+}
+
+status_t ImageDecoder::onOutputReceived(
+        const sp<MediaCodecBuffer> &videoFrameBuffer,
+        const sp<AMessage> &outputFormat, int64_t /*timeUs*/, bool *done) {
+    if (outputFormat == NULL) {
+        return ERROR_MALFORMED;
+    }
+
+    int32_t width, height;
+    CHECK(outputFormat->findInt32("width", &width));
+    CHECK(outputFormat->findInt32("height", &height));
+
+    int32_t imageWidth, imageHeight;
+    CHECK(trackMeta()->findInt32(kKeyWidth, &imageWidth));
+    CHECK(trackMeta()->findInt32(kKeyHeight, &imageHeight));
+
+    if (mFrame == NULL) {
+        mFrame = allocVideoFrame(imageWidth, imageHeight, false /*metaOnly*/);
+
+        addFrame(mFrame);
+    }
+
+    int32_t srcFormat;
+    CHECK(outputFormat->findInt32("color-format", &srcFormat));
+
+    ColorConverter converter((OMX_COLOR_FORMATTYPE)srcFormat, dstFormat());
+
+    int32_t dstLeft, dstTop, dstRight, dstBottom;
+    int32_t numTiles = mGridRows * mGridCols;
+
+    dstLeft = mTilesDecoded % mGridCols * width;
+    dstTop = mTilesDecoded / mGridCols * height;
+    dstRight = dstLeft + width - 1;
+    dstBottom = dstTop + height - 1;
+
+    int32_t crop_left, crop_top, crop_right, crop_bottom;
+    if (!outputFormat->findRect("crop", &crop_left, &crop_top, &crop_right, &crop_bottom)) {
+        crop_left = crop_top = 0;
+        crop_right = width - 1;
+        crop_bottom = height - 1;
+    }
+
+    // apply crop on bottom-right
+    // TODO: need to move this into the color converter itself.
+    if (dstRight >= imageWidth) {
+        crop_right = imageWidth - dstLeft - 1;
+        dstRight = dstLeft + crop_right;
+    }
+    if (dstBottom >= imageHeight) {
+        crop_bottom = imageHeight - dstTop - 1;
+        dstBottom = dstTop + crop_bottom;
+    }
+
+    *done = (++mTilesDecoded >= numTiles);
+
+    if (converter.isValid()) {
+        converter.convert(
+                (const uint8_t *)videoFrameBuffer->data(),
+                width, height,
+                crop_left, crop_top, crop_right, crop_bottom,
+                mFrame->mData,
+                mFrame->mWidth,
+                mFrame->mHeight,
+                dstLeft, dstTop, dstRight, dstBottom);
+        return OK;
+    }
+
+    ALOGE("Unable to convert from format 0x%08x to 0x%08x",
+                srcFormat, dstFormat());
+    return ERROR_UNSUPPORTED;
+}
+
+}  // namespace android
diff --git a/media/libstagefright/HevcUtils.cpp b/media/libstagefright/HevcUtils.cpp
index 7d463a9..91deca5 100644
--- a/media/libstagefright/HevcUtils.cpp
+++ b/media/libstagefright/HevcUtils.cpp
@@ -21,12 +21,12 @@
 #include <utility>
 
 #include "include/HevcUtils.h"
-#include "include/avc_utils.h"
 
 #include <media/stagefright/foundation/ABitReader.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/avc_utils.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/Utils.h>
 
diff --git a/media/libstagefright/InterfaceUtils.cpp b/media/libstagefright/InterfaceUtils.cpp
new file mode 100644
index 0000000..cf9fdf8
--- /dev/null
+++ b/media/libstagefright/InterfaceUtils.cpp
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "include/CallbackDataSource.h"
+
+#include <media/stagefright/CallbackMediaSource.h>
+#include <media/stagefright/InterfaceUtils.h>
+#include <media/stagefright/RemoteDataSource.h>
+#include <media/stagefright/RemoteMediaExtractor.h>
+#include <media/stagefright/RemoteMediaSource.h>
+
+namespace android {
+
+sp<DataSource> CreateDataSourceFromIDataSource(const sp<IDataSource> &source) {
+    if (source == nullptr) {
+        return nullptr;
+    }
+    return new TinyCacheSource(new CallbackDataSource(source));
+}
+
+sp<IDataSource> CreateIDataSourceFromDataSource(const sp<DataSource> &source) {
+    if (source == nullptr) {
+        return nullptr;
+    }
+    return RemoteDataSource::wrap(source);
+}
+
+sp<IMediaExtractor> CreateIMediaExtractorFromMediaExtractor(const sp<MediaExtractor> &extractor) {
+    if (extractor == nullptr) {
+        return nullptr;
+    }
+    return RemoteMediaExtractor::wrap(extractor);
+}
+
+sp<MediaSource> CreateMediaSourceFromIMediaSource(const sp<IMediaSource> &source) {
+    if (source == nullptr) {
+        return nullptr;
+    }
+    return new CallbackMediaSource(source);
+}
+
+sp<IMediaSource> CreateIMediaSourceFromMediaSource(const sp<MediaSource> &source) {
+    if (source == nullptr) {
+        return nullptr;
+    }
+    return RemoteMediaSource::wrap(source);
+}
+
+}  // namespace android
diff --git a/media/libstagefright/JPEGSource.cpp b/media/libstagefright/JPEGSource.cpp
index bafa4b2..ee3aedb 100644
--- a/media/libstagefright/JPEGSource.cpp
+++ b/media/libstagefright/JPEGSource.cpp
@@ -18,8 +18,8 @@
 #define LOG_TAG "JPEGSource"
 #include <utils/Log.h>
 
+#include <media/DataSource.h>
 #include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/DataSource.h>
 #include <media/stagefright/JPEGSource.h>
 #include <media/stagefright/MediaBufferGroup.h>
 #include <media/stagefright/MediaDefs.h>
diff --git a/media/libstagefright/MPEG2TSWriter.cpp b/media/libstagefright/MPEG2TSWriter.cpp
index 03ea959..4c85b0d 100644
--- a/media/libstagefright/MPEG2TSWriter.cpp
+++ b/media/libstagefright/MPEG2TSWriter.cpp
@@ -16,18 +16,18 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "MPEG2TSWriter"
-#include <media/stagefright/foundation/ADebug.h>
 
+#include <media/MediaSource.h>
+#include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/hexdump.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/ByteUtils.h>
 #include <media/stagefright/MPEG2TSWriter.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/MediaSource.h>
 #include <media/stagefright/MetaData.h>
-#include <media/stagefright/Utils.h>
 #include <arpa/inet.h>
 
 #include "include/ESDS.h"
@@ -35,7 +35,7 @@
 namespace android {
 
 struct MPEG2TSWriter::SourceInfo : public AHandler {
-    explicit SourceInfo(const sp<IMediaSource> &source);
+    explicit SourceInfo(const sp<MediaSource> &source);
 
     void start(const sp<AMessage> &notify, const sp<MetaData> &params);
     void stop();
@@ -69,7 +69,7 @@
         kWhatRead  = 'read',
     };
 
-    sp<IMediaSource> mSource;
+    sp<MediaSource> mSource;
     sp<ALooper> mLooper;
     sp<AMessage> mNotify;
 
@@ -91,7 +91,7 @@
     DISALLOW_EVIL_CONSTRUCTORS(SourceInfo);
 };
 
-MPEG2TSWriter::SourceInfo::SourceInfo(const sp<IMediaSource> &source)
+MPEG2TSWriter::SourceInfo::SourceInfo(const sp<MediaSource> &source)
     : mSource(source),
       mLooper(new ALooper),
       mEOSReceived(false),
@@ -499,7 +499,7 @@
     }
 }
 
-status_t MPEG2TSWriter::addSource(const sp<IMediaSource> &source) {
+status_t MPEG2TSWriter::addSource(const sp<MediaSource> &source) {
     CHECK(!mStarted);
 
     sp<MetaData> meta = source->getFormat();
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index 7786c4d..8db00f0 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -32,23 +32,24 @@
 
 #include <functional>
 
+#include <media/MediaSource.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/foundation/ByteUtils.h>
 #include <media/stagefright/foundation/ColorUtils.h>
+#include <media/stagefright/foundation/avc_utils.h>
 #include <media/stagefright/MPEG4Writer.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MetaData.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/MediaSource.h>
 #include <media/stagefright/Utils.h>
 #include <media/mediarecorder.h>
 #include <cutils/properties.h>
 
 #include "include/ESDS.h"
 #include "include/HevcUtils.h"
-#include "include/avc_utils.h"
 
 #ifndef __predict_false
 #define __predict_false(exp) __builtin_expect((exp) != 0, 0)
@@ -100,7 +101,7 @@
 
 class MPEG4Writer::Track {
 public:
-    Track(MPEG4Writer *owner, const sp<IMediaSource> &source, size_t trackId);
+    Track(MPEG4Writer *owner, const sp<MediaSource> &source, size_t trackId);
 
     ~Track();
 
@@ -111,14 +112,18 @@
 
     int64_t getDurationUs() const;
     int64_t getEstimatedTrackSizeBytes() const;
+    int32_t getMetaSizeIncrease() const;
     void writeTrackHeader(bool use32BitOffset = true);
     int64_t getMinCttsOffsetTimeUs();
     void bufferChunk(int64_t timestampUs);
     bool isAvc() const { return mIsAvc; }
     bool isHevc() const { return mIsHevc; }
+    bool isHeic() const { return mIsHeic; }
     bool isAudio() const { return mIsAudio; }
     bool isMPEG4() const { return mIsMPEG4; }
+    bool usePrefix() const { return mIsAvc || mIsHevc || mIsHeic; }
     void addChunkOffset(off64_t offset);
+    void addItemOffsetAndSize(off64_t offset, size_t size);
     int32_t getTrackId() const { return mTrackId; }
     status_t dump(int fd, const Vector<String16>& args) const;
     static const char *getFourCCForMime(const char *mime);
@@ -271,7 +276,7 @@
 
     MPEG4Writer *mOwner;
     sp<MetaData> mMeta;
-    sp<IMediaSource> mSource;
+    sp<MediaSource> mSource;
     volatile bool mDone;
     volatile bool mPaused;
     volatile bool mResumed;
@@ -280,6 +285,7 @@
     bool mIsHevc;
     bool mIsAudio;
     bool mIsVideo;
+    bool mIsHeic;
     bool mIsMPEG4;
     bool mGotStartKeyFrame;
     bool mIsMalformed;
@@ -346,6 +352,16 @@
     int64_t mPreviousTrackTimeUs;
     int64_t mTrackEveryTimeDurationUs;
 
+    int32_t mRotation;
+
+    Vector<uint16_t> mProperties;
+    Vector<uint16_t> mDimgRefs;
+    int32_t mIsPrimary;
+    int32_t mWidth, mHeight;
+    int32_t mGridWidth, mGridHeight;
+    int32_t mGridRows, mGridCols;
+    size_t mNumTiles, mTileIndex;
+
     // Update the audio track's drift information.
     void updateDriftTime(const sp<MetaData>& meta);
 
@@ -385,7 +401,6 @@
 
     // Simple validation on the codec specific data
     status_t checkCodecSpecificData() const;
-    int32_t mRotation;
 
     void updateTrackSizeEstimate();
     void addOneStscTableEntry(size_t chunkId, size_t sampleId);
@@ -473,13 +488,18 @@
     mUse32BitOffset = true;
     mOffset = 0;
     mMdatOffset = 0;
-    mMoovBoxBuffer = NULL;
-    mMoovBoxBufferOffset = 0;
-    mWriteMoovBoxToMemory = false;
+    mInMemoryCache = NULL;
+    mInMemoryCacheOffset = 0;
+    mInMemoryCacheSize = 0;
+    mWriteBoxToMemory = false;
     mFreeBoxOffset = 0;
     mStreamableFile = false;
-    mEstimatedMoovBoxSize = 0;
     mTimeScale = -1;
+    mHasFileLevelMeta = false;
+    mHasMoovBox = false;
+    mPrimaryItemId = 0;
+    mAssociationEntryCount = 0;
+    mNumGrids = 0;
 
     // Following variables only need to be set for the first recording session.
     // And they will stay the same for all the recording sessions.
@@ -566,13 +586,15 @@
         }
     } else if (!strncasecmp(mime, "application/", 12)) {
         return "mett";
+    } else if (!strcasecmp(MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC, mime)) {
+        return "heic";
     } else {
         ALOGE("Track (%s) other than video/audio/metadata is not supported", mime);
     }
     return NULL;
 }
 
-status_t MPEG4Writer::addSource(const sp<IMediaSource> &source) {
+status_t MPEG4Writer::addSource(const sp<MediaSource> &source) {
     Mutex::Autolock l(mLock);
     if (mStarted) {
         ALOGE("Attempt to add source AFTER recording is started");
@@ -594,6 +616,9 @@
     Track *track = new Track(this, source, 1 + mTracks.size());
     mTracks.push_back(track);
 
+    mHasMoovBox |= !track->isHeic();
+    mHasFileLevelMeta |= track->isHeic();
+
     return OK;
 }
 
@@ -655,6 +680,32 @@
 #endif
 }
 
+int64_t MPEG4Writer::estimateFileLevelMetaSize() {
+    // base meta size
+    int64_t metaSize =     12  // meta fullbox header
+                         + 33  // hdlr box
+                         + 14  // pitm box
+                         + 16  // iloc box (fixed size portion)
+                         + 14  // iinf box (fixed size portion)
+                         + 32  // iprp box (fixed size protion)
+                         + 8   // idat box (when empty)
+                         + 12  // iref box (when empty)
+                         ;
+
+    for (List<Track *>::iterator it = mTracks.begin();
+         it != mTracks.end(); ++it) {
+        if ((*it)->isHeic()) {
+            metaSize += (*it)->getMetaSizeIncrease();
+        }
+    }
+
+    ALOGV("estimated meta size: %lld", (long long) metaSize);
+
+    // Need at least 8-byte padding at the end, otherwise the left-over
+    // freebox may become malformed
+    return metaSize + 8;
+}
+
 int64_t MPEG4Writer::estimateMoovBoxSize(int32_t bitRate) {
     // This implementation is highly experimental/heurisitic.
     //
@@ -714,7 +765,11 @@
     ALOGI("limits: %" PRId64 "/%" PRId64 " bytes/us, bit rate: %d bps and the"
          " estimated moov size %" PRId64 " bytes",
          mMaxFileSizeLimitBytes, mMaxFileDurationLimitUs, bitRate, size);
-    return factor * size;
+
+    int64_t estimatedSize = factor * size;
+    CHECK_GE(estimatedSize, 8);
+
+    return estimatedSize;
 }
 
 status_t MPEG4Writer::start(MetaData *param) {
@@ -796,63 +851,70 @@
          mMaxFileSizeLimitBytes >= kMinStreamableFileSizeInBytes);
 
     /*
-     * mWriteMoovBoxToMemory is true if the amount of data in moov box is
-     * smaller than the reserved free space at the beginning of a file, AND
-     * when the content of moov box is constructed. Note that video/audio
-     * frame data is always written to the file but not in the memory.
+     * mWriteBoxToMemory is true if the amount of data in a file-level meta or
+     * moov box is smaller than the reserved free space at the beginning of a
+     * file, AND when the content of the box is constructed. Note that video/
+     * audio frame data is always written to the file but not in the memory.
      *
-     * Before stop()/reset() is called, mWriteMoovBoxToMemory is always
+     * Before stop()/reset() is called, mWriteBoxToMemory is always
      * false. When reset() is called at the end of a recording session,
-     * Moov box needs to be constructed.
+     * file-level meta and/or moov box needs to be constructed.
      *
-     * 1) Right before a moov box is constructed, mWriteMoovBoxToMemory
-     * to set to mStreamableFile so that if
-     * the file is intended to be streamable, it is set to true;
-     * otherwise, it is set to false. When the value is set to false,
-     * all the content of the moov box is written immediately to
+     * 1) Right before the box is constructed, mWriteBoxToMemory to set to
+     * mStreamableFile so that if the file is intended to be streamable, it
+     * is set to true; otherwise, it is set to false. When the value is set
+     * to false, all the content of that box is written immediately to
      * the end of the file. When the value is set to true, all the
-     * content of the moov box is written to an in-memory cache,
-     * mMoovBoxBuffer, util the following condition happens. Note
+     * content of that box is written to an in-memory cache,
+     * mInMemoryCache, util the following condition happens. Note
      * that the size of the in-memory cache is the same as the
      * reserved free space at the beginning of the file.
      *
-     * 2) While the data of the moov box is written to an in-memory
+     * 2) While the data of the box is written to an in-memory
      * cache, the data size is checked against the reserved space.
-     * If the data size surpasses the reserved space, subsequent moov
-     * data could no longer be hold in the in-memory cache. This also
+     * If the data size surpasses the reserved space, subsequent box data
+     * could no longer be hold in the in-memory cache. This also
      * indicates that the reserved space was too small. At this point,
-     * _all_ moov data must be written to the end of the file.
-     * mWriteMoovBoxToMemory must be set to false to direct the write
+     * _all_ subsequent box data must be written to the end of the file.
+     * mWriteBoxToMemory must be set to false to direct the write
      * to the file.
      *
-     * 3) If the data size in moov box is smaller than the reserved
-     * space after moov box is completely constructed, the in-memory
-     * cache copy of the moov box is written to the reserved free
-     * space. Thus, immediately after the moov is completedly
-     * constructed, mWriteMoovBoxToMemory is always set to false.
+     * 3) If the data size in the box is smaller than the reserved
+     * space after the box is completely constructed, the in-memory
+     * cache copy of the box is written to the reserved free space.
+     * mWriteBoxToMemory is always set to false after all boxes that
+     * using the in-memory cache have been constructed.
      */
-    mWriteMoovBoxToMemory = false;
-    mMoovBoxBuffer = NULL;
-    mMoovBoxBufferOffset = 0;
+    mWriteBoxToMemory = false;
+    mInMemoryCache = NULL;
+    mInMemoryCacheOffset = 0;
+
+
+    ALOGV("muxer starting: mHasMoovBox %d, mHasFileLevelMeta %d",
+            mHasMoovBox, mHasFileLevelMeta);
 
     writeFtypBox(param);
 
     mFreeBoxOffset = mOffset;
 
-    if (mEstimatedMoovBoxSize == 0) {
+    if (mInMemoryCacheSize == 0) {
         int32_t bitRate = -1;
-        if (param) {
-            param->findInt32(kKeyBitRate, &bitRate);
+        if (mHasFileLevelMeta) {
+            mInMemoryCacheSize += estimateFileLevelMetaSize();
         }
-        mEstimatedMoovBoxSize = estimateMoovBoxSize(bitRate);
+        if (mHasMoovBox) {
+            if (param) {
+                param->findInt32(kKeyBitRate, &bitRate);
+            }
+            mInMemoryCacheSize += estimateMoovBoxSize(bitRate);
+        }
     }
-    CHECK_GE(mEstimatedMoovBoxSize, 8);
     if (mStreamableFile) {
         // Reserve a 'free' box only for streamable file
         lseek64(mFd, mFreeBoxOffset, SEEK_SET);
-        writeInt32(mEstimatedMoovBoxSize);
+        writeInt32(mInMemoryCacheSize);
         write("free", 4);
-        mMdatOffset = mFreeBoxOffset + mEstimatedMoovBoxSize;
+        mMdatOffset = mFreeBoxOffset + mInMemoryCacheSize;
     } else {
         mMdatOffset = mOffset;
     }
@@ -964,8 +1026,8 @@
     mFd = -1;
     mInitCheck = NO_INIT;
     mStarted = false;
-    free(mMoovBoxBuffer);
-    mMoovBoxBuffer = NULL;
+    free(mInMemoryCache);
+    mInMemoryCache = NULL;
 }
 
 void MPEG4Writer::finishCurrentSession() {
@@ -1008,13 +1070,18 @@
     status_t err = OK;
     int64_t maxDurationUs = 0;
     int64_t minDurationUs = 0x7fffffffffffffffLL;
+    int32_t nonImageTrackCount = 0;
     for (List<Track *>::iterator it = mTracks.begin();
-         it != mTracks.end(); ++it) {
+        it != mTracks.end(); ++it) {
         status_t status = (*it)->stop(stopSource);
         if (err == OK && status != OK) {
             err = status;
         }
 
+        // skip image tracks
+        if ((*it)->isHeic()) continue;
+        nonImageTrackCount++;
+
         int64_t durationUs = (*it)->getDurationUs();
         if (durationUs > maxDurationUs) {
             maxDurationUs = durationUs;
@@ -1024,7 +1091,7 @@
         }
     }
 
-    if (mTracks.size() > 1) {
+    if (nonImageTrackCount > 1) {
         ALOGD("Duration from tracks range is [%" PRId64 ", %" PRId64 "] us",
             minDurationUs, maxDurationUs);
     }
@@ -1050,45 +1117,43 @@
     }
     lseek64(mFd, mOffset, SEEK_SET);
 
-    // Construct moov box now
-    mMoovBoxBufferOffset = 0;
-    mWriteMoovBoxToMemory = mStreamableFile;
-    if (mWriteMoovBoxToMemory) {
+    // Construct file-level meta and moov box now
+    mInMemoryCacheOffset = 0;
+    mWriteBoxToMemory = mStreamableFile;
+    if (mWriteBoxToMemory) {
         // There is no need to allocate in-memory cache
-        // for moov box if the file is not streamable.
+        // if the file is not streamable.
 
-        mMoovBoxBuffer = (uint8_t *) malloc(mEstimatedMoovBoxSize);
-        CHECK(mMoovBoxBuffer != NULL);
-    }
-    writeMoovBox(maxDurationUs);
-
-    // mWriteMoovBoxToMemory could be set to false in
-    // MPEG4Writer::write() method
-    if (mWriteMoovBoxToMemory) {
-        mWriteMoovBoxToMemory = false;
-        // Content of the moov box is saved in the cache, and the in-memory
-        // moov box needs to be written to the file in a single shot.
-
-        CHECK_LE(mMoovBoxBufferOffset + 8, mEstimatedMoovBoxSize);
-
-        // Moov box
-        lseek64(mFd, mFreeBoxOffset, SEEK_SET);
-        mOffset = mFreeBoxOffset;
-        write(mMoovBoxBuffer, 1, mMoovBoxBufferOffset);
-
-        // Free box
-        lseek64(mFd, mOffset, SEEK_SET);
-        writeInt32(mEstimatedMoovBoxSize - mMoovBoxBufferOffset);
-        write("free", 4);
-    } else {
-        ALOGI("The mp4 file will not be streamable.");
+        mInMemoryCache = (uint8_t *) malloc(mInMemoryCacheSize);
+        CHECK(mInMemoryCache != NULL);
     }
 
-    // Free in-memory cache for moov box
-    if (mMoovBoxBuffer != NULL) {
-        free(mMoovBoxBuffer);
-        mMoovBoxBuffer = NULL;
-        mMoovBoxBufferOffset = 0;
+    if (mHasFileLevelMeta) {
+        writeFileLevelMetaBox();
+        if (mWriteBoxToMemory) {
+            writeCachedBoxToFile("meta");
+        } else {
+            ALOGI("The file meta box is written at the end.");
+        }
+    }
+
+    if (mHasMoovBox) {
+        writeMoovBox(maxDurationUs);
+        // mWriteBoxToMemory could be set to false in
+        // MPEG4Writer::write() method
+        if (mWriteBoxToMemory) {
+            writeCachedBoxToFile("moov");
+        } else {
+            ALOGI("The mp4 file will not be streamable.");
+        }
+    }
+    mWriteBoxToMemory = false;
+
+    // Free in-memory cache for box writing
+    if (mInMemoryCache != NULL) {
+        free(mInMemoryCache);
+        mInMemoryCache = NULL;
+        mInMemoryCacheOffset = 0;
     }
 
     CHECK(mBoxes.empty());
@@ -1097,6 +1162,42 @@
     return err;
 }
 
+/*
+ * Writes currently cached box into file.
+ *
+ * Must be called while mWriteBoxToMemory is true, and will not modify
+ * mWriteBoxToMemory. After the call, remaining cache size will be
+ * reduced and buffer offset will be set to the beginning of the cache.
+ */
+void MPEG4Writer::writeCachedBoxToFile(const char *type) {
+    CHECK(mWriteBoxToMemory);
+
+    mWriteBoxToMemory = false;
+    // Content of the box is saved in the cache, and the in-memory
+    // box needs to be written to the file in a single shot.
+
+    CHECK_LE(mInMemoryCacheOffset + 8, mInMemoryCacheSize);
+
+    // Cached box
+    lseek64(mFd, mFreeBoxOffset, SEEK_SET);
+    mOffset = mFreeBoxOffset;
+    write(mInMemoryCache, 1, mInMemoryCacheOffset);
+
+    // Free box
+    lseek64(mFd, mOffset, SEEK_SET);
+    mFreeBoxOffset = mOffset;
+    writeInt32(mInMemoryCacheSize - mInMemoryCacheOffset);
+    write("free", 4);
+
+    // Rewind buffering to the beginning, and restore mWriteBoxToMemory flag
+    mInMemoryCacheSize -= mInMemoryCacheOffset;
+    mInMemoryCacheOffset = 0;
+    mWriteBoxToMemory = true;
+
+    ALOGV("dumped out %s box, estimated size remaining %lld",
+            type, (long long)mInMemoryCacheSize);
+}
+
 uint32_t MPEG4Writer::getMpeg4Time() {
     time_t now = time(NULL);
     // MP4 file uses time counting seconds since midnight, Jan. 1, 1904
@@ -1141,14 +1242,16 @@
     if (mAreGeoTagsAvailable) {
         writeUdtaBox();
     }
-    writeMetaBox();
+    writeMoovLevelMetaBox();
     // Loop through all the tracks to get the global time offset if there is
     // any ctts table appears in a video track.
     int64_t minCttsOffsetTimeUs = kMaxCttsOffsetTimeUs;
     for (List<Track *>::iterator it = mTracks.begin();
         it != mTracks.end(); ++it) {
-        minCttsOffsetTimeUs =
-            std::min(minCttsOffsetTimeUs, (*it)->getMinCttsOffsetTimeUs());
+        if (!(*it)->isHeic()) {
+            minCttsOffsetTimeUs =
+                std::min(minCttsOffsetTimeUs, (*it)->getMinCttsOffsetTimeUs());
+        }
     }
     ALOGI("Ajust the moov start time from %lld us -> %lld us",
             (long long)mStartTimestampUs,
@@ -1158,7 +1261,9 @@
 
     for (List<Track *>::iterator it = mTracks.begin();
         it != mTracks.end(); ++it) {
-        (*it)->writeTrackHeader(mUse32BitOffset);
+        if (!(*it)->isHeic()) {
+            (*it)->writeTrackHeader(mUse32BitOffset);
+        }
     }
     endBox();  // moov
 }
@@ -1167,17 +1272,31 @@
     beginBox("ftyp");
 
     int32_t fileType;
-    if (param && param->findInt32(kKeyFileType, &fileType) &&
-        fileType != OUTPUT_FORMAT_MPEG_4) {
+    if (!param || !param->findInt32(kKeyFileType, &fileType)) {
+        fileType = OUTPUT_FORMAT_MPEG_4;
+    }
+    if (fileType != OUTPUT_FORMAT_MPEG_4 && fileType != OUTPUT_FORMAT_HEIF) {
         writeFourcc("3gp4");
         writeInt32(0);
         writeFourcc("isom");
         writeFourcc("3gp4");
     } else {
-        writeFourcc("mp42");
+        // Only write "heic" as major brand if the client specified HEIF
+        // AND we indeed receive some image heic tracks.
+        if (fileType == OUTPUT_FORMAT_HEIF && mHasFileLevelMeta) {
+            writeFourcc("heic");
+        } else {
+            writeFourcc("mp42");
+        }
         writeInt32(0);
-        writeFourcc("isom");
-        writeFourcc("mp42");
+        if (mHasFileLevelMeta) {
+            writeFourcc("mif1");
+            writeFourcc("heic");
+        }
+        if (mHasMoovBox) {
+            writeFourcc("isom");
+            writeFourcc("mp42");
+        }
     }
 
     endBox();
@@ -1224,15 +1343,21 @@
     mLock.unlock();
 }
 
-off64_t MPEG4Writer::addSample_l(MediaBuffer *buffer) {
+off64_t MPEG4Writer::addSample_l(
+        MediaBuffer *buffer, bool usePrefix, size_t *bytesWritten) {
     off64_t old_offset = mOffset;
 
-    ::write(mFd,
-          (const uint8_t *)buffer->data() + buffer->range_offset(),
-          buffer->range_length());
+    if (usePrefix) {
+        addMultipleLengthPrefixedSamples_l(buffer);
+    } else {
+        ::write(mFd,
+              (const uint8_t *)buffer->data() + buffer->range_offset(),
+              buffer->range_length());
 
-    mOffset += buffer->range_length();
+        mOffset += buffer->range_length();
+    }
 
+    *bytesWritten = mOffset - old_offset;
     return old_offset;
 }
 
@@ -1250,9 +1375,7 @@
     }
 }
 
-off64_t MPEG4Writer::addMultipleLengthPrefixedSamples_l(MediaBuffer *buffer) {
-    off64_t old_offset = mOffset;
-
+void MPEG4Writer::addMultipleLengthPrefixedSamples_l(MediaBuffer *buffer) {
     const size_t kExtensionNALSearchRange = 64; // bytes to look for non-VCL NALUs
 
     const uint8_t *dataStart = (const uint8_t *)buffer->data() + buffer->range_offset();
@@ -1277,13 +1400,9 @@
     buffer->set_range(buffer->range_offset() + currentNalOffset,
             buffer->range_length() - currentNalOffset);
     addLengthPrefixedSample_l(buffer);
-
-    return old_offset;
 }
 
-off64_t MPEG4Writer::addLengthPrefixedSample_l(MediaBuffer *buffer) {
-    off64_t old_offset = mOffset;
-
+void MPEG4Writer::addLengthPrefixedSample_l(MediaBuffer *buffer) {
     size_t length = buffer->range_length();
 
     if (mUse4ByteNalLength) {
@@ -1311,40 +1430,35 @@
         ::write(mFd, (const uint8_t *)buffer->data() + buffer->range_offset(), length);
         mOffset += length + 2;
     }
-
-    return old_offset;
 }
 
 size_t MPEG4Writer::write(
         const void *ptr, size_t size, size_t nmemb) {
 
     const size_t bytes = size * nmemb;
-    if (mWriteMoovBoxToMemory) {
+    if (mWriteBoxToMemory) {
 
-        off64_t moovBoxSize = 8 + mMoovBoxBufferOffset + bytes;
-        if (moovBoxSize > mEstimatedMoovBoxSize) {
-            // The reserved moov box at the beginning of the file
-            // is not big enough. Moov box should be written to
-            // the end of the file from now on, but not to the
-            // in-memory cache.
+        off64_t boxSize = 8 + mInMemoryCacheOffset + bytes;
+        if (boxSize > mInMemoryCacheSize) {
+            // The reserved free space at the beginning of the file is not big
+            // enough. Boxes should be written to the end of the file from now
+            // on, but not to the in-memory cache.
 
-            // We write partial moov box that is in the memory to
-            // the file first.
+            // We write partial box that is in the memory to the file first.
             for (List<off64_t>::iterator it = mBoxes.begin();
                  it != mBoxes.end(); ++it) {
                 (*it) += mOffset;
             }
             lseek64(mFd, mOffset, SEEK_SET);
-            ::write(mFd, mMoovBoxBuffer, mMoovBoxBufferOffset);
+            ::write(mFd, mInMemoryCache, mInMemoryCacheOffset);
             ::write(mFd, ptr, bytes);
-            mOffset += (bytes + mMoovBoxBufferOffset);
+            mOffset += (bytes + mInMemoryCacheOffset);
 
-            // All subsequent moov box content will be written
-            // to the end of the file.
-            mWriteMoovBoxToMemory = false;
+            // All subsequent boxes will be written to the end of the file.
+            mWriteBoxToMemory = false;
         } else {
-            memcpy(mMoovBoxBuffer + mMoovBoxBufferOffset, ptr, bytes);
-            mMoovBoxBufferOffset += bytes;
+            memcpy(mInMemoryCache + mInMemoryCacheOffset, ptr, bytes);
+            mInMemoryCacheOffset += bytes;
         }
     } else {
         ::write(mFd, ptr, size * nmemb);
@@ -1354,8 +1468,8 @@
 }
 
 void MPEG4Writer::beginBox(uint32_t id) {
-    mBoxes.push_back(mWriteMoovBoxToMemory?
-            mMoovBoxBufferOffset: mOffset);
+    mBoxes.push_back(mWriteBoxToMemory?
+            mInMemoryCacheOffset: mOffset);
 
     writeInt32(0);
     writeInt32(id);
@@ -1364,8 +1478,8 @@
 void MPEG4Writer::beginBox(const char *fourcc) {
     CHECK_EQ(strlen(fourcc), 4u);
 
-    mBoxes.push_back(mWriteMoovBoxToMemory?
-            mMoovBoxBufferOffset: mOffset);
+    mBoxes.push_back(mWriteBoxToMemory?
+            mInMemoryCacheOffset: mOffset);
 
     writeInt32(0);
     writeFourcc(fourcc);
@@ -1377,9 +1491,9 @@
     off64_t offset = *--mBoxes.end();
     mBoxes.erase(--mBoxes.end());
 
-    if (mWriteMoovBoxToMemory) {
-       int32_t x = htonl(mMoovBoxBufferOffset - offset);
-       memcpy(mMoovBoxBuffer + offset, &x, 4);
+    if (mWriteBoxToMemory) {
+        int32_t x = htonl(mInMemoryCacheOffset - offset);
+        memcpy(mInMemoryCache + offset, &x, 4);
     } else {
         lseek64(mFd, offset, SEEK_SET);
         writeInt32(mOffset - offset);
@@ -1538,7 +1652,7 @@
     if (mMaxFileSizeLimitBytes == 0) {
         return false;
     }
-    int64_t nTotalBytesEstimate = static_cast<int64_t>(mEstimatedMoovBoxSize);
+    int64_t nTotalBytesEstimate = static_cast<int64_t>(mInMemoryCacheSize);
     for (List<Track *>::iterator it = mTracks.begin();
          it != mTracks.end(); ++it) {
         nTotalBytesEstimate += (*it)->getEstimatedTrackSizeBytes();
@@ -1561,7 +1675,7 @@
         return false;
     }
 
-    int64_t nTotalBytesEstimate = static_cast<int64_t>(mEstimatedMoovBoxSize);
+    int64_t nTotalBytesEstimate = static_cast<int64_t>(mInMemoryCacheSize);
     for (List<Track *>::iterator it = mTracks.begin();
          it != mTracks.end(); ++it) {
         nTotalBytesEstimate += (*it)->getEstimatedTrackSizeBytes();
@@ -1583,7 +1697,7 @@
 
     for (List<Track *>::iterator it = mTracks.begin();
          it != mTracks.end(); ++it) {
-        if ((*it)->getDurationUs() >= mMaxFileDurationLimitUs) {
+        if (!(*it)->isHeic() && (*it)->getDurationUs() >= mMaxFileDurationLimitUs) {
             return true;
         }
     }
@@ -1626,7 +1740,7 @@
 ////////////////////////////////////////////////////////////////////////////////
 
 MPEG4Writer::Track::Track(
-        MPEG4Writer *owner, const sp<IMediaSource> &source, size_t trackId)
+        MPEG4Writer *owner, const sp<MediaSource> &source, size_t trackId)
     : mOwner(owner),
       mMeta(source->getFormat()),
       mSource(source),
@@ -1655,7 +1769,16 @@
       mGotAllCodecSpecificData(false),
       mReachedEOS(false),
       mStartTimestampUs(-1),
-      mRotation(0) {
+      mRotation(0),
+      mIsPrimary(0),
+      mWidth(0),
+      mHeight(0),
+      mGridWidth(0),
+      mGridHeight(0),
+      mGridRows(0),
+      mGridCols(0),
+      mNumTiles(1),
+      mTileIndex(0) {
     getCodecSpecificDataFromInputFormatIfPossible();
 
     const char *mime;
@@ -1664,6 +1787,7 @@
     mIsHevc = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC);
     mIsAudio = !strncasecmp(mime, "audio/", 6);
     mIsVideo = !strncasecmp(mime, "video/", 6);
+    mIsHeic = !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC);
     mIsMPEG4 = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_MPEG4) ||
                !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC);
 
@@ -1675,7 +1799,27 @@
         }
     }
 
-    setTimeScale();
+    if (!mIsHeic) {
+        setTimeScale();
+    } else {
+        CHECK(mMeta->findInt32(kKeyWidth, &mWidth) && (mWidth > 0));
+        CHECK(mMeta->findInt32(kKeyHeight, &mHeight) && (mHeight > 0));
+
+        int32_t gridWidth, gridHeight, gridRows, gridCols;
+        if (mMeta->findInt32(kKeyGridWidth, &gridWidth) && (gridWidth > 0) &&
+            mMeta->findInt32(kKeyGridHeight, &gridHeight) && (gridHeight > 0) &&
+            mMeta->findInt32(kKeyGridRows, &gridRows) && (gridRows > 0) &&
+            mMeta->findInt32(kKeyGridCols, &gridCols) && (gridCols > 0)) {
+            mGridWidth = gridWidth;
+            mGridHeight = gridHeight;
+            mGridRows = gridRows;
+            mGridCols = gridCols;
+            mNumTiles = gridRows * gridCols;
+        }
+        if (!mMeta->findInt32(kKeyTrackIsDefault, &mIsPrimary)) {
+            mIsPrimary = false;
+        }
+    }
 }
 
 // Clear all the internal states except the CSD data.
@@ -1723,15 +1867,15 @@
 }
 
 void MPEG4Writer::Track::updateTrackSizeEstimate() {
-
-    uint32_t stcoBoxCount = (mOwner->use32BitFileOffset()
-                            ? mStcoTableEntries->count()
-                            : mCo64TableEntries->count());
-    int64_t stcoBoxSizeBytes = stcoBoxCount * 4;
-    int64_t stszBoxSizeBytes = mSamplesHaveSameSize? 4: (mStszTableEntries->count() * 4);
-
     mEstimatedTrackSizeBytes = mMdatSizeBytes;  // media data size
-    if (!mOwner->isFileStreamable()) {
+
+    if (!isHeic() && !mOwner->isFileStreamable()) {
+        uint32_t stcoBoxCount = (mOwner->use32BitFileOffset()
+                                ? mStcoTableEntries->count()
+                                : mCo64TableEntries->count());
+        int64_t stcoBoxSizeBytes = stcoBoxCount * 4;
+        int64_t stszBoxSizeBytes = mSamplesHaveSameSize? 4: (mStszTableEntries->count() * 4);
+
         // Reserved free space is not large enough to hold
         // all meta data and thus wasted.
         mEstimatedTrackSizeBytes += mStscTableEntries->count() * 12 +  // stsc box size
@@ -1745,10 +1889,9 @@
 
 void MPEG4Writer::Track::addOneStscTableEntry(
         size_t chunkId, size_t sampleId) {
-
-        mStscTableEntries->add(htonl(chunkId));
-        mStscTableEntries->add(htonl(sampleId));
-        mStscTableEntries->add(htonl(1));
+    mStscTableEntries->add(htonl(chunkId));
+    mStscTableEntries->add(htonl(sampleId));
+    mStscTableEntries->add(htonl(1));
 }
 
 void MPEG4Writer::Track::addOneStssTableEntry(size_t sampleId) {
@@ -1794,6 +1937,7 @@
 }
 
 void MPEG4Writer::Track::addChunkOffset(off64_t offset) {
+    CHECK(!mIsHeic);
     if (mOwner->use32BitFileOffset()) {
         uint32_t value = offset;
         mStcoTableEntries->add(htonl(value));
@@ -1802,6 +1946,70 @@
     }
 }
 
+void MPEG4Writer::Track::addItemOffsetAndSize(off64_t offset, size_t size) {
+    CHECK(mIsHeic);
+
+    if (offset > UINT32_MAX || size > UINT32_MAX) {
+        ALOGE("offset or size is out of range: %lld, %lld",
+                (long long) offset, (long long) size);
+        mIsMalformed = true;
+    }
+    if (mIsMalformed) {
+        return;
+    }
+    if (mTileIndex >= mNumTiles) {
+        ALOGW("Ignoring excess tiles!");
+        return;
+    }
+
+    if (mProperties.empty()) {
+        mProperties.push_back(mOwner->addProperty_l({
+            .type = FOURCC('h', 'v', 'c', 'C'),
+            .hvcc = ABuffer::CreateAsCopy(mCodecSpecificData, mCodecSpecificDataSize)
+        }));
+
+        mProperties.push_back(mOwner->addProperty_l({
+            .type = FOURCC('i', 's', 'p', 'e'),
+            .width = (mNumTiles > 1) ? mGridWidth : mWidth,
+            .height = (mNumTiles > 1) ? mGridHeight : mHeight,
+        }));
+    }
+
+    uint16_t itemId = mOwner->addItem_l({
+        .itemType = "hvc1",
+        .isPrimary = (mNumTiles > 1) ? false : (mIsPrimary != 0),
+        .isHidden = (mNumTiles > 1),
+        .offset = (uint32_t)offset,
+        .size = (uint32_t)size,
+        .properties = mProperties,
+    });
+
+    mTileIndex++;
+    if (mNumTiles > 1) {
+        mDimgRefs.push_back(itemId);
+
+        if (mTileIndex == mNumTiles) {
+            mProperties.clear();
+            mProperties.push_back(mOwner->addProperty_l({
+                .type = FOURCC('i', 's', 'p', 'e'),
+                .width = mWidth,
+                .height = mHeight,
+            }));
+            mOwner->addItem_l({
+                .itemType = "grid",
+                .isPrimary = (mIsPrimary != 0),
+                .isHidden = false,
+                .rows = (uint32_t)mGridRows,
+                .cols = (uint32_t)mGridCols,
+                .width = (uint32_t)mWidth,
+                .height = (uint32_t)mHeight,
+                .properties = mProperties,
+                .dimgRefs = mDimgRefs,
+            });
+        }
+    }
+}
+
 void MPEG4Writer::Track::setTimeScale() {
     ALOGV("setTimeScale");
     // Default time scale
@@ -1854,7 +2062,8 @@
     size_t size = 0;
     if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)) {
         mMeta->findData(kKeyAVCC, &type, &data, &size);
-    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC)) {
+    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC) ||
+               !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC)) {
         mMeta->findData(kKeyHVCC, &type, &data, &size);
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_MPEG4)
             || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) {
@@ -1944,14 +2153,16 @@
         chunk->mTimeStampUs, chunk->mTrack->getTrackType());
 
     int32_t isFirstSample = true;
+    bool usePrefix = chunk->mTrack->usePrefix();
     while (!chunk->mSamples.empty()) {
         List<MediaBuffer *>::iterator it = chunk->mSamples.begin();
 
-        off64_t offset = (chunk->mTrack->isAvc() || chunk->mTrack->isHevc())
-                                ? addMultipleLengthPrefixedSamples_l(*it)
-                                : addSample_l(*it);
+        size_t bytesWritten;
+        off64_t offset = addSample_l(*it, usePrefix, &bytesWritten);
 
-        if (isFirstSample) {
+        if (chunk->mTrack->isHeic()) {
+            chunk->mTrack->addItemOffsetAndSize(offset, bytesWritten);
+        } else if (isFirstSample) {
             chunk->mTrack->addChunkOffset(offset);
             isFirstSample = false;
         }
@@ -2636,7 +2847,7 @@
                             (const uint8_t *)buffer->data()
                                 + buffer->range_offset(),
                             buffer->range_length());
-                } else if (mIsHevc) {
+                } else if (mIsHevc || mIsHeic) {
                     err = makeHEVCCodecSpecificData(
                             (const uint8_t *)buffer->data()
                                 + buffer->range_offset(),
@@ -2661,7 +2872,8 @@
         }
 
         // Per-frame metadata sample's size must be smaller than max allowed.
-        if (!mIsVideo && !mIsAudio && buffer->range_length() >= kMaxMetadataSize) {
+        if (!mIsVideo && !mIsAudio && !mIsHeic &&
+                buffer->range_length() >= kMaxMetadataSize) {
             ALOGW("Buffer size is %zu. Maximum metadata buffer size is %lld for %s track",
                     buffer->range_length(), (long long)kMaxMetadataSize, trackName);
             buffer->release();
@@ -2682,10 +2894,10 @@
         buffer->release();
         buffer = NULL;
 
-        if (mIsAvc || mIsHevc) StripStartcode(copy);
+        if (usePrefix()) StripStartcode(copy);
 
         size_t sampleSize = copy->range_length();
-        if (mIsAvc || mIsHevc) {
+        if (usePrefix()) {
             if (mOwner->useNalLengthFour()) {
                 sampleSize += 4;
             } else {
@@ -2739,223 +2951,230 @@
             mGotStartKeyFrame = true;
         }
 ////////////////////////////////////////////////////////////////////////////////
-        if (mStszTableEntries->count() == 0) {
-            mFirstSampleTimeRealUs = systemTime() / 1000;
-            mStartTimestampUs = timestampUs;
-            mOwner->setStartTimestampUs(mStartTimestampUs);
-            previousPausedDurationUs = mStartTimestampUs;
-        }
 
-        if (mResumed) {
-            int64_t durExcludingEarlierPausesUs = timestampUs - previousPausedDurationUs;
-            if (WARN_UNLESS(durExcludingEarlierPausesUs >= 0ll, "for %s track", trackName)) {
-                copy->release();
-                mSource->stop();
-                mIsMalformed = true;
-                break;
-            }
-
-            int64_t pausedDurationUs = durExcludingEarlierPausesUs - mTrackDurationUs;
-            if (WARN_UNLESS(pausedDurationUs >= lastDurationUs, "for %s track", trackName)) {
-                copy->release();
-                mSource->stop();
-                mIsMalformed = true;
-                break;
-            }
-
-            previousPausedDurationUs += pausedDurationUs - lastDurationUs;
-            mResumed = false;
-        }
-        TimestampDebugHelperEntry timestampDebugEntry;
-        timestampUs -= previousPausedDurationUs;
-        timestampDebugEntry.pts = timestampUs;
-        if (WARN_UNLESS(timestampUs >= 0ll, "for %s track", trackName)) {
-            copy->release();
-            mSource->stop();
-            mIsMalformed = true;
-            break;
-        }
-
-        if (mIsVideo) {
-            /*
-             * Composition time: timestampUs
-             * Decoding time: decodingTimeUs
-             * Composition time offset = composition time - decoding time
-             */
-            int64_t decodingTimeUs;
-            CHECK(meta_data->findInt64(kKeyDecodingTime, &decodingTimeUs));
-            decodingTimeUs -= previousPausedDurationUs;
-
-            // ensure non-negative, monotonic decoding time
-            if (mLastDecodingTimeUs < 0) {
-                decodingTimeUs = std::max((int64_t)0, decodingTimeUs);
-            } else {
-                // increase decoding time by at least the larger vaule of 1 tick and
-                // 0.1 milliseconds. This needs to take into account the possible
-                // delta adjustment in DurationTicks in below.
-                decodingTimeUs = std::max(mLastDecodingTimeUs +
-                        std::max(100, divUp(1000000, mTimeScale)), decodingTimeUs);
-            }
-
-            mLastDecodingTimeUs = decodingTimeUs;
-            timestampDebugEntry.dts = decodingTimeUs;
-            timestampDebugEntry.frameType = isSync ? "Key frame" : "Non-Key frame";
-            // Insert the timestamp into the mTimestampDebugHelper
-            if (mTimestampDebugHelper.size() >= kTimestampDebugCount) {
-                mTimestampDebugHelper.pop_front();
-            }
-            mTimestampDebugHelper.push_back(timestampDebugEntry);
-
-            cttsOffsetTimeUs =
-                    timestampUs + kMaxCttsOffsetTimeUs - decodingTimeUs;
-            if (WARN_UNLESS(cttsOffsetTimeUs >= 0ll, "for %s track", trackName)) {
-                copy->release();
-                mSource->stop();
-                mIsMalformed = true;
-                break;
-            }
-
-            timestampUs = decodingTimeUs;
-            ALOGV("decoding time: %" PRId64 " and ctts offset time: %" PRId64,
-                timestampUs, cttsOffsetTimeUs);
-
-            // Update ctts box table if necessary
-            currCttsOffsetTimeTicks =
-                    (cttsOffsetTimeUs * mTimeScale + 500000LL) / 1000000LL;
-            if (WARN_UNLESS(currCttsOffsetTimeTicks <= 0x0FFFFFFFFLL, "for %s track", trackName)) {
-                copy->release();
-                mSource->stop();
-                mIsMalformed = true;
-                break;
-            }
-
+        if (!mIsHeic) {
             if (mStszTableEntries->count() == 0) {
-                // Force the first ctts table entry to have one single entry
-                // so that we can do adjustment for the initial track start
-                // time offset easily in writeCttsBox().
-                lastCttsOffsetTimeTicks = currCttsOffsetTimeTicks;
-                addOneCttsTableEntry(1, currCttsOffsetTimeTicks);
-                cttsSampleCount = 0;      // No sample in ctts box is pending
-            } else {
-                if (currCttsOffsetTimeTicks != lastCttsOffsetTimeTicks) {
-                    addOneCttsTableEntry(cttsSampleCount, lastCttsOffsetTimeTicks);
-                    lastCttsOffsetTimeTicks = currCttsOffsetTimeTicks;
-                    cttsSampleCount = 1;  // One sample in ctts box is pending
+                mFirstSampleTimeRealUs = systemTime() / 1000;
+                mStartTimestampUs = timestampUs;
+                mOwner->setStartTimestampUs(mStartTimestampUs);
+                previousPausedDurationUs = mStartTimestampUs;
+            }
+
+            if (mResumed) {
+                int64_t durExcludingEarlierPausesUs = timestampUs - previousPausedDurationUs;
+                if (WARN_UNLESS(durExcludingEarlierPausesUs >= 0ll, "for %s track", trackName)) {
+                    copy->release();
+                    mSource->stop();
+                    mIsMalformed = true;
+                    break;
+                }
+
+                int64_t pausedDurationUs = durExcludingEarlierPausesUs - mTrackDurationUs;
+                if (WARN_UNLESS(pausedDurationUs >= lastDurationUs, "for %s track", trackName)) {
+                    copy->release();
+                    mSource->stop();
+                    mIsMalformed = true;
+                    break;
+                }
+
+                previousPausedDurationUs += pausedDurationUs - lastDurationUs;
+                mResumed = false;
+            }
+            TimestampDebugHelperEntry timestampDebugEntry;
+            timestampUs -= previousPausedDurationUs;
+            timestampDebugEntry.pts = timestampUs;
+            if (WARN_UNLESS(timestampUs >= 0ll, "for %s track", trackName)) {
+                copy->release();
+                mSource->stop();
+                mIsMalformed = true;
+                break;
+            }
+
+            if (mIsVideo) {
+                /*
+                 * Composition time: timestampUs
+                 * Decoding time: decodingTimeUs
+                 * Composition time offset = composition time - decoding time
+                 */
+                int64_t decodingTimeUs;
+                CHECK(meta_data->findInt64(kKeyDecodingTime, &decodingTimeUs));
+                decodingTimeUs -= previousPausedDurationUs;
+
+                // ensure non-negative, monotonic decoding time
+                if (mLastDecodingTimeUs < 0) {
+                    decodingTimeUs = std::max((int64_t)0, decodingTimeUs);
                 } else {
-                    ++cttsSampleCount;
+                    // increase decoding time by at least the larger vaule of 1 tick and
+                    // 0.1 milliseconds. This needs to take into account the possible
+                    // delta adjustment in DurationTicks in below.
+                    decodingTimeUs = std::max(mLastDecodingTimeUs +
+                            std::max(100, divUp(1000000, mTimeScale)), decodingTimeUs);
                 }
-            }
 
-            // Update ctts time offset range
-            if (mStszTableEntries->count() == 0) {
-                mMinCttsOffsetTicks = currCttsOffsetTimeTicks;
-                mMaxCttsOffsetTicks = currCttsOffsetTimeTicks;
-            } else {
-                if (currCttsOffsetTimeTicks > mMaxCttsOffsetTicks) {
-                    mMaxCttsOffsetTicks = currCttsOffsetTimeTicks;
-                } else if (currCttsOffsetTimeTicks < mMinCttsOffsetTicks) {
+                mLastDecodingTimeUs = decodingTimeUs;
+                timestampDebugEntry.dts = decodingTimeUs;
+                timestampDebugEntry.frameType = isSync ? "Key frame" : "Non-Key frame";
+                // Insert the timestamp into the mTimestampDebugHelper
+                if (mTimestampDebugHelper.size() >= kTimestampDebugCount) {
+                    mTimestampDebugHelper.pop_front();
+                }
+                mTimestampDebugHelper.push_back(timestampDebugEntry);
+
+                cttsOffsetTimeUs =
+                        timestampUs + kMaxCttsOffsetTimeUs - decodingTimeUs;
+                if (WARN_UNLESS(cttsOffsetTimeUs >= 0ll, "for %s track", trackName)) {
+                    copy->release();
+                    mSource->stop();
+                    mIsMalformed = true;
+                    break;
+                }
+
+                timestampUs = decodingTimeUs;
+                ALOGV("decoding time: %" PRId64 " and ctts offset time: %" PRId64,
+                    timestampUs, cttsOffsetTimeUs);
+
+                // Update ctts box table if necessary
+                currCttsOffsetTimeTicks =
+                        (cttsOffsetTimeUs * mTimeScale + 500000LL) / 1000000LL;
+                if (WARN_UNLESS(currCttsOffsetTimeTicks <= 0x0FFFFFFFFLL, "for %s track", trackName)) {
+                    copy->release();
+                    mSource->stop();
+                    mIsMalformed = true;
+                    break;
+                }
+
+                if (mStszTableEntries->count() == 0) {
+                    // Force the first ctts table entry to have one single entry
+                    // so that we can do adjustment for the initial track start
+                    // time offset easily in writeCttsBox().
+                    lastCttsOffsetTimeTicks = currCttsOffsetTimeTicks;
+                    addOneCttsTableEntry(1, currCttsOffsetTimeTicks);
+                    cttsSampleCount = 0;      // No sample in ctts box is pending
+                } else {
+                    if (currCttsOffsetTimeTicks != lastCttsOffsetTimeTicks) {
+                        addOneCttsTableEntry(cttsSampleCount, lastCttsOffsetTimeTicks);
+                        lastCttsOffsetTimeTicks = currCttsOffsetTimeTicks;
+                        cttsSampleCount = 1;  // One sample in ctts box is pending
+                    } else {
+                        ++cttsSampleCount;
+                    }
+                }
+
+                // Update ctts time offset range
+                if (mStszTableEntries->count() == 0) {
                     mMinCttsOffsetTicks = currCttsOffsetTimeTicks;
-                    mMinCttsOffsetTimeUs = cttsOffsetTimeUs;
+                    mMaxCttsOffsetTicks = currCttsOffsetTimeTicks;
+                } else {
+                    if (currCttsOffsetTimeTicks > mMaxCttsOffsetTicks) {
+                        mMaxCttsOffsetTicks = currCttsOffsetTimeTicks;
+                    } else if (currCttsOffsetTimeTicks < mMinCttsOffsetTicks) {
+                        mMinCttsOffsetTicks = currCttsOffsetTimeTicks;
+                        mMinCttsOffsetTimeUs = cttsOffsetTimeUs;
+                    }
                 }
             }
-        }
 
-        if (mOwner->isRealTimeRecording()) {
-            if (mIsAudio) {
-                updateDriftTime(meta_data);
-            }
-        }
-
-        if (WARN_UNLESS(timestampUs >= 0ll, "for %s track", trackName)) {
-            copy->release();
-            mSource->stop();
-            mIsMalformed = true;
-            break;
-        }
-
-        ALOGV("%s media time stamp: %" PRId64 " and previous paused duration %" PRId64,
-                trackName, timestampUs, previousPausedDurationUs);
-        if (timestampUs > mTrackDurationUs) {
-            mTrackDurationUs = timestampUs;
-        }
-
-        // We need to use the time scale based ticks, rather than the
-        // timestamp itself to determine whether we have to use a new
-        // stts entry, since we may have rounding errors.
-        // The calculation is intended to reduce the accumulated
-        // rounding errors.
-        currDurationTicks =
-            ((timestampUs * mTimeScale + 500000LL) / 1000000LL -
-                (lastTimestampUs * mTimeScale + 500000LL) / 1000000LL);
-        if (currDurationTicks < 0ll) {
-            ALOGE("do not support out of order frames (timestamp: %lld < last: %lld for %s track",
-                    (long long)timestampUs, (long long)lastTimestampUs, trackName);
-            copy->release();
-            mSource->stop();
-            mIsMalformed = true;
-            break;
-        }
-
-        // if the duration is different for this sample, see if it is close enough to the previous
-        // duration that we can fudge it and use the same value, to avoid filling the stts table
-        // with lots of near-identical entries.
-        // "close enough" here means that the current duration needs to be adjusted by less
-        // than 0.1 milliseconds
-        if (lastDurationTicks && (currDurationTicks != lastDurationTicks)) {
-            int64_t deltaUs = ((lastDurationTicks - currDurationTicks) * 1000000LL
-                    + (mTimeScale / 2)) / mTimeScale;
-            if (deltaUs > -100 && deltaUs < 100) {
-                // use previous ticks, and adjust timestamp as if it was actually that number
-                // of ticks
-                currDurationTicks = lastDurationTicks;
-                timestampUs += deltaUs;
-            }
-        }
-        mStszTableEntries->add(htonl(sampleSize));
-        if (mStszTableEntries->count() > 2) {
-
-            // Force the first sample to have its own stts entry so that
-            // we can adjust its value later to maintain the A/V sync.
-            if (mStszTableEntries->count() == 3 || currDurationTicks != lastDurationTicks) {
-                addOneSttsTableEntry(sampleCount, lastDurationTicks);
-                sampleCount = 1;
-            } else {
-                ++sampleCount;
+            if (mOwner->isRealTimeRecording()) {
+                if (mIsAudio) {
+                    updateDriftTime(meta_data);
+                }
             }
 
-        }
-        if (mSamplesHaveSameSize) {
-            if (mStszTableEntries->count() >= 2 && previousSampleSize != sampleSize) {
-                mSamplesHaveSameSize = false;
+            if (WARN_UNLESS(timestampUs >= 0ll, "for %s track", trackName)) {
+                copy->release();
+                mSource->stop();
+                mIsMalformed = true;
+                break;
             }
-            previousSampleSize = sampleSize;
-        }
-        ALOGV("%s timestampUs/lastTimestampUs: %" PRId64 "/%" PRId64,
-                trackName, timestampUs, lastTimestampUs);
-        lastDurationUs = timestampUs - lastTimestampUs;
-        lastDurationTicks = currDurationTicks;
-        lastTimestampUs = timestampUs;
 
-        if (isSync != 0) {
-            addOneStssTableEntry(mStszTableEntries->count());
-        }
-
-        if (mTrackingProgressStatus) {
-            if (mPreviousTrackTimeUs <= 0) {
-                mPreviousTrackTimeUs = mStartTimestampUs;
+            ALOGV("%s media time stamp: %" PRId64 " and previous paused duration %" PRId64,
+                    trackName, timestampUs, previousPausedDurationUs);
+            if (timestampUs > mTrackDurationUs) {
+                mTrackDurationUs = timestampUs;
             }
-            trackProgressStatus(timestampUs);
+
+            // We need to use the time scale based ticks, rather than the
+            // timestamp itself to determine whether we have to use a new
+            // stts entry, since we may have rounding errors.
+            // The calculation is intended to reduce the accumulated
+            // rounding errors.
+            currDurationTicks =
+                ((timestampUs * mTimeScale + 500000LL) / 1000000LL -
+                    (lastTimestampUs * mTimeScale + 500000LL) / 1000000LL);
+            if (currDurationTicks < 0ll) {
+                ALOGE("do not support out of order frames (timestamp: %lld < last: %lld for %s track",
+                        (long long)timestampUs, (long long)lastTimestampUs, trackName);
+                copy->release();
+                mSource->stop();
+                mIsMalformed = true;
+                break;
+            }
+
+            // if the duration is different for this sample, see if it is close enough to the previous
+            // duration that we can fudge it and use the same value, to avoid filling the stts table
+            // with lots of near-identical entries.
+            // "close enough" here means that the current duration needs to be adjusted by less
+            // than 0.1 milliseconds
+            if (lastDurationTicks && (currDurationTicks != lastDurationTicks)) {
+                int64_t deltaUs = ((lastDurationTicks - currDurationTicks) * 1000000LL
+                        + (mTimeScale / 2)) / mTimeScale;
+                if (deltaUs > -100 && deltaUs < 100) {
+                    // use previous ticks, and adjust timestamp as if it was actually that number
+                    // of ticks
+                    currDurationTicks = lastDurationTicks;
+                    timestampUs += deltaUs;
+                }
+            }
+            mStszTableEntries->add(htonl(sampleSize));
+            if (mStszTableEntries->count() > 2) {
+
+                // Force the first sample to have its own stts entry so that
+                // we can adjust its value later to maintain the A/V sync.
+                if (mStszTableEntries->count() == 3 || currDurationTicks != lastDurationTicks) {
+                    addOneSttsTableEntry(sampleCount, lastDurationTicks);
+                    sampleCount = 1;
+                } else {
+                    ++sampleCount;
+                }
+
+            }
+            if (mSamplesHaveSameSize) {
+                if (mStszTableEntries->count() >= 2 && previousSampleSize != sampleSize) {
+                    mSamplesHaveSameSize = false;
+                }
+                previousSampleSize = sampleSize;
+            }
+            ALOGV("%s timestampUs/lastTimestampUs: %" PRId64 "/%" PRId64,
+                    trackName, timestampUs, lastTimestampUs);
+            lastDurationUs = timestampUs - lastTimestampUs;
+            lastDurationTicks = currDurationTicks;
+            lastTimestampUs = timestampUs;
+
+            if (isSync != 0) {
+                addOneStssTableEntry(mStszTableEntries->count());
+            }
+
+            if (mTrackingProgressStatus) {
+                if (mPreviousTrackTimeUs <= 0) {
+                    mPreviousTrackTimeUs = mStartTimestampUs;
+                }
+                trackProgressStatus(timestampUs);
+            }
         }
         if (!hasMultipleTracks) {
-            off64_t offset = (mIsAvc || mIsHevc) ? mOwner->addMultipleLengthPrefixedSamples_l(copy)
-                                 : mOwner->addSample_l(copy);
+            size_t bytesWritten;
+            off64_t offset = mOwner->addSample_l(copy, usePrefix(), &bytesWritten);
 
-            uint32_t count = (mOwner->use32BitFileOffset()
-                        ? mStcoTableEntries->count()
-                        : mCo64TableEntries->count());
+            if (mIsHeic) {
+                addItemOffsetAndSize(offset, bytesWritten);
+            } else {
+                uint32_t count = (mOwner->use32BitFileOffset()
+                            ? mStcoTableEntries->count()
+                            : mCo64TableEntries->count());
 
-            if (count == 0) {
-                addChunkOffset(offset);
+                if (count == 0) {
+                    addChunkOffset(offset);
+                }
             }
             copy->release();
             copy = NULL;
@@ -2963,7 +3182,10 @@
         }
 
         mChunkSamples.push_back(copy);
-        if (interleaveDurationUs == 0) {
+        if (mIsHeic) {
+            bufferChunk(0 /*timestampUs*/);
+            ++nChunks;
+        } else if (interleaveDurationUs == 0) {
             addOneStscTableEntry(++nChunks, 1);
             bufferChunk(timestampUs);
         } else {
@@ -2996,42 +3218,49 @@
 
     mOwner->trackProgressStatus(mTrackId, -1, err);
 
-    // Last chunk
-    if (!hasMultipleTracks) {
-        addOneStscTableEntry(1, mStszTableEntries->count());
-    } else if (!mChunkSamples.empty()) {
-        addOneStscTableEntry(++nChunks, mChunkSamples.size());
-        bufferChunk(timestampUs);
-    }
-
-    // We don't really know how long the last frame lasts, since
-    // there is no frame time after it, just repeat the previous
-    // frame's duration.
-    if (mStszTableEntries->count() == 1) {
-        lastDurationUs = 0;  // A single sample's duration
-        lastDurationTicks = 0;
-    } else {
-        ++sampleCount;  // Count for the last sample
-    }
-
-    if (mStszTableEntries->count() <= 2) {
-        addOneSttsTableEntry(1, lastDurationTicks);
-        if (sampleCount - 1 > 0) {
-            addOneSttsTableEntry(sampleCount - 1, lastDurationTicks);
+    if (mIsHeic) {
+        if (!mChunkSamples.empty()) {
+            bufferChunk(0);
+            ++nChunks;
         }
     } else {
-        addOneSttsTableEntry(sampleCount, lastDurationTicks);
-    }
-
-    // The last ctts box may not have been written yet, and this
-    // is to make sure that we write out the last ctts box.
-    if (currCttsOffsetTimeTicks == lastCttsOffsetTimeTicks) {
-        if (cttsSampleCount > 0) {
-            addOneCttsTableEntry(cttsSampleCount, lastCttsOffsetTimeTicks);
+        // Last chunk
+        if (!hasMultipleTracks) {
+            addOneStscTableEntry(1, mStszTableEntries->count());
+        } else if (!mChunkSamples.empty()) {
+            addOneStscTableEntry(++nChunks, mChunkSamples.size());
+            bufferChunk(timestampUs);
         }
-    }
 
-    mTrackDurationUs += lastDurationUs;
+        // We don't really know how long the last frame lasts, since
+        // there is no frame time after it, just repeat the previous
+        // frame's duration.
+        if (mStszTableEntries->count() == 1) {
+            lastDurationUs = 0;  // A single sample's duration
+            lastDurationTicks = 0;
+        } else {
+            ++sampleCount;  // Count for the last sample
+        }
+
+        if (mStszTableEntries->count() <= 2) {
+            addOneSttsTableEntry(1, lastDurationTicks);
+            if (sampleCount - 1 > 0) {
+                addOneSttsTableEntry(sampleCount - 1, lastDurationTicks);
+            }
+        } else {
+            addOneSttsTableEntry(sampleCount, lastDurationTicks);
+        }
+
+        // The last ctts box may not have been written yet, and this
+        // is to make sure that we write out the last ctts box.
+        if (currCttsOffsetTimeTicks == lastCttsOffsetTimeTicks) {
+            if (cttsSampleCount > 0) {
+                addOneCttsTableEntry(cttsSampleCount, lastCttsOffsetTimeTicks);
+            }
+        }
+
+        mTrackDurationUs += lastDurationUs;
+    }
     mReachedEOS = true;
 
     sendTrackSummary(hasMultipleTracks);
@@ -3053,7 +3282,7 @@
         return true;
     }
 
-    if (mStszTableEntries->count() == 0) {                      // no samples written
+    if (!mIsHeic && mStszTableEntries->count() == 0) {  // no samples written
         ALOGE("The number of recorded samples is 0");
         return true;
     }
@@ -3199,13 +3428,28 @@
     return mEstimatedTrackSizeBytes;
 }
 
+int32_t MPEG4Writer::Track::getMetaSizeIncrease() const {
+    CHECK(mIsHeic);
+    return    20                           // 1. 'ispe' property
+            + (8 + mCodecSpecificDataSize) // 2. 'hvcC' property
+            + (20                          // 3. extra 'ispe'
+            + (8 + 2 + 2 + mNumTiles * 2)  // 4. 'dimg' ref
+            + 12)                          // 5. ImageGrid in 'idat' (worst case)
+            * (mNumTiles > 1)              // -  (3~5: applicable only if grid)
+            + (16                          // 6. increase to 'iloc'
+            + 21                           // 7. increase to 'iinf'
+            + (3 + 2 * 2))                 // 8. increase to 'ipma' (worst case)
+            * (mNumTiles + 1);             // -  (6~8: are per-item)
+}
+
 status_t MPEG4Writer::Track::checkCodecSpecificData() const {
     const char *mime;
     CHECK(mMeta->findCString(kKeyMIMEType, &mime));
     if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AAC, mime) ||
         !strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG4, mime) ||
         !strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime) ||
-        !strcasecmp(MEDIA_MIMETYPE_VIDEO_HEVC, mime)) {
+        !strcasecmp(MEDIA_MIMETYPE_VIDEO_HEVC, mime) ||
+        !strcasecmp(MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC, mime)) {
         if (!mCodecSpecificData ||
             mCodecSpecificDataSize <= 0) {
             ALOGE("Missing codec specific data");
@@ -3222,7 +3466,10 @@
 }
 
 const char *MPEG4Writer::Track::getTrackType() const {
-    return mIsAudio ? "Audio" : (mIsVideo ? "Video" : "Metadata");
+    return mIsAudio ? "Audio" :
+           mIsVideo ? "Video" :
+           mIsHeic  ? "Image" :
+                      "Metadata";
 }
 
 void MPEG4Writer::Track::writeTrackHeader(bool use32BitOffset) {
@@ -3792,11 +4039,11 @@
     endBox();
 }
 
-void MPEG4Writer::writeHdlr() {
+void MPEG4Writer::writeHdlr(const char *handlerType) {
     beginBox("hdlr");
     writeInt32(0); // Version, Flags
     writeInt32(0); // Predefined
-    writeFourcc("mdta");
+    writeFourcc(handlerType);
     writeInt32(0); // Reserved[0]
     writeInt32(0); // Reserved[1]
     writeInt32(0); // Reserved[2]
@@ -3876,19 +4123,291 @@
     endBox(); // ilst
 }
 
-void MPEG4Writer::writeMetaBox() {
+void MPEG4Writer::writeMoovLevelMetaBox() {
     size_t count = mMetaKeys->countEntries();
     if (count == 0) {
         return;
     }
 
     beginBox("meta");
-    writeHdlr();
+    writeHdlr("mdta");
     writeKeys();
     writeIlst();
     endBox();
 }
 
+void MPEG4Writer::writeIlocBox() {
+    beginBox("iloc");
+    // Use version 1 to allow construction method 1 that refers to
+    // data in idat box inside meta box.
+    writeInt32(0x01000000); // Version = 1, Flags = 0
+    writeInt16(0x4400);     // offset_size = length_size = 4
+                            // base_offset_size = index_size = 0
+
+    // 16-bit item_count
+    size_t itemCount = mItems.size();
+    if (itemCount > 65535) {
+        ALOGW("Dropping excess items: itemCount %zu", itemCount);
+        itemCount = 65535;
+    }
+    writeInt16((uint16_t)itemCount);
+
+    for (size_t i = 0; i < itemCount; i++) {
+        writeInt16(mItems[i].itemId);
+        bool isGrid = mItems[i].isGrid();
+
+        writeInt16(isGrid ? 1 : 0); // construction_method
+        writeInt16(0); // data_reference_index = 0
+        writeInt16(1); // extent_count = 1
+
+        if (isGrid) {
+            // offset into the 'idat' box
+            writeInt32(mNumGrids++ * 8);
+            writeInt32(8);
+        } else {
+            writeInt32(mItems[i].offset);
+            writeInt32(mItems[i].size);
+        }
+    }
+    endBox();
+}
+
+void MPEG4Writer::writeInfeBox(
+        uint16_t itemId, const char *itemType, uint32_t flags) {
+    beginBox("infe");
+    writeInt32(0x02000000 | flags); // Version = 2, Flags = 0
+    writeInt16(itemId);
+    writeInt16(0);          //item_protection_index = 0
+    writeFourcc(itemType);
+    writeCString("");       // item_name
+    endBox();
+}
+
+void MPEG4Writer::writeIinfBox() {
+    beginBox("iinf");
+    writeInt32(0);          // Version = 0, Flags = 0
+
+    // 16-bit item_count
+    size_t itemCount = mItems.size();
+    if (itemCount > 65535) {
+        ALOGW("Dropping excess items: itemCount %zu", itemCount);
+        itemCount = 65535;
+    }
+
+    writeInt16((uint16_t)itemCount);
+    for (size_t i = 0; i < itemCount; i++) {
+        writeInfeBox(mItems[i].itemId, mItems[i].itemType,
+                mItems[i].isHidden ? 1 : 0);
+    }
+
+    endBox();
+}
+
+void MPEG4Writer::writeIdatBox() {
+    beginBox("idat");
+
+    for (size_t i = 0; i < mItems.size(); i++) {
+        if (mItems[i].isGrid()) {
+            writeInt8(0); // version
+            // flags == 1 means 32-bit width,height
+            int8_t flags = (mItems[i].width > 65535 || mItems[i].height > 65535);
+            writeInt8(flags);
+            writeInt8(mItems[i].rows - 1);
+            writeInt8(mItems[i].cols - 1);
+            if (flags) {
+                writeInt32(mItems[i].width);
+                writeInt32(mItems[i].height);
+            } else {
+                writeInt16((uint16_t)mItems[i].width);
+                writeInt16((uint16_t)mItems[i].height);
+            }
+        }
+    }
+
+    endBox();
+}
+
+void MPEG4Writer::writeIrefBox() {
+    beginBox("iref");
+    writeInt32(0);          // Version = 0, Flags = 0
+    {
+        for (size_t i = 0; i < mItems.size(); i++) {
+            if (!mItems[i].isGrid()) {
+                continue;
+            }
+            beginBox("dimg");
+            writeInt16(mItems[i].itemId);
+            size_t refCount = mItems[i].dimgRefs.size();
+            if (refCount > 65535) {
+                ALOGW("too many entries in dimg");
+                refCount = 65535;
+            }
+            writeInt16((uint16_t)refCount);
+            for (size_t refIndex = 0; refIndex < refCount; refIndex++) {
+                writeInt16(mItems[i].dimgRefs[refIndex]);
+            }
+            endBox();
+        }
+    }
+    endBox();
+}
+
+void MPEG4Writer::writePitmBox() {
+    beginBox("pitm");
+    writeInt32(0);          // Version = 0, Flags = 0
+    writeInt16(mPrimaryItemId);
+    endBox();
+}
+
+void MPEG4Writer::writeIpcoBox() {
+    beginBox("ipco");
+    size_t numProperties = mProperties.size();
+    if (numProperties > 32767) {
+        ALOGW("Dropping excess properties: numProperties %zu", numProperties);
+        numProperties = 32767;
+    }
+    for (size_t propIndex = 0; propIndex < numProperties; propIndex++) {
+        if (mProperties[propIndex].type == FOURCC('h', 'v', 'c', 'C')) {
+            beginBox("hvcC");
+            sp<ABuffer> hvcc = mProperties[propIndex].hvcc;
+            // Patch avcc's lengthSize field to match the number
+            // of bytes we use to indicate the size of a nal unit.
+            uint8_t *ptr = (uint8_t *)hvcc->data();
+            ptr[21] = (ptr[21] & 0xfc) | (useNalLengthFour() ? 3 : 1);
+            write(hvcc->data(), hvcc->size());
+            endBox();
+        } else if (mProperties[propIndex].type == FOURCC('i', 's', 'p', 'e')) {
+            beginBox("ispe");
+            writeInt32(0); // Version = 0, Flags = 0
+            writeInt32(mProperties[propIndex].width);
+            writeInt32(mProperties[propIndex].height);
+            endBox();
+        } else {
+            ALOGW("Skipping unrecognized property: type 0x%08x",
+                    mProperties[propIndex].type);
+        }
+    }
+    endBox();
+}
+
+void MPEG4Writer::writeIpmaBox() {
+    beginBox("ipma");
+    uint32_t flags = (mProperties.size() > 127) ? 1 : 0;
+    writeInt32(flags); // Version = 0
+
+    writeInt32(mAssociationEntryCount);
+    for (size_t itemIndex = 0; itemIndex < mItems.size(); itemIndex++) {
+        const Vector<uint16_t> &properties = mItems[itemIndex].properties;
+        if (properties.empty()) {
+            continue;
+        }
+        writeInt16(mItems[itemIndex].itemId);
+
+        size_t entryCount = properties.size();
+        if (entryCount > 255) {
+            ALOGW("Dropping excess associations: entryCount %zu", entryCount);
+            entryCount = 255;
+        }
+        writeInt8((uint8_t)entryCount);
+        for (size_t propIndex = 0; propIndex < entryCount; propIndex++) {
+            if (flags & 1) {
+                writeInt16((1 << 15) | properties[propIndex]);
+            } else {
+                writeInt8((1 << 7) | properties[propIndex]);
+            }
+        }
+    }
+    endBox();
+}
+
+void MPEG4Writer::writeIprpBox() {
+    beginBox("iprp");
+    writeIpcoBox();
+    writeIpmaBox();
+    endBox();
+}
+
+void MPEG4Writer::writeFileLevelMetaBox() {
+    if (mItems.empty()) {
+        ALOGE("no valid item was found");
+        return;
+    }
+
+    // patch up the mPrimaryItemId and count items with prop associations
+    uint16_t firstVisibleItemId = 0;
+    for (size_t index = 0; index < mItems.size(); index++) {
+        if (mItems[index].isPrimary) {
+            mPrimaryItemId = mItems[index].itemId;
+        } else if (!firstVisibleItemId && !mItems[index].isHidden) {
+            firstVisibleItemId = mItems[index].itemId;
+        }
+
+        if (!mItems[index].properties.empty()) {
+            mAssociationEntryCount++;
+        }
+    }
+
+    if (mPrimaryItemId == 0) {
+        if (firstVisibleItemId > 0) {
+            ALOGW("didn't find primary, using first visible item");
+            mPrimaryItemId = firstVisibleItemId;
+        } else {
+            ALOGW("no primary and no visible item, using first item");
+            mPrimaryItemId = mItems[0].itemId;
+        }
+    }
+
+    beginBox("meta");
+    writeInt32(0); // Version = 0, Flags = 0
+    writeHdlr("pict");
+    writeIlocBox();
+    writeIinfBox();
+    writePitmBox();
+    writeIprpBox();
+    if (mNumGrids > 0) {
+        writeIdatBox();
+        writeIrefBox();
+    }
+    endBox();
+}
+
+uint16_t MPEG4Writer::addProperty_l(const ItemProperty &prop) {
+    char typeStr[5];
+    MakeFourCCString(prop.type, typeStr);
+    ALOGV("addProperty_l: %s", typeStr);
+
+    mProperties.push_back(prop);
+
+    // returning 1-based property index
+    return mProperties.size();
+}
+
+uint16_t MPEG4Writer::addItem_l(const ItemInfo &info) {
+    ALOGV("addItem_l: type %s, offset %u, size %u",
+            info.itemType, info.offset, info.size);
+
+    size_t index = mItems.size();
+    mItems.push_back(info);
+
+    // make the item id start at 10000
+    mItems.editItemAt(index).itemId = index + 10000;
+
+#if (LOG_NDEBUG==0)
+    if (!info.properties.empty()) {
+        AString str;
+        for (size_t i = 0; i < info.properties.size(); i++) {
+            if (i > 0) {
+                str.append(", ");
+            }
+            str.append(info.properties[i]);
+        }
+        ALOGV("addItem_l: id %d, properties: %s", mItems[index].itemId, str.c_str());
+    }
+#endif // (LOG_NDEBUG==0)
+
+    return mItems[index].itemId;
+}
+
 /*
  * Geodata is stored according to ISO-6709 standard.
  */
diff --git a/media/libstagefright/MediaAdapter.cpp b/media/libstagefright/MediaAdapter.cpp
index d680e0c..74eb1ff 100644
--- a/media/libstagefright/MediaAdapter.cpp
+++ b/media/libstagefright/MediaAdapter.cpp
@@ -45,17 +45,24 @@
 }
 
 status_t MediaAdapter::stop() {
-    Mutex::Autolock autoLock(mAdapterLock);
-    if (mStarted) {
-        mStarted = false;
-        // If stop() happens immediately after a pushBuffer(), we should
-        // clean up the mCurrentMediaBuffer
-        if (mCurrentMediaBuffer != NULL) {
-            mCurrentMediaBuffer->release();
+    MediaBuffer *currentBuffer = NULL;
+    {
+        Mutex::Autolock autoLock(mAdapterLock);
+        if (mStarted) {
+            mStarted = false;
+            // If stop() happens immediately after a pushBuffer(), we should
+            // clean up the mCurrentMediaBuffer. But need to release without
+            // the lock as signalBufferReturned() will acquire the lock.
+            currentBuffer = mCurrentMediaBuffer;
             mCurrentMediaBuffer = NULL;
+
+            // While read() is still waiting, we should signal it to finish.
+            mBufferReadCond.signal();
         }
-        // While read() is still waiting, we should signal it to finish.
-        mBufferReadCond.signal();
+    }
+    if (currentBuffer != NULL) {
+        currentBuffer->release();
+        currentBuffer = NULL;
     }
     return OK;
 }
@@ -97,7 +104,6 @@
 
     *buffer = mCurrentMediaBuffer;
     mCurrentMediaBuffer = NULL;
-    (*buffer)->setObserver(this);
 
     return OK;
 }
@@ -114,6 +120,7 @@
         return INVALID_OPERATION;
     }
     mCurrentMediaBuffer = buffer;
+    mCurrentMediaBuffer->setObserver(this);
     mBufferReadCond.signal();
 
     ALOGV("wait for the buffer returned @ pushBuffer! %p", buffer);
diff --git a/media/libstagefright/MediaClock.cpp b/media/libstagefright/MediaClock.cpp
index 3aa0061..15843a2 100644
--- a/media/libstagefright/MediaClock.cpp
+++ b/media/libstagefright/MediaClock.cpp
@@ -17,11 +17,12 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "MediaClock"
 #include <utils/Log.h>
+#include <map>
 
 #include <media/stagefright/MediaClock.h>
 
 #include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/AMessage.h>
 
 namespace android {
 
@@ -29,15 +30,52 @@
 // If larger than this threshold, it's treated as discontinuity.
 static const int64_t kAnchorFluctuationAllowedUs = 10000ll;
 
+MediaClock::Timer::Timer(const sp<AMessage> &notify, int64_t mediaTimeUs, int64_t adjustRealUs)
+    : mNotify(notify),
+      mMediaTimeUs(mediaTimeUs),
+      mAdjustRealUs(adjustRealUs) {
+}
+
 MediaClock::MediaClock()
     : mAnchorTimeMediaUs(-1),
       mAnchorTimeRealUs(-1),
       mMaxTimeMediaUs(INT64_MAX),
       mStartingTimeMediaUs(-1),
-      mPlaybackRate(1.0) {
+      mPlaybackRate(1.0),
+      mGeneration(0) {
+    mLooper = new ALooper;
+    mLooper->setName("MediaClock");
+    mLooper->start(false /* runOnCallingThread */,
+                   false /* canCallJava */,
+                   ANDROID_PRIORITY_AUDIO);
+}
+
+void MediaClock::init() {
+    mLooper->registerHandler(this);
 }
 
 MediaClock::~MediaClock() {
+    reset();
+    if (mLooper != NULL) {
+        mLooper->unregisterHandler(id());
+        mLooper->stop();
+    }
+}
+
+void MediaClock::reset() {
+    Mutex::Autolock autoLock(mLock);
+    auto it = mTimers.begin();
+    while (it != mTimers.end()) {
+        it->mNotify->setInt32("reason", TIMER_REASON_RESET);
+        it->mNotify->post();
+        it = mTimers.erase(it);
+    }
+    mAnchorTimeMediaUs = -1;
+    mAnchorTimeRealUs = -1;
+    mMaxTimeMediaUs = INT64_MAX;
+    mStartingTimeMediaUs = -1;
+    mPlaybackRate = 1.0;
+    ++mGeneration;
 }
 
 void MediaClock::setStartingTimeMedia(int64_t startingTimeMediaUs) {
@@ -82,6 +120,9 @@
     }
     mAnchorTimeRealUs = nowUs;
     mAnchorTimeMediaUs = nowMediaUs;
+
+    ++mGeneration;
+    processTimers_l();
 }
 
 void MediaClock::updateMaxTimeMedia(int64_t maxTimeMediaUs) {
@@ -105,6 +146,11 @@
     }
     mAnchorTimeRealUs = nowUs;
     mPlaybackRate = rate;
+
+    if (rate > 0.0) {
+        ++mGeneration;
+        processTimers_l();
+    }
 }
 
 float MediaClock::getPlaybackRate() const {
@@ -165,4 +211,106 @@
     return OK;
 }
 
+void MediaClock::addTimer(const sp<AMessage> &notify, int64_t mediaTimeUs,
+                          int64_t adjustRealUs) {
+    Mutex::Autolock autoLock(mLock);
+
+    bool updateTimer = (mPlaybackRate != 0.0);
+    if (updateTimer) {
+        auto it = mTimers.begin();
+        while (it != mTimers.end()) {
+            if (((it->mAdjustRealUs - (double)adjustRealUs) * (double)mPlaybackRate
+                + (it->mMediaTimeUs - mediaTimeUs)) <= 0) {
+                updateTimer = false;
+                break;
+            }
+            ++it;
+        }
+    }
+
+    mTimers.emplace_back(notify, mediaTimeUs, adjustRealUs);
+
+    if (updateTimer) {
+        ++mGeneration;
+        processTimers_l();
+    }
+}
+
+void MediaClock::onMessageReceived(const sp<AMessage> &msg) {
+    switch (msg->what()) {
+        case kWhatTimeIsUp:
+        {
+            int32_t generation;
+            CHECK(msg->findInt32("generation", &generation));
+
+            Mutex::Autolock autoLock(mLock);
+            if (generation != mGeneration) {
+                break;
+            }
+            processTimers_l();
+            break;
+        }
+
+        default:
+            TRESPASS();
+            break;
+    }
+}
+
+void MediaClock::processTimers_l() {
+    int64_t nowMediaTimeUs;
+    status_t status = getMediaTime_l(
+            ALooper::GetNowUs(), &nowMediaTimeUs, false /* allowPastMaxTime */);
+
+    if (status != OK) {
+        return;
+    }
+
+    int64_t nextLapseRealUs = INT64_MAX;
+    std::multimap<int64_t, Timer> notifyList;
+    auto it = mTimers.begin();
+    while (it != mTimers.end()) {
+        double diff = it->mAdjustRealUs * (double)mPlaybackRate
+            + it->mMediaTimeUs - nowMediaTimeUs;
+        int64_t diffMediaUs;
+        if (diff > (double)INT64_MAX) {
+            diffMediaUs = INT64_MAX;
+        } else if (diff < (double)INT64_MIN) {
+            diffMediaUs = INT64_MIN;
+        } else {
+            diffMediaUs = diff;
+        }
+
+        if (diffMediaUs <= 0) {
+            notifyList.emplace(diffMediaUs, *it);
+            it = mTimers.erase(it);
+        } else {
+            if (mPlaybackRate != 0.0
+                && (double)diffMediaUs < INT64_MAX * (double)mPlaybackRate) {
+                int64_t targetRealUs = diffMediaUs / (double)mPlaybackRate;
+                if (targetRealUs < nextLapseRealUs) {
+                    nextLapseRealUs = targetRealUs;
+                }
+            }
+            ++it;
+        }
+    }
+
+    auto itNotify = notifyList.begin();
+    while (itNotify != notifyList.end()) {
+        itNotify->second.mNotify->setInt32("reason", TIMER_REASON_REACHED);
+        itNotify->second.mNotify->post();
+        itNotify = notifyList.erase(itNotify);
+    }
+
+    if (mTimers.empty() || mPlaybackRate == 0.0 || mAnchorTimeMediaUs < 0
+        || nextLapseRealUs == INT64_MAX) {
+        return;
+    }
+
+    sp<AMessage> msg = new AMessage(kWhatTimeIsUp, this);
+    msg->setInt32("generation", mGeneration);
+    msg->post(nextLapseRealUs);
+}
+
 }  // namespace android
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 759e42d..7cfa4ce 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -18,7 +18,6 @@
 #define LOG_TAG "MediaCodec"
 #include <inttypes.h>
 
-#include "include/avc_utils.h"
 #include "include/SecureBuffer.h"
 #include "include/SharedMemoryBuffer.h"
 #include "include/SoftwareRenderer.h"
@@ -29,6 +28,7 @@
 #include <binder/IPCThreadState.h>
 #include <binder/IServiceManager.h>
 #include <binder/MemoryDealer.h>
+#include <cutils/properties.h>
 #include <gui/BufferQueue.h>
 #include <gui/Surface.h>
 #include <media/ICrypto.h>
@@ -41,9 +41,11 @@
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/foundation/AString.h>
 #include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/foundation/avc_utils.h>
 #include <media/stagefright/foundation/hexdump.h>
 #include <media/stagefright/ACodec.h>
 #include <media/stagefright/BufferProducerWrapper.h>
+#include <media/stagefright/CCodec.h>
 #include <media/stagefright/MediaCodec.h>
 #include <media/stagefright/MediaCodecList.h>
 #include <media/stagefright/MediaDefs.h>
@@ -549,8 +551,11 @@
 
 //static
 sp<CodecBase> MediaCodec::GetCodecBase(const AString &name, bool nameIsType) {
-    // at this time only ACodec specifies a mime type.
-    if (nameIsType || name.startsWithIgnoreCase("omx.")) {
+    static bool ccodecEnabled = property_get_bool("debug.stagefright.ccodec", false);
+    if (ccodecEnabled && !nameIsType && name.startsWithIgnoreCase("c2.")) {
+        return new CCodec;
+    } else if (nameIsType || name.startsWithIgnoreCase("omx.")) {
+        // at this time only ACodec specifies a mime type.
         return new ACodec;
     } else if (name.startsWithIgnoreCase("android.filter.")) {
         return new MediaFilter;
@@ -1440,7 +1445,7 @@
                         {
                             if (actionCode == ACTION_CODE_FATAL) {
                                 mAnalyticsItem->setInt32(kCodecError, err);
-                                mAnalyticsItem->setInt32(kCodecErrorState, mState);
+                                mAnalyticsItem->setCString(kCodecErrorState, stateString(mState).c_str());
                                 flushAnalyticsItem();
                                 initAnalyticsItem();
                             }
@@ -1453,7 +1458,7 @@
                         {
                             if (actionCode == ACTION_CODE_FATAL) {
                                 mAnalyticsItem->setInt32(kCodecError, err);
-                                mAnalyticsItem->setInt32(kCodecErrorState, mState);
+                                mAnalyticsItem->setCString(kCodecErrorState, stateString(mState).c_str());
                                 flushAnalyticsItem();
                                 initAnalyticsItem();
                             }
@@ -1494,7 +1499,7 @@
                         {
                             if (actionCode == ACTION_CODE_FATAL) {
                                 mAnalyticsItem->setInt32(kCodecError, err);
-                                mAnalyticsItem->setInt32(kCodecErrorState, mState);
+                                mAnalyticsItem->setCString(kCodecErrorState, stateString(mState).c_str());
                                 flushAnalyticsItem();
                                 initAnalyticsItem();
 
@@ -1527,7 +1532,7 @@
                                 break;
                             default:
                                 mAnalyticsItem->setInt32(kCodecError, err);
-                                mAnalyticsItem->setInt32(kCodecErrorState, mState);
+                                mAnalyticsItem->setCString(kCodecErrorState, stateString(mState).c_str());
                                 flushAnalyticsItem();
                                 initAnalyticsItem();
                                 setState(UNINITIALIZED);
@@ -1849,7 +1854,6 @@
                                 }
                             }
                         }
-
                         if (mFlags & kFlagIsAsync) {
                             onOutputFormatChanged();
                         } else {
@@ -3224,4 +3228,28 @@
     }
 }
 
+std::string MediaCodec::stateString(State state) {
+    const char *rval = NULL;
+    char rawbuffer[16]; // room for "%d"
+
+    switch (state) {
+        case UNINITIALIZED: rval = "UNINITIALIZED"; break;
+        case INITIALIZING: rval = "INITIALIZING"; break;
+        case INITIALIZED: rval = "INITIALIZED"; break;
+        case CONFIGURING: rval = "CONFIGURING"; break;
+        case CONFIGURED: rval = "CONFIGURED"; break;
+        case STARTING: rval = "STARTING"; break;
+        case STARTED: rval = "STARTED"; break;
+        case FLUSHING: rval = "FLUSHING"; break;
+        case FLUSHED: rval = "FLUSHED"; break;
+        case STOPPING: rval = "STOPPING"; break;
+        case RELEASING: rval = "RELEASING"; break;
+        default:
+            snprintf(rawbuffer, sizeof(rawbuffer), "%d", state);
+            rval = rawbuffer;
+            break;
+    }
+    return rval;
+}
+
 }  // namespace android
diff --git a/media/libstagefright/MediaCodecList.cpp b/media/libstagefright/MediaCodecList.cpp
index 4652594..54265a4 100644
--- a/media/libstagefright/MediaCodecList.cpp
+++ b/media/libstagefright/MediaCodecList.cpp
@@ -24,23 +24,21 @@
 
 #include <media/IMediaCodecList.h>
 #include <media/IMediaPlayerService.h>
-#include <media/IMediaCodecService.h>
 #include <media/MediaCodecInfo.h>
-#include <media/MediaDefs.h>
 
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/MediaDefs.h>
 #include <media/stagefright/MediaCodecList.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/OmxInfoBuilder.h>
 #include <media/stagefright/omx/OMXUtils.h>
-#include <media/stagefright/xmlparser/MediaCodecsXmlParser.h>
+#include <xmlparser/include/media/stagefright/xmlparser/MediaCodecsXmlParser.h>
 
 #include <sys/stat.h>
 #include <utils/threads.h>
 
 #include <cutils/properties.h>
-#include <expat.h>
 
 #include <algorithm>
 
diff --git a/media/libstagefright/MediaCodecSource.cpp b/media/libstagefright/MediaCodecSource.cpp
index d808e5b..cf800b2 100644
--- a/media/libstagefright/MediaCodecSource.cpp
+++ b/media/libstagefright/MediaCodecSource.cpp
@@ -24,6 +24,7 @@
 #include <gui/Surface.h>
 #include <media/ICrypto.h>
 #include <media/MediaCodecBuffer.h>
+#include <media/MediaSource.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/ALooper.h>
@@ -33,7 +34,6 @@
 #include <media/stagefright/MediaCodecList.h>
 #include <media/stagefright/MediaCodecSource.h>
 #include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/MediaSource.h>
 #include <media/stagefright/MetaData.h>
 #include <media/stagefright/Utils.h>
 
@@ -643,7 +643,9 @@
 
     if (mStopping && reachedEOS) {
         ALOGI("encoder (%s) stopped", mIsVideo ? "video" : "audio");
-        mPuller->stopSource();
+        if (mPuller != NULL) {
+            mPuller->stopSource();
+        }
         ALOGV("source (%s) stopped", mIsVideo ? "video" : "audio");
         // posting reply to everyone that's waiting
         List<sp<AReplyToken>>::iterator it;
diff --git a/media/libstagefright/MediaExtractor.cpp b/media/libstagefright/MediaExtractor.cpp
deleted file mode 100644
index c91c82b..0000000
--- a/media/libstagefright/MediaExtractor.cpp
+++ /dev/null
@@ -1,295 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "MediaExtractor"
-#include <utils/Log.h>
-#include <inttypes.h>
-#include <pwd.h>
-
-#include "include/AMRExtractor.h"
-#include "include/MP3Extractor.h"
-#include "include/MPEG4Extractor.h"
-#include "include/WAVExtractor.h"
-#include "include/OggExtractor.h"
-#include "include/MPEG2PSExtractor.h"
-#include "include/MPEG2TSExtractor.h"
-#include "include/FLACExtractor.h"
-#include "include/AACExtractor.h"
-#include "include/MidiExtractor.h"
-
-#include "matroska/MatroskaExtractor.h"
-
-#include <binder/IServiceManager.h>
-#include <binder/MemoryDealer.h>
-
-#include <media/MediaAnalyticsItem.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/DataSource.h>
-#include <media/stagefright/MediaDefs.h>
-#include <media/stagefright/MediaExtractor.h>
-#include <media/stagefright/MetaData.h>
-#include <media/IMediaExtractorService.h>
-#include <cutils/properties.h>
-#include <utils/String8.h>
-#include <private/android_filesystem_config.h>
-
-// still doing some on/off toggling here.
-#define MEDIA_LOG       1
-
-
-namespace android {
-
-// key for media statistics
-static const char *kKeyExtractor = "extractor";
-// attrs for media statistics
-static const char *kExtractorMime = "android.media.mediaextractor.mime";
-static const char *kExtractorTracks = "android.media.mediaextractor.ntrk";
-static const char *kExtractorFormat = "android.media.mediaextractor.fmt";
-
-MediaExtractor::MediaExtractor() {
-    if (!LOG_NDEBUG) {
-        uid_t uid = getuid();
-        struct passwd *pw = getpwuid(uid);
-        ALOGI("extractor created in uid: %d (%s)", getuid(), pw->pw_name);
-    }
-
-    mAnalyticsItem = NULL;
-    if (MEDIA_LOG) {
-        mAnalyticsItem = new MediaAnalyticsItem(kKeyExtractor);
-        (void) mAnalyticsItem->generateSessionID();
-    }
-}
-
-MediaExtractor::~MediaExtractor() {
-
-    // log the current record, provided it has some information worth recording
-    if (MEDIA_LOG) {
-        if (mAnalyticsItem != NULL) {
-            if (mAnalyticsItem->count() > 0) {
-                mAnalyticsItem->setFinalized(true);
-                mAnalyticsItem->selfrecord();
-            }
-        }
-    }
-    if (mAnalyticsItem != NULL) {
-        delete mAnalyticsItem;
-        mAnalyticsItem = NULL;
-    }
-}
-
-sp<MetaData> MediaExtractor::getMetaData() {
-    return new MetaData;
-}
-
-status_t MediaExtractor::getMetrics(Parcel *reply) {
-
-    if (mAnalyticsItem == NULL || reply == NULL) {
-        return UNKNOWN_ERROR;
-    }
-
-    populateMetrics();
-    mAnalyticsItem->writeToParcel(reply);
-
-    return OK;
-}
-
-void MediaExtractor::populateMetrics() {
-    ALOGV("MediaExtractor::populateMetrics");
-    // normally overridden in subclasses
-}
-
-uint32_t MediaExtractor::flags() const {
-    return CAN_SEEK_BACKWARD | CAN_SEEK_FORWARD | CAN_PAUSE | CAN_SEEK;
-}
-
-// static
-sp<IMediaExtractor> MediaExtractor::Create(
-        const sp<DataSource> &source, const char *mime) {
-    ALOGV("MediaExtractor::Create %s", mime);
-
-    if (!property_get_bool("media.stagefright.extractremote", true)) {
-        // local extractor
-        ALOGW("creating media extractor in calling process");
-        return CreateFromService(source, mime);
-    } else {
-        // remote extractor
-        ALOGV("get service manager");
-        sp<IBinder> binder = defaultServiceManager()->getService(String16("media.extractor"));
-
-        if (binder != 0) {
-            sp<IMediaExtractorService> mediaExService(interface_cast<IMediaExtractorService>(binder));
-            sp<IMediaExtractor> ex = mediaExService->makeExtractor(source->asIDataSource(), mime);
-            return ex;
-        } else {
-            ALOGE("extractor service not running");
-            return NULL;
-        }
-    }
-    return NULL;
-}
-
-sp<MediaExtractor> MediaExtractor::CreateFromService(
-        const sp<DataSource> &source, const char *mime) {
-
-    ALOGV("MediaExtractor::CreateFromService %s", mime);
-    RegisterDefaultSniffers();
-
-    // initialize source decryption if needed
-    source->DrmInitialization(nullptr /* mime */);
-
-    sp<AMessage> meta;
-
-    String8 tmp;
-    if (mime == NULL) {
-        float confidence;
-        if (!sniff(source, &tmp, &confidence, &meta)) {
-            ALOGW("FAILED to autodetect media content.");
-
-            return NULL;
-        }
-
-        mime = tmp.string();
-        ALOGV("Autodetected media content as '%s' with confidence %.2f",
-             mime, confidence);
-    }
-
-    MediaExtractor *ret = NULL;
-    if (!strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_MPEG4)
-            || !strcasecmp(mime, "audio/mp4")) {
-        ret = new MPEG4Extractor(source);
-    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) {
-        ret = new MP3Extractor(source, meta);
-    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)
-            || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) {
-        ret = new AMRExtractor(source);
-    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)) {
-        ret = new FLACExtractor(source);
-    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_WAV)) {
-        ret = new WAVExtractor(source);
-    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_OGG)) {
-        ret = new OggExtractor(source);
-    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_MATROSKA)) {
-        ret = new MatroskaExtractor(source);
-    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_MPEG2TS)) {
-        ret = new MPEG2TSExtractor(source);
-    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC_ADTS)) {
-        ret = new AACExtractor(source, meta);
-    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_MPEG2PS)) {
-        ret = new MPEG2PSExtractor(source);
-    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MIDI)) {
-        ret = new MidiExtractor(source);
-    }
-
-    if (ret != NULL) {
-       // track the container format (mpeg, aac, wvm, etc)
-       if (MEDIA_LOG) {
-          if (ret->mAnalyticsItem != NULL) {
-              size_t ntracks = ret->countTracks();
-              ret->mAnalyticsItem->setCString(kExtractorFormat,  ret->name());
-              // tracks (size_t)
-              ret->mAnalyticsItem->setInt32(kExtractorTracks,  ntracks);
-              // metadata
-              sp<MetaData> pMetaData = ret->getMetaData();
-              if (pMetaData != NULL) {
-                String8 xx = pMetaData->toString();
-                // 'titl' -- but this verges into PII
-                // 'mime'
-                const char *mime = NULL;
-                if (pMetaData->findCString(kKeyMIMEType, &mime)) {
-                    ret->mAnalyticsItem->setCString(kExtractorMime,  mime);
-                }
-                // what else is interesting and not already available?
-              }
-	  }
-       }
-    }
-
-    return ret;
-}
-
-Mutex MediaExtractor::gSnifferMutex;
-List<MediaExtractor::SnifferFunc> MediaExtractor::gSniffers;
-bool MediaExtractor::gSniffersRegistered = false;
-
-// static
-bool MediaExtractor::sniff(
-        const sp<DataSource> &source, String8 *mimeType, float *confidence, sp<AMessage> *meta) {
-    *mimeType = "";
-    *confidence = 0.0f;
-    meta->clear();
-
-    {
-        Mutex::Autolock autoLock(gSnifferMutex);
-        if (!gSniffersRegistered) {
-            return false;
-        }
-    }
-
-    for (List<SnifferFunc>::iterator it = gSniffers.begin();
-         it != gSniffers.end(); ++it) {
-        String8 newMimeType;
-        float newConfidence;
-        sp<AMessage> newMeta;
-        if ((*it)(source, &newMimeType, &newConfidence, &newMeta)) {
-            if (newConfidence > *confidence) {
-                *mimeType = newMimeType;
-                *confidence = newConfidence;
-                *meta = newMeta;
-            }
-        }
-    }
-
-    return *confidence > 0.0;
-}
-
-// static
-void MediaExtractor::RegisterSniffer_l(SnifferFunc func) {
-    for (List<SnifferFunc>::iterator it = gSniffers.begin();
-         it != gSniffers.end(); ++it) {
-        if (*it == func) {
-            return;
-        }
-    }
-
-    gSniffers.push_back(func);
-}
-
-// static
-void MediaExtractor::RegisterDefaultSniffers() {
-    Mutex::Autolock autoLock(gSnifferMutex);
-    if (gSniffersRegistered) {
-        return;
-    }
-
-    RegisterSniffer_l(SniffMPEG4);
-    RegisterSniffer_l(SniffMatroska);
-    RegisterSniffer_l(SniffOgg);
-    RegisterSniffer_l(SniffWAV);
-    RegisterSniffer_l(SniffFLAC);
-    RegisterSniffer_l(SniffAMR);
-    RegisterSniffer_l(SniffMPEG2TS);
-    RegisterSniffer_l(SniffMP3);
-    RegisterSniffer_l(SniffAAC);
-    RegisterSniffer_l(SniffMPEG2PS);
-    RegisterSniffer_l(SniffMidi);
-
-    gSniffersRegistered = true;
-}
-
-
-}  // namespace android
diff --git a/media/libstagefright/MediaExtractorFactory.cpp b/media/libstagefright/MediaExtractorFactory.cpp
new file mode 100644
index 0000000..8a90e93
--- /dev/null
+++ b/media/libstagefright/MediaExtractorFactory.cpp
@@ -0,0 +1,264 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaExtractor"
+#include <utils/Log.h>
+
+#include <binder/IServiceManager.h>
+#include <media/DataSource.h>
+#include <media/MediaAnalyticsItem.h>
+#include <media/MediaExtractor.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/FileSource.h>
+#include <media/stagefright/InterfaceUtils.h>
+#include <media/stagefright/MediaExtractorFactory.h>
+#include <media/stagefright/MetaData.h>
+#include <media/IMediaExtractor.h>
+#include <media/IMediaExtractorService.h>
+#include <cutils/properties.h>
+#include <utils/String8.h>
+
+#include <dirent.h>
+#include <dlfcn.h>
+
+namespace android {
+
+// static
+sp<IMediaExtractor> MediaExtractorFactory::Create(
+        const sp<DataSource> &source, const char *mime) {
+    ALOGV("MediaExtractorFactory::%s %s", __func__, mime);
+
+    if (!property_get_bool("media.stagefright.extractremote", true)) {
+        // local extractor
+        ALOGW("creating media extractor in calling process");
+        sp<MediaExtractor> extractor = CreateFromService(source, mime);
+        return CreateIMediaExtractorFromMediaExtractor(extractor);
+    } else {
+        // remote extractor
+        ALOGV("get service manager");
+        sp<IBinder> binder = defaultServiceManager()->getService(String16("media.extractor"));
+
+        if (binder != 0) {
+            sp<IMediaExtractorService> mediaExService(interface_cast<IMediaExtractorService>(binder));
+            sp<IMediaExtractor> ex = mediaExService->makeExtractor(
+                    CreateIDataSourceFromDataSource(source), mime);
+            return ex;
+        } else {
+            ALOGE("extractor service not running");
+            return NULL;
+        }
+    }
+    return NULL;
+}
+
+// static
+sp<IMediaExtractor> MediaExtractorFactory::CreateFromFd(
+        int fd, int64_t offset, int64_t length, const char *mime, sp<DataSource> *out) {
+    ALOGV("MediaExtractorFactory::%s %s", __func__, mime);
+
+    if (property_get_bool("media.stagefright.extractremote", true)) {
+        // remote extractor
+        ALOGV("get service manager");
+        sp<IBinder> binder = defaultServiceManager()->getService(String16("media.extractor"));
+
+        if (binder != 0) {
+            sp<IMediaExtractorService> mediaExService(
+                    interface_cast<IMediaExtractorService>(binder));
+            if (!FileSource::requiresDrm(fd, offset, length, nullptr /* mime */)) {
+                ALOGD("FileSource remote");
+                sp<IDataSource> remoteSource =
+                    mediaExService->makeIDataSource(fd, offset, length);
+                ALOGV("IDataSource(FileSource): %p %d %lld %lld",
+                        remoteSource.get(), fd, (long long)offset, (long long)length);
+                if (remoteSource.get() != nullptr) {
+                    // replace the caller's local source with remote source.
+                    *out = CreateDataSourceFromIDataSource(remoteSource);
+                    return mediaExService->makeExtractor(remoteSource, mime);
+                } else {
+                    ALOGW("extractor service cannot make file source."
+                            " falling back to local file source.");
+                }
+            }
+            // Falls back.
+        } else {
+            ALOGE("extractor service not running");
+            return nullptr;
+        }
+    }
+    *out = new FileSource(fd, offset, length);
+    return Create(*out, mime);
+}
+
+sp<MediaExtractor> MediaExtractorFactory::CreateFromService(
+        const sp<DataSource> &source, const char *mime) {
+
+    ALOGV("MediaExtractorFactory::%s %s", __func__, mime);
+    RegisterDefaultSniffers();
+
+    // initialize source decryption if needed
+    source->DrmInitialization(nullptr /* mime */);
+
+    sp<AMessage> meta;
+
+    MediaExtractor::CreatorFunc creator = NULL;
+    String8 tmp;
+    float confidence;
+    creator = sniff(source, &tmp, &confidence, &meta);
+    if (!creator) {
+        ALOGV("FAILED to autodetect media content.");
+        return NULL;
+    }
+
+    mime = tmp.string();
+    ALOGV("Autodetected media content as '%s' with confidence %.2f",
+         mime, confidence);
+
+    MediaExtractor *ret = creator(source, meta);
+    return ret;
+}
+
+Mutex MediaExtractorFactory::gSnifferMutex;
+List<MediaExtractor::ExtractorDef> MediaExtractorFactory::gSniffers;
+bool MediaExtractorFactory::gSniffersRegistered = false;
+
+// static
+MediaExtractor::CreatorFunc MediaExtractorFactory::sniff(
+        const sp<DataSource> &source, String8 *mimeType, float *confidence, sp<AMessage> *meta) {
+    *mimeType = "";
+    *confidence = 0.0f;
+    meta->clear();
+
+    {
+        Mutex::Autolock autoLock(gSnifferMutex);
+        if (!gSniffersRegistered) {
+            return NULL;
+        }
+    }
+
+    MediaExtractor::CreatorFunc curCreator = NULL;
+    MediaExtractor::CreatorFunc bestCreator = NULL;
+    for (List<MediaExtractor::ExtractorDef>::iterator it = gSniffers.begin();
+         it != gSniffers.end(); ++it) {
+        String8 newMimeType;
+        float newConfidence;
+        sp<AMessage> newMeta;
+        if ((curCreator = (*it).sniff(source, &newMimeType, &newConfidence, &newMeta))) {
+            if (newConfidence > *confidence) {
+                *mimeType = newMimeType;
+                *confidence = newConfidence;
+                *meta = newMeta;
+                bestCreator = curCreator;
+            }
+        }
+    }
+
+    return bestCreator;
+}
+
+// static
+void MediaExtractorFactory::RegisterSniffer_l(const MediaExtractor::ExtractorDef &def) {
+    // sanity check check struct version, uuid, name
+    if (def.def_version == 0 || def.def_version > MediaExtractor::EXTRACTORDEF_VERSION) {
+        ALOGE("don't understand extractor format %u, ignoring.", def.def_version);
+        return;
+    }
+    if (memcmp(&def.extractor_uuid, "\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0", 16) == 0) {
+        ALOGE("invalid UUID, ignoring");
+        return;
+    }
+    if (def.extractor_name == NULL || strlen(def.extractor_name) == 0) {
+        ALOGE("extractors should have a name, ignoring");
+        return;
+    }
+
+    for (List<MediaExtractor::ExtractorDef>::iterator it = gSniffers.begin();
+            it != gSniffers.end(); ++it) {
+        if (memcmp(&((*it).extractor_uuid), &def.extractor_uuid, 16) == 0) {
+            // there's already an extractor with the same uuid
+            if ((*it).extractor_version < def.extractor_version) {
+                // this one is newer, replace the old one
+                ALOGW("replacing extractor '%s' version %u with version %u",
+                        def.extractor_name,
+                        (*it).extractor_version,
+                        def.extractor_version);
+                gSniffers.erase(it);
+                break;
+            } else {
+                ALOGW("ignoring extractor '%s' version %u in favor of version %u",
+                        def.extractor_name,
+                        def.extractor_version,
+                        (*it).extractor_version);
+                return;
+            }
+        }
+    }
+    ALOGV("registering extractor for %s", def.extractor_name);
+    gSniffers.push_back(def);
+}
+
+// static
+void MediaExtractorFactory::RegisterDefaultSniffers() {
+    Mutex::Autolock autoLock(gSnifferMutex);
+    if (gSniffersRegistered) {
+        return;
+    }
+
+    auto registerExtractors = [](const char *libDirPath) -> void {
+        DIR *libDir = opendir(libDirPath);
+        if (libDir) {
+            struct dirent* libEntry;
+            while ((libEntry = readdir(libDir))) {
+                String8 libPath = String8(libDirPath) + libEntry->d_name;
+                void *libHandle = dlopen(libPath.string(), RTLD_NOW | RTLD_LOCAL);
+                if (libHandle) {
+                    MediaExtractor::GetExtractorDef getsniffer =
+                            (MediaExtractor::GetExtractorDef) dlsym(libHandle, "GETEXTRACTORDEF");
+                    if (getsniffer) {
+                        ALOGV("registering sniffer for %s", libPath.string());
+                        RegisterSniffer_l(getsniffer());
+                    } else {
+                        ALOGW("%s does not contain sniffer", libPath.string());
+                        dlclose(libHandle);
+                    }
+                } else {
+                    ALOGW("couldn't dlopen(%s)", libPath.string());
+                }
+            }
+
+            closedir(libDir);
+        } else {
+            ALOGE("couldn't opendir(%s)", libDirPath);
+        }
+    };
+
+    registerExtractors("/system/lib"
+#ifdef __LP64__
+            "64"
+#endif
+            "/extractors/");
+
+    registerExtractors("/vendor/lib"
+#ifdef __LP64__
+            "64"
+#endif
+            "/extractors/");
+
+    gSniffersRegistered = true;
+}
+
+
+}  // namespace android
diff --git a/media/libstagefright/MediaMuxer.cpp b/media/libstagefright/MediaMuxer.cpp
index c7b8888..62daac8 100644
--- a/media/libstagefright/MediaMuxer.cpp
+++ b/media/libstagefright/MediaMuxer.cpp
@@ -23,6 +23,8 @@
 
 #include <media/stagefright/MediaMuxer.h>
 
+#include <media/mediarecorder.h>
+#include <media/MediaSource.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
@@ -31,17 +33,22 @@
 #include <media/stagefright/MediaCodec.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/MediaSource.h>
 #include <media/stagefright/MetaData.h>
 #include <media/stagefright/MPEG4Writer.h>
 #include <media/stagefright/Utils.h>
 
 namespace android {
 
+static bool isMp4Format(MediaMuxer::OutputFormat format) {
+    return format == MediaMuxer::OUTPUT_FORMAT_MPEG_4 ||
+           format == MediaMuxer::OUTPUT_FORMAT_THREE_GPP ||
+           format == MediaMuxer::OUTPUT_FORMAT_HEIF;
+}
+
 MediaMuxer::MediaMuxer(int fd, OutputFormat format)
     : mFormat(format),
       mState(UNINITIALIZED) {
-    if (format == OUTPUT_FORMAT_MPEG_4 || format == OUTPUT_FORMAT_THREE_GPP) {
+    if (isMp4Format(format)) {
         mWriter = new MPEG4Writer(fd);
     } else if (format == OUTPUT_FORMAT_WEBM) {
         mWriter = new WebmWriter(fd);
@@ -49,6 +56,10 @@
 
     if (mWriter != NULL) {
         mFileMeta = new MetaData;
+        if (format == OUTPUT_FORMAT_HEIF) {
+            // Note that the key uses recorder file types.
+            mFileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_HEIF);
+        }
         mState = INITIALIZED;
     }
 }
@@ -108,8 +119,8 @@
         ALOGE("setLocation() must be called before start().");
         return INVALID_OPERATION;
     }
-    if (mFormat != OUTPUT_FORMAT_MPEG_4 && mFormat != OUTPUT_FORMAT_THREE_GPP) {
-        ALOGE("setLocation() is only supported for .mp4 pr .3gp output.");
+    if (!isMp4Format(mFormat)) {
+        ALOGE("setLocation() is only supported for .mp4, .3gp or .heic output.");
         return INVALID_OPERATION;
     }
 
diff --git a/media/libstagefright/MediaSync.cpp b/media/libstagefright/MediaSync.cpp
index 9278381..ba14e5d 100644
--- a/media/libstagefright/MediaSync.cpp
+++ b/media/libstagefright/MediaSync.cpp
@@ -61,6 +61,7 @@
         mNextBufferItemMediaUs(-1),
         mPlaybackRate(0.0) {
     mMediaClock = new MediaClock;
+    mMediaClock->init();
 
     // initialize settings
     mPlaybackSettings = AUDIO_PLAYBACK_RATE_DEFAULT;
diff --git a/media/libstagefright/NuCachedSource2.cpp b/media/libstagefright/NuCachedSource2.cpp
index afd6ffb..18f4b12 100644
--- a/media/libstagefright/NuCachedSource2.cpp
+++ b/media/libstagefright/NuCachedSource2.cpp
@@ -681,10 +681,6 @@
     return mSource->DrmInitialization(mime);
 }
 
-void NuCachedSource2::getDrmInfo(sp<DecryptHandle> &handle, DrmManagerClient **client) {
-    mSource->getDrmInfo(handle, client);
-}
-
 String8 NuCachedSource2::getUri() {
     return mSource->getUri();
 }
diff --git a/media/libstagefright/NuMediaExtractor.cpp b/media/libstagefright/NuMediaExtractor.cpp
index 640cb82..17c9648 100644
--- a/media/libstagefright/NuMediaExtractor.cpp
+++ b/media/libstagefright/NuMediaExtractor.cpp
@@ -23,28 +23,40 @@
 #include "include/ESDS.h"
 #include "include/NuCachedSource2.h"
 
+#include <media/DataSource.h>
+#include <media/MediaExtractor.h>
+#include <media/MediaSource.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/DataSource.h>
+#include <media/stagefright/DataSourceFactory.h>
 #include <media/stagefright/FileSource.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/MediaExtractor.h>
-#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MediaExtractorFactory.h>
 #include <media/stagefright/MetaData.h>
 #include <media/stagefright/Utils.h>
 
 namespace android {
 
+NuMediaExtractor::Sample::Sample()
+    : mBuffer(NULL),
+      mSampleTimeUs(-1ll) {
+}
+
+NuMediaExtractor::Sample::Sample(MediaBuffer *buffer, int64_t timeUs)
+    : mBuffer(buffer),
+      mSampleTimeUs(timeUs) {
+}
+
 NuMediaExtractor::NuMediaExtractor()
     : mTotalBitrate(-1ll),
       mDurationUs(-1ll) {
 }
 
 NuMediaExtractor::~NuMediaExtractor() {
-    releaseTrackSamples();
+    releaseAllTrackSamples();
 
     for (size_t i = 0; i < mSelectedTracks.size(); ++i) {
         TrackInfo *info = &mSelectedTracks.editItemAt(i);
@@ -60,7 +72,7 @@
 }
 
 status_t NuMediaExtractor::setDataSource(
-        const sp<IMediaHTTPService> &httpService,
+        const sp<MediaHTTPService> &httpService,
         const char *path,
         const KeyedVector<String8, String8> *headers) {
     Mutex::Autolock autoLock(mLock);
@@ -70,13 +82,13 @@
     }
 
     sp<DataSource> dataSource =
-        DataSource::CreateFromURI(httpService, path, headers);
+        DataSourceFactory::CreateFromURI(httpService, path, headers);
 
     if (dataSource == NULL) {
         return -ENOENT;
     }
 
-    mImpl = MediaExtractor::Create(dataSource);
+    mImpl = MediaExtractorFactory::Create(dataSource);
 
     if (mImpl == NULL) {
         return ERROR_UNSUPPORTED;
@@ -112,7 +124,7 @@
         return err;
     }
 
-    mImpl = MediaExtractor::Create(fileSource);
+    mImpl = MediaExtractorFactory::Create(fileSource);
 
     if (mImpl == NULL) {
         return ERROR_UNSUPPORTED;
@@ -142,7 +154,7 @@
         return err;
     }
 
-    mImpl = MediaExtractor::Create(source);
+    mImpl = MediaExtractorFactory::Create(source);
 
     if (mImpl == NULL) {
         return ERROR_UNSUPPORTED;
@@ -286,7 +298,8 @@
     return OK;
 }
 
-status_t NuMediaExtractor::selectTrack(size_t index) {
+status_t NuMediaExtractor::selectTrack(size_t index,
+        int64_t startTimeUs, MediaSource::ReadOptions::SeekMode mode) {
     Mutex::Autolock autoLock(mLock);
 
     if (mImpl == NULL) {
@@ -309,31 +322,56 @@
     sp<IMediaSource> source = mImpl->getTrack(index);
 
     if (source == nullptr) {
+        ALOGE("track %zu is empty", index);
         return ERROR_MALFORMED;
     }
 
     status_t ret = source->start();
     if (ret != OK) {
+        ALOGE("track %zu failed to start", index);
         return ret;
     }
 
+    sp<MetaData> meta = source->getFormat();
+    if (meta == NULL) {
+        ALOGE("track %zu has no meta data", index);
+        return ERROR_MALFORMED;
+    }
+
+    const char *mime;
+    if (!meta->findCString(kKeyMIMEType, &mime)) {
+        ALOGE("track %zu has no mime type in meta data", index);
+        return ERROR_MALFORMED;
+    }
+    ALOGV("selectTrack, track[%zu]: %s", index, mime);
+
     mSelectedTracks.push();
     TrackInfo *info = &mSelectedTracks.editItemAt(mSelectedTracks.size() - 1);
 
     info->mSource = source;
     info->mTrackIndex = index;
+    if (!strncasecmp(mime, "audio/", 6)) {
+        info->mTrackType = MEDIA_TRACK_TYPE_AUDIO;
+        info->mMaxFetchCount = 64;
+    } else if (!strncasecmp(mime, "video/", 6)) {
+        info->mTrackType = MEDIA_TRACK_TYPE_VIDEO;
+        info->mMaxFetchCount = 8;
+    } else {
+        info->mTrackType = MEDIA_TRACK_TYPE_UNKNOWN;
+        info->mMaxFetchCount = 1;
+    }
     info->mFinalResult = OK;
-    info->mSample = NULL;
-    info->mSampleTimeUs = -1ll;
+    releaseTrackSamples(info);
     info->mTrackFlags = 0;
 
-    const char *mime;
-    CHECK(source->getFormat()->findCString(kKeyMIMEType, &mime));
-
     if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_VORBIS)) {
         info->mTrackFlags |= kIsVorbis;
     }
 
+    if (startTimeUs >= 0) {
+        fetchTrackSamples(info, startTimeUs, mode);
+    }
+
     return OK;
 }
 
@@ -364,12 +402,7 @@
 
     TrackInfo *info = &mSelectedTracks.editItemAt(i);
 
-    if (info->mSample != NULL) {
-        info->mSample->release();
-        info->mSample = NULL;
-
-        info->mSampleTimeUs = -1ll;
-    }
+    releaseTrackSamples(info);
 
     CHECK_EQ((status_t)OK, info->mSource->stop());
 
@@ -378,79 +411,136 @@
     return OK;
 }
 
-void NuMediaExtractor::releaseTrackSamples() {
-    for (size_t i = 0; i < mSelectedTracks.size(); ++i) {
-        TrackInfo *info = &mSelectedTracks.editItemAt(i);
+void NuMediaExtractor::releaseOneSample(TrackInfo *info) {
+    if (info == NULL || info->mSamples.empty()) {
+        return;
+    }
 
-        if (info->mSample != NULL) {
-            info->mSample->release();
-            info->mSample = NULL;
+    auto it = info->mSamples.begin();
+    if (it->mBuffer != NULL) {
+        it->mBuffer->release();
+    }
+    info->mSamples.erase(it);
+}
 
-            info->mSampleTimeUs = -1ll;
+void NuMediaExtractor::releaseTrackSamples(TrackInfo *info) {
+    if (info == NULL) {
+        return;
+    }
+
+    auto it = info->mSamples.begin();
+    while (it != info->mSamples.end()) {
+        if (it->mBuffer != NULL) {
+            it->mBuffer->release();
         }
+        it = info->mSamples.erase(it);
     }
 }
 
-ssize_t NuMediaExtractor::fetchTrackSamples(
+void NuMediaExtractor::releaseAllTrackSamples() {
+    for (size_t i = 0; i < mSelectedTracks.size(); ++i) {
+        releaseTrackSamples(&mSelectedTracks.editItemAt(i));
+    }
+}
+
+ssize_t NuMediaExtractor::fetchAllTrackSamples(
         int64_t seekTimeUs, MediaSource::ReadOptions::SeekMode mode) {
     TrackInfo *minInfo = NULL;
     ssize_t minIndex = -1;
 
     for (size_t i = 0; i < mSelectedTracks.size(); ++i) {
         TrackInfo *info = &mSelectedTracks.editItemAt(i);
+        fetchTrackSamples(info, seekTimeUs, mode);
 
-        if (seekTimeUs >= 0ll) {
-            info->mFinalResult = OK;
-
-            if (info->mSample != NULL) {
-                info->mSample->release();
-                info->mSample = NULL;
-                info->mSampleTimeUs = -1ll;
-            }
-        } else if (info->mFinalResult != OK) {
+        if (info->mSamples.empty()) {
             continue;
         }
 
-        if (info->mSample == NULL) {
-            MediaSource::ReadOptions options;
-            if (seekTimeUs >= 0ll) {
-                options.setSeekTo(seekTimeUs, mode);
-            }
-            status_t err = info->mSource->read(&info->mSample, &options);
-
-            if (err != OK) {
-                CHECK(info->mSample == NULL);
-
-                info->mFinalResult = err;
-
-                if (info->mFinalResult != ERROR_END_OF_STREAM) {
-                    ALOGW("read on track %zu failed with error %d",
-                          info->mTrackIndex, err);
-                }
-
-                info->mSampleTimeUs = -1ll;
-                continue;
-            } else {
-                CHECK(info->mSample != NULL);
-                CHECK(info->mSample->meta_data()->findInt64(
-                            kKeyTime, &info->mSampleTimeUs));
-            }
-        }
-
-        if (minInfo == NULL  || info->mSampleTimeUs < minInfo->mSampleTimeUs) {
+        if (minInfo == NULL) {
             minInfo = info;
             minIndex = i;
+        } else {
+            auto it = info->mSamples.begin();
+            auto itMin = minInfo->mSamples.begin();
+            if (it->mSampleTimeUs < itMin->mSampleTimeUs) {
+                minInfo = info;
+                minIndex = i;
+            }
         }
     }
 
     return minIndex;
 }
 
+void NuMediaExtractor::fetchTrackSamples(TrackInfo *info,
+        int64_t seekTimeUs, MediaSource::ReadOptions::SeekMode mode) {
+    if (info == NULL) {
+        return;
+    }
+
+    MediaSource::ReadOptions options;
+    if (seekTimeUs >= 0ll) {
+        options.setSeekTo(seekTimeUs, mode);
+        info->mFinalResult = OK;
+        releaseTrackSamples(info);
+    } else if (info->mFinalResult != OK || !info->mSamples.empty()) {
+        return;
+    }
+
+    status_t err = OK;
+    Vector<MediaBuffer *> mediaBuffers;
+    if (info->mSource->supportReadMultiple()) {
+        options.setNonBlocking();
+        err = info->mSource->readMultiple(&mediaBuffers, info->mMaxFetchCount, &options);
+    } else {
+        MediaBuffer *mbuf = NULL;
+        err = info->mSource->read(&mbuf, &options);
+        if (err == OK && mbuf != NULL) {
+            mediaBuffers.push_back(mbuf);
+        }
+    }
+
+    info->mFinalResult = err;
+    if (err != OK && err != ERROR_END_OF_STREAM) {
+        ALOGW("read on track %zu failed with error %d", info->mTrackIndex, err);
+        size_t count = mediaBuffers.size();
+        for (size_t id = 0; id < count; ++id) {
+            MediaBuffer *mbuf = mediaBuffers[id];
+            if (mbuf != NULL) {
+                mbuf->release();
+            }
+        }
+        return;
+    }
+
+    size_t count = mediaBuffers.size();
+    bool releaseRemaining = false;
+    for (size_t id = 0; id < count; ++id) {
+        int64_t timeUs;
+        MediaBuffer *mbuf = mediaBuffers[id];
+        if (mbuf == NULL) {
+            continue;
+        }
+        if (releaseRemaining) {
+            mbuf->release();
+            continue;
+        }
+        if (mbuf->meta_data()->findInt64(kKeyTime, &timeUs)) {
+            info->mSamples.emplace_back(mbuf, timeUs);
+        } else {
+            mbuf->meta_data()->dumpToLog();
+            info->mFinalResult = ERROR_MALFORMED;
+            mbuf->release();
+            releaseRemaining = true;
+        }
+    }
+}
+
 status_t NuMediaExtractor::seekTo(
         int64_t timeUs, MediaSource::ReadOptions::SeekMode mode) {
     Mutex::Autolock autoLock(mLock);
 
-    ssize_t minIndex = fetchTrackSamples(timeUs, mode);
+    ssize_t minIndex = fetchAllTrackSamples(timeUs, mode);
 
     if (minIndex < 0) {
         return ERROR_END_OF_STREAM;
@@ -462,7 +552,7 @@
 status_t NuMediaExtractor::advance() {
     Mutex::Autolock autoLock(mLock);
 
-    ssize_t minIndex = fetchTrackSamples();
+    ssize_t minIndex = fetchAllTrackSamples();
 
     if (minIndex < 0) {
         return ERROR_END_OF_STREAM;
@@ -470,28 +560,26 @@
 
     TrackInfo *info = &mSelectedTracks.editItemAt(minIndex);
 
-    info->mSample->release();
-    info->mSample = NULL;
-    info->mSampleTimeUs = -1ll;
+    releaseOneSample(info);
 
     return OK;
 }
 
-status_t NuMediaExtractor::appendVorbisNumPageSamples(TrackInfo *info, const sp<ABuffer> &buffer) {
+status_t NuMediaExtractor::appendVorbisNumPageSamples(MediaBuffer *mbuf, const sp<ABuffer> &buffer) {
     int32_t numPageSamples;
-    if (!info->mSample->meta_data()->findInt32(
+    if (!mbuf->meta_data()->findInt32(
             kKeyValidSamples, &numPageSamples)) {
         numPageSamples = -1;
     }
 
-    memcpy((uint8_t *)buffer->data() + info->mSample->range_length(),
+    memcpy((uint8_t *)buffer->data() + mbuf->range_length(),
            &numPageSamples,
            sizeof(numPageSamples));
 
     uint32_t type;
     const void *data;
     size_t size, size2;
-    if (info->mSample->meta_data()->findData(kKeyEncryptedSizes, &type, &data, &size)) {
+    if (mbuf->meta_data()->findData(kKeyEncryptedSizes, &type, &data, &size)) {
         // Signal numPageSamples (a plain int32_t) is appended at the end,
         // i.e. sizeof(numPageSamples) plain bytes + 0 encrypted bytes
         if (SIZE_MAX - size < sizeof(int32_t)) {
@@ -509,9 +597,9 @@
         int32_t zero = 0;
         memcpy(adata, data, size);
         memcpy(adata + size, &zero, sizeof(zero));
-        info->mSample->meta_data()->setData(kKeyEncryptedSizes, type, adata, newSize);
+        mbuf->meta_data()->setData(kKeyEncryptedSizes, type, adata, newSize);
 
-        if (info->mSample->meta_data()->findData(kKeyPlainSizes, &type, &data, &size2)) {
+        if (mbuf->meta_data()->findData(kKeyPlainSizes, &type, &data, &size2)) {
             if (size2 != size) {
                 return ERROR_MALFORMED;
             }
@@ -524,7 +612,7 @@
         // append sizeof(numPageSamples) to plain sizes.
         int32_t int32Size = sizeof(numPageSamples);
         memcpy(adata + size, &int32Size, sizeof(int32Size));
-        info->mSample->meta_data()->setData(kKeyPlainSizes, type, adata, newSize);
+        mbuf->meta_data()->setData(kKeyPlainSizes, type, adata, newSize);
     }
 
     return OK;
@@ -533,7 +621,7 @@
 status_t NuMediaExtractor::readSampleData(const sp<ABuffer> &buffer) {
     Mutex::Autolock autoLock(mLock);
 
-    ssize_t minIndex = fetchTrackSamples();
+    ssize_t minIndex = fetchAllTrackSamples();
 
     if (minIndex < 0) {
         return ERROR_END_OF_STREAM;
@@ -541,7 +629,8 @@
 
     TrackInfo *info = &mSelectedTracks.editItemAt(minIndex);
 
-    size_t sampleSize = info->mSample->range_length();
+    auto it = info->mSamples.begin();
+    size_t sampleSize = it->mBuffer->range_length();
 
     if (info->mTrackFlags & kIsVorbis) {
         // Each sample's data is suffixed by the number of page samples
@@ -554,14 +643,14 @@
     }
 
     const uint8_t *src =
-        (const uint8_t *)info->mSample->data()
-            + info->mSample->range_offset();
+        (const uint8_t *)it->mBuffer->data()
+            + it->mBuffer->range_offset();
 
-    memcpy((uint8_t *)buffer->data(), src, info->mSample->range_length());
+    memcpy((uint8_t *)buffer->data(), src, it->mBuffer->range_length());
 
     status_t err = OK;
     if (info->mTrackFlags & kIsVorbis) {
-        err = appendVorbisNumPageSamples(info, buffer);
+        err = appendVorbisNumPageSamples(it->mBuffer, buffer);
     }
 
     if (err == OK) {
@@ -574,7 +663,7 @@
 status_t NuMediaExtractor::getSampleTrackIndex(size_t *trackIndex) {
     Mutex::Autolock autoLock(mLock);
 
-    ssize_t minIndex = fetchTrackSamples();
+    ssize_t minIndex = fetchAllTrackSamples();
 
     if (minIndex < 0) {
         return ERROR_END_OF_STREAM;
@@ -589,14 +678,14 @@
 status_t NuMediaExtractor::getSampleTime(int64_t *sampleTimeUs) {
     Mutex::Autolock autoLock(mLock);
 
-    ssize_t minIndex = fetchTrackSamples();
+    ssize_t minIndex = fetchAllTrackSamples();
 
     if (minIndex < 0) {
         return ERROR_END_OF_STREAM;
     }
 
     TrackInfo *info = &mSelectedTracks.editItemAt(minIndex);
-    *sampleTimeUs = info->mSampleTimeUs;
+    *sampleTimeUs = info->mSamples.begin()->mSampleTimeUs;
 
     return OK;
 }
@@ -606,14 +695,14 @@
 
     *sampleMeta = NULL;
 
-    ssize_t minIndex = fetchTrackSamples();
+    ssize_t minIndex = fetchAllTrackSamples();
 
     if (minIndex < 0) {
         return ERROR_END_OF_STREAM;
     }
 
     TrackInfo *info = &mSelectedTracks.editItemAt(minIndex);
-    *sampleMeta = info->mSample->meta_data();
+    *sampleMeta = info->mSamples.begin()->mBuffer->meta_data();
 
     return OK;
 }
@@ -624,7 +713,7 @@
 }
 
 bool NuMediaExtractor::getTotalBitrate(int64_t *bitrate) const {
-    if (mTotalBitrate >= 0) {
+    if (mTotalBitrate > 0) {
         *bitrate = mTotalBitrate;
         return true;
     }
diff --git a/media/libstagefright/OMXClient.cpp b/media/libstagefright/OMXClient.cpp
index 5f50e46..9375de1 100644
--- a/media/libstagefright/OMXClient.cpp
+++ b/media/libstagefright/OMXClient.cpp
@@ -25,7 +25,6 @@
 #include <cutils/properties.h>
 
 #include <binder/IServiceManager.h>
-#include <media/IMediaCodecService.h>
 #include <media/stagefright/OMXClient.h>
 
 #include <media/IOMX.h>
@@ -38,70 +37,25 @@
 }
 
 status_t OMXClient::connect() {
-    return connect("default", nullptr);
+    return connect("default");
 }
 
-status_t OMXClient::connect(bool* trebleFlag) {
-    if (property_get_bool("persist.media.treble_omx", true)) {
-        if (trebleFlag != nullptr) {
-            *trebleFlag = true;
-        }
-        return connectTreble();
-    }
-    if (trebleFlag != nullptr) {
-        *trebleFlag = false;
-    }
-    return connectLegacy();
-}
-
-status_t OMXClient::connect(const char* name, bool* trebleFlag) {
-    if (property_get_bool("persist.media.treble_omx", true)) {
-        if (trebleFlag != nullptr) {
-            *trebleFlag = true;
-        }
-        return connectTreble(name);
-    }
-    if (trebleFlag != nullptr) {
-        *trebleFlag = false;
-    }
-    return connectLegacy();
-}
-
-status_t OMXClient::connectLegacy() {
-    sp<IServiceManager> sm = defaultServiceManager();
-    sp<IBinder> codecbinder = sm->getService(String16("media.codec"));
-    sp<IMediaCodecService> codecservice = interface_cast<IMediaCodecService>(codecbinder);
-
-    if (codecservice.get() == NULL) {
-        ALOGE("Cannot obtain IMediaCodecService");
-        return NO_INIT;
-    }
-
-    mOMX = codecservice->getOMX();
-    if (mOMX.get() == NULL) {
-        ALOGE("Cannot obtain mediacodec IOMX");
-        return NO_INIT;
-    }
-
-    return OK;
-}
-
-status_t OMXClient::connectTreble(const char* name) {
+status_t OMXClient::connect(const char* name) {
     using namespace ::android::hardware::media::omx::V1_0;
     if (name == nullptr) {
         name = "default";
     }
     sp<IOmx> tOmx = IOmx::getService(name);
     if (tOmx.get() == nullptr) {
-        ALOGE("Cannot obtain Treble IOmx.");
+        ALOGE("Cannot obtain IOmx service.");
         return NO_INIT;
     }
     if (!tOmx->isRemote()) {
-        ALOGE("Treble IOmx is in passthrough mode.");
+        ALOGE("IOmx service running in passthrough mode.");
         return NO_INIT;
     }
     mOMX = new utils::LWOmx(tOmx);
-    ALOGI("Treble IOmx obtained");
+    ALOGI("IOmx service obtained");
     return OK;
 }
 
@@ -109,4 +63,8 @@
     mOMX.clear();
 }
 
+sp<IOMX> OMXClient::interface() {
+    return mOMX;
+}
+
 }  // namespace android
diff --git a/media/libstagefright/OmxInfoBuilder.cpp b/media/libstagefright/OmxInfoBuilder.cpp
index 8717a79..a6ebadd 100644
--- a/media/libstagefright/OmxInfoBuilder.cpp
+++ b/media/libstagefright/OmxInfoBuilder.cpp
@@ -24,8 +24,7 @@
 #include <utils/Log.h>
 #include <cutils/properties.h>
 
-#include <binder/IServiceManager.h>
-#include <media/IMediaCodecService.h>
+#include <media/stagefright/foundation/MediaDefs.h>
 #include <media/stagefright/OmxInfoBuilder.h>
 #include <media/stagefright/ACodec.h>
 
@@ -34,10 +33,9 @@
 #include <android/hardware/media/omx/1.0/IOmxNode.h>
 #include <media/stagefright/omx/OMXUtils.h>
 
-#include <media/IOMXStore.h>
 #include <media/IOMX.h>
-#include <media/MediaDefs.h>
 #include <media/omx/1.0/WOmx.h>
+#include <media/stagefright/omx/1.0/OmxStore.h>
 
 #include <media/openmax/OMX_Index.h>
 #include <media/openmax/OMX_IndexExt.h>
@@ -48,10 +46,18 @@
 
 namespace android {
 
+using ::android::hardware::hidl_string;
+using ::android::hardware::hidl_vec;
+using namespace ::android::hardware::media::omx::V1_0;
+
 namespace /* unnamed */ {
 
+bool hasPrefix(const hidl_string& s, const char* prefix) {
+    return strncmp(s.c_str(), prefix, strlen(prefix)) == 0;
+}
+
 status_t queryCapabilities(
-        const IOMXStore::NodeInfo& node, const char* mime, bool isEncoder,
+        const IOmxStore::NodeInfo& node, const char* mime, bool isEncoder,
         MediaCodecInfo::CapabilitiesWriter* caps) {
     sp<ACodec> codec = new ACodec();
     status_t err = codec->queryCapabilities(
@@ -62,14 +68,13 @@
     for (const auto& attribute : node.attributes) {
         // All features have an int32 value except
         // "feature-bitrate-modes", which has a string value.
-        if ((attribute.key.compare(0, 8, "feature-") == 0) &&
-                (attribute.key.compare(8, 15, "bitrate-modes")
-                 != 0)) {
-            // If this attribute.key is a feature that is not a bitrate
-            // control, add an int32 value.
+        if (hasPrefix(attribute.key, "feature-") &&
+                !hasPrefix(attribute.key, "feature-bitrate-modes")) {
+            // If this attribute.key is a feature that is not bitrate modes,
+            // add an int32 value.
             caps->addDetail(
                     attribute.key.c_str(),
-                    attribute.value == "1" ? 1 : 0);
+                    hasPrefix(attribute.value, "1") ? 1 : 0);
         } else {
             // Non-feature attributes
             caps->addDetail(
@@ -85,138 +90,70 @@
 }
 
 status_t OmxInfoBuilder::buildMediaCodecList(MediaCodecListWriter* writer) {
-    bool treble;
-    sp<IOMX> omx;
-    std::vector<IOMXStore::RoleInfo> roles;
+    // Obtain IOmxStore
+    sp<IOmxStore> omxStore = IOmxStore::getService();
+    if (omxStore == nullptr) {
+        ALOGE("Cannot find an IOmxStore service.");
+        return NO_INIT;
+    }
 
-    treble = property_get_bool("persist.media.treble_omx", true);
-    if (treble) {
-        using namespace ::android::hardware::media::omx::V1_0;
-        using ::android::hardware::hidl_vec;
-        using ::android::hardware::hidl_string;
-
-        // Obtain IOmxStore
-        sp<IOmxStore> omxStore = IOmxStore::getService();
+    // List service attributes (global settings)
+    Status status;
+    hidl_vec<IOmxStore::RoleInfo> roles;
+    auto transStatus = omxStore->listRoles(
+            [&roles] (
+            const hidl_vec<IOmxStore::RoleInfo>& inRoleList) {
+                roles = inRoleList;
+            });
+    if (!transStatus.isOk()) {
+        ALOGE("Fail to obtain codec roles from IOmxStore.");
+        return NO_INIT;
+    } else if (roles.size() == 0) {
+        ALOGW("IOmxStore has empty implementation. "
+                "Creating a local default instance...");
+        omxStore = new implementation::OmxStore();
         if (omxStore == nullptr) {
-            ALOGE("Cannot connect to an IOmxStore instance.");
+            ALOGE("Cannot create a local default instance.");
             return NO_INIT;
         }
-
-        // List service attributes (global settings)
-        Status status;
-        hidl_vec<IOmxStore::ServiceAttribute> serviceAttributes;
-        auto transStatus = omxStore->listServiceAttributes(
-                [&status, &serviceAttributes]
-                (Status inStatus, const hidl_vec<IOmxStore::ServiceAttribute>&
-                        inAttributes) {
-                    status = inStatus;
-                    serviceAttributes = inAttributes;
-                });
-        if (!transStatus.isOk()) {
-            ALOGE("Fail to obtain global settings from IOmxStore.");
-            return NO_INIT;
-        }
-        if (status != Status::OK) {
-            ALOGE("IOmxStore reports parsing error.");
-            return NO_INIT;
-        }
-        for (const auto& p : serviceAttributes) {
-            writer->addGlobalSetting(
-                    p.key.c_str(), p.value.c_str());
-        }
-
-        // List roles and convert to IOMXStore's format
+        ALOGI("IOmxStore local default instance created.");
         transStatus = omxStore->listRoles(
-                [&roles]
-                (const hidl_vec<IOmxStore::RoleInfo>& inRoleList) {
-                    roles.reserve(inRoleList.size());
-                    for (const auto& inRole : inRoleList) {
-                        IOMXStore::RoleInfo role;
-                        role.role = inRole.role;
-                        role.type = inRole.type;
-                        role.isEncoder = inRole.isEncoder;
-                        role.preferPlatformNodes = inRole.preferPlatformNodes;
-                        std::vector<IOMXStore::NodeInfo>& nodes =
-                                role.nodes;
-                        nodes.reserve(inRole.nodes.size());
-                        for (const auto& inNode : inRole.nodes) {
-                            IOMXStore::NodeInfo node;
-                            node.name = inNode.name;
-                            node.owner = inNode.owner;
-                            std::vector<IOMXStore::Attribute>& attributes =
-                                    node.attributes;
-                            attributes.reserve(inNode.attributes.size());
-                            for (const auto& inAttr : inNode.attributes) {
-                                IOMXStore::Attribute attr;
-                                attr.key = inAttr.key;
-                                attr.value = inAttr.value;
-                                attributes.push_back(std::move(attr));
-                            }
-                            nodes.push_back(std::move(node));
-                        }
-                        roles.push_back(std::move(role));
-                    }
+                [&roles] (
+                const hidl_vec<IOmxStore::RoleInfo>& inRoleList) {
+                    roles = inRoleList;
                 });
         if (!transStatus.isOk()) {
-            ALOGE("Fail to obtain codec roles from IOmxStore.");
-            return NO_INIT;
-        }
-    } else {
-        // Obtain IOMXStore
-        sp<IServiceManager> sm = defaultServiceManager();
-        if (sm == nullptr) {
-            ALOGE("Cannot obtain the default service manager.");
-            return NO_INIT;
-        }
-        sp<IBinder> codecBinder = sm->getService(String16("media.codec"));
-        if (codecBinder == nullptr) {
-            ALOGE("Cannot obtain the media codec service.");
-            return NO_INIT;
-        }
-        sp<IMediaCodecService> codecService =
-                interface_cast<IMediaCodecService>(codecBinder);
-        if (codecService == nullptr) {
-            ALOGE("Wrong type of media codec service obtained.");
-            return NO_INIT;
-        }
-        omx = codecService->getOMX();
-        if (omx == nullptr) {
-            ALOGE("Cannot connect to an IOMX instance.");
-        }
-        sp<IOMXStore> omxStore = codecService->getOMXStore();
-        if (omxStore == nullptr) {
-            ALOGE("Cannot connect to an IOMXStore instance.");
-            return NO_INIT;
-        }
-
-        // List service attributes (global settings)
-        std::vector<IOMXStore::Attribute> serviceAttributes;
-        status_t status = omxStore->listServiceAttributes(&serviceAttributes);
-        if (status != OK) {
-            ALOGE("Fail to obtain global settings from IOMXStore.");
-            return NO_INIT;
-        }
-        for (const auto& p : serviceAttributes) {
-            writer->addGlobalSetting(
-                    p.key.c_str(), p.value.c_str());
-        }
-
-        // List roles
-        status = omxStore->listRoles(&roles);
-        if (status != OK) {
-            ALOGE("Fail to obtain codec roles from IOMXStore.");
+            ALOGE("Fail to obtain codec roles from local IOmxStore.");
             return NO_INIT;
         }
     }
 
+    hidl_vec<IOmxStore::ServiceAttribute> serviceAttributes;
+    transStatus = omxStore->listServiceAttributes(
+            [&status, &serviceAttributes] (
+            Status inStatus,
+            const hidl_vec<IOmxStore::ServiceAttribute>& inAttributes) {
+                status = inStatus;
+                serviceAttributes = inAttributes;
+            });
+    if (!transStatus.isOk()) {
+        ALOGE("Fail to obtain global settings from IOmxStore.");
+        return NO_INIT;
+    }
+    if (status != Status::OK) {
+        ALOGE("IOmxStore reports parsing error.");
+        return NO_INIT;
+    }
+    for (const auto& p : serviceAttributes) {
+        writer->addGlobalSetting(
+                p.key.c_str(), p.value.c_str());
+    }
+
     // Convert roles to lists of codecs
 
-    // codec name -> index into swCodecs
-    std::map<std::string, std::unique_ptr<MediaCodecInfoWriter> >
-            swCodecName2Info;
-    // codec name -> index into hwCodecs
-    std::map<std::string, std::unique_ptr<MediaCodecInfoWriter> >
-            hwCodecName2Info;
+    // codec name -> index into swCodecs/hwCodecs
+    std::map<hidl_string, std::unique_ptr<MediaCodecInfoWriter>>
+            swCodecName2Info, hwCodecName2Info;
     // owner name -> MediaCodecInfo
     // This map will be used to obtain the correct IOmx service(s) needed for
     // creating IOmxNode instances and querying capabilities.
@@ -230,10 +167,10 @@
         // If preferPlatformNodes is true, hardware nodes must be added after
         // platform (software) nodes. hwCodecs is used to hold hardware nodes
         // that need to be added after software nodes for the same role.
-        std::vector<const IOMXStore::NodeInfo*> hwCodecs;
+        std::vector<const IOmxStore::NodeInfo*> hwCodecs;
         for (const auto& node : role.nodes) {
             const auto& nodeName = node.name;
-            bool isSoftware = nodeName.compare(0, 10, "OMX.google") == 0;
+            bool isSoftware = hasPrefix(nodeName, "OMX.google");
             MediaCodecInfoWriter* info;
             if (isSoftware) {
                 auto c2i = swCodecName2Info.find(nodeName);
diff --git a/media/libstagefright/RemoteMediaExtractor.cpp b/media/libstagefright/RemoteMediaExtractor.cpp
new file mode 100644
index 0000000..2a16e16
--- /dev/null
+++ b/media/libstagefright/RemoteMediaExtractor.cpp
@@ -0,0 +1,143 @@
+/*
+ * Copyright 2017, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "RemoteMediaExtractor"
+#include <utils/Log.h>
+
+#include <media/stagefright/InterfaceUtils.h>
+#include <media/MediaAnalyticsItem.h>
+#include <media/MediaSource.h>
+#include <media/stagefright/RemoteMediaExtractor.h>
+
+// still doing some on/off toggling here.
+#define MEDIA_LOG       1
+
+namespace android {
+
+// key for media statistics
+static const char *kKeyExtractor = "extractor";
+
+// attrs for media statistics
+static const char *kExtractorMime = "android.media.mediaextractor.mime";
+static const char *kExtractorTracks = "android.media.mediaextractor.ntrk";
+static const char *kExtractorFormat = "android.media.mediaextractor.fmt";
+
+RemoteMediaExtractor::RemoteMediaExtractor(const sp<MediaExtractor> &extractor)
+    :mExtractor(extractor) {
+
+    mAnalyticsItem = nullptr;
+    if (MEDIA_LOG) {
+        mAnalyticsItem = new MediaAnalyticsItem(kKeyExtractor);
+        (void) mAnalyticsItem->generateSessionID();
+
+        // track the container format (mpeg, aac, wvm, etc)
+        size_t ntracks = extractor->countTracks();
+        mAnalyticsItem->setCString(kExtractorFormat, extractor->name());
+        // tracks (size_t)
+        mAnalyticsItem->setInt32(kExtractorTracks, ntracks);
+        // metadata
+        sp<MetaData> pMetaData = extractor->getMetaData();
+        if (pMetaData != nullptr) {
+            String8 xx = pMetaData->toString();
+            // 'titl' -- but this verges into PII
+            // 'mime'
+            const char *mime = nullptr;
+            if (pMetaData->findCString(kKeyMIMEType, &mime)) {
+                mAnalyticsItem->setCString(kExtractorMime,  mime);
+            }
+            // what else is interesting and not already available?
+        }
+    }
+}
+
+RemoteMediaExtractor::~RemoteMediaExtractor() {
+    // log the current record, provided it has some information worth recording
+    if (MEDIA_LOG) {
+        if (mAnalyticsItem != nullptr) {
+            if (mAnalyticsItem->count() > 0) {
+                mAnalyticsItem->setFinalized(true);
+                mAnalyticsItem->selfrecord();
+            }
+        }
+    }
+    if (mAnalyticsItem != nullptr) {
+        delete mAnalyticsItem;
+        mAnalyticsItem = nullptr;
+    }
+}
+
+size_t RemoteMediaExtractor::countTracks() {
+    return mExtractor->countTracks();
+}
+
+sp<IMediaSource> RemoteMediaExtractor::getTrack(size_t index) {
+    sp<MediaSource> source = mExtractor->getTrack(index);
+    return (source.get() == nullptr) ? nullptr : CreateIMediaSourceFromMediaSource(source);
+}
+
+sp<MetaData> RemoteMediaExtractor::getTrackMetaData(size_t index, uint32_t flags) {
+    return mExtractor->getTrackMetaData(index, flags);
+}
+
+sp<MetaData> RemoteMediaExtractor::getMetaData() {
+    return mExtractor->getMetaData();
+}
+
+status_t RemoteMediaExtractor::getMetrics(Parcel *reply) {
+    if (mAnalyticsItem == nullptr || reply == nullptr) {
+        return UNKNOWN_ERROR;
+    }
+
+    mAnalyticsItem->writeToParcel(reply);
+    return OK;
+}
+
+uint32_t RemoteMediaExtractor::flags() const {
+    return mExtractor->flags();
+}
+
+char* RemoteMediaExtractor::getDrmTrackInfo(size_t trackID, int * len) {
+    return mExtractor->getDrmTrackInfo(trackID, len);
+}
+
+void RemoteMediaExtractor::setUID(uid_t uid) {
+    return mExtractor->setUID(uid);
+}
+
+status_t RemoteMediaExtractor::setMediaCas(const HInterfaceToken &casToken) {
+    return mExtractor->setMediaCas(casToken);
+}
+
+const char * RemoteMediaExtractor::name() {
+    return mExtractor->name();
+}
+
+void RemoteMediaExtractor::release() {
+    return mExtractor->release();
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+// static
+sp<IMediaExtractor> RemoteMediaExtractor::wrap(const sp<MediaExtractor> &extractor) {
+    if (extractor.get() == nullptr) {
+        return nullptr;
+    }
+    return new RemoteMediaExtractor(extractor);
+}
+
+}  // namespace android
diff --git a/media/libstagefright/RemoteMediaSource.cpp b/media/libstagefright/RemoteMediaSource.cpp
new file mode 100644
index 0000000..866d163
--- /dev/null
+++ b/media/libstagefright/RemoteMediaSource.cpp
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2017, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/stagefright/RemoteMediaSource.h>
+#include <media/IMediaSource.h>
+
+namespace android {
+
+RemoteMediaSource::RemoteMediaSource(const sp<MediaSource> &source)
+    :mSource(source) {}
+
+RemoteMediaSource::~RemoteMediaSource() {}
+
+status_t RemoteMediaSource::start(MetaData *params) {
+    return mSource->start(params);
+}
+
+status_t RemoteMediaSource::stop() {
+    return mSource->stop();
+}
+
+sp<MetaData> RemoteMediaSource::getFormat() {
+    return mSource->getFormat();
+}
+
+status_t RemoteMediaSource::read(MediaBuffer **buffer, const MediaSource::ReadOptions *options) {
+    return mSource->read(buffer, reinterpret_cast<const MediaSource::ReadOptions*>(options));
+}
+
+status_t RemoteMediaSource::pause() {
+    return mSource->pause();
+}
+
+status_t RemoteMediaSource::setStopTimeUs(int64_t stopTimeUs) {
+    return mSource->setStopTimeUs(stopTimeUs);
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+// static
+sp<IMediaSource> RemoteMediaSource::wrap(const sp<MediaSource> &source) {
+    if (source.get() == nullptr) {
+        return nullptr;
+    }
+    return new RemoteMediaSource(source);
+}
+
+}  // namespace android
diff --git a/media/libstagefright/SimpleDecodingSource.cpp b/media/libstagefright/SimpleDecodingSource.cpp
index 90b8603..9b2fb4f 100644
--- a/media/libstagefright/SimpleDecodingSource.cpp
+++ b/media/libstagefright/SimpleDecodingSource.cpp
@@ -14,6 +14,10 @@
  * limitations under the License.
  */
 
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SimpleDecodingSource"
+#include <utils/Log.h>
+
 #include <gui/Surface.h>
 
 #include <media/ICrypto.h>
@@ -36,14 +40,14 @@
 
 //static
 sp<SimpleDecodingSource> SimpleDecodingSource::Create(
-        const sp<IMediaSource> &source, uint32_t flags) {
+        const sp<MediaSource> &source, uint32_t flags) {
     return SimpleDecodingSource::Create(source, flags, nullptr, nullptr);
 }
 
 //static
 sp<SimpleDecodingSource> SimpleDecodingSource::Create(
-        const sp<IMediaSource> &source, uint32_t flags, const sp<ANativeWindow> &nativeWindow,
-        const char *desiredCodec) {
+        const sp<MediaSource> &source, uint32_t flags, const sp<ANativeWindow> &nativeWindow,
+        const char *desiredCodec, bool skipMediaCodecList) {
     sp<Surface> surface = static_cast<Surface*>(nativeWindow.get());
     const char *mime = NULL;
     sp<MetaData> meta = source->getFormat();
@@ -63,6 +67,33 @@
     looper->start();
 
     sp<MediaCodec> codec;
+    auto configure = [=](const sp<MediaCodec> &codec, const AString &componentName)
+            -> sp<SimpleDecodingSource> {
+        if (codec != NULL) {
+            ALOGI("Successfully allocated codec '%s'", componentName.c_str());
+
+            status_t err = codec->configure(format, surface, NULL /* crypto */, 0 /* flags */);
+            sp<AMessage> outFormat;
+            if (err == OK) {
+                err = codec->getOutputFormat(&outFormat);
+            }
+            if (err == OK) {
+                return new SimpleDecodingSource(codec, source, looper,
+                        surface != NULL,
+                        strcmp(mime, MEDIA_MIMETYPE_AUDIO_VORBIS) == 0,
+                        outFormat);
+            }
+
+            ALOGD("Failed to configure codec '%s'", componentName.c_str());
+            codec->release();
+        }
+        return NULL;
+    };
+
+    if (skipMediaCodecList) {
+        codec = MediaCodec::CreateByComponentName(looper, desiredCodec);
+        return configure(codec, desiredCodec);
+    }
 
     for (size_t i = 0; i < matchingCodecs.size(); ++i) {
         const AString &componentName = matchingCodecs[i];
@@ -73,22 +104,10 @@
         ALOGV("Attempting to allocate codec '%s'", componentName.c_str());
 
         codec = MediaCodec::CreateByComponentName(looper, componentName);
-        if (codec != NULL) {
-            ALOGI("Successfully allocated codec '%s'", componentName.c_str());
-
-            status_t err = codec->configure(format, surface, NULL /* crypto */, 0 /* flags */);
-            if (err == OK) {
-                err = codec->getOutputFormat(&format);
-            }
-            if (err == OK) {
-                return new SimpleDecodingSource(codec, source, looper,
-                        surface != NULL,
-                        strcmp(mime, MEDIA_MIMETYPE_AUDIO_VORBIS) == 0,
-                        format);
-            }
-
-            ALOGD("Failed to configure codec '%s'", componentName.c_str());
-            codec->release();
+        sp<SimpleDecodingSource> res = configure(codec, componentName);
+        if (res != NULL) {
+            return res;
+        } else {
             codec = NULL;
         }
     }
@@ -99,7 +118,7 @@
 }
 
 SimpleDecodingSource::SimpleDecodingSource(
-        const sp<MediaCodec> &codec, const sp<IMediaSource> &source, const sp<ALooper> &looper,
+        const sp<MediaCodec> &codec, const sp<MediaSource> &source, const sp<ALooper> &looper,
         bool usingSurface, bool isVorbis, const sp<AMessage> &format)
     : mCodec(codec),
       mSource(source),
@@ -212,7 +231,7 @@
     status_t res;
 
     // flush codec on seek
-    IMediaSource::ReadOptions::SeekMode mode;
+    MediaSource::ReadOptions::SeekMode mode;
     if (options != NULL && options->getSeekTo(&out_pts, &mode)) {
         me->mQueuedInputEOS = false;
         me->mGotOutputEOS = false;
diff --git a/media/libstagefright/StagefrightMetadataRetriever.cpp b/media/libstagefright/StagefrightMetadataRetriever.cpp
index 103da95..e2db0f5 100644
--- a/media/libstagefright/StagefrightMetadataRetriever.cpp
+++ b/media/libstagefright/StagefrightMetadataRetriever.cpp
@@ -20,37 +20,24 @@
 #include <inttypes.h>
 
 #include <utils/Log.h>
-#include <gui/Surface.h>
 
-#include "include/avc_utils.h"
+#include "include/FrameDecoder.h"
 #include "include/StagefrightMetadataRetriever.h"
 
-#include <media/ICrypto.h>
 #include <media/IMediaHTTPService.h>
-#include <media/MediaCodecBuffer.h>
-
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/ColorConverter.h>
-#include <media/stagefright/DataSource.h>
+#include <media/stagefright/DataSourceFactory.h>
 #include <media/stagefright/FileSource.h>
-#include <media/stagefright/MediaBuffer.h>
-#include <media/stagefright/MediaCodec.h>
 #include <media/stagefright/MediaCodecList.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/MediaExtractor.h>
-#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MediaExtractorFactory.h>
 #include <media/stagefright/MetaData.h>
-#include <media/stagefright/Utils.h>
-
 #include <media/CharacterEncodingDetector.h>
 
 namespace android {
 
-static const int64_t kBufferTimeOutUs = 30000ll; // 30 msec
-static const size_t kRetryCount = 20; // must be >0
-
 StagefrightMetadataRetriever::StagefrightMetadataRetriever()
     : mParsedMetaData(false),
       mAlbumArt(NULL) {
@@ -78,14 +65,14 @@
     ALOGV("setDataSource(%s)", uri);
 
     clearMetadata();
-    mSource = DataSource::CreateFromURI(httpService, uri, headers);
+    mSource = DataSourceFactory::CreateFromURI(httpService, uri, headers);
 
     if (mSource == NULL) {
         ALOGE("Unable to create data source for '%s'.", uri);
         return UNKNOWN_ERROR;
     }
 
-    mExtractor = MediaExtractor::Create(mSource);
+    mExtractor = MediaExtractorFactory::Create(mSource);
 
     if (mExtractor == NULL) {
         ALOGE("Unable to instantiate an extractor for '%s'.", uri);
@@ -115,7 +102,7 @@
         return err;
     }
 
-    mExtractor = MediaExtractor::Create(mSource);
+    mExtractor = MediaExtractorFactory::Create(mSource);
 
     if (mExtractor == NULL) {
         mSource.clear();
@@ -132,7 +119,7 @@
 
     clearMetadata();
     mSource = source;
-    mExtractor = MediaExtractor::Create(mSource, mime);
+    mExtractor = MediaExtractorFactory::Create(mSource, mime);
 
     if (mExtractor == NULL) {
         ALOGE("Failed to instantiate a MediaExtractor.");
@@ -143,469 +130,124 @@
     return OK;
 }
 
-static VideoFrame *allocVideoFrame(
-        const sp<MetaData> &trackMeta, int32_t width, int32_t height, int32_t bpp, bool metaOnly) {
-    int32_t rotationAngle;
-    if (!trackMeta->findInt32(kKeyRotation, &rotationAngle)) {
-        rotationAngle = 0;  // By default, no rotation
-    }
+VideoFrame* StagefrightMetadataRetriever::getImageAtIndex(
+        int index, int colorFormat, bool metaOnly) {
 
-    uint32_t type;
-    const void *iccData;
-    size_t iccSize;
-    if (!trackMeta->findData(kKeyIccProfile, &type, &iccData, &iccSize)){
-        iccData = NULL;
-        iccSize = 0;
-    }
+    ALOGV("getImageAtIndex: index: %d colorFormat: %d, metaOnly: %d",
+            index, colorFormat, metaOnly);
 
-    int32_t sarWidth, sarHeight;
-    int32_t displayWidth, displayHeight;
-    if (trackMeta->findInt32(kKeySARWidth, &sarWidth)
-            && trackMeta->findInt32(kKeySARHeight, &sarHeight)
-            && sarHeight != 0) {
-        displayWidth = (width * sarWidth) / sarHeight;
-        displayHeight = height;
-    } else if (trackMeta->findInt32(kKeyDisplayWidth, &displayWidth)
-                && trackMeta->findInt32(kKeyDisplayHeight, &displayHeight)
-                && displayWidth > 0 && displayHeight > 0
-                && width > 0 && height > 0) {
-        ALOGV("found display size %dx%d", displayWidth, displayHeight);
-    } else {
-        displayWidth = width;
-        displayHeight = height;
-    }
-
-    return new VideoFrame(width, height, displayWidth, displayHeight,
-            rotationAngle, bpp, !metaOnly, iccData, iccSize);
-}
-
-static bool getDstColorFormat(android_pixel_format_t colorFormat,
-        OMX_COLOR_FORMATTYPE *omxColorFormat, int32_t *bpp) {
-    switch (colorFormat) {
-        case HAL_PIXEL_FORMAT_RGB_565:
-        {
-            *omxColorFormat = OMX_COLOR_Format16bitRGB565;
-            *bpp = 2;
-            return true;
-        }
-        case HAL_PIXEL_FORMAT_RGBA_8888:
-        {
-            *omxColorFormat = OMX_COLOR_Format32BitRGBA8888;
-            *bpp = 4;
-            return true;
-        }
-        case HAL_PIXEL_FORMAT_BGRA_8888:
-        {
-            *omxColorFormat = OMX_COLOR_Format32bitBGRA8888;
-            *bpp = 4;
-            return true;
-        }
-        default:
-        {
-            ALOGE("Unsupported color format: %d", colorFormat);
-            break;
-        }
-    }
-    return false;
-}
-
-static VideoFrame *extractVideoFrame(
-        const AString &componentName,
-        const sp<MetaData> &trackMeta,
-        const sp<IMediaSource> &source,
-        int64_t frameTimeUs,
-        int seekMode,
-        int colorFormat,
-        bool metaOnly) {
-    sp<MetaData> format = source->getFormat();
-
-    MediaSource::ReadOptions::SeekMode mode =
-            static_cast<MediaSource::ReadOptions::SeekMode>(seekMode);
-    if (seekMode < MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC ||
-        seekMode > MediaSource::ReadOptions::SEEK_CLOSEST) {
-        ALOGE("Unknown seek mode: %d", seekMode);
+    if (mExtractor.get() == NULL) {
+        ALOGE("no extractor.");
         return NULL;
     }
 
-    int32_t dstBpp;
-    OMX_COLOR_FORMATTYPE dstFormat;
-    if (!getDstColorFormat(
-            (android_pixel_format_t)colorFormat, &dstFormat, &dstBpp)) {
-        return NULL;
-    }
+    size_t n = mExtractor->countTracks();
+    size_t i;
+    int imageCount = 0;
 
-    if (metaOnly) {
-        int32_t width, height;
-        CHECK(trackMeta->findInt32(kKeyWidth, &width));
-        CHECK(trackMeta->findInt32(kKeyHeight, &height));
-        return allocVideoFrame(trackMeta, width, height, dstBpp, true);
-    }
+    for (i = 0; i < n; ++i) {
+        sp<MetaData> meta = mExtractor->getTrackMetaData(i);
+        ALOGV("getting track %zu of %zu, meta=%s", i, n, meta->toString().c_str());
 
-    MediaSource::ReadOptions options;
-    sp<MetaData> overrideMeta;
-    if (frameTimeUs < 0) {
-        uint32_t type;
-        const void *data;
-        size_t size;
-        int64_t thumbNailTime;
-        int32_t thumbnailWidth, thumbnailHeight;
+        const char *mime;
+        CHECK(meta->findCString(kKeyMIMEType, &mime));
 
-        // if we have a stand-alone thumbnail, set up the override meta,
-        // and set seekTo time to -1.
-        if (trackMeta->findInt32(kKeyThumbnailWidth, &thumbnailWidth)
-         && trackMeta->findInt32(kKeyThumbnailHeight, &thumbnailHeight)
-         && trackMeta->findData(kKeyThumbnailHVCC, &type, &data, &size)){
-            overrideMeta = new MetaData(*trackMeta);
-            overrideMeta->remove(kKeyDisplayWidth);
-            overrideMeta->remove(kKeyDisplayHeight);
-            overrideMeta->setInt32(kKeyWidth, thumbnailWidth);
-            overrideMeta->setInt32(kKeyHeight, thumbnailHeight);
-            overrideMeta->setData(kKeyHVCC, type, data, size);
-            thumbNailTime = -1ll;
-            ALOGV("thumbnail: %dx%d", thumbnailWidth, thumbnailHeight);
-        } else if (!trackMeta->findInt64(kKeyThumbnailTime, &thumbNailTime)
-                || thumbNailTime < 0) {
-            thumbNailTime = 0;
-        }
-
-        options.setSeekTo(thumbNailTime, mode);
-    } else {
-        options.setSeekTo(frameTimeUs, mode);
-    }
-
-    int32_t gridRows = 1, gridCols = 1;
-    if (overrideMeta == NULL) {
-        // check if we're dealing with a tiled heif
-        int32_t gridWidth, gridHeight;
-        if (trackMeta->findInt32(kKeyGridWidth, &gridWidth) && gridWidth > 0
-         && trackMeta->findInt32(kKeyGridHeight, &gridHeight) && gridHeight > 0) {
-            int32_t width, height, displayWidth, displayHeight;
-            CHECK(trackMeta->findInt32(kKeyWidth, &width));
-            CHECK(trackMeta->findInt32(kKeyHeight, &height));
-            CHECK(trackMeta->findInt32(kKeyDisplayWidth, &displayWidth));
-            CHECK(trackMeta->findInt32(kKeyDisplayHeight, &displayHeight));
-
-            if (width >= displayWidth && height >= displayHeight
-                    && (width % gridWidth == 0) && (height % gridHeight == 0)) {
-                ALOGV("grid config: %dx%d, display %dx%d, grid %dx%d",
-                        width, height, displayWidth, displayHeight, gridWidth, gridHeight);
-
-                overrideMeta = new MetaData(*trackMeta);
-                overrideMeta->remove(kKeyDisplayWidth);
-                overrideMeta->remove(kKeyDisplayHeight);
-                overrideMeta->setInt32(kKeyWidth, gridWidth);
-                overrideMeta->setInt32(kKeyHeight, gridHeight);
-                gridCols = width / gridWidth;
-                gridRows = height / gridHeight;
-            } else {
-                ALOGE("Bad grid config: %dx%d, display %dx%d, grid %dx%d",
-                        width, height, displayWidth, displayHeight, gridWidth, gridHeight);
+        if (!strncasecmp(mime, "image/", 6)) {
+            int32_t isPrimary;
+            if ((index < 0 && meta->findInt32(
+                    kKeyTrackIsDefault, &isPrimary) && isPrimary)
+                    || (index == imageCount++)) {
+                break;
             }
         }
-        if (overrideMeta == NULL) {
-            overrideMeta = trackMeta;
-        }
     }
-    int32_t numTiles = gridRows * gridCols;
 
-    sp<AMessage> videoFormat;
-    if (convertMetaDataToMessage(overrideMeta, &videoFormat) != OK) {
-        ALOGE("b/23680780");
-        ALOGW("Failed to convert meta data to message");
+    if (i == n) {
+        ALOGE("image track not found.");
         return NULL;
     }
 
-    // TODO: Use Flexible color instead
-    videoFormat->setInt32("color-format", OMX_COLOR_FormatYUV420Planar);
+    sp<MetaData> trackMeta = mExtractor->getTrackMetaData(i);
 
-    // For the thumbnail extraction case, try to allocate single buffer in both
-    // input and output ports, if seeking to a sync frame. NOTE: This request may
-    // fail if component requires more than that for decoding.
-    bool isSeekingClosest = (seekMode == MediaSource::ReadOptions::SEEK_CLOSEST);
-    bool decodeSingleFrame = !isSeekingClosest && (numTiles == 1);
-    if (decodeSingleFrame) {
-        videoFormat->setInt32("android._num-input-buffers", 1);
-        videoFormat->setInt32("android._num-output-buffers", 1);
-    }
+    sp<IMediaSource> source = mExtractor->getTrack(i);
 
-    status_t err;
-    sp<ALooper> looper = new ALooper;
-    looper->start();
-    sp<MediaCodec> decoder = MediaCodec::CreateByComponentName(
-            looper, componentName, &err);
-
-    if (decoder.get() == NULL || err != OK) {
-        ALOGW("Failed to instantiate decoder [%s]", componentName.c_str());
+    if (source.get() == NULL) {
+        ALOGE("unable to instantiate image track.");
         return NULL;
     }
 
-    err = decoder->configure(videoFormat, NULL /* surface */, NULL /* crypto */, 0 /* flags */);
-    if (err != OK) {
-        ALOGW("configure returned error %d (%s)", err, asString(err));
-        decoder->release();
-        return NULL;
-    }
-
-    err = decoder->start();
-    if (err != OK) {
-        ALOGW("start returned error %d (%s)", err, asString(err));
-        decoder->release();
-        return NULL;
-    }
-
-    err = source->start();
-    if (err != OK) {
-        ALOGW("source failed to start: %d (%s)", err, asString(err));
-        decoder->release();
-        return NULL;
-    }
-
-    Vector<sp<MediaCodecBuffer> > inputBuffers;
-    err = decoder->getInputBuffers(&inputBuffers);
-    if (err != OK) {
-        ALOGW("failed to get input buffers: %d (%s)", err, asString(err));
-        decoder->release();
-        source->stop();
-        return NULL;
-    }
-
-    Vector<sp<MediaCodecBuffer> > outputBuffers;
-    err = decoder->getOutputBuffers(&outputBuffers);
-    if (err != OK) {
-        ALOGW("failed to get output buffers: %d (%s)", err, asString(err));
-        decoder->release();
-        source->stop();
-        return NULL;
-    }
-
-    sp<AMessage> outputFormat = NULL;
-    bool haveMoreInputs = true;
-    size_t index, offset, size;
-    int64_t timeUs;
-    size_t retriesLeft = kRetryCount;
-    bool done = false;
     const char *mime;
-    bool success = format->findCString(kKeyMIMEType, &mime);
-    if (!success) {
-        ALOGE("Could not find mime type");
-        return NULL;
+    CHECK(trackMeta->findCString(kKeyMIMEType, &mime));
+    ALOGV("extracting from %s track", mime);
+    if (!strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC)) {
+        mime = MEDIA_MIMETYPE_VIDEO_HEVC;
+        trackMeta = new MetaData(*trackMeta);
+        trackMeta->setCString(kKeyMIMEType, mime);
     }
 
-    bool isAvcOrHevc = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)
-            || !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC);
+    Vector<AString> matchingCodecs;
+    MediaCodecList::findMatchingCodecs(
+            mime,
+            false, /* encoder */
+            MediaCodecList::kPreferSoftwareCodecs,
+            &matchingCodecs);
 
-    bool firstSample = true;
-    int64_t targetTimeUs = -1ll;
+    for (size_t i = 0; i < matchingCodecs.size(); ++i) {
+        const AString &componentName = matchingCodecs[i];
+        ImageDecoder decoder(componentName, trackMeta, source);
+        VideoFrame* frame = decoder.extractFrame(
+                0 /*frameTimeUs*/, 0 /*seekMode*/, colorFormat, metaOnly);
 
-    VideoFrame *frame = NULL;
-    int32_t tilesDecoded = 0;
-
-    do {
-        size_t inputIndex = -1;
-        int64_t ptsUs = 0ll;
-        uint32_t flags = 0;
-        sp<MediaCodecBuffer> codecBuffer = NULL;
-
-        while (haveMoreInputs) {
-            err = decoder->dequeueInputBuffer(&inputIndex, kBufferTimeOutUs);
-            if (err != OK) {
-                ALOGW("Timed out waiting for input");
-                if (retriesLeft) {
-                    err = OK;
-                }
-                break;
-            }
-            codecBuffer = inputBuffers[inputIndex];
-
-            MediaBuffer *mediaBuffer = NULL;
-
-            err = source->read(&mediaBuffer, &options);
-            options.clearSeekTo();
-            if (err != OK) {
-                ALOGW("Input Error or EOS");
-                haveMoreInputs = false;
-                if (err == ERROR_END_OF_STREAM) {
-                    err = OK;
-                }
-                break;
-            }
-            if (firstSample && isSeekingClosest) {
-                mediaBuffer->meta_data()->findInt64(kKeyTargetTime, &targetTimeUs);
-                ALOGV("Seeking closest: targetTimeUs=%lld", (long long)targetTimeUs);
-            }
-            firstSample = false;
-
-            if (mediaBuffer->range_length() > codecBuffer->capacity()) {
-                ALOGE("buffer size (%zu) too large for codec input size (%zu)",
-                        mediaBuffer->range_length(), codecBuffer->capacity());
-                haveMoreInputs = false;
-                err = BAD_VALUE;
-            } else {
-                codecBuffer->setRange(0, mediaBuffer->range_length());
-
-                CHECK(mediaBuffer->meta_data()->findInt64(kKeyTime, &ptsUs));
-                memcpy(codecBuffer->data(),
-                        (const uint8_t*)mediaBuffer->data() + mediaBuffer->range_offset(),
-                        mediaBuffer->range_length());
-            }
-
-            mediaBuffer->release();
-            break;
+        if (frame != NULL) {
+            return frame;
         }
-
-        if (haveMoreInputs && inputIndex < inputBuffers.size()) {
-            if (isAvcOrHevc && IsIDR(codecBuffer) && decodeSingleFrame) {
-                // Only need to decode one IDR frame, unless we're seeking with CLOSEST
-                // option, in which case we need to actually decode to targetTimeUs.
-                haveMoreInputs = false;
-                flags |= MediaCodec::BUFFER_FLAG_EOS;
-            }
-
-            ALOGV("QueueInput: size=%zu ts=%" PRId64 " us flags=%x",
-                    codecBuffer->size(), ptsUs, flags);
-            err = decoder->queueInputBuffer(
-                    inputIndex,
-                    codecBuffer->offset(),
-                    codecBuffer->size(),
-                    ptsUs,
-                    flags);
-
-            // we don't expect an output from codec config buffer
-            if (flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) {
-                continue;
-            }
-        }
-
-        while (err == OK) {
-            // wait for a decoded buffer
-            err = decoder->dequeueOutputBuffer(
-                    &index,
-                    &offset,
-                    &size,
-                    &timeUs,
-                    &flags,
-                    kBufferTimeOutUs);
-
-            if (err == INFO_FORMAT_CHANGED) {
-                ALOGV("Received format change");
-                err = decoder->getOutputFormat(&outputFormat);
-            } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) {
-                ALOGV("Output buffers changed");
-                err = decoder->getOutputBuffers(&outputBuffers);
-            } else {
-                if (err == -EAGAIN /* INFO_TRY_AGAIN_LATER */ && --retriesLeft > 0) {
-                    ALOGV("Timed-out waiting for output.. retries left = %zu", retriesLeft);
-                    err = OK;
-                } else if (err == OK) {
-                    // If we're seeking with CLOSEST option and obtained a valid targetTimeUs
-                    // from the extractor, decode to the specified frame. Otherwise we're done.
-                    ALOGV("Received an output buffer, timeUs=%lld", (long long)timeUs);
-                    sp<MediaCodecBuffer> videoFrameBuffer = outputBuffers.itemAt(index);
-
-                    int32_t width, height;
-                    CHECK(outputFormat != NULL);
-                    CHECK(outputFormat->findInt32("width", &width));
-                    CHECK(outputFormat->findInt32("height", &height));
-
-                    int32_t crop_left, crop_top, crop_right, crop_bottom;
-                    if (!outputFormat->findRect("crop", &crop_left, &crop_top, &crop_right, &crop_bottom)) {
-                        crop_left = crop_top = 0;
-                        crop_right = width - 1;
-                        crop_bottom = height - 1;
-                    }
-
-                    if (frame == NULL) {
-                        frame = allocVideoFrame(
-                                trackMeta,
-                                (crop_right - crop_left + 1) * gridCols,
-                                (crop_bottom - crop_top + 1) * gridRows,
-                                dstBpp,
-                                false /*metaOnly*/);
-                    }
-
-                    int32_t srcFormat;
-                    CHECK(outputFormat->findInt32("color-format", &srcFormat));
-
-                    ColorConverter converter((OMX_COLOR_FORMATTYPE)srcFormat, dstFormat);
-
-                    int32_t dstLeft, dstTop, dstRight, dstBottom;
-                    if (numTiles == 1) {
-                        dstLeft = crop_left;
-                        dstTop = crop_top;
-                        dstRight = crop_right;
-                        dstBottom = crop_bottom;
-                    } else {
-                        dstLeft = tilesDecoded % gridCols * width;
-                        dstTop = tilesDecoded / gridCols * height;
-                        dstRight = dstLeft + width - 1;
-                        dstBottom = dstTop + height - 1;
-                    }
-
-                    if (converter.isValid()) {
-                        err = converter.convert(
-                                (const uint8_t *)videoFrameBuffer->data(),
-                                width, height,
-                                crop_left, crop_top, crop_right, crop_bottom,
-                                frame->mData,
-                                frame->mWidth,
-                                frame->mHeight,
-                                dstLeft, dstTop, dstRight, dstBottom);
-                    } else {
-                        ALOGE("Unable to convert from format 0x%08x to 0x%08x",
-                                srcFormat, dstFormat);
-
-                        err = ERROR_UNSUPPORTED;
-                    }
-
-                    done = (targetTimeUs < 0ll) || (timeUs >= targetTimeUs);
-                    if (numTiles > 1) {
-                        tilesDecoded++;
-                        done &= (tilesDecoded >= numTiles);
-                    }
-                    err = decoder->releaseOutputBuffer(index);
-                } else {
-                    ALOGW("Received error %d (%s) instead of output", err, asString(err));
-                    done = true;
-                }
-                break;
-            }
-        }
-    } while (err == OK && !done);
-
-    source->stop();
-    decoder->release();
-
-    if (err != OK) {
-        ALOGE("failed to get video frame (err %d)", err);
-        delete frame;
-        frame = NULL;
+        ALOGV("%s failed to extract thumbnail, trying next decoder.", componentName.c_str());
     }
 
-    return frame;
+    return NULL;
 }
 
-VideoFrame *StagefrightMetadataRetriever::getFrameAtTime(
+VideoFrame* StagefrightMetadataRetriever::getFrameAtTime(
         int64_t timeUs, int option, int colorFormat, bool metaOnly) {
-
     ALOGV("getFrameAtTime: %" PRId64 " us option: %d colorFormat: %d, metaOnly: %d",
             timeUs, option, colorFormat, metaOnly);
 
+    VideoFrame *frame;
+    status_t err = getFrameInternal(
+            timeUs, 1, option, colorFormat, metaOnly, &frame, NULL /*outFrames*/);
+    return (err == OK) ? frame : NULL;
+}
+
+status_t StagefrightMetadataRetriever::getFrameAtIndex(
+        std::vector<VideoFrame*>* frames,
+        int frameIndex, int numFrames, int colorFormat, bool metaOnly) {
+    ALOGV("getFrameAtIndex: frameIndex %d, numFrames %d, colorFormat: %d, metaOnly: %d",
+            frameIndex, numFrames, colorFormat, metaOnly);
+
+    return getFrameInternal(
+            frameIndex, numFrames, MediaSource::ReadOptions::SEEK_FRAME_INDEX,
+            colorFormat, metaOnly, NULL /*outFrame*/, frames);
+}
+
+status_t StagefrightMetadataRetriever::getFrameInternal(
+        int64_t timeUs, int numFrames, int option, int colorFormat, bool metaOnly,
+        VideoFrame **outFrame, std::vector<VideoFrame*>* outFrames) {
     if (mExtractor.get() == NULL) {
-        ALOGV("no extractor.");
-        return NULL;
+        ALOGE("no extractor.");
+        return NO_INIT;
     }
 
     sp<MetaData> fileMeta = mExtractor->getMetaData();
 
     if (fileMeta == NULL) {
-        ALOGV("extractor doesn't publish metadata, failed to initialize?");
-        return NULL;
+        ALOGE("extractor doesn't publish metadata, failed to initialize?");
+        return NO_INIT;
     }
 
     int32_t drm = 0;
     if (fileMeta->findInt32(kKeyIsDRM, &drm) && drm != 0) {
         ALOGE("frame grab not allowed.");
-        return NULL;
+        return ERROR_DRM_UNKNOWN;
     }
 
     size_t n = mExtractor->countTracks();
@@ -622,8 +264,8 @@
     }
 
     if (i == n) {
-        ALOGV("no video track found.");
-        return NULL;
+        ALOGE("no video track found.");
+        return INVALID_OPERATION;
     }
 
     sp<MetaData> trackMeta = mExtractor->getTrackMetaData(
@@ -633,7 +275,7 @@
 
     if (source.get() == NULL) {
         ALOGV("unable to instantiate video track.");
-        return NULL;
+        return UNKNOWN_ERROR;
     }
 
     const void *data;
@@ -656,16 +298,25 @@
 
     for (size_t i = 0; i < matchingCodecs.size(); ++i) {
         const AString &componentName = matchingCodecs[i];
-        VideoFrame *frame = extractVideoFrame(
-                componentName, trackMeta, source, timeUs, option, colorFormat, metaOnly);
-
-        if (frame != NULL) {
-            return frame;
+        VideoFrameDecoder decoder(componentName, trackMeta, source);
+        if (outFrame != NULL) {
+            *outFrame = decoder.extractFrame(
+                    timeUs, option, colorFormat, metaOnly);
+            if (*outFrame != NULL) {
+                return OK;
+            }
+        } else if (outFrames != NULL) {
+            status_t err = decoder.extractFrames(
+                    timeUs, numFrames, option, colorFormat, outFrames);
+            if (err == OK) {
+                return OK;
+            }
         }
-        ALOGV("%s failed to extract thumbnail, trying next decoder.", componentName.c_str());
+        ALOGV("%s failed to extract frame, trying next decoder.", componentName.c_str());
     }
 
-    return NULL;
+    ALOGE("all codecs failed to extract frame.");
+    return UNKNOWN_ERROR;
 }
 
 MediaAlbumArt *StagefrightMetadataRetriever::extractAlbumArt() {
@@ -797,8 +448,14 @@
     bool hasVideo = false;
     int32_t videoWidth = -1;
     int32_t videoHeight = -1;
+    int32_t videoFrameCount = 0;
     int32_t audioBitrate = -1;
     int32_t rotationAngle = -1;
+    int32_t imageCount = 0;
+    int32_t imagePrimary = 0;
+    int32_t imageWidth = -1;
+    int32_t imageHeight = -1;
+    int32_t imageRotation = -1;
 
     // The overall duration is the duration of the longest track.
     int64_t maxDurationUs = 0;
@@ -829,6 +486,21 @@
                 if (!trackMeta->findInt32(kKeyRotation, &rotationAngle)) {
                     rotationAngle = 0;
                 }
+                if (!trackMeta->findInt32(kKeyFrameCount, &videoFrameCount)) {
+                    videoFrameCount = 0;
+                }
+            } else if (!strncasecmp("image/", mime, 6)) {
+                int32_t isPrimary;
+                if (trackMeta->findInt32(
+                        kKeyTrackIsDefault, &isPrimary) && isPrimary) {
+                    imagePrimary = imageCount;
+                    CHECK(trackMeta->findInt32(kKeyWidth, &imageWidth));
+                    CHECK(trackMeta->findInt32(kKeyHeight, &imageHeight));
+                    if (!trackMeta->findInt32(kKeyRotation, &imageRotation)) {
+                        imageRotation = 0;
+                    }
+                }
+                imageCount++;
             } else if (!strcasecmp(mime, MEDIA_MIMETYPE_TEXT_3GPP)) {
                 const char *lang;
                 if (trackMeta->findCString(kKeyMediaLanguage, &lang)) {
@@ -867,6 +539,30 @@
 
         sprintf(tmp, "%d", rotationAngle);
         mMetaData.add(METADATA_KEY_VIDEO_ROTATION, String8(tmp));
+
+        if (videoFrameCount > 0) {
+            sprintf(tmp, "%d", videoFrameCount);
+            mMetaData.add(METADATA_KEY_VIDEO_FRAME_COUNT, String8(tmp));
+        }
+    }
+
+    if (imageCount > 0) {
+        mMetaData.add(METADATA_KEY_HAS_IMAGE, String8("yes"));
+
+        sprintf(tmp, "%d", imageCount);
+        mMetaData.add(METADATA_KEY_IMAGE_COUNT, String8(tmp));
+
+        sprintf(tmp, "%d", imagePrimary);
+        mMetaData.add(METADATA_KEY_IMAGE_PRIMARY, String8(tmp));
+
+        sprintf(tmp, "%d", imageWidth);
+        mMetaData.add(METADATA_KEY_IMAGE_WIDTH, String8(tmp));
+
+        sprintf(tmp, "%d", imageHeight);
+        mMetaData.add(METADATA_KEY_IMAGE_HEIGHT, String8(tmp));
+
+        sprintf(tmp, "%d", imageRotation);
+        mMetaData.add(METADATA_KEY_IMAGE_ROTATION, String8(tmp));
     }
 
     if (numTracks == 1 && hasAudio && audioBitrate >= 0) {
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index 3ef8f2a..53699ef 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -27,7 +27,6 @@
 #include "include/ESDS.h"
 #include "include/HevcUtils.h"
 
-#include <arpa/inet.h>
 #include <cutils/properties.h>
 #include <media/openmax/OMX_Audio.h>
 #include <media/openmax/OMX_Video.h>
@@ -37,6 +36,7 @@
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/ALookup.h>
 #include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/ByteUtils.h>
 #include <media/stagefright/MetaData.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/AudioSystem.h>
@@ -47,39 +47,6 @@
 
 namespace android {
 
-uint16_t U16_AT(const uint8_t *ptr) {
-    return ptr[0] << 8 | ptr[1];
-}
-
-uint32_t U32_AT(const uint8_t *ptr) {
-    return ptr[0] << 24 | ptr[1] << 16 | ptr[2] << 8 | ptr[3];
-}
-
-uint64_t U64_AT(const uint8_t *ptr) {
-    return ((uint64_t)U32_AT(ptr)) << 32 | U32_AT(ptr + 4);
-}
-
-uint16_t U16LE_AT(const uint8_t *ptr) {
-    return ptr[0] | (ptr[1] << 8);
-}
-
-uint32_t U32LE_AT(const uint8_t *ptr) {
-    return ptr[3] << 24 | ptr[2] << 16 | ptr[1] << 8 | ptr[0];
-}
-
-uint64_t U64LE_AT(const uint8_t *ptr) {
-    return ((uint64_t)U32LE_AT(ptr + 4)) << 32 | U32LE_AT(ptr);
-}
-
-// XXX warning: these won't work on big-endian host.
-uint64_t ntoh64(uint64_t x) {
-    return ((uint64_t)ntohl(x & 0xffffffff) << 32) | ntohl(x >> 32);
-}
-
-uint64_t hton64(uint64_t x) {
-    return ((uint64_t)htonl(x & 0xffffffff) << 32) | htonl(x >> 32);
-}
-
 static status_t copyNALUToABuffer(sp<ABuffer> *buffer, const uint8_t *ptr, size_t length) {
     if (((*buffer)->size() + 4 + length) > ((*buffer)->capacity() - (*buffer)->offset())) {
         sp<ABuffer> tmpBuffer = new (std::nothrow) ABuffer((*buffer)->size() + 4 + length + 1024);
@@ -672,7 +639,8 @@
         msg->setString("language", lang);
     }
 
-    if (!strncasecmp("video/", mime, 6)) {
+    if (!strncasecmp("video/", mime, 6) ||
+            !strncasecmp("image/", mime, 6)) {
         int32_t width, height;
         if (!meta->findInt32(kKeyWidth, &width)
                 || !meta->findInt32(kKeyHeight, &height)) {
@@ -696,6 +664,23 @@
             msg->setInt32("sar-height", sarHeight);
         }
 
+        if (!strncasecmp("image/", mime, 6)) {
+            int32_t gridWidth, gridHeight, gridRows, gridCols;
+            if (meta->findInt32(kKeyGridWidth, &gridWidth)
+                    && meta->findInt32(kKeyHeight, &gridHeight)
+                    && meta->findInt32(kKeyGridRows, &gridRows)
+                    && meta->findInt32(kKeyGridCols, &gridCols)) {
+                msg->setInt32("grid-width", gridWidth);
+                msg->setInt32("grid-height", gridHeight);
+                msg->setInt32("grid-rows", gridRows);
+                msg->setInt32("grid-cols", gridCols);
+            }
+            int32_t isPrimary;
+            if (meta->findInt32(kKeyTrackIsDefault, &isPrimary) && isPrimary) {
+                msg->setInt32("is-default", 1);
+            }
+        }
+
         int32_t colorFormat;
         if (meta->findInt32(kKeyColorFormat, &colorFormat)) {
             msg->setInt32("color-format", colorFormat);
@@ -1327,7 +1312,7 @@
         meta->setCString(kKeyMediaLanguage, lang.c_str());
     }
 
-    if (mime.startsWith("video/")) {
+    if (mime.startsWith("video/") || mime.startsWith("image/")) {
         int32_t width;
         int32_t height;
         if (msg->findInt32("width", &width) && msg->findInt32("height", &height)) {
@@ -1351,6 +1336,26 @@
             meta->setInt32(kKeyDisplayHeight, displayHeight);
         }
 
+        if (mime.startsWith("image/")){
+            int32_t isPrimary;
+            if (msg->findInt32("is-default", &isPrimary) && isPrimary) {
+                meta->setInt32(kKeyTrackIsDefault, 1);
+            }
+            int32_t gridWidth, gridHeight, gridRows, gridCols;
+            if (msg->findInt32("grid-width", &gridWidth)) {
+                meta->setInt32(kKeyGridWidth, gridWidth);
+            }
+            if (msg->findInt32("grid-height", &gridHeight)) {
+                meta->setInt32(kKeyGridHeight, gridHeight);
+            }
+            if (msg->findInt32("grid-rows", &gridRows)) {
+                meta->setInt32(kKeyGridRows, gridRows);
+            }
+            if (msg->findInt32("grid-cols", &gridCols)) {
+                meta->setInt32(kKeyGridCols, gridCols);
+            }
+        }
+
         int32_t colorFormat;
         if (msg->findInt32("color-format", &colorFormat)) {
             meta->setInt32(kKeyColorFormat, colorFormat);
@@ -1467,7 +1472,8 @@
             // for transporting the CSD to muxers.
             reassembleESDS(csd0, esds.data());
             meta->setData(kKeyESDS, kKeyESDS, esds.data(), esds.size());
-        } else if (mime == MEDIA_MIMETYPE_VIDEO_HEVC) {
+        } else if (mime == MEDIA_MIMETYPE_VIDEO_HEVC ||
+                   mime == MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC) {
             std::vector<uint8_t> hvcc(csd0size + 1024);
             size_t outsize = reassembleHVCC(csd0, hvcc.data(), hvcc.size(), 4);
             meta->setData(kKeyHVCC, kKeyHVCC, hvcc.data(), outsize);
@@ -1809,41 +1815,17 @@
 }
 
 void writeToAMessage(const sp<AMessage> &msg, const BufferingSettings &buffering) {
-    msg->setInt32("init-mode", buffering.mInitialBufferingMode);
-    msg->setInt32("rebuffer-mode", buffering.mRebufferingMode);
-    msg->setInt32("init-ms", buffering.mInitialWatermarkMs);
-    msg->setInt32("init-kb", buffering.mInitialWatermarkKB);
-    msg->setInt32("rebuffer-low-ms", buffering.mRebufferingWatermarkLowMs);
-    msg->setInt32("rebuffer-high-ms", buffering.mRebufferingWatermarkHighMs);
-    msg->setInt32("rebuffer-low-kb", buffering.mRebufferingWatermarkLowKB);
-    msg->setInt32("rebuffer-high-kb", buffering.mRebufferingWatermarkHighKB);
+    msg->setInt32("init-ms", buffering.mInitialMarkMs);
+    msg->setInt32("resume-playback-ms", buffering.mResumePlaybackMarkMs);
 }
 
 void readFromAMessage(const sp<AMessage> &msg, BufferingSettings *buffering /* nonnull */) {
     int32_t value;
-    if (msg->findInt32("init-mode", &value)) {
-        buffering->mInitialBufferingMode = (BufferingMode)value;
-    }
-    if (msg->findInt32("rebuffer-mode", &value)) {
-        buffering->mRebufferingMode = (BufferingMode)value;
-    }
     if (msg->findInt32("init-ms", &value)) {
-        buffering->mInitialWatermarkMs = value;
+        buffering->mInitialMarkMs = value;
     }
-    if (msg->findInt32("init-kb", &value)) {
-        buffering->mInitialWatermarkKB = value;
-    }
-    if (msg->findInt32("rebuffer-low-ms", &value)) {
-        buffering->mRebufferingWatermarkLowMs = value;
-    }
-    if (msg->findInt32("rebuffer-high-ms", &value)) {
-        buffering->mRebufferingWatermarkHighMs = value;
-    }
-    if (msg->findInt32("rebuffer-low-kb", &value)) {
-        buffering->mRebufferingWatermarkLowKB = value;
-    }
-    if (msg->findInt32("rebuffer-high-kb", &value)) {
-        buffering->mRebufferingWatermarkHighKB = value;
+    if (msg->findInt32("resume-playback-ms", &value)) {
+        buffering->mResumePlaybackMarkMs = value;
     }
 }
 
@@ -1879,13 +1861,5 @@
     return result;
 }
 
-void MakeFourCCString(uint32_t x, char *s) {
-    s[0] = x >> 24;
-    s[1] = (x >> 16) & 0xff;
-    s[2] = (x >> 8) & 0xff;
-    s[3] = x & 0xff;
-    s[4] = '\0';
-}
-
 }  // namespace android
 
diff --git a/media/libstagefright/codec2/Android.bp b/media/libstagefright/codec2/Android.bp
new file mode 100644
index 0000000..ee5c3eb
--- /dev/null
+++ b/media/libstagefright/codec2/Android.bp
@@ -0,0 +1,78 @@
+cc_library_shared {
+    name: "libstagefright_codec2",
+
+    tags: [
+        "optional",
+    ],
+
+    srcs: ["C2.cpp"],
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
+
+    include_dirs: [
+        "frameworks/av/media/libstagefright/codec2/include",
+        "frameworks/native/include/media/hardware",
+    ],
+
+    export_include_dirs: [
+        "include",
+    ],
+
+    sanitize: {
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+        cfi: false, // true,
+        diag: {
+            cfi: false, // true,
+        },
+    },
+
+    ldflags: ["-Wl,-Bsymbolic"],
+}
+
+cc_library_shared {
+    name: "libstagefright_simple_c2component",
+
+    tags: [
+        "optional",
+    ],
+
+    srcs: [
+        "SimpleC2Component.cpp",
+        "SimpleC2Interface.cpp",
+    ],
+
+    include_dirs: [
+    ],
+
+    shared_libs: [
+        "liblog",
+        "libstagefright_codec2",
+        "libstagefright_codec2_vndk",
+        "libstagefright_foundation",
+	"libutils",
+    ],
+
+    sanitize: {
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+        cfi: true,
+        diag: {
+            cfi: true,
+        },
+    },
+
+    ldflags: ["-Wl,-Bsymbolic"],
+}
+
+subdirs = [
+    "tests",
+    "vndk",
+]
diff --git a/media/libstagefright/codec2/Android.mk b/media/libstagefright/codec2/Android.mk
index ef06ed7..459608f 100644
--- a/media/libstagefright/codec2/Android.mk
+++ b/media/libstagefright/codec2/Android.mk
@@ -1,21 +1,42 @@
-LOCAL_PATH:= $(call my-dir)
-include $(CLEAR_VARS)
+# =============================================================================
+# DOCUMENTATION GENERATION
+# =============================================================================
+C2_ROOT := $(call my-dir)
 
-LOCAL_SRC_FILES:= \
-        C2.cpp    \
+C2_DOCS_ROOT := $(OUT_DIR)/target/common/docs/codec2
 
-LOCAL_C_INCLUDES += \
-        $(TOP)/frameworks/av/media/libstagefright/codec2/include \
-        $(TOP)/frameworks/native/include/media/hardware \
+C2_OUT_TEMP := $(PRODUCT_OUT)/gen/ETC/Codec2-docs_intermediates
 
-LOCAL_MODULE:= libstagefright_codec2
-LOCAL_CFLAGS += -Werror -Wall
-LOCAL_CLANG := true
-LOCAL_SANITIZE := unsigned-integer-overflow signed-integer-overflow cfi
-LOCAL_SANITIZE_DIAG := cfi
+C2_DOXY := $(or $(shell command -v doxygen),\
+		$(shell command -v /Applications/Doxygen.app/Contents/Resources/doxygen))
 
-include $(BUILD_SHARED_LIBRARY)
+check-doxygen:
+ifndef C2_DOXY
+	$(error 'doxygen is not available')
+endif
 
-################################################################################
+$(C2_OUT_TEMP)/doxy-api.config: $(C2_ROOT)/docs/doxygen.config
+	# only document include directory, no internal sections
+	sed 's/\(^INPUT *=.*\)/\1include\//; \
+	s/\(^INTERNAL_DOCS *= *\).*/\1NO/; \
+	s/\(^ENABLED_SECTIONS *=.*\)INTERNAL\(.*\).*/\1\2/; \
+	s:\(^OUTPUT_DIRECTORY *= \)out:\1'$(OUT_DIR)':;' \
+		$(C2_ROOT)/docs/doxygen.config > $@
 
-include $(call all-makefiles-under,$(LOCAL_PATH))
+$(C2_OUT_TEMP)/doxy-internal.config: $(C2_ROOT)/docs/doxygen.config
+	sed 's:\(^OUTPUT_DIRECTORY *= \)out\(.*\)api:\1'$(OUT_DIR)'\2internal:;' \
+		$(C2_ROOT)/docs/doxygen.config > $@
+
+docs-api: $(C2_OUT_TEMP)/doxy-api.config check-doxygen
+	echo API docs are building in $(C2_DOCS_ROOT)/api
+	rm -rf $(C2_DOCS_ROOT)/api
+	mkdir -p $(C2_DOCS_ROOT)/api
+	$(C2_DOXY) $(C2_OUT_TEMP)/doxy-api.config
+
+docs-internal: $(C2_OUT_TEMP)/doxy-internal.config check-doxygen
+	echo Internal docs are building in $(C2_DOCS_ROOT)/internal
+	rm -rf $(C2_DOCS_ROOT)/internal
+	mkdir -p $(C2_DOCS_ROOT)/internal
+	$(C2_DOXY) $(C2_OUT_TEMP)/doxy-internal.config
+
+docs-all: docs-api docs-internal
\ No newline at end of file
diff --git a/media/libstagefright/codec2/SimpleC2Component.cpp b/media/libstagefright/codec2/SimpleC2Component.cpp
new file mode 100644
index 0000000..4d75a31
--- /dev/null
+++ b/media/libstagefright/codec2/SimpleC2Component.cpp
@@ -0,0 +1,410 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "SimpleC2Component"
+#include <media/stagefright/foundation/ADebug.h>
+
+#include <inttypes.h>
+
+#include <C2PlatformSupport.h>
+#include <SimpleC2Component.h>
+
+namespace android {
+
+std::unique_ptr<C2Work> SimpleC2Component::WorkQueue::pop_front() {
+    std::unique_ptr<C2Work> work = std::move(mQueue.front().work);
+    mQueue.pop_front();
+    return work;
+}
+
+void SimpleC2Component::WorkQueue::push_back(std::unique_ptr<C2Work> work) {
+    mQueue.push_back({ std::move(work), NO_DRAIN });
+}
+
+bool SimpleC2Component::WorkQueue::empty() const {
+    return mQueue.empty();
+}
+
+void SimpleC2Component::WorkQueue::clear() {
+    mQueue.clear();
+}
+
+uint32_t SimpleC2Component::WorkQueue::drainMode() const {
+    return mQueue.front().drainMode;
+}
+
+void SimpleC2Component::WorkQueue::markDrain(uint32_t drainMode) {
+    mQueue.push_back({ nullptr, drainMode });
+}
+
+SimpleC2Component::SimpleC2Component(
+        const std::shared_ptr<C2ComponentInterface> &intf)
+    : mIntf(intf) {
+}
+
+c2_status_t SimpleC2Component::setListener_vb(
+        const std::shared_ptr<C2Component::Listener> &listener, c2_blocking_t mayBlock) {
+    Mutexed<ExecState>::Locked state(mExecState);
+    if (state->mState == RUNNING) {
+        if (listener) {
+            return C2_BAD_STATE;
+        } else if (!mayBlock) {
+            return C2_BLOCKING;
+        }
+    }
+    state->mListener = listener;
+    // TODO: wait for listener change to have taken place before returning
+    // (e.g. if there is an ongoing listener callback)
+    return C2_OK;
+}
+
+c2_status_t SimpleC2Component::queue_nb(std::list<std::unique_ptr<C2Work>> * const items) {
+    {
+        Mutexed<ExecState>::Locked state(mExecState);
+        if (state->mState != RUNNING) {
+            return C2_BAD_STATE;
+        }
+    }
+    {
+        Mutexed<WorkQueue>::Locked queue(mWorkQueue);
+        while (!items->empty()) {
+            queue->push_back(std::move(items->front()));
+            items->pop_front();
+        }
+        queue->mCondition.broadcast();
+    }
+    return C2_OK;
+}
+
+c2_status_t SimpleC2Component::announce_nb(const std::vector<C2WorkOutline> &items) {
+    (void) items;
+    return C2_OMITTED;
+}
+
+c2_status_t SimpleC2Component::flush_sm(
+        flush_mode_t flushThrough, std::list<std::unique_ptr<C2Work>>* const flushedWork) {
+    (void) flushThrough;
+    {
+        Mutexed<ExecState>::Locked state(mExecState);
+        if (state->mState != RUNNING) {
+            return C2_BAD_STATE;
+        }
+    }
+    {
+        Mutexed<WorkQueue>::Locked queue(mWorkQueue);
+        queue->incGeneration();
+        while (!queue->empty()) {
+            std::unique_ptr<C2Work> work = queue->pop_front();
+            if (work) {
+                flushedWork->push_back(std::move(work));
+            }
+        }
+    }
+    {
+        Mutexed<PendingWork>::Locked pending(mPendingWork);
+        while (!pending->empty()) {
+            flushedWork->push_back(std::move(pending->begin()->second));
+            pending->erase(pending->begin());
+        }
+    }
+
+    return onFlush_sm();
+}
+
+c2_status_t SimpleC2Component::drain_nb(drain_mode_t drainMode) {
+    if (drainMode == DRAIN_CHAIN) {
+        return C2_OMITTED;
+    }
+    {
+        Mutexed<ExecState>::Locked state(mExecState);
+        if (state->mState != RUNNING) {
+            return C2_BAD_STATE;
+        }
+    }
+    {
+        Mutexed<WorkQueue>::Locked queue(mWorkQueue);
+        queue->markDrain(drainMode);
+        queue->mCondition.broadcast();
+    }
+
+    return C2_OK;
+}
+
+c2_status_t SimpleC2Component::start() {
+    Mutexed<ExecState>::Locked state(mExecState);
+    if (state->mState == RUNNING) {
+        return C2_BAD_STATE;
+    }
+    bool needsInit = (state->mState == UNINITIALIZED);
+    if (needsInit) {
+        state.unlock();
+        c2_status_t err = onInit();
+        if (err != C2_OK) {
+            return err;
+        }
+        state.lock();
+    }
+    if (!state->mThread.joinable()) {
+        mExitRequested = false;
+        state->mThread = std::thread(
+                [](std::weak_ptr<SimpleC2Component> wp) {
+                    while (true) {
+                        std::shared_ptr<SimpleC2Component> thiz = wp.lock();
+                        if (!thiz) {
+                            return;
+                        }
+                        if (thiz->exitRequested()) {
+                            return;
+                        }
+                        thiz->processQueue();
+                    }
+                },
+                shared_from_this());
+    }
+    state->mState = RUNNING;
+    return C2_OK;
+}
+
+c2_status_t SimpleC2Component::stop() {
+    {
+        Mutexed<ExecState>::Locked state(mExecState);
+        if (state->mState != RUNNING) {
+            return C2_BAD_STATE;
+        }
+        state->mState = STOPPED;
+    }
+    {
+        Mutexed<WorkQueue>::Locked queue(mWorkQueue);
+        queue->clear();
+    }
+    {
+        Mutexed<PendingWork>::Locked pending(mPendingWork);
+        pending->clear();
+    }
+    c2_status_t err = onStop();
+    if (err != C2_OK) {
+        return err;
+    }
+    return C2_OK;
+}
+
+c2_status_t SimpleC2Component::reset() {
+    {
+        Mutexed<ExecState>::Locked state(mExecState);
+        state->mState = UNINITIALIZED;
+    }
+    {
+        Mutexed<WorkQueue>::Locked queue(mWorkQueue);
+        queue->clear();
+    }
+    {
+        Mutexed<PendingWork>::Locked pending(mPendingWork);
+        pending->clear();
+    }
+    onReset();
+    return C2_OK;
+}
+
+c2_status_t SimpleC2Component::release() {
+    std::thread releasing;
+    {
+        Mutexed<ExecState>::Locked state(mExecState);
+        releasing = std::move(state->mThread);
+    }
+    mExitRequested = true;
+    releasing.join();
+    onRelease();
+    return C2_OK;
+}
+
+std::shared_ptr<C2ComponentInterface> SimpleC2Component::intf() {
+    return mIntf;
+}
+
+namespace {
+
+std::vector<std::unique_ptr<C2Work>> vec(std::unique_ptr<C2Work> &work) {
+    std::vector<std::unique_ptr<C2Work>> ret;
+    ret.push_back(std::move(work));
+    return ret;
+}
+
+}  // namespace
+
+void SimpleC2Component::finish(
+        uint64_t frameIndex, std::function<void(const std::unique_ptr<C2Work> &)> fillWork) {
+    std::unique_ptr<C2Work> work;
+    {
+        Mutexed<PendingWork>::Locked pending(mPendingWork);
+        if (pending->count(frameIndex) == 0) {
+            ALOGW("unknown frame index: %" PRIu64, frameIndex);
+            return;
+        }
+        work = std::move(pending->at(frameIndex));
+        pending->erase(frameIndex);
+    }
+    if (work) {
+        fillWork(work);
+        Mutexed<ExecState>::Locked state(mExecState);
+        state->mListener->onWorkDone_nb(shared_from_this(), vec(work));
+        ALOGV("returning pending work");
+    }
+}
+
+void SimpleC2Component::processQueue() {
+    std::unique_ptr<C2Work> work;
+    uint64_t generation;
+    int32_t drainMode;
+    {
+        Mutexed<WorkQueue>::Locked queue(mWorkQueue);
+        nsecs_t deadline = systemTime() + ms2ns(250);
+        while (queue->empty()) {
+            nsecs_t now = systemTime();
+            if (now >= deadline) {
+                return;
+            }
+            status_t err = queue.waitForConditionRelative(queue->mCondition, deadline - now);
+            if (err == TIMED_OUT) {
+                return;
+            }
+        }
+
+        generation = queue->generation();
+        drainMode = queue->drainMode();
+        work = queue->pop_front();
+    }
+
+    if (!mOutputBlockPool) {
+        c2_status_t err = [this] {
+            // TODO: don't use query_vb
+            C2StreamFormatConfig::output outputFormat(0u);
+            c2_status_t err = intf()->query_vb(
+                    { &outputFormat },
+                    {},
+                    C2_DONT_BLOCK,
+                    nullptr);
+            if (err != C2_OK) {
+                return err;
+            }
+            err = GetCodec2BlockPool(
+                    (outputFormat.value == C2FormatVideo)
+                    ? C2BlockPool::BASIC_GRAPHIC
+                    : C2BlockPool::BASIC_LINEAR,
+                    shared_from_this(),
+                    &mOutputBlockPool);
+            if (err != C2_OK) {
+                return err;
+            }
+            return C2_OK;
+        }();
+        if (err != C2_OK) {
+            Mutexed<ExecState>::Locked state(mExecState);
+            state->mListener->onError_nb(shared_from_this(), err);
+            return;
+        }
+    }
+
+    if (!work) {
+        c2_status_t err = drain(drainMode, mOutputBlockPool);
+        if (err != C2_OK) {
+            Mutexed<ExecState>::Locked state(mExecState);
+            state->mListener->onError_nb(shared_from_this(), err);
+        }
+        return;
+    }
+
+    process(work, mOutputBlockPool);
+    {
+        Mutexed<WorkQueue>::Locked queue(mWorkQueue);
+        if (queue->generation() != generation) {
+            ALOGW("work form old generation: was %" PRIu64 " now %" PRIu64, queue->generation(), generation);
+            work->result = C2_NOT_FOUND;
+            queue.unlock();
+            {
+                Mutexed<ExecState>::Locked state(mExecState);
+                state->mListener->onWorkDone_nb(shared_from_this(), vec(work));
+            }
+            queue.lock();
+            return;
+        }
+    }
+    if (work->worklets_processed != 0u) {
+        Mutexed<ExecState>::Locked state(mExecState);
+        ALOGV("returning this work");
+        state->mListener->onWorkDone_nb(shared_from_this(), vec(work));
+    } else {
+        ALOGV("queue pending work");
+        std::unique_ptr<C2Work> unexpected;
+        {
+            Mutexed<PendingWork>::Locked pending(mPendingWork);
+            uint64_t frameIndex = work->input.ordinal.frame_index;
+            if (pending->count(frameIndex) != 0) {
+                unexpected = std::move(pending->at(frameIndex));
+                pending->erase(frameIndex);
+            }
+            (void) pending->insert({ frameIndex, std::move(work) });
+        }
+        if (unexpected) {
+            unexpected->result = C2_CORRUPTED;
+            Mutexed<ExecState>::Locked state(mExecState);
+            state->mListener->onWorkDone_nb(shared_from_this(), vec(unexpected));
+        }
+    }
+}
+
+namespace {
+
+class GraphicBuffer : public C2Buffer {
+public:
+    GraphicBuffer(
+            const std::shared_ptr<C2GraphicBlock> &block,
+            const C2Rect &crop)
+        : C2Buffer({ block->share(crop, ::android::C2Fence()) }) {}
+};
+
+
+class LinearBuffer : public C2Buffer {
+public:
+    LinearBuffer(
+            const std::shared_ptr<C2LinearBlock> &block, size_t offset, size_t size)
+        : C2Buffer({ block->share(offset, size, ::android::C2Fence()) }) {}
+};
+
+}  // namespace
+
+std::shared_ptr<C2Buffer> SimpleC2Component::createLinearBuffer(
+        const std::shared_ptr<C2LinearBlock> &block) {
+    return createLinearBuffer(block, block->offset(), block->size());
+}
+
+std::shared_ptr<C2Buffer> SimpleC2Component::createLinearBuffer(
+        const std::shared_ptr<C2LinearBlock> &block, size_t offset, size_t size) {
+    return std::make_shared<LinearBuffer>(block, offset, size);
+}
+
+std::shared_ptr<C2Buffer> SimpleC2Component::createGraphicBuffer(
+        const std::shared_ptr<C2GraphicBlock> &block) {
+    return createGraphicBuffer(block, C2Rect(0, 0, block->width(), block->height()));
+}
+
+std::shared_ptr<C2Buffer> SimpleC2Component::createGraphicBuffer(
+        const std::shared_ptr<C2GraphicBlock> &block,
+        const C2Rect &crop) {
+    return std::make_shared<GraphicBuffer>(block, crop);
+}
+
+} // namespace android
diff --git a/media/libstagefright/codec2/SimpleC2Interface.cpp b/media/libstagefright/codec2/SimpleC2Interface.cpp
new file mode 100644
index 0000000..f9cab26
--- /dev/null
+++ b/media/libstagefright/codec2/SimpleC2Interface.cpp
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SimpleC2Interface"
+#include <utils/Log.h>
+
+#include <SimpleC2Interface.h>
+
+namespace android {
+
+c2_status_t SimpleC2Interface::query_vb(
+        const std::vector<C2Param* const> &stackParams,
+        const std::vector<C2Param::Index> &heapParamIndices,
+        c2_blocking_t mayBlock,
+        std::vector<std::unique_ptr<C2Param>>* const heapParams) const {
+    (void)mayBlock;
+
+    for (C2Param* const param : stackParams) {
+        if (param->coreIndex() != C2StreamFormatConfig::CORE_INDEX
+                || !param->forStream()
+                || param->stream() != 0u) {
+            param->invalidate();
+            continue;
+        }
+        if (param->forInput()) {
+            param->updateFrom(mInputFormat);
+        } else {
+            param->updateFrom(mOutputFormat);
+        }
+    }
+    if (heapParams) {
+        heapParams->clear();
+        for (const auto &index : heapParamIndices) {
+            if (index.coreIndex() != C2StreamFormatConfig::CORE_INDEX
+                    || !index.forStream()
+                    || index.stream() != 0u) {
+                heapParams->push_back(nullptr);
+            }
+            if (index.forInput()) {
+                heapParams->push_back(C2Param::Copy(mInputFormat));
+            } else {
+                heapParams->push_back(C2Param::Copy(mOutputFormat));
+            }
+        }
+    }
+
+    return C2_OK;
+}
+
+} // namespace android
diff --git a/media/libstagefright/codec2/docs/doxyfilter.sh b/media/libstagefright/codec2/docs/doxyfilter.sh
new file mode 100755
index 0000000..d813153
--- /dev/null
+++ b/media/libstagefright/codec2/docs/doxyfilter.sh
@@ -0,0 +1,100 @@
+#!/usr/bin/env python3
+import re, sys
+
+global in_comment, current, indent, hold
+in_comment, current, indent, hold = False, None, '', []
+
+class ChangeCStyleCommentsToDoxy:
+    def dump_hold():
+        global hold
+        for h in hold:
+            print(h, end='')
+        hold[:] = []
+
+    def doxy_hold():
+        global current, hold
+        if current == '//':
+            for h in hold:
+                print(re.sub(r'^( *//(?!/))', r'\1/', h), end='')
+        else:
+            first = True
+            for h in hold:
+                if first:
+                    h = re.sub(r'^( */[*](?![*]))', r'\1*', h)
+                    first = False
+                print(h, end='')
+        hold[:] = []
+
+    def process_comment(t, ind, line):
+        global current, indent, hold
+        if t != current or ind not in (indent, indent + ' '):
+            dump_hold()
+            current, indent = t, ind
+        hold.append(line)
+
+    def process_line(ind, line):
+        global current, indent
+        if ind in (indent, ''):
+            doxy_hold()
+        else:
+            dump_hold()
+        current, indent = None, None
+        print(line, end='')
+
+    def process(self, input, path):
+        for line in input:
+            ind = re.match(r'^( *)', line).group(1)
+            if in_comment:
+                # TODO: this is not quite right, but good enough
+                m = re.match(r'^ *[*]/', line)
+                if m:
+                    process_comment('/*', ind, line)
+                    in_comment = False
+                else:
+                    process_comment('/*', ind, line)
+                continue
+            m = re.match(r'^ *//', line)
+            if m:
+                # one-line comment
+                process_comment('//', ind, line)
+                continue
+            m = re.match(r'^ */[*]', line)
+            if m:
+                # multi-line comment
+                process_comment('/*', ind, line)
+                # TODO: this is not quite right, but good enough
+                in_comment = not re.match(r'^ *[*]/', line)
+                continue
+            process_line(ind, line)
+
+class AutoGroup:
+    def process(self, input, path):
+        if '/codec2/include/' in path:
+            group = 'API Codec2 API'
+        elif False:
+            return
+        elif '/codec2/vndk/' in path:
+            group = 'VNDK Platform provided glue'
+        elif '/codec2/tests/' in path:
+            group = 'Tests Unit tests'
+        else:
+            group = 'Random Misc. sandbox'
+
+        print('#undef __APPLE__')
+
+        for line in input:
+            if re.match(r'^namespace android {', line):
+                print(line, end='')
+                print()
+                print(r'/// \addtogroup {}'.format(group))
+                print(r'/// @{')
+                continue
+            elif re.match(r'^} +// +namespace', line):
+                print(r'/// @}')
+                print()
+            print(line, end='')
+
+P = AutoGroup()
+for path in sys.argv[1:]:
+    with open(path, 'rt') as input:
+        P.process(input, path)
diff --git a/media/libstagefright/codec2/docs/doxygen.config b/media/libstagefright/codec2/docs/doxygen.config
new file mode 100644
index 0000000..11a921f
--- /dev/null
+++ b/media/libstagefright/codec2/docs/doxygen.config
@@ -0,0 +1,2446 @@
+# Doxyfile 1.8.11
+
+# This file describes the settings to be used by the documentation system
+# doxygen (www.doxygen.org) for a project.
+#
+# All text after a double hash (##) is considered a comment and is placed in
+# front of the TAG it is preceding.
+#
+# All text after a single hash (#) is considered a comment and will be ignored.
+# The format is:
+# TAG = value [value, ...]
+# For lists, items can also be appended using:
+# TAG += value [value, ...]
+# Values that contain spaces should be placed between quotes (\" \").
+
+#---------------------------------------------------------------------------
+# Project related configuration options
+#---------------------------------------------------------------------------
+
+# This tag specifies the encoding used for all characters in the config file
+# that follow. The default is UTF-8 which is also the encoding used for all text
+# before the first occurrence of this tag. Doxygen uses libiconv (or the iconv
+# built into libc) for the transcoding. See http://www.gnu.org/software/libiconv
+# for the list of possible encodings.
+# The default value is: UTF-8.
+
+DOXYFILE_ENCODING      = UTF-8
+
+# The PROJECT_NAME tag is a single word (or a sequence of words surrounded by
+# double-quotes, unless you are using Doxywizard) that should identify the
+# project for which the documentation is generated. This name is used in the
+# title of most generated pages and in a few other places.
+# The default value is: My Project.
+
+PROJECT_NAME           = Codec2
+
+# The PROJECT_NUMBER tag can be used to enter a project or revision number. This
+# could be handy for archiving the generated documentation or if some version
+# control system is used.
+
+PROJECT_NUMBER         = 
+
+# Using the PROJECT_BRIEF tag one can provide an optional one line description
+# for a project that appears at the top of each page and should give viewer a
+# quick idea about the purpose of the project. Keep the description short.
+
+PROJECT_BRIEF          = 
+
+# With the PROJECT_LOGO tag one can specify a logo or an icon that is included
+# in the documentation. The maximum height of the logo should not exceed 55
+# pixels and the maximum width should not exceed 200 pixels. Doxygen will copy
+# the logo to the output directory.
+
+PROJECT_LOGO           = 
+
+# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path
+# into which the generated documentation will be written. If a relative path is
+# entered, it will be relative to the location where doxygen was started. If
+# left blank the current directory will be used.
+
+OUTPUT_DIRECTORY       = out/target/common/docs/codec2/api
+
+# If the CREATE_SUBDIRS tag is set to YES then doxygen will create 4096 sub-
+# directories (in 2 levels) under the output directory of each output format and
+# will distribute the generated files over these directories. Enabling this
+# option can be useful when feeding doxygen a huge amount of source files, where
+# putting all generated files in the same directory would otherwise causes
+# performance problems for the file system.
+# The default value is: NO.
+
+CREATE_SUBDIRS         = NO
+
+# If the ALLOW_UNICODE_NAMES tag is set to YES, doxygen will allow non-ASCII
+# characters to appear in the names of generated files. If set to NO, non-ASCII
+# characters will be escaped, for example _xE3_x81_x84 will be used for Unicode
+# U+3044.
+# The default value is: NO.
+
+ALLOW_UNICODE_NAMES    = NO
+
+# The OUTPUT_LANGUAGE tag is used to specify the language in which all
+# documentation generated by doxygen is written. Doxygen will use this
+# information to generate all constant output in the proper language.
+# Possible values are: Afrikaans, Arabic, Armenian, Brazilian, Catalan, Chinese,
+# Chinese-Traditional, Croatian, Czech, Danish, Dutch, English (United States),
+# Esperanto, Farsi (Persian), Finnish, French, German, Greek, Hungarian,
+# Indonesian, Italian, Japanese, Japanese-en (Japanese with English messages),
+# Korean, Korean-en (Korean with English messages), Latvian, Lithuanian,
+# Macedonian, Norwegian, Persian (Farsi), Polish, Portuguese, Romanian, Russian,
+# Serbian, Serbian-Cyrillic, Slovak, Slovene, Spanish, Swedish, Turkish,
+# Ukrainian and Vietnamese.
+# The default value is: English.
+
+OUTPUT_LANGUAGE        = English
+
+# If the BRIEF_MEMBER_DESC tag is set to YES, doxygen will include brief member
+# descriptions after the members that are listed in the file and class
+# documentation (similar to Javadoc). Set to NO to disable this.
+# The default value is: YES.
+
+BRIEF_MEMBER_DESC      = YES
+
+# If the REPEAT_BRIEF tag is set to YES, doxygen will prepend the brief
+# description of a member or function before the detailed description
+#
+# Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the
+# brief descriptions will be completely suppressed.
+# The default value is: YES.
+
+REPEAT_BRIEF           = YES
+
+# This tag implements a quasi-intelligent brief description abbreviator that is
+# used to form the text in various listings. Each string in this list, if found
+# as the leading text of the brief description, will be stripped from the text
+# and the result, after processing the whole list, is used as the annotated
+# text. Otherwise, the brief description is used as-is. If left blank, the
+# following values are used ($name is automatically replaced with the name of
+# the entity):The $name class, The $name widget, The $name file, is, provides,
+# specifies, contains, represents, a, an and the.
+
+ABBREVIATE_BRIEF       = "The $name class" \
+                         "The $name widget" \
+                         "The $name file" \
+                         is \
+                         provides \
+                         specifies \
+                         contains \
+                         represents \
+                         a \
+                         an \
+                         the
+
+# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then
+# doxygen will generate a detailed section even if there is only a brief
+# description.
+# The default value is: NO.
+
+ALWAYS_DETAILED_SEC    = NO
+
+# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all
+# inherited members of a class in the documentation of that class as if those
+# members were ordinary class members. Constructors, destructors and assignment
+# operators of the base classes will not be shown.
+# The default value is: NO.
+
+INLINE_INHERITED_MEMB  = YES
+
+# If the FULL_PATH_NAMES tag is set to YES, doxygen will prepend the full path
+# before files name in the file list and in the header files. If set to NO the
+# shortest path that makes the file name unique will be used
+# The default value is: YES.
+
+FULL_PATH_NAMES        = YES
+
+# The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path.
+# Stripping is only done if one of the specified strings matches the left-hand
+# part of the path. The tag can be used to show relative paths in the file list.
+# If left blank the directory from which doxygen is run is used as the path to
+# strip.
+#
+# Note that you can specify absolute paths here, but also relative paths, which
+# will be relative from the directory where doxygen is started.
+# This tag requires that the tag FULL_PATH_NAMES is set to YES.
+
+STRIP_FROM_PATH        = frameworks/av/media/libstagefright/codec2
+
+# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the
+# path mentioned in the documentation of a class, which tells the reader which
+# header file to include in order to use a class. If left blank only the name of
+# the header file containing the class definition is used. Otherwise one should
+# specify the list of include paths that are normally passed to the compiler
+# using the -I flag.
+
+STRIP_FROM_INC_PATH    = 
+
+# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but
+# less readable) file names. This can be useful is your file systems doesn't
+# support long names like on DOS, Mac, or CD-ROM.
+# The default value is: NO.
+
+SHORT_NAMES            = NO
+
+# If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the
+# first line (until the first dot) of a Javadoc-style comment as the brief
+# description. If set to NO, the Javadoc-style will behave just like regular Qt-
+# style comments (thus requiring an explicit @brief command for a brief
+# description.)
+# The default value is: NO.
+
+JAVADOC_AUTOBRIEF      = YES
+
+# If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first
+# line (until the first dot) of a Qt-style comment as the brief description. If
+# set to NO, the Qt-style will behave just like regular Qt-style comments (thus
+# requiring an explicit \brief command for a brief description.)
+# The default value is: NO.
+
+QT_AUTOBRIEF           = YES
+
+# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a
+# multi-line C++ special comment block (i.e. a block of //! or /// comments) as
+# a brief description. This used to be the default behavior. The new default is
+# to treat a multi-line C++ comment block as a detailed description. Set this
+# tag to YES if you prefer the old behavior instead.
+#
+# Note that setting this tag to YES also means that rational rose comments are
+# not recognized any more.
+# The default value is: NO.
+
+MULTILINE_CPP_IS_BRIEF = NO
+
+# If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the
+# documentation from any documented member that it re-implements.
+# The default value is: YES.
+
+INHERIT_DOCS           = YES
+
+# If the SEPARATE_MEMBER_PAGES tag is set to YES then doxygen will produce a new
+# page for each member. If set to NO, the documentation of a member will be part
+# of the file/class/namespace that contains it.
+# The default value is: NO.
+
+SEPARATE_MEMBER_PAGES  = NO
+
+# The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen
+# uses this value to replace tabs by spaces in code fragments.
+# Minimum value: 1, maximum value: 16, default value: 4.
+
+TAB_SIZE               = 4
+
+# This tag can be used to specify a number of aliases that act as commands in
+# the documentation. An alias has the form:
+# name=value
+# For example adding
+# "sideeffect=@par Side Effects:\n"
+# will allow you to put the command \sideeffect (or @sideeffect) in the
+# documentation, which will result in a user-defined paragraph with heading
+# "Side Effects:". You can put \n's in the value part of an alias to insert
+# newlines.
+
+ALIASES                = 
+
+# This tag can be used to specify a number of word-keyword mappings (TCL only).
+# A mapping has the form "name=value". For example adding "class=itcl::class"
+# will allow you to use the command class in the itcl::class meaning.
+
+TCL_SUBST              = 
+
+# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources
+# only. Doxygen will then generate output that is more tailored for C. For
+# instance, some of the names that are used will be different. The list of all
+# members will be omitted, etc.
+# The default value is: NO.
+
+OPTIMIZE_OUTPUT_FOR_C  = NO
+
+# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or
+# Python sources only. Doxygen will then generate output that is more tailored
+# for that language. For instance, namespaces will be presented as packages,
+# qualified scopes will look different, etc.
+# The default value is: NO.
+
+OPTIMIZE_OUTPUT_JAVA   = NO
+
+# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran
+# sources. Doxygen will then generate output that is tailored for Fortran.
+# The default value is: NO.
+
+OPTIMIZE_FOR_FORTRAN   = NO
+
+# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL
+# sources. Doxygen will then generate output that is tailored for VHDL.
+# The default value is: NO.
+
+OPTIMIZE_OUTPUT_VHDL   = NO
+
+# Doxygen selects the parser to use depending on the extension of the files it
+# parses. With this tag you can assign which parser to use for a given
+# extension. Doxygen has a built-in mapping, but you can override or extend it
+# using this tag. The format is ext=language, where ext is a file extension, and
+# language is one of the parsers supported by doxygen: IDL, Java, Javascript,
+# C#, C, C++, D, PHP, Objective-C, Python, Fortran (fixed format Fortran:
+# FortranFixed, free formatted Fortran: FortranFree, unknown formatted Fortran:
+# Fortran. In the later case the parser tries to guess whether the code is fixed
+# or free formatted code, this is the default for Fortran type files), VHDL. For
+# instance to make doxygen treat .inc files as Fortran files (default is PHP),
+# and .f files as C (default is Fortran), use: inc=Fortran f=C.
+#
+# Note: For files without extension you can use no_extension as a placeholder.
+#
+# Note that for custom extensions you also need to set FILE_PATTERNS otherwise
+# the files are not read by doxygen.
+
+EXTENSION_MAPPING      = 
+
+# If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments
+# according to the Markdown format, which allows for more readable
+# documentation. See http://daringfireball.net/projects/markdown/ for details.
+# The output of markdown processing is further processed by doxygen, so you can
+# mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in
+# case of backward compatibilities issues.
+# The default value is: YES.
+
+MARKDOWN_SUPPORT       = YES
+
+# When enabled doxygen tries to link words that correspond to documented
+# classes, or namespaces to their corresponding documentation. Such a link can
+# be prevented in individual cases by putting a % sign in front of the word or
+# globally by setting AUTOLINK_SUPPORT to NO.
+# The default value is: YES.
+
+AUTOLINK_SUPPORT       = YES
+
+# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want
+# to include (a tag file for) the STL sources as input, then you should set this
+# tag to YES in order to let doxygen match functions declarations and
+# definitions whose arguments contain STL classes (e.g. func(std::string);
+# versus func(std::string) {}). This also make the inheritance and collaboration
+# diagrams that involve STL classes more complete and accurate.
+# The default value is: NO.
+
+BUILTIN_STL_SUPPORT    = YES
+
+# If you use Microsoft's C++/CLI language, you should set this option to YES to
+# enable parsing support.
+# The default value is: NO.
+
+CPP_CLI_SUPPORT        = NO
+
+# Set the SIP_SUPPORT tag to YES if your project consists of sip (see:
+# http://www.riverbankcomputing.co.uk/software/sip/intro) sources only. Doxygen
+# will parse them like normal C++ but will assume all classes use public instead
+# of private inheritance when no explicit protection keyword is present.
+# The default value is: NO.
+
+SIP_SUPPORT            = NO
+
+# For Microsoft's IDL there are propget and propput attributes to indicate
+# getter and setter methods for a property. Setting this option to YES will make
+# doxygen to replace the get and set methods by a property in the documentation.
+# This will only work if the methods are indeed getting or setting a simple
+# type. If this is not the case, or you want to show the methods anyway, you
+# should set this option to NO.
+# The default value is: YES.
+
+IDL_PROPERTY_SUPPORT   = YES
+
+# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC
+# tag is set to YES then doxygen will reuse the documentation of the first
+# member in the group (if any) for the other members of the group. By default
+# all members of a group must be documented explicitly.
+# The default value is: NO.
+
+DISTRIBUTE_GROUP_DOC   = NO
+
+# If one adds a struct or class to a group and this option is enabled, then also
+# any nested class or struct is added to the same group. By default this option
+# is disabled and one has to add nested compounds explicitly via \ingroup.
+# The default value is: NO.
+
+GROUP_NESTED_COMPOUNDS = NO
+
+# Set the SUBGROUPING tag to YES to allow class member groups of the same type
+# (for instance a group of public functions) to be put as a subgroup of that
+# type (e.g. under the Public Functions section). Set it to NO to prevent
+# subgrouping. Alternatively, this can be done per class using the
+# \nosubgrouping command.
+# The default value is: YES.
+
+SUBGROUPING            = YES
+
+# When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions
+# are shown inside the group in which they are included (e.g. using \ingroup)
+# instead of on a separate page (for HTML and Man pages) or section (for LaTeX
+# and RTF).
+#
+# Note that this feature does not work in combination with
+# SEPARATE_MEMBER_PAGES.
+# The default value is: NO.
+
+INLINE_GROUPED_CLASSES = NO
+
+# When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions
+# with only public data fields or simple typedef fields will be shown inline in
+# the documentation of the scope in which they are defined (i.e. file,
+# namespace, or group documentation), provided this scope is documented. If set
+# to NO, structs, classes, and unions are shown on a separate page (for HTML and
+# Man pages) or section (for LaTeX and RTF).
+# The default value is: NO.
+
+INLINE_SIMPLE_STRUCTS  = NO
+
+# When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or
+# enum is documented as struct, union, or enum with the name of the typedef. So
+# typedef struct TypeS {} TypeT, will appear in the documentation as a struct
+# with name TypeT. When disabled the typedef will appear as a member of a file,
+# namespace, or class. And the struct will be named TypeS. This can typically be
+# useful for C code in case the coding convention dictates that all compound
+# types are typedef'ed and only the typedef is referenced, never the tag name.
+# The default value is: NO.
+
+TYPEDEF_HIDES_STRUCT   = YES
+
+# The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This
+# cache is used to resolve symbols given their name and scope. Since this can be
+# an expensive process and often the same symbol appears multiple times in the
+# code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small
+# doxygen will become slower. If the cache is too large, memory is wasted. The
+# cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range
+# is 0..9, the default is 0, corresponding to a cache size of 2^16=65536
+# symbols. At the end of a run doxygen will report the cache usage and suggest
+# the optimal cache size from a speed point of view.
+# Minimum value: 0, maximum value: 9, default value: 0.
+
+LOOKUP_CACHE_SIZE      = 0
+
+#---------------------------------------------------------------------------
+# Build related configuration options
+#---------------------------------------------------------------------------
+
+# If the EXTRACT_ALL tag is set to YES, doxygen will assume all entities in
+# documentation are documented, even if no documentation was available. Private
+# class members and static file members will be hidden unless the
+# EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES.
+# Note: This will also disable the warnings about undocumented members that are
+# normally produced when WARNINGS is set to YES.
+# The default value is: NO.
+
+EXTRACT_ALL            = YES
+
+# If the EXTRACT_PRIVATE tag is set to YES, all private members of a class will
+# be included in the documentation.
+# The default value is: NO.
+
+EXTRACT_PRIVATE        = NO
+
+# If the EXTRACT_PACKAGE tag is set to YES, all members with package or internal
+# scope will be included in the documentation.
+# The default value is: NO.
+
+EXTRACT_PACKAGE        = NO
+
+# If the EXTRACT_STATIC tag is set to YES, all static members of a file will be
+# included in the documentation.
+# The default value is: NO.
+
+EXTRACT_STATIC         = NO
+
+# If the EXTRACT_LOCAL_CLASSES tag is set to YES, classes (and structs) defined
+# locally in source files will be included in the documentation. If set to NO,
+# only classes defined in header files are included. Does not have any effect
+# for Java sources.
+# The default value is: YES.
+
+EXTRACT_LOCAL_CLASSES  = YES
+
+# This flag is only useful for Objective-C code. If set to YES, local methods,
+# which are defined in the implementation section but not in the interface are
+# included in the documentation. If set to NO, only methods in the interface are
+# included.
+# The default value is: NO.
+
+EXTRACT_LOCAL_METHODS  = NO
+
+# If this flag is set to YES, the members of anonymous namespaces will be
+# extracted and appear in the documentation as a namespace called
+# 'anonymous_namespace{file}', where file will be replaced with the base name of
+# the file that contains the anonymous namespace. By default anonymous namespace
+# are hidden.
+# The default value is: NO.
+
+EXTRACT_ANON_NSPACES   = NO
+
+# If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all
+# undocumented members inside documented classes or files. If set to NO these
+# members will be included in the various overviews, but no documentation
+# section is generated. This option has no effect if EXTRACT_ALL is enabled.
+# The default value is: NO.
+
+HIDE_UNDOC_MEMBERS     = NO
+
+# If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all
+# undocumented classes that are normally visible in the class hierarchy. If set
+# to NO, these classes will be included in the various overviews. This option
+# has no effect if EXTRACT_ALL is enabled.
+# The default value is: NO.
+
+HIDE_UNDOC_CLASSES     = NO
+
+# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend
+# (class|struct|union) declarations. If set to NO, these declarations will be
+# included in the documentation.
+# The default value is: NO.
+
+HIDE_FRIEND_COMPOUNDS  = NO
+
+# If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any
+# documentation blocks found inside the body of a function. If set to NO, these
+# blocks will be appended to the function's detailed documentation block.
+# The default value is: NO.
+
+HIDE_IN_BODY_DOCS      = NO
+
+# The INTERNAL_DOCS tag determines if documentation that is typed after a
+# \internal command is included. If the tag is set to NO then the documentation
+# will be excluded. Set it to YES to include the internal documentation.
+# The default value is: NO.
+
+INTERNAL_DOCS          = YES
+
+# If the CASE_SENSE_NAMES tag is set to NO then doxygen will only generate file
+# names in lower-case letters. If set to YES, upper-case letters are also
+# allowed. This is useful if you have classes or files whose names only differ
+# in case and if your file system supports case sensitive file names. Windows
+# and Mac users are advised to set this option to NO.
+# The default value is: system dependent.
+
+CASE_SENSE_NAMES       = NO
+
+# If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with
+# their full class and namespace scopes in the documentation. If set to YES, the
+# scope will be hidden.
+# The default value is: NO.
+
+HIDE_SCOPE_NAMES       = YES
+
+# If the HIDE_COMPOUND_REFERENCE tag is set to NO (default) then doxygen will
+# append additional text to a page's title, such as Class Reference. If set to
+# YES the compound reference will be hidden.
+# The default value is: NO.
+
+HIDE_COMPOUND_REFERENCE= NO
+
+# If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of
+# the files that are included by a file in the documentation of that file.
+# The default value is: YES.
+
+SHOW_INCLUDE_FILES     = YES
+
+# If the SHOW_GROUPED_MEMB_INC tag is set to YES then Doxygen will add for each
+# grouped member an include statement to the documentation, telling the reader
+# which file to include in order to use the member.
+# The default value is: NO.
+
+SHOW_GROUPED_MEMB_INC  = NO
+
+# If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include
+# files with double quotes in the documentation rather than with sharp brackets.
+# The default value is: NO.
+
+FORCE_LOCAL_INCLUDES   = NO
+
+# If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the
+# documentation for inline members.
+# The default value is: YES.
+
+INLINE_INFO            = YES
+
+# If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the
+# (detailed) documentation of file and class members alphabetically by member
+# name. If set to NO, the members will appear in declaration order.
+# The default value is: YES.
+
+SORT_MEMBER_DOCS       = YES
+
+# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief
+# descriptions of file, namespace and class members alphabetically by member
+# name. If set to NO, the members will appear in declaration order. Note that
+# this will also influence the order of the classes in the class list.
+# The default value is: NO.
+
+SORT_BRIEF_DOCS        = NO
+
+# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the
+# (brief and detailed) documentation of class members so that constructors and
+# destructors are listed first. If set to NO the constructors will appear in the
+# respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS.
+# Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief
+# member documentation.
+# Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting
+# detailed member documentation.
+# The default value is: NO.
+
+SORT_MEMBERS_CTORS_1ST = NO
+
+# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy
+# of group names into alphabetical order. If set to NO the group names will
+# appear in their defined order.
+# The default value is: NO.
+
+SORT_GROUP_NAMES       = NO
+
+# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by
+# fully-qualified names, including namespaces. If set to NO, the class list will
+# be sorted only by class name, not including the namespace part.
+# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES.
+# Note: This option applies only to the class list, not to the alphabetical
+# list.
+# The default value is: NO.
+
+SORT_BY_SCOPE_NAME     = NO
+
+# If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper
+# type resolution of all parameters of a function it will reject a match between
+# the prototype and the implementation of a member function even if there is
+# only one candidate or it is obvious which candidate to choose by doing a
+# simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still
+# accept a match between prototype and implementation in such cases.
+# The default value is: NO.
+
+STRICT_PROTO_MATCHING  = NO
+
+# The GENERATE_TODOLIST tag can be used to enable (YES) or disable (NO) the todo
+# list. This list is created by putting \todo commands in the documentation.
+# The default value is: YES.
+
+GENERATE_TODOLIST      = YES
+
+# The GENERATE_TESTLIST tag can be used to enable (YES) or disable (NO) the test
+# list. This list is created by putting \test commands in the documentation.
+# The default value is: YES.
+
+GENERATE_TESTLIST      = YES
+
+# The GENERATE_BUGLIST tag can be used to enable (YES) or disable (NO) the bug
+# list. This list is created by putting \bug commands in the documentation.
+# The default value is: YES.
+
+GENERATE_BUGLIST       = YES
+
+# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or disable (NO)
+# the deprecated list. This list is created by putting \deprecated commands in
+# the documentation.
+# The default value is: YES.
+
+GENERATE_DEPRECATEDLIST= YES
+
+# The ENABLED_SECTIONS tag can be used to enable conditional documentation
+# sections, marked by \if <section_label> ... \endif and \cond <section_label>
+# ... \endcond blocks.
+
+ENABLED_SECTIONS       = INTERNAL
+
+# The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the
+# initial value of a variable or macro / define can have for it to appear in the
+# documentation. If the initializer consists of more lines than specified here
+# it will be hidden. Use a value of 0 to hide initializers completely. The
+# appearance of the value of individual variables and macros / defines can be
+# controlled using \showinitializer or \hideinitializer command in the
+# documentation regardless of this setting.
+# Minimum value: 0, maximum value: 10000, default value: 30.
+
+MAX_INITIALIZER_LINES  = 30
+
+# Set the SHOW_USED_FILES tag to NO to disable the list of files generated at
+# the bottom of the documentation of classes and structs. If set to YES, the
+# list will mention the files that were used to generate the documentation.
+# The default value is: YES.
+
+SHOW_USED_FILES        = YES
+
+# Set the SHOW_FILES tag to NO to disable the generation of the Files page. This
+# will remove the Files entry from the Quick Index and from the Folder Tree View
+# (if specified).
+# The default value is: YES.
+
+SHOW_FILES             = YES
+
+# Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces
+# page. This will remove the Namespaces entry from the Quick Index and from the
+# Folder Tree View (if specified).
+# The default value is: YES.
+
+SHOW_NAMESPACES        = YES
+
+# The FILE_VERSION_FILTER tag can be used to specify a program or script that
+# doxygen should invoke to get the current version for each file (typically from
+# the version control system). Doxygen will invoke the program by executing (via
+# popen()) the command command input-file, where command is the value of the
+# FILE_VERSION_FILTER tag, and input-file is the name of an input file provided
+# by doxygen. Whatever the program writes to standard output is used as the file
+# version. For an example see the documentation.
+
+FILE_VERSION_FILTER    = 
+
+# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed
+# by doxygen. The layout file controls the global structure of the generated
+# output files in an output format independent way. To create the layout file
+# that represents doxygen's defaults, run doxygen with the -l option. You can
+# optionally specify a file name after the option, if omitted DoxygenLayout.xml
+# will be used as the name of the layout file.
+#
+# Note that if you run doxygen from a directory containing a file called
+# DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE
+# tag is left empty.
+
+LAYOUT_FILE            = 
+
+# The CITE_BIB_FILES tag can be used to specify one or more bib files containing
+# the reference definitions. This must be a list of .bib files. The .bib
+# extension is automatically appended if omitted. This requires the bibtex tool
+# to be installed. See also http://en.wikipedia.org/wiki/BibTeX for more info.
+# For LaTeX the style of the bibliography can be controlled using
+# LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the
+# search path. See also \cite for info how to create references.
+
+CITE_BIB_FILES         = 
+
+#---------------------------------------------------------------------------
+# Configuration options related to warning and progress messages
+#---------------------------------------------------------------------------
+
+# The QUIET tag can be used to turn on/off the messages that are generated to
+# standard output by doxygen. If QUIET is set to YES this implies that the
+# messages are off.
+# The default value is: NO.
+
+QUIET                  = NO
+
+# The WARNINGS tag can be used to turn on/off the warning messages that are
+# generated to standard error (stderr) by doxygen. If WARNINGS is set to YES
+# this implies that the warnings are on.
+#
+# Tip: Turn warnings on while writing the documentation.
+# The default value is: YES.
+
+WARNINGS               = YES
+
+# If the WARN_IF_UNDOCUMENTED tag is set to YES then doxygen will generate
+# warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag
+# will automatically be disabled.
+# The default value is: YES.
+
+WARN_IF_UNDOCUMENTED   = YES
+
+# If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for
+# potential errors in the documentation, such as not documenting some parameters
+# in a documented function, or documenting parameters that don't exist or using
+# markup commands wrongly.
+# The default value is: YES.
+
+WARN_IF_DOC_ERROR      = YES
+
+# This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that
+# are documented, but have no documentation for their parameters or return
+# value. If set to NO, doxygen will only warn about wrong or incomplete
+# parameter documentation, but not about the absence of documentation.
+# The default value is: NO.
+
+WARN_NO_PARAMDOC       = NO
+
+# If the WARN_AS_ERROR tag is set to YES then doxygen will immediately stop when
+# a warning is encountered.
+# The default value is: NO.
+
+WARN_AS_ERROR          = NO
+
+# The WARN_FORMAT tag determines the format of the warning messages that doxygen
+# can produce. The string should contain the $file, $line, and $text tags, which
+# will be replaced by the file and line number from which the warning originated
+# and the warning text. Optionally the format may contain $version, which will
+# be replaced by the version of the file (if it could be obtained via
+# FILE_VERSION_FILTER)
+# The default value is: $file:$line: $text.
+
+WARN_FORMAT            = "$file:$line: $text"
+
+# The WARN_LOGFILE tag can be used to specify a file to which warning and error
+# messages should be written. If left blank the output is written to standard
+# error (stderr).
+
+WARN_LOGFILE           = 
+
+#---------------------------------------------------------------------------
+# Configuration options related to the input files
+#---------------------------------------------------------------------------
+
+# The INPUT tag is used to specify the files and/or directories that contain
+# documented source files. You may enter file names like myfile.cpp or
+# directories like /usr/src/myproject. Separate the files or directories with
+# spaces. See also FILE_PATTERNS and EXTENSION_MAPPING
+# Note: If this tag is empty the current directory is searched.
+
+INPUT                  = frameworks/av/media/libstagefright/codec2/
+
+# This tag can be used to specify the character encoding of the source files
+# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses
+# libiconv (or the iconv built into libc) for the transcoding. See the libiconv
+# documentation (see: http://www.gnu.org/software/libiconv) for the list of
+# possible encodings.
+# The default value is: UTF-8.
+
+INPUT_ENCODING         = UTF-8
+
+# If the value of the INPUT tag contains directories, you can use the
+# FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and
+# *.h) to filter out the source-files in the directories.
+#
+# Note that for custom extensions or not directly supported extensions you also
+# need to set EXTENSION_MAPPING for the extension otherwise the files are not
+# read by doxygen.
+#
+# If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cpp,
+# *.c++, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h,
+# *.hh, *.hxx, *.hpp, *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc,
+# *.m, *.markdown, *.md, *.mm, *.dox, *.py, *.pyw, *.f90, *.f, *.for, *.tcl,
+# *.vhd, *.vhdl, *.ucf, *.qsf, *.as and *.js.
+
+FILE_PATTERNS          = C2*.c \
+                         C2*.cpp \
+                         C2*.h
+
+# The RECURSIVE tag can be used to specify whether or not subdirectories should
+# be searched for input files as well.
+# The default value is: NO.
+
+RECURSIVE              = YES
+
+# The EXCLUDE tag can be used to specify files and/or directories that should be
+# excluded from the INPUT source files. This way you can easily exclude a
+# subdirectory from a directory tree whose root is specified with the INPUT tag.
+#
+# Note that relative paths are relative to the directory from which doxygen is
+# run.
+
+EXCLUDE                = 
+
+# The EXCLUDE_SYMLINKS tag can be used to select whether or not files or
+# directories that are symbolic links (a Unix file system feature) are excluded
+# from the input.
+# The default value is: NO.
+
+EXCLUDE_SYMLINKS       = NO
+
+# If the value of the INPUT tag contains directories, you can use the
+# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude
+# certain files from those directories.
+#
+# Note that the wildcards are matched against the file with absolute path, so to
+# exclude all test directories for example use the pattern */test/*
+
+EXCLUDE_PATTERNS       = ._*
+
+# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names
+# (namespaces, classes, functions, etc.) that should be excluded from the
+# output. The symbol name can be a fully qualified name, a word, or if the
+# wildcard * is used, a substring. Examples: ANamespace, AClass,
+# AClass::ANamespace, ANamespace::*Test
+#
+# Note that the wildcards are matched against the file with absolute path, so to
+# exclude all test directories use the pattern */test/*
+
+EXCLUDE_SYMBOLS        = 
+
+# The EXAMPLE_PATH tag can be used to specify one or more files or directories
+# that contain example code fragments that are included (see the \include
+# command).
+
+EXAMPLE_PATH           = 
+
+# If the value of the EXAMPLE_PATH tag contains directories, you can use the
+# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and
+# *.h) to filter out the source-files in the directories. If left blank all
+# files are included.
+
+EXAMPLE_PATTERNS       = *
+
+# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be
+# searched for input files to be used with the \include or \dontinclude commands
+# irrespective of the value of the RECURSIVE tag.
+# The default value is: NO.
+
+EXAMPLE_RECURSIVE      = NO
+
+# The IMAGE_PATH tag can be used to specify one or more files or directories
+# that contain images that are to be included in the documentation (see the
+# \image command).
+
+IMAGE_PATH             = 
+
+# The INPUT_FILTER tag can be used to specify a program that doxygen should
+# invoke to filter for each input file. Doxygen will invoke the filter program
+# by executing (via popen()) the command:
+#
+# <filter> <input-file>
+#
+# where <filter> is the value of the INPUT_FILTER tag, and <input-file> is the
+# name of an input file. Doxygen will then use the output that the filter
+# program writes to standard output. If FILTER_PATTERNS is specified, this tag
+# will be ignored.
+#
+# Note that the filter must not add or remove lines; it is applied before the
+# code is scanned, but not when the output code is generated. If lines are added
+# or removed, the anchors will not be placed correctly.
+#
+# Note that for custom extensions or not directly supported extensions you also
+# need to set EXTENSION_MAPPING for the extension otherwise the files are not
+# properly processed by doxygen.
+
+INPUT_FILTER           = frameworks/av/media/libstagefright/codec2/docs/doxyfilter.sh
+
+# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern
+# basis. Doxygen will compare the file name with each pattern and apply the
+# filter if there is a match. The filters are a list of the form: pattern=filter
+# (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how
+# filters are used. If the FILTER_PATTERNS tag is empty or if none of the
+# patterns match the file name, INPUT_FILTER is applied.
+#
+# Note that for custom extensions or not directly supported extensions you also
+# need to set EXTENSION_MAPPING for the extension otherwise the files are not
+# properly processed by doxygen.
+
+FILTER_PATTERNS        = 
+
+# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using
+# INPUT_FILTER) will also be used to filter the input files that are used for
+# producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES).
+# The default value is: NO.
+
+FILTER_SOURCE_FILES    = YES
+
+# The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file
+# pattern. A pattern will override the setting for FILTER_PATTERN (if any) and
+# it is also possible to disable source filtering for a specific pattern using
+# *.ext= (so without naming a filter).
+# This tag requires that the tag FILTER_SOURCE_FILES is set to YES.
+
+FILTER_SOURCE_PATTERNS = 
+
+# If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that
+# is part of the input, its contents will be placed on the main page
+# (index.html). This can be useful if you have a project on for instance GitHub
+# and want to reuse the introduction page also for the doxygen output.
+
+USE_MDFILE_AS_MAINPAGE = 
+
+#---------------------------------------------------------------------------
+# Configuration options related to source browsing
+#---------------------------------------------------------------------------
+
+# If the SOURCE_BROWSER tag is set to YES then a list of source files will be
+# generated. Documented entities will be cross-referenced with these sources.
+#
+# Note: To get rid of all source code in the generated output, make sure that
+# also VERBATIM_HEADERS is set to NO.
+# The default value is: NO.
+
+SOURCE_BROWSER         = YES
+
+# Setting the INLINE_SOURCES tag to YES will include the body of functions,
+# classes and enums directly into the documentation.
+# The default value is: NO.
+
+INLINE_SOURCES         = NO
+
+# Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any
+# special comment blocks from generated source code fragments. Normal C, C++ and
+# Fortran comments will always remain visible.
+# The default value is: YES.
+
+STRIP_CODE_COMMENTS    = YES
+
+# If the REFERENCED_BY_RELATION tag is set to YES then for each documented
+# function all documented functions referencing it will be listed.
+# The default value is: NO.
+
+REFERENCED_BY_RELATION = YES
+
+# If the REFERENCES_RELATION tag is set to YES then for each documented function
+# all documented entities called/used by that function will be listed.
+# The default value is: NO.
+
+REFERENCES_RELATION    = YES
+
+# If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set
+# to YES then the hyperlinks from functions in REFERENCES_RELATION and
+# REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will
+# link to the documentation.
+# The default value is: YES.
+
+REFERENCES_LINK_SOURCE = YES
+
+# If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the
+# source code will show a tooltip with additional information such as prototype,
+# brief description and links to the definition and documentation. Since this
+# will make the HTML file larger and loading of large files a bit slower, you
+# can opt to disable this feature.
+# The default value is: YES.
+# This tag requires that the tag SOURCE_BROWSER is set to YES.
+
+SOURCE_TOOLTIPS        = YES
+
+# If the USE_HTAGS tag is set to YES then the references to source code will
+# point to the HTML generated by the htags(1) tool instead of doxygen built-in
+# source browser. The htags tool is part of GNU's global source tagging system
+# (see http://www.gnu.org/software/global/global.html). You will need version
+# 4.8.6 or higher.
+#
+# To use it do the following:
+# - Install the latest version of global
+# - Enable SOURCE_BROWSER and USE_HTAGS in the config file
+# - Make sure the INPUT points to the root of the source tree
+# - Run doxygen as normal
+#
+# Doxygen will invoke htags (and that will in turn invoke gtags), so these
+# tools must be available from the command line (i.e. in the search path).
+#
+# The result: instead of the source browser generated by doxygen, the links to
+# source code will now point to the output of htags.
+# The default value is: NO.
+# This tag requires that the tag SOURCE_BROWSER is set to YES.
+
+USE_HTAGS              = NO
+
+# If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a
+# verbatim copy of the header file for each class for which an include is
+# specified. Set to NO to disable this.
+# See also: Section \class.
+# The default value is: YES.
+
+VERBATIM_HEADERS       = YES
+
+# If the CLANG_ASSISTED_PARSING tag is set to YES then doxygen will use the
+# clang parser (see: http://clang.llvm.org/) for more accurate parsing at the
+# cost of reduced performance. This can be particularly helpful with template
+# rich C++ code for which doxygen's built-in parser lacks the necessary type
+# information.
+# Note: The availability of this option depends on whether or not doxygen was
+# generated with the -Duse-libclang=ON option for CMake.
+# The default value is: NO.
+
+CLANG_ASSISTED_PARSING = YES
+
+# If clang assisted parsing is enabled you can provide the compiler with command
+# line options that you would normally use when invoking the compiler. Note that
+# the include paths will already be set by doxygen for the files and directories
+# specified with INPUT and INCLUDE_PATH.
+# This tag requires that the tag CLANG_ASSISTED_PARSING is set to YES.
+
+CLANG_OPTIONS          = -std=c++11
+
+#---------------------------------------------------------------------------
+# Configuration options related to the alphabetical class index
+#---------------------------------------------------------------------------
+
+# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all
+# compounds will be generated. Enable this if the project contains a lot of
+# classes, structs, unions or interfaces.
+# The default value is: YES.
+
+ALPHABETICAL_INDEX     = YES
+
+# The COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns in
+# which the alphabetical index list will be split.
+# Minimum value: 1, maximum value: 20, default value: 5.
+# This tag requires that the tag ALPHABETICAL_INDEX is set to YES.
+
+COLS_IN_ALPHA_INDEX    = 5
+
+# In case all classes in a project start with a common prefix, all classes will
+# be put under the same header in the alphabetical index. The IGNORE_PREFIX tag
+# can be used to specify a prefix (or a list of prefixes) that should be ignored
+# while generating the index headers.
+# This tag requires that the tag ALPHABETICAL_INDEX is set to YES.
+
+IGNORE_PREFIX          = 
+
+#---------------------------------------------------------------------------
+# Configuration options related to the HTML output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_HTML tag is set to YES, doxygen will generate HTML output
+# The default value is: YES.
+
+GENERATE_HTML          = YES
+
+# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it.
+# The default directory is: html.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_OUTPUT            = html
+
+# The HTML_FILE_EXTENSION tag can be used to specify the file extension for each
+# generated HTML page (for example: .htm, .php, .asp).
+# The default value is: .html.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_FILE_EXTENSION    = .html
+
+# The HTML_HEADER tag can be used to specify a user-defined HTML header file for
+# each generated HTML page. If the tag is left blank doxygen will generate a
+# standard header.
+#
+# To get valid HTML the header file that includes any scripts and style sheets
+# that doxygen needs, which is dependent on the configuration options used (e.g.
+# the setting GENERATE_TREEVIEW). It is highly recommended to start with a
+# default header using
+# doxygen -w html new_header.html new_footer.html new_stylesheet.css
+# YourConfigFile
+# and then modify the file new_header.html. See also section "Doxygen usage"
+# for information on how to generate the default header that doxygen normally
+# uses.
+# Note: The header is subject to change so you typically have to regenerate the
+# default header when upgrading to a newer version of doxygen. For a description
+# of the possible markers and block names see the documentation.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_HEADER            = 
+
+# The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each
+# generated HTML page. If the tag is left blank doxygen will generate a standard
+# footer. See HTML_HEADER for more information on how to generate a default
+# footer and what special commands can be used inside the footer. See also
+# section "Doxygen usage" for information on how to generate the default footer
+# that doxygen normally uses.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_FOOTER            = 
+
+# The HTML_STYLESHEET tag can be used to specify a user-defined cascading style
+# sheet that is used by each HTML page. It can be used to fine-tune the look of
+# the HTML output. If left blank doxygen will generate a default style sheet.
+# See also section "Doxygen usage" for information on how to generate the style
+# sheet that doxygen normally uses.
+# Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as
+# it is more robust and this tag (HTML_STYLESHEET) will in the future become
+# obsolete.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_STYLESHEET        = 
+
+# The HTML_EXTRA_STYLESHEET tag can be used to specify additional user-defined
+# cascading style sheets that are included after the standard style sheets
+# created by doxygen. Using this option one can overrule certain style aspects.
+# This is preferred over using HTML_STYLESHEET since it does not replace the
+# standard style sheet and is therefore more robust against future updates.
+# Doxygen will copy the style sheet files to the output directory.
+# Note: The order of the extra style sheet files is of importance (e.g. the last
+# style sheet in the list overrules the setting of the previous ones in the
+# list). For an example see the documentation.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_EXTRA_STYLESHEET  = 
+
+# The HTML_EXTRA_FILES tag can be used to specify one or more extra images or
+# other source files which should be copied to the HTML output directory. Note
+# that these files will be copied to the base HTML output directory. Use the
+# $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these
+# files. In the HTML_STYLESHEET file, use the file name only. Also note that the
+# files will be copied as-is; there are no commands or markers available.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_EXTRA_FILES       = 
+
+# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen
+# will adjust the colors in the style sheet and background images according to
+# this color. Hue is specified as an angle on a colorwheel, see
+# http://en.wikipedia.org/wiki/Hue for more information. For instance the value
+# 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300
+# purple, and 360 is red again.
+# Minimum value: 0, maximum value: 359, default value: 220.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_COLORSTYLE_HUE    = 220
+
+# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors
+# in the HTML output. For a value of 0 the output will use grayscales only. A
+# value of 255 will produce the most vivid colors.
+# Minimum value: 0, maximum value: 255, default value: 100.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_COLORSTYLE_SAT    = 100
+
+# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the
+# luminance component of the colors in the HTML output. Values below 100
+# gradually make the output lighter, whereas values above 100 make the output
+# darker. The value divided by 100 is the actual gamma applied, so 80 represents
+# a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not
+# change the gamma.
+# Minimum value: 40, maximum value: 240, default value: 80.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_COLORSTYLE_GAMMA  = 80
+
+# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML
+# page will contain the date and time when the page was generated. Setting this
+# to YES can help to show when doxygen was last run and thus if the
+# documentation is up to date.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_TIMESTAMP         = NO
+
+# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML
+# documentation will contain sections that can be hidden and shown after the
+# page has loaded.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_DYNAMIC_SECTIONS  = YES
+
+# With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries
+# shown in the various tree structured indices initially; the user can expand
+# and collapse entries dynamically later on. Doxygen will expand the tree to
+# such a level that at most the specified number of entries are visible (unless
+# a fully collapsed tree already exceeds this amount). So setting the number of
+# entries 1 will produce a full collapsed tree by default. 0 is a special value
+# representing an infinite number of entries and will result in a full expanded
+# tree by default.
+# Minimum value: 0, maximum value: 9999, default value: 100.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_INDEX_NUM_ENTRIES = 100
+
+# If the GENERATE_DOCSET tag is set to YES, additional index files will be
+# generated that can be used as input for Apple's Xcode 3 integrated development
+# environment (see: http://developer.apple.com/tools/xcode/), introduced with
+# OSX 10.5 (Leopard). To create a documentation set, doxygen will generate a
+# Makefile in the HTML output directory. Running make will produce the docset in
+# that directory and running make install will install the docset in
+# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at
+# startup. See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html
+# for more information.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+GENERATE_DOCSET        = NO
+
+# This tag determines the name of the docset feed. A documentation feed provides
+# an umbrella under which multiple documentation sets from a single provider
+# (such as a company or product suite) can be grouped.
+# The default value is: Doxygen generated docs.
+# This tag requires that the tag GENERATE_DOCSET is set to YES.
+
+DOCSET_FEEDNAME        = "Doxygen generated docs"
+
+# This tag specifies a string that should uniquely identify the documentation
+# set bundle. This should be a reverse domain-name style string, e.g.
+# com.mycompany.MyDocSet. Doxygen will append .docset to the name.
+# The default value is: org.doxygen.Project.
+# This tag requires that the tag GENERATE_DOCSET is set to YES.
+
+DOCSET_BUNDLE_ID       = org.doxygen.Project
+
+# The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify
+# the documentation publisher. This should be a reverse domain-name style
+# string, e.g. com.mycompany.MyDocSet.documentation.
+# The default value is: org.doxygen.Publisher.
+# This tag requires that the tag GENERATE_DOCSET is set to YES.
+
+DOCSET_PUBLISHER_ID    = org.doxygen.Publisher
+
+# The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher.
+# The default value is: Publisher.
+# This tag requires that the tag GENERATE_DOCSET is set to YES.
+
+DOCSET_PUBLISHER_NAME  = Publisher
+
+# If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three
+# additional HTML index files: index.hhp, index.hhc, and index.hhk. The
+# index.hhp is a project file that can be read by Microsoft's HTML Help Workshop
+# (see: http://www.microsoft.com/en-us/download/details.aspx?id=21138) on
+# Windows.
+#
+# The HTML Help Workshop contains a compiler that can convert all HTML output
+# generated by doxygen into a single compiled HTML file (.chm). Compiled HTML
+# files are now used as the Windows 98 help format, and will replace the old
+# Windows help format (.hlp) on all Windows platforms in the future. Compressed
+# HTML files also contain an index, a table of contents, and you can search for
+# words in the documentation. The HTML workshop also contains a viewer for
+# compressed HTML files.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+GENERATE_HTMLHELP      = NO
+
+# The CHM_FILE tag can be used to specify the file name of the resulting .chm
+# file. You can add a path in front of the file if the result should not be
+# written to the html output directory.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+CHM_FILE               = 
+
+# The HHC_LOCATION tag can be used to specify the location (absolute path
+# including file name) of the HTML help compiler (hhc.exe). If non-empty,
+# doxygen will try to run the HTML help compiler on the generated index.hhp.
+# The file has to be specified with full path.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+HHC_LOCATION           = 
+
+# The GENERATE_CHI flag controls if a separate .chi index file is generated
+# (YES) or that it should be included in the master .chm file (NO).
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+GENERATE_CHI           = NO
+
+# The CHM_INDEX_ENCODING is used to encode HtmlHelp index (hhk), content (hhc)
+# and project file content.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+CHM_INDEX_ENCODING     = 
+
+# The BINARY_TOC flag controls whether a binary table of contents is generated
+# (YES) or a normal table of contents (NO) in the .chm file. Furthermore it
+# enables the Previous and Next buttons.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+BINARY_TOC             = NO
+
+# The TOC_EXPAND flag can be set to YES to add extra items for group members to
+# the table of contents of the HTML help documentation and to the tree view.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+TOC_EXPAND             = NO
+
+# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and
+# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that
+# can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help
+# (.qch) of the generated HTML documentation.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+GENERATE_QHP           = NO
+
+# If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify
+# the file name of the resulting .qch file. The path specified is relative to
+# the HTML output folder.
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QCH_FILE               = 
+
+# The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help
+# Project output. For more information please see Qt Help Project / Namespace
+# (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#namespace).
+# The default value is: org.doxygen.Project.
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHP_NAMESPACE          = org.doxygen.Project
+
+# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt
+# Help Project output. For more information please see Qt Help Project / Virtual
+# Folders (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#virtual-
+# folders).
+# The default value is: doc.
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHP_VIRTUAL_FOLDER     = doc
+
+# If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom
+# filter to add. For more information please see Qt Help Project / Custom
+# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom-
+# filters).
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHP_CUST_FILTER_NAME   = 
+
+# The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the
+# custom filter to add. For more information please see Qt Help Project / Custom
+# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom-
+# filters).
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHP_CUST_FILTER_ATTRS  = 
+
+# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this
+# project's filter section matches. Qt Help Project / Filter Attributes (see:
+# http://qt-project.org/doc/qt-4.8/qthelpproject.html#filter-attributes).
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHP_SECT_FILTER_ATTRS  = 
+
+# The QHG_LOCATION tag can be used to specify the location of Qt's
+# qhelpgenerator. If non-empty doxygen will try to run qhelpgenerator on the
+# generated .qhp file.
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHG_LOCATION           = 
+
+# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be
+# generated, together with the HTML files, they form an Eclipse help plugin. To
+# install this plugin and make it available under the help contents menu in
+# Eclipse, the contents of the directory containing the HTML and XML files needs
+# to be copied into the plugins directory of eclipse. The name of the directory
+# within the plugins directory should be the same as the ECLIPSE_DOC_ID value.
+# After copying Eclipse needs to be restarted before the help appears.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+GENERATE_ECLIPSEHELP   = NO
+
+# A unique identifier for the Eclipse help plugin. When installing the plugin
+# the directory name containing the HTML and XML files should also have this
+# name. Each documentation set should have its own identifier.
+# The default value is: org.doxygen.Project.
+# This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES.
+
+ECLIPSE_DOC_ID         = org.doxygen.Project
+
+# If you want full control over the layout of the generated HTML pages it might
+# be necessary to disable the index and replace it with your own. The
+# DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top
+# of each HTML page. A value of NO enables the index and the value YES disables
+# it. Since the tabs in the index contain the same information as the navigation
+# tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+DISABLE_INDEX          = NO
+
+# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index
+# structure should be generated to display hierarchical information. If the tag
+# value is set to YES, a side panel will be generated containing a tree-like
+# index structure (just like the one that is generated for HTML Help). For this
+# to work a browser that supports JavaScript, DHTML, CSS and frames is required
+# (i.e. any modern browser). Windows users are probably better off using the
+# HTML help feature. Via custom style sheets (see HTML_EXTRA_STYLESHEET) one can
+# further fine-tune the look of the index. As an example, the default style
+# sheet generated by doxygen has an example that shows how to put an image at
+# the root of the tree instead of the PROJECT_NAME. Since the tree basically has
+# the same information as the tab index, you could consider setting
+# DISABLE_INDEX to YES when enabling this option.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+GENERATE_TREEVIEW      = YES
+
+# The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that
+# doxygen will group on one line in the generated HTML documentation.
+#
+# Note that a value of 0 will completely suppress the enum values from appearing
+# in the overview section.
+# Minimum value: 0, maximum value: 20, default value: 4.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+ENUM_VALUES_PER_LINE   = 4
+
+# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used
+# to set the initial width (in pixels) of the frame in which the tree is shown.
+# Minimum value: 0, maximum value: 1500, default value: 250.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+TREEVIEW_WIDTH         = 250
+
+# If the EXT_LINKS_IN_WINDOW option is set to YES, doxygen will open links to
+# external symbols imported via tag files in a separate window.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+EXT_LINKS_IN_WINDOW    = NO
+
+# Use this tag to change the font size of LaTeX formulas included as images in
+# the HTML documentation. When you change the font size after a successful
+# doxygen run you need to manually remove any form_*.png images from the HTML
+# output directory to force them to be regenerated.
+# Minimum value: 8, maximum value: 50, default value: 10.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+FORMULA_FONTSIZE       = 10
+
+# Use the FORMULA_TRANPARENT tag to determine whether or not the images
+# generated for formulas are transparent PNGs. Transparent PNGs are not
+# supported properly for IE 6.0, but are supported on all modern browsers.
+#
+# Note that when changing this option you need to delete any form_*.png files in
+# the HTML output directory before the changes have effect.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+FORMULA_TRANSPARENT    = YES
+
+# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see
+# http://www.mathjax.org) which uses client side Javascript for the rendering
+# instead of using pre-rendered bitmaps. Use this if you do not have LaTeX
+# installed or if you want to formulas look prettier in the HTML output. When
+# enabled you may also need to install MathJax separately and configure the path
+# to it using the MATHJAX_RELPATH option.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+USE_MATHJAX            = NO
+
+# When MathJax is enabled you can set the default output format to be used for
+# the MathJax output. See the MathJax site (see:
+# http://docs.mathjax.org/en/latest/output.html) for more details.
+# Possible values are: HTML-CSS (which is slower, but has the best
+# compatibility), NativeMML (i.e. MathML) and SVG.
+# The default value is: HTML-CSS.
+# This tag requires that the tag USE_MATHJAX is set to YES.
+
+MATHJAX_FORMAT         = HTML-CSS
+
+# When MathJax is enabled you need to specify the location relative to the HTML
+# output directory using the MATHJAX_RELPATH option. The destination directory
+# should contain the MathJax.js script. For instance, if the mathjax directory
+# is located at the same level as the HTML output directory, then
+# MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax
+# Content Delivery Network so you can quickly see the result without installing
+# MathJax. However, it is strongly recommended to install a local copy of
+# MathJax from http://www.mathjax.org before deployment.
+# The default value is: http://cdn.mathjax.org/mathjax/latest.
+# This tag requires that the tag USE_MATHJAX is set to YES.
+
+MATHJAX_RELPATH        = http://cdn.mathjax.org/mathjax/latest
+
+# The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax
+# extension names that should be enabled during MathJax rendering. For example
+# MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols
+# This tag requires that the tag USE_MATHJAX is set to YES.
+
+MATHJAX_EXTENSIONS     = 
+
+# The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces
+# of code that will be used on startup of the MathJax code. See the MathJax site
+# (see: http://docs.mathjax.org/en/latest/output.html) for more details. For an
+# example see the documentation.
+# This tag requires that the tag USE_MATHJAX is set to YES.
+
+MATHJAX_CODEFILE       = 
+
+# When the SEARCHENGINE tag is enabled doxygen will generate a search box for
+# the HTML output. The underlying search engine uses javascript and DHTML and
+# should work on any modern browser. Note that when using HTML help
+# (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET)
+# there is already a search function so this one should typically be disabled.
+# For large projects the javascript based search engine can be slow, then
+# enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to
+# search using the keyboard; to jump to the search box use <access key> + S
+# (what the <access key> is depends on the OS and browser, but it is typically
+# <CTRL>, <ALT>/<option>, or both). Inside the search box use the <cursor down
+# key> to jump into the search results window, the results can be navigated
+# using the <cursor keys>. Press <Enter> to select an item or <escape> to cancel
+# the search. The filter options can be selected when the cursor is inside the
+# search box by pressing <Shift>+<cursor down>. Also here use the <cursor keys>
+# to select a filter and <Enter> or <escape> to activate or cancel the filter
+# option.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+SEARCHENGINE           = YES
+
+# When the SERVER_BASED_SEARCH tag is enabled the search engine will be
+# implemented using a web server instead of a web client using Javascript. There
+# are two flavors of web server based searching depending on the EXTERNAL_SEARCH
+# setting. When disabled, doxygen will generate a PHP script for searching and
+# an index file used by the script. When EXTERNAL_SEARCH is enabled the indexing
+# and searching needs to be provided by external tools. See the section
+# "External Indexing and Searching" for details.
+# The default value is: NO.
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+SERVER_BASED_SEARCH    = NO
+
+# When EXTERNAL_SEARCH tag is enabled doxygen will no longer generate the PHP
+# script for searching. Instead the search results are written to an XML file
+# which needs to be processed by an external indexer. Doxygen will invoke an
+# external search engine pointed to by the SEARCHENGINE_URL option to obtain the
+# search results.
+#
+# Doxygen ships with an example indexer (doxyindexer) and search engine
+# (doxysearch.cgi) which are based on the open source search engine library
+# Xapian (see: http://xapian.org/).
+#
+# See the section "External Indexing and Searching" for details.
+# The default value is: NO.
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+EXTERNAL_SEARCH        = NO
+
+# The SEARCHENGINE_URL should point to a search engine hosted by a web server
+# which will return the search results when EXTERNAL_SEARCH is enabled.
+#
+# Doxygen ships with an example indexer (doxyindexer) and search engine
+# (doxysearch.cgi) which are based on the open source search engine library
+# Xapian (see: http://xapian.org/). See the section "External Indexing and
+# Searching" for details.
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+SEARCHENGINE_URL       = 
+
+# When SERVER_BASED_SEARCH and EXTERNAL_SEARCH are both enabled the unindexed
+# search data is written to a file for indexing by an external tool. With the
+# SEARCHDATA_FILE tag the name of this file can be specified.
+# The default file is: searchdata.xml.
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+SEARCHDATA_FILE        = searchdata.xml
+
+# When SERVER_BASED_SEARCH and EXTERNAL_SEARCH are both enabled the
+# EXTERNAL_SEARCH_ID tag can be used as an identifier for the project. This is
+# useful in combination with EXTRA_SEARCH_MAPPINGS to search through multiple
+# projects and redirect the results back to the right project.
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+EXTERNAL_SEARCH_ID     = 
+
+# The EXTRA_SEARCH_MAPPINGS tag can be used to enable searching through doxygen
+# projects other than the one defined by this configuration file, but that are
+# all added to the same external search index. Each project needs to have a
+# unique id set via EXTERNAL_SEARCH_ID. The search mapping then maps the id of
+# to a relative location where the documentation can be found. The format is:
+# EXTRA_SEARCH_MAPPINGS = tagname1=loc1 tagname2=loc2 ...
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+EXTRA_SEARCH_MAPPINGS  = 
+
+#---------------------------------------------------------------------------
+# Configuration options related to the LaTeX output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_LATEX tag is set to YES, doxygen will generate LaTeX output.
+# The default value is: YES.
+
+GENERATE_LATEX         = NO
+
+# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it.
+# The default directory is: latex.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_OUTPUT           = latex
+
+# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be
+# invoked.
+#
+# Note that when enabling USE_PDFLATEX this option is only used for generating
+# bitmaps for formulas in the HTML output, but not in the Makefile that is
+# written to the output directory.
+# The default file is: latex.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_CMD_NAME         = latex
+
+# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to generate
+# index for LaTeX.
+# The default file is: makeindex.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+MAKEINDEX_CMD_NAME     = makeindex
+
+# If the COMPACT_LATEX tag is set to YES, doxygen generates more compact LaTeX
+# documents. This may be useful for small projects and may help to save some
+# trees in general.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+COMPACT_LATEX          = NO
+
+# The PAPER_TYPE tag can be used to set the paper type that is used by the
+# printer.
+# Possible values are: a4 (210 x 297 mm), letter (8.5 x 11 inches), legal (8.5 x
+# 14 inches) and executive (7.25 x 10.5 inches).
+# The default value is: a4.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+PAPER_TYPE             = a4
+
+# The EXTRA_PACKAGES tag can be used to specify one or more LaTeX package names
+# that should be included in the LaTeX output. The package can be specified just
+# by its name or with the correct syntax as to be used with the LaTeX
+# \usepackage command. To get the times font for instance you can specify :
+# EXTRA_PACKAGES=times or EXTRA_PACKAGES={times}
+# To use the option intlimits with the amsmath package you can specify:
+# EXTRA_PACKAGES=[intlimits]{amsmath}
+# If left blank no extra packages will be included.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+EXTRA_PACKAGES         = 
+
+# The LATEX_HEADER tag can be used to specify a personal LaTeX header for the
+# generated LaTeX document. The header should contain everything until the first
+# chapter. If it is left blank doxygen will generate a standard header. See
+# section "Doxygen usage" for information on how to let doxygen write the
+# default header to a separate file.
+#
+# Note: Only use a user-defined header if you know what you are doing! The
+# following commands have a special meaning inside the header: $title,
+# $datetime, $date, $doxygenversion, $projectname, $projectnumber,
+# $projectbrief, $projectlogo. Doxygen will replace $title with the empty
+# string, for the replacement values of the other commands the user is referred
+# to HTML_HEADER.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_HEADER           = 
+
+# The LATEX_FOOTER tag can be used to specify a personal LaTeX footer for the
+# generated LaTeX document. The footer should contain everything after the last
+# chapter. If it is left blank doxygen will generate a standard footer. See
+# LATEX_HEADER for more information on how to generate a default footer and what
+# special commands can be used inside the footer.
+#
+# Note: Only use a user-defined footer if you know what you are doing!
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_FOOTER           = 
+
+# The LATEX_EXTRA_STYLESHEET tag can be used to specify additional user-defined
+# LaTeX style sheets that are included after the standard style sheets created
+# by doxygen. Using this option one can overrule certain style aspects. Doxygen
+# will copy the style sheet files to the output directory.
+# Note: The order of the extra style sheet files is of importance (e.g. the last
+# style sheet in the list overrules the setting of the previous ones in the
+# list).
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_EXTRA_STYLESHEET = 
+
+# The LATEX_EXTRA_FILES tag can be used to specify one or more extra images or
+# other source files which should be copied to the LATEX_OUTPUT output
+# directory. Note that the files will be copied as-is; there are no commands or
+# markers available.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_EXTRA_FILES      = 
+
+# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated is
+# prepared for conversion to PDF (using ps2pdf or pdflatex). The PDF file will
+# contain links (just like the HTML output) instead of page references. This
+# makes the output suitable for online browsing using a PDF viewer.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+PDF_HYPERLINKS         = YES
+
+# If the USE_PDFLATEX tag is set to YES, doxygen will use pdflatex to generate
+# the PDF file directly from the LaTeX files. Set this option to YES, to get a
+# higher quality PDF documentation.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+USE_PDFLATEX           = YES
+
+# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \batchmode
+# command to the generated LaTeX files. This will instruct LaTeX to keep running
+# if errors occur, instead of asking the user for help. This option is also used
+# when generating formulas in HTML.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_BATCHMODE        = NO
+
+# If the LATEX_HIDE_INDICES tag is set to YES then doxygen will not include the
+# index chapters (such as File Index, Compound Index, etc.) in the output.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_HIDE_INDICES     = NO
+
+# If the LATEX_SOURCE_CODE tag is set to YES then doxygen will include source
+# code with syntax highlighting in the LaTeX output.
+#
+# Note that which sources are shown also depends on other settings such as
+# SOURCE_BROWSER.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_SOURCE_CODE      = NO
+
+# The LATEX_BIB_STYLE tag can be used to specify the style to use for the
+# bibliography, e.g. plainnat, or ieeetr. See
+# http://en.wikipedia.org/wiki/BibTeX and \cite for more info.
+# The default value is: plain.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_BIB_STYLE        = plain
+
+# If the LATEX_TIMESTAMP tag is set to YES then the footer of each generated
+# page will contain the date and time when the page was generated. Setting this
+# to NO can help when comparing the output of multiple runs.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_TIMESTAMP        = NO
+
+#---------------------------------------------------------------------------
+# Configuration options related to the RTF output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_RTF tag is set to YES, doxygen will generate RTF output. The
+# RTF output is optimized for Word 97 and may not look too pretty with other RTF
+# readers/editors.
+# The default value is: NO.
+
+GENERATE_RTF           = NO
+
+# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it.
+# The default directory is: rtf.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+RTF_OUTPUT             = rtf
+
+# If the COMPACT_RTF tag is set to YES, doxygen generates more compact RTF
+# documents. This may be useful for small projects and may help to save some
+# trees in general.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+COMPACT_RTF            = NO
+
+# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated will
+# contain hyperlink fields. The RTF file will contain links (just like the HTML
+# output) instead of page references. This makes the output suitable for online
+# browsing using Word or some other Word compatible readers that support those
+# fields.
+#
+# Note: WordPad (write) and others do not support links.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+RTF_HYPERLINKS         = NO
+
+# Load stylesheet definitions from file. Syntax is similar to doxygen's config
+# file, i.e. a series of assignments. You only have to provide replacements,
+# missing definitions are set to their default value.
+#
+# See also section "Doxygen usage" for information on how to generate the
+# default style sheet that doxygen normally uses.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+RTF_STYLESHEET_FILE    = 
+
+# Set optional variables used in the generation of an RTF document. Syntax is
+# similar to doxygen's config file. A template extensions file can be generated
+# using doxygen -e rtf extensionFile.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+RTF_EXTENSIONS_FILE    = 
+
+# If the RTF_SOURCE_CODE tag is set to YES then doxygen will include source code
+# with syntax highlighting in the RTF output.
+#
+# Note that which sources are shown also depends on other settings such as
+# SOURCE_BROWSER.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+RTF_SOURCE_CODE        = NO
+
+#---------------------------------------------------------------------------
+# Configuration options related to the man page output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_MAN tag is set to YES, doxygen will generate man pages for
+# classes and files.
+# The default value is: NO.
+
+GENERATE_MAN           = NO
+
+# The MAN_OUTPUT tag is used to specify where the man pages will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it. A directory man3 will be created inside the directory specified by
+# MAN_OUTPUT.
+# The default directory is: man.
+# This tag requires that the tag GENERATE_MAN is set to YES.
+
+MAN_OUTPUT             = man
+
+# The MAN_EXTENSION tag determines the extension that is added to the generated
+# man pages. In case the manual section does not start with a number, the number
+# 3 is prepended. The dot (.) at the beginning of the MAN_EXTENSION tag is
+# optional.
+# The default value is: .3.
+# This tag requires that the tag GENERATE_MAN is set to YES.
+
+MAN_EXTENSION          = .3
+
+# The MAN_SUBDIR tag determines the name of the directory created within
+# MAN_OUTPUT in which the man pages are placed. If defaults to man followed by
+# MAN_EXTENSION with the initial . removed.
+# This tag requires that the tag GENERATE_MAN is set to YES.
+
+MAN_SUBDIR             = 
+
+# If the MAN_LINKS tag is set to YES and doxygen generates man output, then it
+# will generate one additional man file for each entity documented in the real
+# man page(s). These additional files only source the real man page, but without
+# them the man command would be unable to find the correct page.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_MAN is set to YES.
+
+MAN_LINKS              = NO
+
+#---------------------------------------------------------------------------
+# Configuration options related to the XML output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_XML tag is set to YES, doxygen will generate an XML file that
+# captures the structure of the code including all documentation.
+# The default value is: NO.
+
+GENERATE_XML           = NO
+
+# The XML_OUTPUT tag is used to specify where the XML pages will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it.
+# The default directory is: xml.
+# This tag requires that the tag GENERATE_XML is set to YES.
+
+XML_OUTPUT             = xml
+
+# If the XML_PROGRAMLISTING tag is set to YES, doxygen will dump the program
+# listings (including syntax highlighting and cross-referencing information) to
+# the XML output. Note that enabling this will significantly increase the size
+# of the XML output.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_XML is set to YES.
+
+XML_PROGRAMLISTING     = YES
+
+#---------------------------------------------------------------------------
+# Configuration options related to the DOCBOOK output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_DOCBOOK tag is set to YES, doxygen will generate Docbook files
+# that can be used to generate PDF.
+# The default value is: NO.
+
+GENERATE_DOCBOOK       = NO
+
+# The DOCBOOK_OUTPUT tag is used to specify where the Docbook pages will be put.
+# If a relative path is entered the value of OUTPUT_DIRECTORY will be put in
+# front of it.
+# The default directory is: docbook.
+# This tag requires that the tag GENERATE_DOCBOOK is set to YES.
+
+DOCBOOK_OUTPUT         = docbook
+
+# If the DOCBOOK_PROGRAMLISTING tag is set to YES, doxygen will include the
+# program listings (including syntax highlighting and cross-referencing
+# information) to the DOCBOOK output. Note that enabling this will significantly
+# increase the size of the DOCBOOK output.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_DOCBOOK is set to YES.
+
+DOCBOOK_PROGRAMLISTING = NO
+
+#---------------------------------------------------------------------------
+# Configuration options for the AutoGen Definitions output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_AUTOGEN_DEF tag is set to YES, doxygen will generate an
+# AutoGen Definitions (see http://autogen.sf.net) file that captures the
+# structure of the code including all documentation. Note that this feature is
+# still experimental and incomplete at the moment.
+# The default value is: NO.
+
+GENERATE_AUTOGEN_DEF   = NO
+
+#---------------------------------------------------------------------------
+# Configuration options related to the Perl module output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_PERLMOD tag is set to YES, doxygen will generate a Perl module
+# file that captures the structure of the code including all documentation.
+#
+# Note that this feature is still experimental and incomplete at the moment.
+# The default value is: NO.
+
+GENERATE_PERLMOD       = NO
+
+# If the PERLMOD_LATEX tag is set to YES, doxygen will generate the necessary
+# Makefile rules, Perl scripts and LaTeX code to be able to generate PDF and DVI
+# output from the Perl module output.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_PERLMOD is set to YES.
+
+PERLMOD_LATEX          = NO
+
+# If the PERLMOD_PRETTY tag is set to YES, the Perl module output will be nicely
+# formatted so it can be parsed by a human reader. This is useful if you want to
+# understand what is going on. On the other hand, if this tag is set to NO, the
+# size of the Perl module output will be much smaller and Perl will parse it
+# just the same.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_PERLMOD is set to YES.
+
+PERLMOD_PRETTY         = YES
+
+# The names of the make variables in the generated doxyrules.make file are
+# prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. This is useful
+# so different doxyrules.make files included by the same Makefile don't
+# overwrite each other's variables.
+# This tag requires that the tag GENERATE_PERLMOD is set to YES.
+
+PERLMOD_MAKEVAR_PREFIX = 
+
+#---------------------------------------------------------------------------
+# Configuration options related to the preprocessor
+#---------------------------------------------------------------------------
+
+# If the ENABLE_PREPROCESSING tag is set to YES, doxygen will evaluate all
+# C-preprocessor directives found in the sources and include files.
+# The default value is: YES.
+
+ENABLE_PREPROCESSING   = YES
+
+# If the MACRO_EXPANSION tag is set to YES, doxygen will expand all macro names
+# in the source code. If set to NO, only conditional compilation will be
+# performed. Macro expansion can be done in a controlled way by setting
+# EXPAND_ONLY_PREDEF to YES.
+# The default value is: NO.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+MACRO_EXPANSION        = YES
+
+# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES then
+# the macro expansion is limited to the macros specified with the PREDEFINED and
+# EXPAND_AS_DEFINED tags.
+# The default value is: NO.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+EXPAND_ONLY_PREDEF     = YES
+
+# If the SEARCH_INCLUDES tag is set to YES, the include files in the
+# INCLUDE_PATH will be searched if a #include is found.
+# The default value is: YES.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+SEARCH_INCLUDES        = YES
+
+# The INCLUDE_PATH tag can be used to specify one or more directories that
+# contain include files that are not input files but should be processed by the
+# preprocessor.
+# This tag requires that the tag SEARCH_INCLUDES is set to YES.
+
+INCLUDE_PATH           = /Volumes/A/aosp/prebuilts/clang/darwin-x86/host/3.6/lib/clang/3.6/include \
+                         /Volumes/A/aosp/external/libcxx/include \
+                         /Volumes/A/aosp/bionic/libc/include \
+                         /Volumes/A/aosp/bionic/libc/kernel/uapi \
+                         /Volumes/A/aosp/bionic/libc/kernel/uapi/asm-arm64 \
+                         /Volumes/A/aosp/external/gtest
+
+# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard
+# patterns (like *.h and *.hpp) to filter out the header-files in the
+# directories. If left blank, the patterns specified with FILE_PATTERNS will be
+# used.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+INCLUDE_FILE_PATTERNS  = 
+
+# The PREDEFINED tag can be used to specify one or more macro names that are
+# defined before the preprocessor is started (similar to the -D option of e.g.
+# gcc). The argument of the tag is a list of macros of the form: name or
+# name=definition (no spaces). If the definition and the "=" are omitted, "=1"
+# is assumed. To prevent a macro definition from being undefined via #undef or
+# recursively expanded use the := operator instead of the = operator.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+PREDEFINED             = __APPLE__= \
+                         __ANDROID__=1 \
+                         ANDROID:=1 \
+			 __unused=
+
+# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this
+# tag can be used to specify a list of macro names that should be expanded. The
+# macro definition that is found in the sources will be used. Use the PREDEFINED
+# tag if you want to use a different macro definition that overrules the
+# definition found in the source code.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+EXPAND_AS_DEFINED      = DEFINE_FLEXIBLE_METHODS \
+                         DEFINE_CAST_OPERATORS
+
+# If the SKIP_FUNCTION_MACROS tag is set to YES then doxygen's preprocessor will
+# remove all references to function-like macros that are alone on a line, have
+# an all uppercase name, and do not end with a semicolon. Such function macros
+# are typically used for boiler-plate code, and will confuse the parser if not
+# removed.
+# The default value is: YES.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+SKIP_FUNCTION_MACROS   = YES
+
+#---------------------------------------------------------------------------
+# Configuration options related to external references
+#---------------------------------------------------------------------------
+
+# The TAGFILES tag can be used to specify one or more tag files. For each tag
+# file the location of the external documentation should be added. The format of
+# a tag file without this location is as follows:
+# TAGFILES = file1 file2 ...
+# Adding location for the tag files is done as follows:
+# TAGFILES = file1=loc1 "file2 = loc2" ...
+# where loc1 and loc2 can be relative or absolute paths or URLs. See the
+# section "Linking to external documentation" for more information about the use
+# of tag files.
+# Note: Each tag file must have a unique name (where the name does NOT include
+# the path). If a tag file is not located in the directory in which doxygen is
+# run, you must also specify the path to the tagfile here.
+
+TAGFILES               = 
+
+# When a file name is specified after GENERATE_TAGFILE, doxygen will create a
+# tag file that is based on the input files it reads. See section "Linking to
+# external documentation" for more information about the usage of tag files.
+
+GENERATE_TAGFILE       = 
+
+# If the ALLEXTERNALS tag is set to YES, all external class will be listed in
+# the class index. If set to NO, only the inherited external classes will be
+# listed.
+# The default value is: NO.
+
+ALLEXTERNALS           = NO
+
+# If the EXTERNAL_GROUPS tag is set to YES, all external groups will be listed
+# in the modules index. If set to NO, only the current project's groups will be
+# listed.
+# The default value is: YES.
+
+EXTERNAL_GROUPS        = YES
+
+# If the EXTERNAL_PAGES tag is set to YES, all external pages will be listed in
+# the related pages index. If set to NO, only the current project's pages will
+# be listed.
+# The default value is: YES.
+
+EXTERNAL_PAGES         = YES
+
+# The PERL_PATH should be the absolute path and name of the perl script
+# interpreter (i.e. the result of 'which perl').
+# The default file (with absolute path) is: /usr/bin/perl.
+
+PERL_PATH              = /usr/bin/perl
+
+#---------------------------------------------------------------------------
+# Configuration options related to the dot tool
+#---------------------------------------------------------------------------
+
+# If the CLASS_DIAGRAMS tag is set to YES, doxygen will generate a class diagram
+# (in HTML and LaTeX) for classes with base or super classes. Setting the tag to
+# NO turns the diagrams off. Note that this option also works with HAVE_DOT
+# disabled, but it is recommended to install and use dot, since it yields more
+# powerful graphs.
+# The default value is: YES.
+
+CLASS_DIAGRAMS         = YES
+
+# You can define message sequence charts within doxygen comments using the \msc
+# command. Doxygen will then run the mscgen tool (see:
+# http://www.mcternan.me.uk/mscgen/)) to produce the chart and insert it in the
+# documentation. The MSCGEN_PATH tag allows you to specify the directory where
+# the mscgen tool resides. If left empty the tool is assumed to be found in the
+# default search path.
+
+MSCGEN_PATH            = 
+
+# You can include diagrams made with dia in doxygen documentation. Doxygen will
+# then run dia to produce the diagram and insert it in the documentation. The
+# DIA_PATH tag allows you to specify the directory where the dia binary resides.
+# If left empty dia is assumed to be found in the default search path.
+
+DIA_PATH               = 
+
+# If set to YES the inheritance and collaboration graphs will hide inheritance
+# and usage relations if the target is undocumented or is not a class.
+# The default value is: YES.
+
+HIDE_UNDOC_RELATIONS   = YES
+
+# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is
+# available from the path. This tool is part of Graphviz (see:
+# http://www.graphviz.org/), a graph visualization toolkit from AT&T and Lucent
+# Bell Labs. The other options in this section have no effect if this option is
+# set to NO
+# The default value is: NO.
+
+HAVE_DOT               = NO
+
+# The DOT_NUM_THREADS specifies the number of dot invocations doxygen is allowed
+# to run in parallel. When set to 0 doxygen will base this on the number of
+# processors available in the system. You can set it explicitly to a value
+# larger than 0 to get control over the balance between CPU load and processing
+# speed.
+# Minimum value: 0, maximum value: 32, default value: 0.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_NUM_THREADS        = 0
+
+# When you want a differently looking font in the dot files that doxygen
+# generates you can specify the font name using DOT_FONTNAME. You need to make
+# sure dot is able to find the font, which can be done by putting it in a
+# standard location or by setting the DOTFONTPATH environment variable or by
+# setting DOT_FONTPATH to the directory containing the font.
+# The default value is: Helvetica.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_FONTNAME           = Helvetica
+
+# The DOT_FONTSIZE tag can be used to set the size (in points) of the font of
+# dot graphs.
+# Minimum value: 4, maximum value: 24, default value: 10.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_FONTSIZE           = 10
+
+# By default doxygen will tell dot to use the default font as specified with
+# DOT_FONTNAME. If you specify a different font using DOT_FONTNAME you can set
+# the path where dot can find it using this tag.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_FONTPATH           = 
+
+# If the CLASS_GRAPH tag is set to YES then doxygen will generate a graph for
+# each documented class showing the direct and indirect inheritance relations.
+# Setting this tag to YES will force the CLASS_DIAGRAMS tag to NO.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+CLASS_GRAPH            = YES
+
+# If the COLLABORATION_GRAPH tag is set to YES then doxygen will generate a
+# graph for each documented class showing the direct and indirect implementation
+# dependencies (inheritance, containment, and class references variables) of the
+# class with other documented classes.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+COLLABORATION_GRAPH    = YES
+
+# If the GROUP_GRAPHS tag is set to YES then doxygen will generate a graph for
+# groups, showing the direct groups dependencies.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+GROUP_GRAPHS           = YES
+
+# If the UML_LOOK tag is set to YES, doxygen will generate inheritance and
+# collaboration diagrams in a style similar to the OMG's Unified Modeling
+# Language.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+UML_LOOK               = NO
+
+# If the UML_LOOK tag is enabled, the fields and methods are shown inside the
+# class node. If there are many fields or methods and many nodes the graph may
+# become too big to be useful. The UML_LIMIT_NUM_FIELDS threshold limits the
+# number of items for each type to make the size more manageable. Set this to 0
+# for no limit. Note that the threshold may be exceeded by 50% before the limit
+# is enforced. So when you set the threshold to 10, up to 15 fields may appear,
+# but if the number exceeds 15, the total amount of fields shown is limited to
+# 10.
+# Minimum value: 0, maximum value: 100, default value: 10.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+UML_LIMIT_NUM_FIELDS   = 10
+
+# If the TEMPLATE_RELATIONS tag is set to YES then the inheritance and
+# collaboration graphs will show the relations between templates and their
+# instances.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+TEMPLATE_RELATIONS     = NO
+
+# If the INCLUDE_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are set to
+# YES then doxygen will generate a graph for each documented file showing the
+# direct and indirect include dependencies of the file with other documented
+# files.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+INCLUDE_GRAPH          = YES
+
+# If the INCLUDED_BY_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are
+# set to YES then doxygen will generate a graph for each documented file showing
+# the direct and indirect include dependencies of the file with other documented
+# files.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+INCLUDED_BY_GRAPH      = YES
+
+# If the CALL_GRAPH tag is set to YES then doxygen will generate a call
+# dependency graph for every global function or class method.
+#
+# Note that enabling this option will significantly increase the time of a run.
+# So in most cases it will be better to enable call graphs for selected
+# functions only using the \callgraph command. Disabling a call graph can be
+# accomplished by means of the command \hidecallgraph.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+CALL_GRAPH             = NO
+
+# If the CALLER_GRAPH tag is set to YES then doxygen will generate a caller
+# dependency graph for every global function or class method.
+#
+# Note that enabling this option will significantly increase the time of a run.
+# So in most cases it will be better to enable caller graphs for selected
+# functions only using the \callergraph command. Disabling a caller graph can be
+# accomplished by means of the command \hidecallergraph.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+CALLER_GRAPH           = NO
+
+# If the GRAPHICAL_HIERARCHY tag is set to YES then doxygen will graphical
+# hierarchy of all classes instead of a textual one.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+GRAPHICAL_HIERARCHY    = YES
+
+# If the DIRECTORY_GRAPH tag is set to YES then doxygen will show the
+# dependencies a directory has on other directories in a graphical way. The
+# dependency relations are determined by the #include relations between the
+# files in the directories.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DIRECTORY_GRAPH        = YES
+
+# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images
+# generated by dot. For an explanation of the image formats see the section
+# output formats in the documentation of the dot tool (Graphviz (see:
+# http://www.graphviz.org/)).
+# Note: If you choose svg you need to set HTML_FILE_EXTENSION to xhtml in order
+# to make the SVG files visible in IE 9+ (other browsers do not have this
+# requirement).
+# Possible values are: png, jpg, gif, svg, png:gd, png:gd:gd, png:cairo,
+# png:cairo:gd, png:cairo:cairo, png:cairo:gdiplus, png:gdiplus and
+# png:gdiplus:gdiplus.
+# The default value is: png.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_IMAGE_FORMAT       = png
+
+# If DOT_IMAGE_FORMAT is set to svg, then this option can be set to YES to
+# enable generation of interactive SVG images that allow zooming and panning.
+#
+# Note that this requires a modern browser other than Internet Explorer. Tested
+# and working are Firefox, Chrome, Safari, and Opera.
+# Note: For IE 9+ you need to set HTML_FILE_EXTENSION to xhtml in order to make
+# the SVG files visible. Older versions of IE do not have SVG support.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+INTERACTIVE_SVG        = NO
+
+# The DOT_PATH tag can be used to specify the path where the dot tool can be
+# found. If left blank, it is assumed the dot tool can be found in the path.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_PATH               = 
+
+# The DOTFILE_DIRS tag can be used to specify one or more directories that
+# contain dot files that are included in the documentation (see the \dotfile
+# command).
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOTFILE_DIRS           = 
+
+# The MSCFILE_DIRS tag can be used to specify one or more directories that
+# contain msc files that are included in the documentation (see the \mscfile
+# command).
+
+MSCFILE_DIRS           = 
+
+# The DIAFILE_DIRS tag can be used to specify one or more directories that
+# contain dia files that are included in the documentation (see the \diafile
+# command).
+
+DIAFILE_DIRS           = 
+
+# When using plantuml, the PLANTUML_JAR_PATH tag should be used to specify the
+# path where java can find the plantuml.jar file. If left blank, it is assumed
+# PlantUML is not used or called during a preprocessing step. Doxygen will
+# generate a warning when it encounters a \startuml command in this case and
+# will not generate output for the diagram.
+
+PLANTUML_JAR_PATH      = 
+
+# When using plantuml, the specified paths are searched for files specified by
+# the !include statement in a plantuml block.
+
+PLANTUML_INCLUDE_PATH  = 
+
+# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of nodes
+# that will be shown in the graph. If the number of nodes in a graph becomes
+# larger than this value, doxygen will truncate the graph, which is visualized
+# by representing a node as a red box. Note that doxygen if the number of direct
+# children of the root node in a graph is already larger than
+# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note that
+# the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH.
+# Minimum value: 0, maximum value: 10000, default value: 50.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_GRAPH_MAX_NODES    = 50
+
+# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the graphs
+# generated by dot. A depth value of 3 means that only nodes reachable from the
+# root by following a path via at most 3 edges will be shown. Nodes that lay
+# further from the root node will be omitted. Note that setting this option to 1
+# or 2 may greatly reduce the computation time needed for large code bases. Also
+# note that the size of a graph can be further restricted by
+# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction.
+# Minimum value: 0, maximum value: 1000, default value: 0.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+MAX_DOT_GRAPH_DEPTH    = 0
+
+# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent
+# background. This is disabled by default, because dot on Windows does not seem
+# to support this out of the box.
+#
+# Warning: Depending on the platform used, enabling this option may lead to
+# badly anti-aliased labels on the edges of a graph (i.e. they become hard to
+# read).
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_TRANSPARENT        = NO
+
+# Set the DOT_MULTI_TARGETS tag to YES to allow dot to generate multiple output
+# files in one run (i.e. multiple -o and -T options on the command line). This
+# makes dot run faster, but since only newer versions of dot (>1.8.10) support
+# this, this feature is disabled by default.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_MULTI_TARGETS      = NO
+
+# If the GENERATE_LEGEND tag is set to YES doxygen will generate a legend page
+# explaining the meaning of the various boxes and arrows in the dot generated
+# graphs.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+GENERATE_LEGEND        = YES
+
+# If the DOT_CLEANUP tag is set to YES, doxygen will remove the intermediate dot
+# files that are used to generate the various graphs.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_CLEANUP            = YES
diff --git a/media/libstagefright/codec2/include/C2.h b/media/libstagefright/codec2/include/C2.h
index 7d00a03..fd99cce 100644
--- a/media/libstagefright/codec2/include/C2.h
+++ b/media/libstagefright/codec2/include/C2.h
@@ -66,8 +66,8 @@
  * mitigate binary breaks by adhering to the following conventions:
  *
  * - at most one vtable with placeholder virtual methods
- * - all optional/placeholder virtual methods returning a status_t, with C2_NOT_IMPLEMENTED not
- *   requiring any update to input/output arguments.
+ * - all optional/placeholder virtual methods returning a c2_status_t, with C2_OMITTED not requiring
+ *   any update to input/output arguments.
  * - limiting symbol export of inline methods
  * - use of pimpl (or shared-pimpl)
  *
@@ -98,49 +98,82 @@
  * C2String: basic string implementation
  */
 typedef std::string C2String;
+
+/**
+ * C2StringLiteral: basic string literal implementation.
+ * \note these are never owned by any object, and can only refer to C string literals.
+ */
 typedef const char *C2StringLiteral;
 
 /**
- * C2Error: status codes used.
+ * c2_status_t: status codes used.
  */
-typedef int32_t C2Error;
-enum {
+enum c2_status_t : int32_t {
+
+/*
+ * Use android status constants if available. Otherwise, define the android status constants as
+ * additional enum values using POSIX errno constants.
+ */
 #ifndef __ANDROID__
-    OK                  = 0,
+    ALREADY_EXISTS      = -EEXIST,
     BAD_VALUE           = -EINVAL,
     BAD_INDEX           = -EOVERFLOW,
-    UNKNOWN_TRANSACTION = -EBADMSG,
-    ALREADY_EXISTS      = -EEXIST,
-    NAME_NOT_FOUND      = -ENOENT,
+    FAILED_TRANSACTION  = -ENOTSUP,
     INVALID_OPERATION   = -ENOSYS,
+    NAME_NOT_FOUND      = -ENOENT,
     NO_MEMORY           = -ENOMEM,
+    NO_INIT             = -ENODEV,
+    OK                  = 0,
     PERMISSION_DENIED   = -EPERM,
     TIMED_OUT           = -ETIMEDOUT,
-    UNKNOWN_ERROR       = -EINVAL,
+    UNKNOWN_ERROR       = -EFAULT,
+    UNKNOWN_TRANSACTION = -EBADMSG,
+    WOULD_BLOCK         = -EWOULDBLOCK,
 #endif
 
-    C2_OK               = OK,                   ///< operation completed successfully
+    C2_OK        = OK,                   ///< operation completed successfully
 
     // bad input
-    C2_BAD_VALUE        = BAD_VALUE,            ///< argument has invalid value (user error)
-    C2_BAD_INDEX        = BAD_INDEX,            ///< argument uses invalid index (user error)
-    C2_UNSUPPORTED      = UNKNOWN_TRANSACTION,  ///< argument/index is value but not supported \todo is this really BAD_INDEX/VALUE?
+    C2_BAD_VALUE = BAD_VALUE,            ///< argument has invalid value (user error)
+    C2_BAD_INDEX = BAD_INDEX,            ///< argument uses invalid index (user error)
+    C2_CANNOT_DO = FAILED_TRANSACTION,   ///< argument/index is valid but not possible
 
     // bad sequencing of events
-    C2_DUPLICATE        = ALREADY_EXISTS,       ///< object already exists
-    C2_NOT_FOUND        = NAME_NOT_FOUND,       ///< object not found
-    C2_BAD_STATE        = INVALID_OPERATION,    ///< operation is not permitted in the current state
+    C2_DUPLICATE = ALREADY_EXISTS,       ///< object already exists
+    C2_NOT_FOUND = NAME_NOT_FOUND,       ///< object not found
+    C2_BAD_STATE = INVALID_OPERATION,    ///< operation is not permitted in the current state
+    C2_BLOCKING  = WOULD_BLOCK,          ///< operation would block but blocking is not permitted
 
     // bad environment
-    C2_NO_MEMORY        = NO_MEMORY,            ///< not enough memory to complete operation
-    C2_NO_PERMISSION    = PERMISSION_DENIED,    ///< missing permission to complete operation
-    C2_TIMED_OUT        = TIMED_OUT,            ///< operation did not complete within timeout
+    C2_NO_MEMORY = NO_MEMORY,            ///< not enough memory to complete operation
+    C2_REFUSED   = PERMISSION_DENIED,    ///< missing permission to complete operation
+
+    C2_TIMED_OUT = TIMED_OUT,            ///< operation did not complete within timeout
 
     // bad versioning
-    C2_NOT_IMPLEMENTED  = UNKNOWN_TRANSACTION,  ///< operation is not implemented (optional only) \todo for now reuse error code
+    C2_OMITTED   = UNKNOWN_TRANSACTION,  ///< operation is not implemented/supported (optional only)
 
     // unknown fatal
-    C2_CORRUPTED        = UNKNOWN_ERROR,        ///< some unexpected error prevented the operation
+    C2_CORRUPTED = UNKNOWN_ERROR,        ///< some unexpected error prevented the operation
+    C2_NO_INIT   = NO_INIT,              ///< status has not been initialized
+};
+
+/**
+ * Type that describes the desired blocking behavior for variable blocking calls. Blocking in this
+ * API is used in a somewhat modified meaning such that operations that merely update variables
+ * protected by mutexes are still considered "non-blocking" (always used in quotes).
+ */
+enum c2_blocking_t : int32_t {
+    /**
+     * The operation SHALL be "non-blocking". This means that it shall not perform any file
+     * operations, or call/wait on other processes. It may use a protected region as long as the
+     * mutex is never used to protect code that is otherwise "may block".
+     */
+    C2_DONT_BLOCK = false,
+    /**
+     * The operation MAY be temporarily blocking.
+     */
+    C2_MAY_BLOCK = true,
 };
 
 /// @}
@@ -158,16 +191,25 @@
 #define C2_INTERNAL __attribute__((internal_linkage))
 
 #define DEFINE_OTHER_COMPARISON_OPERATORS(type) \
-    inline bool operator!=(const type &other) { return !(*this == other); } \
-    inline bool operator<=(const type &other) { return (*this == other) || (*this < other); } \
-    inline bool operator>=(const type &other) { return !(*this < other); } \
-    inline bool operator>(const type &other) { return !(*this < other) && !(*this == other); }
+    inline bool operator!=(const type &other) const { return !(*this == other); } \
+    inline bool operator<=(const type &other) const { return (*this == other) || (*this < other); } \
+    inline bool operator>=(const type &other) const { return !(*this < other); } \
+    inline bool operator>(const type &other) const { return !(*this < other) && !(*this == other); }
 
 #define DEFINE_FIELD_BASED_COMPARISON_OPERATORS(type, field) \
     inline bool operator<(const type &other) const { return field < other.field; } \
     inline bool operator==(const type &other) const { return field == other.field; } \
     DEFINE_OTHER_COMPARISON_OPERATORS(type)
 
+#define DEFINE_FIELD_AND_MASK_BASED_COMPARISON_OPERATORS(type, field, mask) \
+    inline bool operator<(const type &other) const { \
+        return (field & mask) < (other.field & (mask)); \
+    } \
+    inline bool operator==(const type &other) const { \
+        return (field & mask) == (other.field & (mask)); \
+    } \
+    DEFINE_OTHER_COMPARISON_OPERATORS(type)
+
 /// \cond INTERNAL
 
 /// \defgroup utils_internal
@@ -180,7 +222,7 @@
 struct c2_types<T> {
     typedef typename std::decay<T>::type wide_type;
     typedef wide_type narrow_type;
-    typedef wide_type mintype;
+    typedef wide_type min_type; // type for min(T...)
 };
 
 /** specialization for two types */
@@ -196,7 +238,7 @@
     typedef typename std::decay<
             typename std::conditional<sizeof(T) < sizeof(U), T, U>::type>::type narrow_type;
     typedef typename std::conditional<
-            std::is_signed<T>::value, wide_type, narrow_type>::type mintype;
+            std::is_signed<T>::value, wide_type, narrow_type>::type min_type;
 };
 
 /// @}
@@ -216,7 +258,7 @@
     /** Narrowest type of the template parameter types. */
     typedef typename c2_types<typename c2_types<T, U>::narrow_type, V...>::narrow_type narrow_type;
     /** Type that accommodates the minimum value for any input for the template parameter types. */
-    typedef typename c2_types<typename c2_types<T, U>::mintype, V...>::mintype mintype;
+    typedef typename c2_types<typename c2_types<T, U>::min_type, V...>::min_type min_type;
 };
 
 /**
@@ -249,11 +291,11 @@
  *  \ingroup utils_internal
  * specialization for two values */
 template<typename T, typename U>
-inline constexpr typename c2_types<T, U>::mintype c2_min(const T a, const U b) {
+inline constexpr typename c2_types<T, U>::min_type c2_min(const T a, const U b) {
     typedef typename c2_types<T, U>::wide_type wide_type;
     return ({
         wide_type a_(a), b_(b);
-        static_cast<typename c2_types<T, U>::mintype>(a_ < b_ ? a_ : b_);
+        static_cast<typename c2_types<T, U>::min_type>(a_ < b_ ? a_ : b_);
     });
 }
 
@@ -269,12 +311,12 @@
  * @return the smallest of the input arguments.
  */
 template<typename T, typename U, typename... V>
-constexpr typename c2_types<T, U, V...>::mintype c2_min(const T a, const U b, const V ... c) {
-    typedef typename c2_types<U, V...>::mintype rest_type;
+constexpr typename c2_types<T, U, V...>::min_type c2_min(const T a, const U b, const V ... c) {
+    typedef typename c2_types<U, V...>::min_type rest_type;
     typedef typename c2_types<T, rest_type>::wide_type wide_type;
     return ({
         wide_type a_(a), b_(c2_min(b, c...));
-        static_cast<typename c2_types<T, rest_type>::mintype>(a_ < b_ ? a_ : b_);
+        static_cast<typename c2_types<T, rest_type>::min_type>(a_ < b_ ? a_ : b_);
     });
 }
 
diff --git a/media/libstagefright/codec2/include/C2Buffer.h b/media/libstagefright/codec2/include/C2Buffer.h
index 9f6b487..df9362c 100644
--- a/media/libstagefright/codec2/include/C2Buffer.h
+++ b/media/libstagefright/codec2/include/C2Buffer.h
@@ -23,8 +23,6 @@
 #include <list>
 #include <memory>
 
-typedef int C2Fence;
-
 #ifdef __ANDROID__
 
 // #include <system/window.h>
@@ -88,10 +86,10 @@
      * \retval C2_TIMED_OUT     the fence has not been signaled within the timeout
      * \retval C2_BAD_STATE     the fence has been abandoned without being signaled (it will never
      *                          be signaled)
-     * \retval C2_NO_PERMISSION no permission to wait for the fence (unexpected - system)
+     * \retval C2_REFUSED       no permission to wait for the fence (unexpected - system)
      * \retval C2_CORRUPTED     some unknown error prevented waiting for the fence (unexpected)
      */
-    C2Error wait(nsecs_t timeoutNs);
+    c2_status_t wait(nsecs_t timeoutNs);
 
     /**
      * Used to check if this fence is valid (if there is a chance for it to be signaled.)
@@ -154,11 +152,11 @@
      *
      * \retval C2_OK            the fence(s) were successfully signaled
      * \retval C2_BAD_STATE     the fence(s) have already been abandoned or merged (caller error)
-     * \retval C2_ALREADY_EXISTS the fence(s) have already been signaled (caller error)
-     * \retval C2_NO_PERMISSION no permission to signal the fence (unexpected - system)
+     * \retval C2_DUPLICATE     the fence(s) have already been signaled (caller error)
+     * \retval C2_REFUSED       no permission to signal the fence (unexpected - system)
      * \retval C2_CORRUPTED     some unknown error prevented signaling the fence(s) (unexpected)
      */
-    C2Error fire();
+    c2_status_t fire();
 
     /**
      * Trigger this event from the merging of the supplied fences. This means that it will be
@@ -167,12 +165,12 @@
      *
      * \retval C2_OK            the merging was successfully done
      * \retval C2_NO_MEMORY     not enough memory to perform the merging
-     * \retval C2_ALREADY_EXISTS    the fence have already been merged (caller error)
+     * \retval C2_DUPLICATE     the fence have already been merged (caller error)
      * \retval C2_BAD_STATE     the fence have already been signaled or abandoned (caller error)
-     * \retval C2_NO_PERMISSION no permission to merge the fence (unexpected - system)
+     * \retval C2_REFUSED       no permission to merge the fence (unexpected - system)
      * \retval C2_CORRUPTED     some unknown error prevented merging the fence(s) (unexpected)
      */
-    C2Error merge(std::vector<C2Fence> fences);
+    c2_status_t merge(std::vector<C2Fence> fences);
 
     /**
      * Abandons the event and any associated fence(s).
@@ -182,11 +180,11 @@
      *
      * \retval C2_OK            the fence(s) were successfully signaled
      * \retval C2_BAD_STATE     the fence(s) have already been signaled or merged (caller error)
-     * \retval C2_ALREADY_EXISTS    the fence(s) have already been abandoned (caller error)
-     * \retval C2_NO_PERMISSION no permission to abandon the fence (unexpected - system)
+     * \retval C2_DUPLICATE     the fence(s) have already been abandoned (caller error)
+     * \retval C2_REFUSED       no permission to abandon the fence (unexpected - system)
      * \retval C2_CORRUPTED     some unknown error prevented signaling the fence(s) (unexpected)
      */
-    C2Error abandon();
+    c2_status_t abandon();
 
 private:
     class Impl;
@@ -197,15 +195,15 @@
 /// @{
 
 /**
- * Interface for objects that encapsulate an updatable error value.
+ * Interface for objects that encapsulate an updatable status value.
  */
-struct _C2InnateError {
-    inline C2Error error() const { return mError; }
+struct _C2InnateStatus {
+    inline c2_status_t status() const { return mStatus; }
 
 protected:
-    _C2InnateError(C2Error error) : mError(error) { }
+    _C2InnateStatus(c2_status_t status) : mStatus(status) { }
 
-    C2Error mError; // this error is updatable by the object
+    c2_status_t mStatus; // this status is updatable by the object
 };
 
 /// @}
@@ -223,13 +221,17 @@
      *
      * \return acquired object potentially invalidated if waiting for the fence failed.
      */
-    T get();
+    T get() {
+        // TODO:
+        // wait();
+        return mT;
+    }
 
 protected:
-    C2Acquirable(C2Error error, C2Fence fence, T t) : C2Fence(fence), mInitialError(error), mT(t) { }
+    C2Acquirable(c2_status_t error, C2Fence fence, T t) : C2Fence(fence), mInitialError(error), mT(t) { }
 
 private:
-    C2Error mInitialError;
+    c2_status_t mInitialError;
     T mT; // TODO: move instead of copy
 };
 
@@ -268,7 +270,7 @@
         : mCapacity(parent == nullptr ? 0 : parent->capacity()) { }
 
 private:
-    const uint32_t mCapacity;
+    uint32_t mCapacity;
 /// @}
 };
 
@@ -429,7 +431,7 @@
     /**
      * \return pointer to the start of the block or nullptr on error.
      */
-    const uint8_t *data();
+    const uint8_t *data() const;
 
     /**
      * Returns a portion of this view.
@@ -445,7 +447,11 @@
     /**
      * \return error during the creation/mapping of this view.
      */
-    C2Error error();
+    c2_status_t error() const;
+
+protected:
+    C2ReadView(const _C2LinearCapacityAspect *parent, const uint8_t *data);
+    explicit C2ReadView(c2_status_t error);
 
 private:
     class Impl;
@@ -474,7 +480,11 @@
     /**
      * \return error during the creation/mapping of this view.
      */
-    C2Error error();
+    c2_status_t error() const;
+
+protected:
+    C2WriteView(const _C2LinearRangeAspect *parent, uint8_t *base);
+    explicit C2WriteView(c2_status_t error);
 
 private:
     class Impl;
@@ -516,7 +526,13 @@
      */
     C2Fence fence() const { return mFence; }
 
+protected:
+    C2ConstLinearBlock(std::shared_ptr<C2LinearAllocation> alloc);
+    C2ConstLinearBlock(std::shared_ptr<C2LinearAllocation> alloc, size_t offset, size_t size);
+
 private:
+    class Impl;
+    std::shared_ptr<Impl> mImpl;
     C2Fence mFence;
 };
 
@@ -544,6 +560,14 @@
      *    The block shall be modified only until firing the event for the fence.
      */
     C2ConstLinearBlock share(size_t offset, size_t size, C2Fence fence);
+
+protected:
+    C2LinearBlock(std::shared_ptr<C2LinearAllocation> alloc);
+    C2LinearBlock(std::shared_ptr<C2LinearAllocation> alloc, size_t offset, size_t size);
+
+private:
+    class Impl;
+    std::shared_ptr<Impl> mImpl;
 };
 
 /// @}
@@ -605,7 +629,7 @@
      * \retval C2_TIMED_OUT     the reservation timed out \todo when?
      * \retval C2_CORRUPTED     some unknown error prevented reserving space. (unexpected)
      */
-    C2Error reserve(size_t size, C2Fence *fence /* nullable */);
+    c2_status_t reserve(size_t size, C2Fence *fence /* nullable */);
 
     /**
      * Abandons a portion of this segment. This will move to the beginning of this segment.
@@ -618,7 +642,7 @@
      * \retval C2_TIMED_OUT     the operation timed out (unexpected)
      * \retval C2_CORRUPTED     some unknown error prevented abandoning the data (unexpected)
      */
-    C2Error abandon(size_t size);
+    c2_status_t abandon(size_t size);
 
     /**
      * Share a portion as block(s) with consumers (these are moved to the used section).
@@ -635,7 +659,7 @@
      * \retval C2_TIMED_OUT     the operation timed out (unexpected)
      * \retval C2_CORRUPTED     some unknown error prevented sharing the data (unexpected)
      */
-    C2Error share(size_t size, C2Fence fence, std::list<C2ConstLinearBlock> &blocks);
+    c2_status_t share(size_t size, C2Fence fence, std::list<C2ConstLinearBlock> &blocks);
 
     /**
      * Returns the beginning offset of this segment from the start of this circular block.
@@ -669,7 +693,7 @@
     /**
      * \return error during the creation/mapping of this view.
      */
-    C2Error error();
+    c2_status_t error() const;
 };
 
 /**
@@ -690,7 +714,7 @@
      * \param size    number of bytes to commit to the next segment
      * \param fence   fence used for the commit (the fence must signal before the data is committed)
      */
-    C2Error commit(size_t size, C2Fence fence);
+    c2_status_t commit(size_t size, C2Fence fence);
 
     /**
      * Maps this block into memory and returns a write view for it.
@@ -712,20 +736,20 @@
 /// \name Planar capacity interface
 /// @{
 public:
-    inline uint32_t width() const { return mWidth; }
-    inline uint32_t height() const { return mHeight; }
+    inline uint32_t width() const { return _mWidth; }
+    inline uint32_t height() const { return _mHeight; }
 
 protected:
     inline _C2PlanarCapacityAspect(uint32_t width, uint32_t height)
-      : mWidth(width), mHeight(height) { }
+      : _mWidth(width), _mHeight(height) { }
 
     inline _C2PlanarCapacityAspect(const _C2PlanarCapacityAspect *parent)
-        : mWidth(parent == nullptr ? 0 : parent->width()),
-          mHeight(parent == nullptr ? 0 : parent->height()) { }
+        : _mWidth(parent == nullptr ? 0 : parent->width()),
+          _mHeight(parent == nullptr ? 0 : parent->height()) { }
 
 private:
-    const uint32_t mWidth;
-    const uint32_t mHeight;
+    const uint32_t _mWidth;
+    const uint32_t _mHeight;
 /// @}
 };
 
@@ -736,25 +760,25 @@
  */
 struct C2Rect {
 // public:
-    uint32_t mLeft;
-    uint32_t mTop;
-    uint32_t mWidth;
-    uint32_t mHeight;
+    uint32_t left;
+    uint32_t top;
+    uint32_t width;
+    uint32_t height;
 
-    inline C2Rect(uint32_t width, uint32_t height)
-        : C2Rect(width, height, 0, 0) { }
+    constexpr inline C2Rect(uint32_t width_, uint32_t height_)
+        : C2Rect(width_, height_, 0, 0) { }
 
-    inline C2Rect(uint32_t width, uint32_t height, uint32_t left, uint32_t top)
-        : mLeft(left), mTop(top), mWidth(width), mHeight(height) { }
+    constexpr inline C2Rect(uint32_t width_, uint32_t height_, uint32_t left_, uint32_t top_)
+        : left(left_), top(top_), width(width_), height(height_) { }
 
     // utility methods
 
     inline bool isEmpty() const {
-        return mWidth == 0 || mHeight == 0;
+        return width == 0 || height == 0;
     }
 
     inline bool isValid() const {
-        return mLeft <= ~mWidth && mTop <= ~mHeight;
+        return left <= ~width && top <= ~height;
     }
 
     inline operator bool() const {
@@ -771,9 +795,9 @@
         } else if (other.isEmpty()) {
             return true;
         } else {
-            return mLeft <= other.mLeft && mTop <= other.mTop
-                    && mLeft + mWidth >= other.mLeft + other.mWidth
-                    && mTop + mHeight >= other.mTop + other.mHeight;
+            return left <= other.left && top <= other.top
+                    && left + width >= other.left + other.width
+                    && top + height >= other.top + other.height;
         }
     }
 
@@ -783,8 +807,8 @@
         } else if (isEmpty()) {
             return other.isEmpty();
         } else {
-            return mLeft == other.mLeft && mTop == other.mTop
-                    && mWidth == other.mWidth && mHeight == other.mHeight;
+            return left == other.left && top == other.top
+                    && width == other.width && height == other.height;
         }
     }
 
@@ -810,78 +834,104 @@
 };
 
 /**
- * C2PlaneInfo: information on the layout of flexible planes.
+ * C2PlaneInfo: information on the layout of a singe flexible plane.
  *
  * Public fields without getters/setters.
  */
 struct C2PlaneInfo {
-// public:
-    enum Channel : uint32_t {
-        Y,
-        R,
-        G,
-        B,
-        A,
-        Cr,
-        Cb,
-    } mChannel;
+//public:
+    enum channel_t : uint32_t {
+        CHANNEL_Y,  ///< luma
+        CHANNEL_R,  ///< red
+        CHANNEL_G,  ///< green
+        CHANNEL_B,  ///< blue
+        CHANNEL_A,  ///< alpha
+        CHANNEL_CR, ///< Cr
+        CHANNEL_CB, ///< Cb
+    } channel;
 
-    int32_t mColInc;               // column increment in bytes. may be negative
-    int32_t mRowInc;               // row increment in bytes. may be negative
-    uint32_t mHorizSubsampling;    // subsampling compared to width
-    uint32_t mVertSubsampling;     // subsampling compared to height
+    int32_t colInc;       ///< column increment in bytes. may be negative
+    int32_t rowInc;       ///< row increment in bytes. may be negative
+    uint32_t colSampling; ///< subsampling compared to width (must be a power of 2)
+    uint32_t rowSampling; ///< subsampling compared to height (must be a power of 2)
 
-    uint32_t mBitDepth;
-    uint32_t mAllocatedDepth;
+    uint32_t allocatedDepth; ///< size of each sample (must be a multiple of 8)
+    uint32_t bitDepth;       ///< significant bits per sample
+    /**
+     * the right shift of the significant bits in the sample. E.g. if a 10-bit significant
+     * value is laid out in a 16-bit allocation aligned to LSB (values 0-1023), rightShift
+     * would be 0 as the 16-bit value read from the sample does not need to be right shifted
+     * and can be used as is (after applying a 10-bit mask of 0x3FF).
+     *
+     * +--------+--------+
+     * |      VV|VVVVVVVV|
+     * +--------+--------+
+     *  15     8 7      0
+     *
+     * If the value is laid out aligned to MSB, rightShift would be 6, as the value read
+     * from the allocated sample must be right-shifted by 6 to get the actual sample value.
+     *
+     * +--------+--------+
+     * |VVVVVVVV|VV      |
+     * +--------+--------+
+     *  15     8 7      0
+     */
+    uint32_t rightShift;
+
+    enum endianness_t : uint32_t {
+        NATIVE,
+        LITTLE_END, // LITTLE_ENDIAN is reserved macro
+        BIG_END,    // BIG_ENDIAN is a reserved macro
+    } endianness; ///< endianness of the samples
 
     inline ssize_t minOffset(uint32_t width, uint32_t height) {
         ssize_t offs = 0;
-        if (width > 0 && mColInc < 0) {
-            offs += mColInc * (ssize_t)(width - 1);
+        if (width > 0 && colInc < 0) {
+            offs += colInc * (ssize_t)(width - 1);
         }
-        if (height > 0 && mRowInc < 0) {
-            offs += mRowInc * (ssize_t)(height - 1);
+        if (height > 0 && rowInc < 0) {
+            offs += rowInc * (ssize_t)(height - 1);
         }
         return offs;
     }
 
     inline ssize_t maxOffset(uint32_t width, uint32_t height, uint32_t allocatedDepth) {
         ssize_t offs = (allocatedDepth + 7) >> 3;
-        if (width > 0 && mColInc > 0) {
-            offs += mColInc * (ssize_t)(width - 1);
+        if (width > 0 && colInc > 0) {
+            offs += colInc * (ssize_t)(width - 1);
         }
-        if (height > 0 && mRowInc > 0) {
-            offs += mRowInc * (ssize_t)(height - 1);
+        if (height > 0 && rowInc > 0) {
+            offs += rowInc * (ssize_t)(height - 1);
         }
         return offs;
     }
 };
 
-struct C2PlaneLayout {
-public:
-    enum Type : uint32_t {
-        MEDIA_IMAGE_TYPE_UNKNOWN = 0,
-        MEDIA_IMAGE_TYPE_YUV = 0x100,
-        MEDIA_IMAGE_TYPE_YUVA,
-        MEDIA_IMAGE_TYPE_RGB,
-        MEDIA_IMAGE_TYPE_RGBA,
+struct C2PlanarLayout {
+//public:
+    enum type_t : uint32_t {
+        TYPE_UNKNOWN = 0,
+        TYPE_YUV = 0x100,
+        TYPE_YUVA,
+        TYPE_RGB,
+        TYPE_RGBA,
     };
 
-    Type mType;
-    uint32_t mNumPlanes;               // number of planes
+    type_t type;
+    uint32_t numPlanes;               // number of planes
 
-    enum PlaneIndex : uint32_t {
-        Y = 0,
-        U = 1,
-        V = 2,
-        R = 0,
-        G = 1,
-        B = 2,
-        A = 3,
+    enum plane_index_t : uint32_t {
+        PLANE_Y = 0,
+        PLANE_U = 1,
+        PLANE_V = 2,
+        PLANE_R = 0,
+        PLANE_G = 1,
+        PLANE_B = 2,
+        PLANE_A = 3,
         MAX_NUM_PLANES = 4,
     };
 
-    C2PlaneInfo mPlanes[MAX_NUM_PLANES];
+    C2PlaneInfo planes[MAX_NUM_PLANES];
 };
 
 /**
@@ -895,29 +945,51 @@
 public:
     // crop can be an empty rect, does not have to line up with subsampling
     // NOTE: we do not support floating-point crop
-    inline const C2Rect crop() { return mCrop; }
+    inline const C2Rect crop() const { return mCrop; }
 
     /**
      *  Sets crop to crop intersected with [(0,0) .. (width, height)]
      */
-    inline void setCrop_be(const C2Rect &crop);
+    inline void setCrop_be(const C2Rect &crop) {
+        mCrop.left = std::min(width(), crop.left);
+        mCrop.top = std::min(height(), crop.top);
+        // It's guaranteed that mCrop.left <= width() && mCrop.top <= height()
+        mCrop.width = std::min(width() - mCrop.left, crop.width);
+        mCrop.height = std::min(height() - mCrop.top, crop.height);
+    }
 
     /**
      * If crop is within the dimensions of this object, it sets crop to it.
      *
      * \return true iff crop is within the dimensions of this object
      */
-    inline bool setCrop(const C2Rect &crop);
+    inline bool setCrop(const C2Rect &crop) {
+        if (width() < crop.width || height() < crop.height
+                || width() - crop.width < crop.left || height() - crop.height < crop.top) {
+            return false;
+        }
+        mCrop = crop;
+        return true;
+    }
+
+protected:
+    inline _C2PlanarSection(const _C2PlanarCapacityAspect *parent)
+        : _C2PlanarCapacityAspect(parent), mCrop(width(), height()) {}
 
 private:
     C2Rect mCrop;
 /// @}
 };
 
+class C2GraphicAllocation;
+
 class C2Block2D : public _C2PlanarSection {
 public:
     const C2Handle *handle() const;
 
+protected:
+    C2Block2D(const std::shared_ptr<C2GraphicAllocation> &alloc);
+
 private:
     class Impl;
     std::shared_ptr<Impl> mImpl;
@@ -935,14 +1007,25 @@
 class C2GraphicView : public _C2PlanarSection {
 public:
     /**
-     * \return pointer to the start of the block or nullptr on error.
+     * \return array of pointers to the start of the planes or nullptr on error.
+     * Regardless of crop rect, they always point to the top-left corner of
+     * each plane.  Access outside of the crop rect results in an undefined
+     * behavior.
      */
-    const uint8_t *data() const;
+    const uint8_t *const *data() const;
 
     /**
-     * \return pointer to the start of the block or nullptr on error.
+     * \return array of pointers to the start of the planes or nullptr on error.
+     * Regardless of crop rect, they always point to the top-left corner of
+     * each plane.  Access outside of the crop rect results in an undefined
+     * behavior.
      */
-    uint8_t *data();
+    uint8_t *const *data();
+
+    /**
+     * \return layout of the graphic block to interpret the returned data.
+     */
+    const C2PlanarLayout layout() const;
 
     /**
      * Returns a section of this view.
@@ -957,7 +1040,14 @@
     /**
      * \return error during the creation/mapping of this view.
      */
-    C2Error error() const;
+    c2_status_t error() const;
+
+protected:
+    C2GraphicView(
+            const _C2PlanarCapacityAspect *parent,
+            uint8_t *const *data,
+            const C2PlanarLayout& layout);
+    explicit C2GraphicView(c2_status_t error);
 
 private:
     class Impl;
@@ -996,7 +1086,12 @@
      */
     C2Fence fence() const { return mFence; }
 
+protected:
+    C2ConstGraphicBlock(const std::shared_ptr<C2GraphicAllocation> &alloc, C2Fence fence);
+
 private:
+    class Impl;
+    std::shared_ptr<Impl> mImpl;
     C2Fence mFence;
 };
 
@@ -1024,6 +1119,13 @@
      *    The block shall be modified only until firing the event for the fence.
      */
     C2ConstGraphicBlock share(const C2Rect &crop, C2Fence fence);
+
+protected:
+    explicit C2GraphicBlock(const std::shared_ptr<C2GraphicAllocation> &alloc);
+
+private:
+    class Impl;
+    std::shared_ptr<Impl> mImpl;
 };
 
 /// @}
@@ -1089,7 +1191,8 @@
 
 protected:
     // no public constructor
-    // C2BufferData(const std::shared_ptr<const Impl> &impl) : mImpl(impl) {}
+    explicit C2BufferData(const std::list<C2ConstLinearBlock> &blocks);
+    explicit C2BufferData(const std::list<C2ConstGraphicBlock> &blocks);
 };
 
 /**
@@ -1145,7 +1248,7 @@
      * \retval C2_NO_MEMORY not enough memory to register for this callback
      * \retval C2_CORRUPTED an unknown error prevented the registration (unexpected)
      */
-    C2Error registerOnDestroyNotify(OnDestroyNotify *onDestroyNotify, void *arg = nullptr);
+    c2_status_t registerOnDestroyNotify(OnDestroyNotify onDestroyNotify, void *arg = nullptr);
 
     /**
      * Unregisters a previously registered pre-destroy notification.
@@ -1157,7 +1260,7 @@
      * \retval C2_NOT_FOUND the notification was not found
      * \retval C2_CORRUPTED an unknown error prevented the registration (unexpected)
      */
-    C2Error unregisterOnDestroyNotify(OnDestroyNotify *onDestroyNotify, void *arg = nullptr);
+    c2_status_t unregisterOnDestroyNotify(OnDestroyNotify onDestroyNotify, void *arg = nullptr);
 
     ///@}
 
@@ -1183,7 +1286,7 @@
      * \retval C2_NO_MEMORY not enough memory to attach the metadata (this return value is not
      *                      used if the same kind of metadata is already attached to the buffer).
      */
-    C2Error setInfo(const std::shared_ptr<C2Info> &info);
+    c2_status_t setInfo(const std::shared_ptr<C2Info> &info);
 
     /**
      * Checks if there is a certain type of metadata attached to this buffer.
@@ -1193,14 +1296,17 @@
      * \return true iff there is a metadata with the parameter type attached to this buffer.
      */
     bool hasInfo(C2Param::Type index) const;
-    std::shared_ptr<C2Info> removeInfo(C2Param::Type index) const;
+    std::shared_ptr<C2Info> removeInfo(C2Param::Type index);
     ///@}
 
 protected:
     // no public constructor
-    inline C2Buffer() = default;
+    explicit C2Buffer(const std::list<C2ConstLinearBlock> &blocks);
+    explicit C2Buffer(const std::list<C2ConstGraphicBlock> &blocks);
 
 private:
+    class Impl;
+    std::shared_ptr<Impl> mImpl;
 //    Type _mType;
 };
 
@@ -1245,25 +1351,26 @@
 // public:
     // TODO: match these to gralloc1.h
     enum Consumer : uint64_t {
-        kSoftwareRead        = GRALLOC_USAGE_SW_READ_OFTEN,
-        kRenderScriptRead    = GRALLOC_USAGE_RENDERSCRIPT,
-        kTextureRead         = GRALLOC_USAGE_HW_TEXTURE,
-        kHardwareComposer    = GRALLOC_USAGE_HW_COMPOSER,
-        kHardwareEncoder     = GRALLOC_USAGE_HW_VIDEO_ENCODER,
-        kProtectedRead       = GRALLOC_USAGE_PROTECTED,
+        // \todo do we need to distinguish often from rarely?
+        CPU_READ          = GRALLOC_USAGE_SW_READ_OFTEN,
+        RENDERSCRIPT_READ = GRALLOC_USAGE_RENDERSCRIPT,
+        HW_TEXTURE_READ   = GRALLOC_USAGE_HW_TEXTURE,
+        HW_COMPOSER_READ  = GRALLOC_USAGE_HW_COMPOSER,
+        HW_CODEC_READ     = GRALLOC_USAGE_HW_VIDEO_ENCODER,
+        READ_PROTECTED    = GRALLOC_USAGE_PROTECTED,
     };
 
     enum Producer : uint64_t {
-        kSoftwareWrite       = GRALLOC_USAGE_SW_WRITE_OFTEN,
-        kRenderScriptWrite   = GRALLOC_USAGE_RENDERSCRIPT,
-        kTextureWrite        = GRALLOC_USAGE_HW_RENDER,
-        kCompositionTarget   = GRALLOC_USAGE_HW_COMPOSER | GRALLOC_USAGE_HW_RENDER,
-        kHardwareDecoder     = GRALLOC_USAGE_HW_VIDEO_ENCODER,
-        kProtectedWrite      = GRALLOC_USAGE_PROTECTED,
+        CPU_WRITE          = GRALLOC_USAGE_SW_WRITE_OFTEN,
+        RENDERSCRIPT_WRITE = GRALLOC_USAGE_RENDERSCRIPT,
+        HW_TEXTURE_WRITE   = GRALLOC_USAGE_HW_RENDER,
+        HW_COMPOSER_WRITE  = GRALLOC_USAGE_HW_COMPOSER | GRALLOC_USAGE_HW_RENDER,
+        HW_CODEC_WRITE     = GRALLOC_USAGE_HW_VIDEO_ENCODER,
+        WRITE_PROTECTED    = GRALLOC_USAGE_PROTECTED,
     };
 
-    uint64_t mConsumer; // e.g. input
-    uint64_t mProducer; // e.g. output
+    uint64_t consumer; // e.g. input
+    uint64_t producer; // e.g. output
 };
 
 /**
@@ -1295,14 +1402,15 @@
      * \todo Do we need to support sync operation as we could just wait for the fence?
      *
      * \retval C2_OK        the operation was successful
-     * \retval C2_NO_PERMISSION no permission to map the portion
+     * \retval C2_REFUSED   no permission to map the portion
      * \retval C2_TIMED_OUT the operation timed out
+     * \retval C2_DUPLICATE if the allocation is already mapped.
      * \retval C2_NO_MEMORY not enough memory to complete the operation
      * \retval C2_BAD_VALUE the parameters (offset/size) are invalid or outside the allocation, or
      *                      the usage flags are invalid (caller error)
      * \retval C2_CORRUPTED some unknown error prevented the operation from completing (unexpected)
      */
-    virtual C2Error map(
+    virtual c2_status_t map(
             size_t offset, size_t size, C2MemoryUsage usage, int *fenceFd /* nullable */,
             void **addr /* nonnull */) = 0;
 
@@ -1320,12 +1428,13 @@
      *
      * \retval C2_OK        the operation was successful
      * \retval C2_TIMED_OUT the operation timed out
+     * \retval C2_NOT_FOUND if the allocation was not mapped previously.
      * \retval C2_BAD_VALUE the parameters (addr/size) do not correspond to previously mapped
      *                      regions (caller error)
      * \retval C2_CORRUPTED some unknown error prevented the operation from completing (unexpected)
-     * \retval C2_NO_PERMISSION no permission to unmap the portion (unexpected - system)
+     * \retval C2_REFUSED   no permission to unmap the portion (unexpected - system)
      */
-    virtual C2Error unmap(void *addr, size_t size, int *fenceFd /* nullable */) = 0;
+    virtual c2_status_t unmap(void *addr, size_t size, int *fenceFd /* nullable */) = 0;
 
     /**
      * Returns true if this is a valid allocation.
@@ -1379,8 +1488,8 @@
      * \todo Do we need to support sync operation as we could just wait for the fence?
      *
      * \retval C2_OK        the operation was successful
-     * \retval C2_NO_PERMISSION no permission to map the section
-     * \retval C2_ALREADY_EXISTS there is already a mapped region (caller error)
+     * \retval C2_REFUSED   no permission to map the section
+     * \retval C2_DUPLICATE there is already a mapped region (caller error)
      * \retval C2_TIMED_OUT the operation timed out
      * \retval C2_NO_MEMORY not enough memory to complete the operation
      * \retval C2_BAD_VALUE the parameters (rect) are invalid or outside the allocation, or the
@@ -1388,10 +1497,10 @@
      * \retval C2_CORRUPTED some unknown error prevented the operation from completing (unexpected)
 
      */
-    virtual C2Error map(
+    virtual c2_status_t map(
             C2Rect rect, C2MemoryUsage usage, int *fenceFd,
             // TODO: return <addr, size> buffers with plane sizes
-            C2PlaneLayout *layout /* nonnull */, uint8_t **addr /* nonnull */) = 0;
+            C2PlanarLayout *layout /* nonnull */, uint8_t **addr /* nonnull */) = 0;
 
     /**
      * Unmaps the last mapped rectangular section.
@@ -1406,9 +1515,9 @@
      * \retval C2_TIMED_OUT the operation timed out
      * \retval C2_NOT_FOUND there is no mapped region (caller error)
      * \retval C2_CORRUPTED some unknown error prevented the operation from completing (unexpected)
-     * \retval C2_NO_PERMISSION no permission to unmap the section (unexpected - system)
+     * \retval C2_REFUSED   no permission to unmap the section (unexpected - system)
      */
-    virtual C2Error unmap(C2Fence *fenceFd /* nullable */) = 0;
+    virtual c2_status_t unmap(C2Fence *fenceFd /* nullable */) = 0;
 
     /**
      * Returns true if this is a valid allocation.
@@ -1425,10 +1534,11 @@
     /**
      * Returns true if this is the same allocation as |other|.
      */
-    virtual bool equals(const std::shared_ptr<const C2GraphicAllocation> &other) = 0;
+    virtual bool equals(const std::shared_ptr<const C2GraphicAllocation> &other) const = 0;
 
 protected:
-    virtual ~C2GraphicAllocation();
+    using _C2PlanarCapacityAspect::_C2PlanarCapacityAspect;
+    virtual ~C2GraphicAllocation() = default;
 };
 
 /**
@@ -1444,32 +1554,90 @@
 class C2Allocator {
 public:
     /**
+     * Allocator ID type.
+     */
+    typedef uint32_t id_t;
+
+    /**
+     * Allocation types. This is a bitmask and is used in C2Allocator::Info
+     * to list the supported allocation types of an allocator.
+     */
+    enum type_t : uint32_t {
+        LINEAR  = 1 << 0, //
+        GRAPHIC = 1 << 1,
+    };
+
+    /**
+     * Information about an allocator.
+     *
+     * Allocators don't have a query API so all queriable information is stored here.
+     */
+    struct Traits {
+        C2String name;              ///< allocator name
+        id_t id;                    ///< allocator ID
+        type_t supportedTypes;      ///< supported allocation types
+        C2MemoryUsage minimumUsage; ///< usage that is minimally required for allocations
+        C2MemoryUsage maximumUsage; ///< usage that is maximally allowed for allocations
+    };
+
+    /**
+     * Returns the unique name of this allocator.
+     *
+     * This method MUST be "non-blocking" and return within 1ms.
+     *
+     * \return the name of this allocator.
+     * \retval an empty string if there was not enough memory to allocate the actual name.
+     */
+    virtual C2String getName() const = 0;
+
+    /**
+     * Returns a unique ID for this allocator. This ID is used to get this allocator from the
+     * allocator store, and to identify this allocator across all processes.
+     *
+     * This method MUST be "non-blocking" and return within 1ms.
+     *
+     * \return a unique ID for this allocator.
+     */
+    virtual id_t getId() const = 0;
+
+    /**
+     * Returns the allocator traits.
+     *
+     * This method MUST be "non-blocking" and return within 1ms.
+     *
+     * Allocators don't have a full-fledged query API, only this method.
+     *
+     * \return allocator information
+     */
+    virtual std::shared_ptr<const Traits> getTraits() const = 0;
+
+    /**
      * Allocates a 1D allocation of given |capacity| and |usage|. If successful, the allocation is
      * stored in |allocation|. Otherwise, |allocation| is set to 'nullptr'.
      *
-     * \param capacity        the size of requested allocation (the allocation could be slightly
+     * \param capacity      the size of requested allocation (the allocation could be slightly
      *                      larger, e.g. to account for any system-required alignment)
-     * \param usage           the memory usage info for the requested allocation. \note that the
+     * \param usage         the memory usage info for the requested allocation. \note that the
      *                      returned allocation may be later used/mapped with different usage.
      *                      The allocator should layout the buffer to be optimized for this usage,
      *                      but must support any usage. One exception: protected buffers can
      *                      only be used in a protected scenario.
-     * \param allocation      pointer to where the allocation shall be stored on success. nullptr
+     * \param allocation    pointer to where the allocation shall be stored on success. nullptr
      *                      will be stored here on failure
      *
      * \retval C2_OK        the allocation was successful
      * \retval C2_NO_MEMORY not enough memory to complete the allocation
      * \retval C2_TIMED_OUT the allocation timed out
-     * \retval C2_NO_PERMISSION     no permission to complete the allocation
+     * \retval C2_REFUSED   no permission to complete the allocation
      * \retval C2_BAD_VALUE capacity or usage are not supported (invalid) (caller error)
-     * \retval C2_UNSUPPORTED       this allocator does not support 1D allocations
+     * \retval C2_OMITTED   this allocator does not support 1D allocations
      * \retval C2_CORRUPTED some unknown, unrecoverable error occured during allocation (unexpected)
      */
-    virtual C2Error allocateLinearBuffer(
+    virtual c2_status_t newLinearAllocation(
             uint32_t capacity __unused, C2MemoryUsage usage __unused,
             std::shared_ptr<C2LinearAllocation> *allocation /* nonnull */) {
         *allocation = nullptr;
-        return C2_UNSUPPORTED;
+        return C2_OMITTED;
     }
 
     /**
@@ -1478,55 +1646,55 @@
      *
      * \param handle      the handle for the existing allocation
      * \param allocation  pointer to where the allocation shall be stored on success. nullptr
-     *                  will be stored here on failure
+     *                    will be stored here on failure
      *
      * \retval C2_OK        the allocation was recreated successfully
      * \retval C2_NO_MEMORY not enough memory to recreate the allocation
      * \retval C2_TIMED_OUT the recreation timed out (unexpected)
-     * \retval C2_NO_PERMISSION     no permission to recreate the allocation
+     * \retval C2_REFUSED   no permission to recreate the allocation
      * \retval C2_BAD_VALUE invalid handle (caller error)
-     * \retval C2_UNSUPPORTED       this allocator does not support 1D allocations
+     * \retval C2_OMITTED   this allocator does not support 1D allocations
      * \retval C2_CORRUPTED some unknown, unrecoverable error occured during allocation (unexpected)
      */
-    virtual C2Error recreateLinearBuffer(
+    virtual c2_status_t priorLinearAllocation(
             const C2Handle *handle __unused,
             std::shared_ptr<C2LinearAllocation> *allocation /* nonnull */) {
         *allocation = nullptr;
-        return C2_UNSUPPORTED;
+        return C2_OMITTED;
     }
 
     /**
      * Allocates a 2D allocation of given |width|, |height|, |format| and |usage|. If successful,
      * the allocation is stored in |allocation|. Otherwise, |allocation| is set to 'nullptr'.
      *
-     * \param width           the width of requested allocation (the allocation could be slightly
+     * \param width         the width of requested allocation (the allocation could be slightly
      *                      larger, e.g. to account for any system-required alignment)
-     * \param height          the height of requested allocation (the allocation could be slightly
+     * \param height        the height of requested allocation (the allocation could be slightly
      *                      larger, e.g. to account for any system-required alignment)
-     * \param format          the pixel format of requested allocation. This could be a vendor
+     * \param format        the pixel format of requested allocation. This could be a vendor
      *                      specific format.
-     * \param usage           the memory usage info for the requested allocation. \note that the
+     * \param usage         the memory usage info for the requested allocation. \note that the
      *                      returned allocation may be later used/mapped with different usage.
      *                      The allocator should layout the buffer to be optimized for this usage,
      *                      but must support any usage. One exception: protected buffers can
      *                      only be used in a protected scenario.
-     * \param allocation      pointer to where the allocation shall be stored on success. nullptr
+     * \param allocation    pointer to where the allocation shall be stored on success. nullptr
      *                      will be stored here on failure
      *
      * \retval C2_OK        the allocation was successful
      * \retval C2_NO_MEMORY not enough memory to complete the allocation
      * \retval C2_TIMED_OUT the allocation timed out
-     * \retval C2_NO_PERMISSION     no permission to complete the allocation
+     * \retval C2_REFUSED   no permission to complete the allocation
      * \retval C2_BAD_VALUE width, height, format or usage are not supported (invalid) (caller error)
-     * \retval C2_UNSUPPORTED       this allocator does not support 2D allocations
+     * \retval C2_OMITTED   this allocator does not support 2D allocations
      * \retval C2_CORRUPTED some unknown, unrecoverable error occured during allocation (unexpected)
      */
-    virtual C2Error allocateGraphicBuffer(
+    virtual c2_status_t newGraphicAllocation(
             uint32_t width __unused, uint32_t height __unused, uint32_t format __unused,
             C2MemoryUsage usage __unused,
             std::shared_ptr<C2GraphicAllocation> *allocation /* nonnull */) {
         *allocation = nullptr;
-        return C2_UNSUPPORTED;
+        return C2_OMITTED;
     }
 
     /**
@@ -1535,21 +1703,21 @@
      *
      * \param handle      the handle for the existing allocation
      * \param allocation  pointer to where the allocation shall be stored on success. nullptr
-     *                  will be stored here on failure
+     *                    will be stored here on failure
      *
      * \retval C2_OK        the allocation was recreated successfully
      * \retval C2_NO_MEMORY not enough memory to recreate the allocation
      * \retval C2_TIMED_OUT the recreation timed out (unexpected)
-     * \retval C2_NO_PERMISSION     no permission to recreate the allocation
+     * \retval C2_REFUSED   no permission to recreate the allocation
      * \retval C2_BAD_VALUE invalid handle (caller error)
-     * \retval C2_UNSUPPORTED       this allocator does not support 2D allocations
+     * \retval C2_OMITTED   this allocator does not support 2D allocations
      * \retval C2_CORRUPTED some unknown, unrecoverable error occured during recreation (unexpected)
      */
-    virtual C2Error recreateGraphicBuffer(
+    virtual c2_status_t priorGraphicAllocation(
             const C2Handle *handle __unused,
             std::shared_ptr<C2GraphicAllocation> *allocation /* nonnull */) {
         *allocation = nullptr;
-        return C2_UNSUPPORTED;
+        return C2_OMITTED;
     }
 
 protected:
@@ -1559,110 +1727,143 @@
 };
 
 /**
- *  Block allocators are used by components to allocate memory for output buffers. They can
- *  support either linear (1D), circular (1D) or graphic (2D) allocations.
+ *  Block pools are used by components to obtain output buffers in an efficient way. They can
+ *  support either linear (1D), circular (1D) or graphic (2D) blocks.
+ *
+ *  Block pools decouple the recycling of memory/allocations from the components. They are meant to
+ *  be an opaque service (there are no public APIs other than obtaining blocks) provided by the
+ *  platform. Block pools are also meant to decouple allocations from memory used by buffers. This
+ *  is accomplished by allowing pools to allot multiple memory 'blocks' on a single allocation. As
+ *  their name suggest, block pools maintain a pool of memory blocks. When a component asks for
+ *  a memory block, pools will try to return a free memory block already in the pool. If no such
+ *  block exists, they will allocate memory using the backing allocator and allot a block on that
+ *  allocation. When blocks are no longer used in the system, they are recycled back to the block
+ *  pool and are available as free blocks.
  *
  *  Never constructed on stack.
- *
- *  Block allocators are provided by the framework.
  */
-class C2BlockAllocator {
+class C2BlockPool {
 public:
     /**
-     * Allocates a linear writeable block of given |capacity| and |usage|. If successful, the
+     * Block pool ID type.
+     */
+    typedef uint64_t local_id_t;
+
+    enum : local_id_t {
+        BASIC_LINEAR = 0,  ///< ID of basic (unoptimized) block pool for fetching 1D blocks
+        BASIC_GRAPHIC = 1, ///< ID of basic (unoptimized) block pool for fetching 2D blocks
+        PLATFORM_START = 0x10,
+    };
+
+    /**
+     * Returns the ID for this block pool. This ID is used to get this block pool from the platform.
+     * It is only valid in the current process.
+     *
+     * This method MUST be "non-blocking" and return within 1ms.
+     *
+     * \return a local ID for this block pool.
+     */
+    virtual local_id_t getLocalId() const = 0;
+
+    /**
+     * Returns the ID of the backing allocator of this block pool.
+     *
+     * This method MUST be "non-blocking" and return within 1ms.
+     *
+     * \return the ID of the backing allocator of this block pool.
+     */
+    virtual C2Allocator::id_t getAllocatorId() const = 0;
+
+    /**
+     * Obtains a linear writeable block of given |capacity| and |usage|. If successful, the
      * block is stored in |block|. Otherwise, |block| is set to 'nullptr'.
      *
-     * \param capacity        the size of requested block.
-     * \param usage           the memory usage info for the requested allocation. \note that the
-     *                      returned allocation may be later used/mapped with different usage.
-     *                      The allocator shall lay out the buffer to be optimized for this usage,
-     *                      but must support any usage. One exception: protected buffers can
-     *                      only be used in a protected scenario.
-     * \param block      pointer to where the allocated block shall be stored on success. nullptr
-     *                      will be stored here on failure
+     * \param capacity the size of requested block.
+     * \param usage    the memory usage info for the requested block. Returned blocks will be
+     *                 optimized for this usage, but may be used with any usage. One exception:
+     *                 protected blocks/buffers can only be used in a protected scenario.
+     * \param block    pointer to where the obtained block shall be stored on success. nullptr will
+     *                 be stored here on failure
      *
-     * \retval C2_OK        the allocation was successful
-     * \retval C2_NO_MEMORY not enough memory to complete the allocation
-     * \retval C2_TIMED_OUT the allocation timed out
-     * \retval C2_NO_PERMISSION     no permission to complete the allocation
+     * \retval C2_OK        the operation was successful
+     * \retval C2_NO_MEMORY not enough memory to complete any required allocation
+     * \retval C2_TIMED_OUT the operation timed out
+     * \retval C2_REFUSED   no permission to complete any required allocation
      * \retval C2_BAD_VALUE capacity or usage are not supported (invalid) (caller error)
-     * \retval C2_UNSUPPORTED       this allocator does not support linear allocations
-     * \retval C2_CORRUPTED some unknown, unrecoverable error occured during allocation (unexpected)
+     * \retval C2_OMITTED   this pool does not support linear blocks
+     * \retval C2_CORRUPTED some unknown, unrecoverable error occured during operation (unexpected)
      */
-    virtual C2Error allocateLinearBlock(
+    virtual c2_status_t fetchLinearBlock(
             uint32_t capacity __unused, C2MemoryUsage usage __unused,
             std::shared_ptr<C2LinearBlock> *block /* nonnull */) {
         *block = nullptr;
-        return C2_UNSUPPORTED;
+        return C2_OMITTED;
     }
 
     /**
-     * Allocates a circular writeable block of given |capacity| and |usage|. If successful, the
+     * Obtains a circular writeable block of given |capacity| and |usage|. If successful, the
      * block is stored in |block|. Otherwise, |block| is set to 'nullptr'.
      *
-     * \param capacity        the size of requested circular block. (the allocation could be slightly
-     *                      larger, e.g. to account for any system-required alignment)
-     * \param usage           the memory usage info for the requested allocation. \note that the
-     *                      returned allocation may be later used/mapped with different usage.
-     *                      The allocator shall lay out the buffer to be optimized for this usage,
-     *                      but must support any usage. One exception: protected buffers can
-     *                      only be used in a protected scenario.
-     * \param block      pointer to where the allocated block shall be stored on success. nullptr
-     *                      will be stored here on failure
+     * \param capacity the size of requested circular block. (note: the size of the obtained
+     *                 block could be slightly larger, e.g. to accommodate any system-required
+     *                 alignment)
+     * \param usage    the memory usage info for the requested block. Returned blocks will be
+     *                 optimized for this usage, but may be used with any usage. One exception:
+     *                 protected blocks/buffers can only be used in a protected scenario.
+     * \param block    pointer to where the obtained block shall be stored on success. nullptr
+     *                 will be stored here on failure
      *
-     * \retval C2_OK            the allocation was successful
-     * \retval C2_NO_MEMORY     not enough memory to complete the allocation
-     * \retval C2_TIMED_OUT     the allocation timed out
-     * \retval C2_NO_PERMISSION     no permission to complete the allocation
-     * \retval C2_BAD_VALUE     capacity or usage are not supported (invalid) (caller error)
-     * \retval C2_UNSUPPORTED   this allocator does not support circular allocations
-     * \retval C2_CORRUPTED     some unknown, unrecoverable error occured during allocation (unexpected)
+     * \retval C2_OK        the operation was successful
+     * \retval C2_NO_MEMORY not enough memory to complete any required allocation
+     * \retval C2_TIMED_OUT the operation timed out
+     * \retval C2_REFUSED   no permission to complete any required allocation
+     * \retval C2_BAD_VALUE capacity or usage are not supported (invalid) (caller error)
+     * \retval C2_OMITTED   this pool does not support circular blocks
+     * \retval C2_CORRUPTED some unknown, unrecoverable error occured during operation (unexpected)
      */
-    virtual C2Error allocateCircularBlock(
+    virtual c2_status_t fetchCircularBlock(
             uint32_t capacity __unused, C2MemoryUsage usage __unused,
             std::shared_ptr<C2CircularBlock> *block /* nonnull */) {
         *block = nullptr;
-        return C2_UNSUPPORTED;
+        return C2_OMITTED;
     }
 
     /**
-     * Allocates a 2D graphic block of given |width|, |height|, |format| and |usage|. If successful,
-     * the allocation is stored in |block|. Otherwise, |block| is set to 'nullptr'.
+     * Obtains a 2D graphic block of given |width|, |height|, |format| and |usage|. If successful,
+     * the block is stored in |block|. Otherwise, |block| is set to 'nullptr'.
      *
-     * \param width           the width of requested allocation (the allocation could be slightly
-     *                      larger, e.g. to account for any system-required alignment)
-     * \param height          the height of requested allocation (the allocation could be slightly
-     *                      larger, e.g. to account for any system-required alignment)
-     * \param format          the pixel format of requested allocation. This could be a vendor
-     *                      specific format.
-     * \param usage           the memory usage info for the requested allocation. \note that the
-     *                      returned allocation may be later used/mapped with different usage.
-     *                      The allocator should layout the buffer to be optimized for this usage,
-     *                      but must support any usage. One exception: protected buffers can
-     *                      only be used in a protected scenario.
-     * \param block      pointer to where the allocation shall be stored on success. nullptr
-     *                      will be stored here on failure
+     * \param width  the width of requested block (the obtained block could be slightly larger, e.g.
+     *               to accommodate any system-required alignment)
+     * \param height the height of requested block (the obtained block could be slightly larger,
+     *               e.g. to accommodate any system-required alignment)
+     * \param format the pixel format of requested block. This could be a vendor specific format.
+     * \param usage  the memory usage info for the requested block. Returned blocks will be
+     *               optimized for this usage, but may be used with any usage. One exception:
+     *               protected blocks/buffers can only be used in a protected scenario.
+     * \param block  pointer to where the obtained block shall be stored on success. nullptr
+     *               will be stored here on failure
      *
-     * \retval C2_OK            the allocation was successful
-     * \retval C2_NO_MEMORY     not enough memory to complete the allocation
-     * \retval C2_TIMED_OUT     the allocation timed out
-     * \retval C2_NO_PERMISSION     no permission to complete the allocation
-     * \retval C2_BAD_VALUE     width, height, format or usage are not supported (invalid) (caller error)
-     * \retval C2_UNSUPPORTED   this allocator does not support 2D allocations
-     * \retval C2_CORRUPTED     some unknown, unrecoverable error occured during allocation (unexpected)
+     * \retval C2_OK        the operation was successful
+     * \retval C2_NO_MEMORY not enough memory to complete any required allocation
+     * \retval C2_TIMED_OUT the operation timed out
+     * \retval C2_REFUSED   no permission to complete any required allocation
+     * \retval C2_BAD_VALUE width, height, format or usage are not supported (invalid) (caller
+     *                      error)
+     * \retval C2_OMITTED   this pool does not support 2D blocks
+     * \retval C2_CORRUPTED some unknown, unrecoverable error occured during operation (unexpected)
      */
-    virtual C2Error allocateGraphicBlock(
+    virtual c2_status_t fetchGraphicBlock(
             uint32_t width __unused, uint32_t height __unused, uint32_t format __unused,
             C2MemoryUsage usage __unused,
             std::shared_ptr<C2GraphicBlock> *block /* nonnull */) {
         *block = nullptr;
-        return C2_UNSUPPORTED;
+        return C2_OMITTED;
     }
 
 protected:
-    C2BlockAllocator() = default;
+    C2BlockPool() = default;
 
-    virtual ~C2BlockAllocator() = default;
+    virtual ~C2BlockPool() = default;
 };
 
 /// @}
diff --git a/media/libstagefright/codec2/include/C2Component.h b/media/libstagefright/codec2/include/C2Component.h
index 1ee9302..38d545e 100644
--- a/media/libstagefright/codec2/include/C2Component.h
+++ b/media/libstagefright/codec2/include/C2Component.h
@@ -36,23 +36,29 @@
 
 class C2Component;
 
-class C2ComponentListener {
-public:
-    virtual void onWorkDone(std::weak_ptr<C2Component> component,
-                            std::vector<std::unique_ptr<C2Work>> workItems) = 0;
+struct C2FieldSupportedValuesQuery {
+    enum type_t : uint32_t {
+        POSSIBLE, ///< query all possible values regardless of other settings
+        CURRENT,  ///< query currently possible values given dependent settings
+    };
 
-    virtual void onTripped(std::weak_ptr<C2Component> component,
-                           std::vector<std::shared_ptr<C2SettingResult>> settingResult) = 0;
+    const C2ParamField field;
+    const type_t type;
+    c2_status_t status;
+    C2FieldSupportedValues values;
 
-    virtual void onError(std::weak_ptr<C2Component> component,
-                         uint32_t errorCode) = 0;
+    C2FieldSupportedValuesQuery(const C2ParamField &field_, type_t type_)
+        : field(field_), type(type_), status(C2_NO_INIT) { }
 
-    // virtual void onTunnelReleased(<from>, <to>) = 0;
+    static C2FieldSupportedValuesQuery&&
+    Current(const C2ParamField &field_) {
+        return std::move(C2FieldSupportedValuesQuery(field_, CURRENT));
+    }
 
-    // virtual void onComponentReleased(<id>) = 0;
-
-protected:
-    virtual ~C2ComponentListener();
+    static C2FieldSupportedValuesQuery&&
+    Possible(const C2ParamField &field_) {
+        return std::move(C2FieldSupportedValuesQuery(field_, POSSIBLE));
+    }
 };
 
 /**
@@ -72,8 +78,8 @@
      * This is a unique name for this component or component interface 'class'; however, multiple
      * instances of this component SHALL have the same name.
      *
-     * This method MUST be supported in any state. This call does not change the state nor the
-     * internal states of the component.
+     * When attached to a component, this method MUST be supported in any component state.
+     * This call does not change the state nor the internal configuration of the component.
      *
      * This method MUST be "non-blocking" and return within 1ms.
      *
@@ -86,127 +92,127 @@
      * Returns a unique ID for this component or interface object.
      * This ID is used as work targets, unique work IDs, and when configuring tunneling.
      *
-     * This method MUST be supported in any state. This call does not change the state nor the
-     * internal states of the component.
+     * When attached to a component, this method MUST be supported in any component state.
+     * This call does not change the state nor the internal configuration of the component.
      *
      * This method MUST be "non-blocking" and return within 1ms.
      *
      * \return a unique node ID for this component or component interface instance.
      */
-    virtual node_id getId() const = 0;
+    virtual c2_node_id_t getId() const = 0;
 
     /**
      * Queries a set of parameters from the component or interface object.
      * Querying is performed at best effort: the component SHALL query all supported parameters and
-     * skip unsupported ones, or heap allocated parameters that could not be allocated. Any errors
-     * are communicated in the return value. Additionally, preallocated (e.g. stack) parameters that
-     * could not be queried are invalidated. Parameters to be allocated on the heap are omitted from
-     * the result.
+     * skip unsupported ones, heap allocated parameters that could not be allocated or parameters
+     * that could not be queried without blocking. Any errors are communicated in the return value.
+     * Additionally, preallocated (e.g. stack) parameters that could not be queried are invalidated.
+     * Invalid or blocking parameters to be allocated on the heap are omitted from the result.
      *
      * \note Parameter values do not depend on the order of query.
      *
      * \todo This method cannot be used to query info-buffers. Is that a problem?
      *
-     * This method MUST be supported in any state. This call does not change the state nor the
-     * internal states of the component.
+     * When attached to a component, this method MUST be supported in any component state except
+     * released.
+     * This call does not change the state nor the internal configuration of the component.
      *
-     * This method MUST be "non-blocking" and return within 1ms.
+     * This method has a variable blocking behavior based on state.
+     * In the stopped state this method MUST be "non-blocking" and return within 1ms.
+     * In the running states this method may be momentarily blocking, but MUST return within 5ms.
      *
-     * \param[in,out] stackParams   a list of params queried. These are initialized specific to each
-     *                      setting; e.g. size and index are set and rest of the members are
-     *                      cleared.
-     *                      \note Flexible settings that are of incorrect size will be invalidated.
-     * \param[in] heapParamIndices a vector of param indices for params to be queried and returned on the
-     *                      heap. These parameters will be returned in heapParams. Unsupported param
-     *                      indices will be ignored.
-     * \param[out] heapParams    a list of params where to which the supported heap parameters will be
-     *                      appended in the order they appear in heapParamIndices.
+     * \param[in,out] stackParams  a list of params queried. These are initialized specific to each
+     *                             setting; e.g. size and index are set and rest of the members are
+     *                             cleared.
+     *                             \note Flexible settings that are of incorrect size will be
+     *                             invalidated.
+     * \param[in] heapParamIndices a vector of param indices for params to be queried and returned
+     *                             on the heap. These parameters will be returned in heapParams.
+     *                             Unsupported param indices will be ignored.
+     * \param[in] mayBlock         if true (C2_MAY_BLOCK), implementation may momentarily block.
+     *                             Otherwise (C2_DONT_BLOCK), it must be "non-blocking".
+     * \param[out] heapParams      a list of params where to which the supported heap parameters
+     *                             will be appended in the order they appear in heapParamIndices.
      *
      * \retval C2_OK        all parameters could be queried
      * \retval C2_BAD_INDEX all supported parameters could be queried, but some parameters were not
      *                      supported
+     * \retval C2_BAD_STATE when called in the released component state (user error)
+     *                      (this error code is only allowed for interfaces connected to components)
      * \retval C2_NO_MEMORY could not allocate memory for a supported parameter
+     * \retval C2_BLOCKING  the operation must block to complete but mayBlock is false
+     *                      (this error code is only allowed for interfaces connected to components)
+     * \retval C2_TIMED_OUT could not query the parameters within the time limit (unexpected)
+     *                      (this error code is only allowed for interfaces connected to components
+     *                      in the running state)
      * \retval C2_CORRUPTED some unknown error prevented the querying of the parameters
      *                      (unexpected)
+     *                      (this error code is only allowed for interfaces connected to components)
      */
-    virtual status_t query_nb(
+    virtual c2_status_t query_vb(
         const std::vector<C2Param* const> &stackParams,
         const std::vector<C2Param::Index> &heapParamIndices,
+        c2_blocking_t mayBlock,
         std::vector<std::unique_ptr<C2Param>>* const heapParams) const = 0;
 
     /**
      * Sets a set of parameters for the component or interface object.
-     * Tuning is performed at best effort: the component SHALL update all supported configuration at
-     * best effort (unless configured otherwise) and skip unsupported ones. Any errors are
-     * communicated in the return value and in |failures|.
+     *
+     * Tuning is performed at best effort: the component SHALL process the configuration updates in
+     * the order they appear in |params|. If any parameter update fails, the component shall
+     * communicate the failure in the return value and in |failures|, and still process the
+     * remaining parameters. Unsupported parameters are skipped, though they are communicated in
+     * ther return value. Most parameters are updated at best effort - such that even if client
+     * specifies an unsupported value for a field, the closest supported value is used. On the
+     * other hand, strict parameters only accept specific values for their fields, and if the client
+     * specifies an unsupported value, the parameter setting shall fail for that field.
+     * If the client tries to change the value of a field that requires momentary blocking without
+     * setting |mayBlock| to C2_MAY_BLOCK, that parameter shall also be skipped and a specific
+     * return value shall be used. Final values for all parameters set are propagated back to the
+     * caller in |params|.
      *
      * \note Parameter tuning DOES depend on the order of the tuning parameters. E.g. some parameter
-     * update may allow some subsequent parameter update.
+     * update may allow some subsequent values for further parameter updates.
      *
-     * This method MUST be supported in any state.
+     * When attached to a component, this method MUST be supported in any component state except
+     * released.
      *
-     * This method MUST be "non-blocking" and return within 1ms.
+     * This method has a variable blocking behavior based on state.
+     * In the stopped state this method MUST be "non-blocking" and return within 1ms.
+     * In the running states this method may be momentarily blocking, but MUST return within 5ms.
      *
-     * \param[in,out] params          a list of parameter updates. These will be updated to the actual
-     *                      parameter values after the updates (this is because tuning is performed
-     *                      at best effort).
-     *                      \todo params that could not be updated are not marked here, so are
-     *                      confusing - are they "existing" values or intended to be configured
-     *                      values?
-     * \param[out] failures        a list of parameter failures
+     * \param[in,out] params a list of parameter updates. These will be updated to the actual
+     *                       parameter values after the updates (this is because tuning is performed
+     *                       at best effort).
+     *                       \todo params that could not be updated are not marked here, so are
+     *                       confusing - are they "existing" values or intended to be configured
+     *                       values?
+     * \param[in] mayBlock   if true (C2_MAY_BLOCK), implementation may momentarily block.
+     *                       Otherwise (C2_DONT_BLOCK), it must be "non-blocking".
+     * \param[out] failures  a list of parameter failures and optional guidance
      *
      * \retval C2_OK        all parameters could be updated successfully
      * \retval C2_BAD_INDEX all supported parameters could be updated successfully, but some
      *                      parameters were not supported
      * \retval C2_BAD_VALUE some supported parameters could not be updated successfully because
      *                      they contained unsupported values. These are returned in |failures|.
+     * \retval C2_BAD_STATE when called in the released component state (user error)
+     *                      (this error code is only allowed for interfaces connected to components)
      * \retval C2_NO_MEMORY some supported parameters could not be updated successfully because
      *                      they contained unsupported values, but could not allocate a failure
      *                      object for them.
+     * \retval C2_TIMED_OUT could not set the parameters within the time limit (unexpected)
+     *                      (this error code is only allowed for interfaces connected to components
+     *                      in the running state)
+     * \retval C2_BLOCKING  the operation must block to complete but mayBlock is false
+     *                      (this error code is only allowed for interfaces connected to components)
      * \retval C2_CORRUPTED some unknown error prevented the update of the parameters
      *                      (unexpected)
+     *                      (this error code is only allowed for interfaces connected to components)
      */
-    virtual status_t config_nb(
+    virtual c2_status_t config_vb(
             const std::vector<C2Param* const> &params,
-            std::vector<std::unique_ptr<C2SettingResult>>* const failures) = 0;
-
-    /**
-     * Atomically sets a set of parameters for the component or interface object.
-     *
-     * \note This method is used mainly for reserving resources for a component.
-     *
-     * The component SHALL update all supported configuration at
-     * best effort(TBD) (unless configured otherwise) and skip unsupported ones. Any errors are
-     * communicated in the return value and in |failures|.
-     *
-     * \note Parameter tuning DOES depend on the order of the tuning parameters. E.g. some parameter
-     * update may allow some subsequent parameter update.
-     *
-     * This method MUST be supported in any state.
-     *
-     * This method may be momentarily blocking, but MUST return within 5ms.
-     *
-     * \param params[in,out]          a list of parameter updates. These will be updated to the actual
-     *                      parameter values after the updates (this is because tuning is performed
-     *                      at best effort).
-     *                      \todo params that could not be updated are not marked here, so are
-     *                      confusing - are they "existing" values or intended to be configured
-     *                      values?
-     * \param failures[out]        a list of parameter failures
-     *
-     * \retval C2_OK        all parameters could be updated successfully
-     * \retval C2_BAD_INDEX all supported parameters could be updated successfully, but some
-     *                      parameters were not supported
-     * \retval C2_BAD_VALUE some supported parameters could not be updated successfully because
-     *                      they contained unsupported values. These are returned in |failures|.
-     * \retval C2_NO_MEMORY some supported parameters could not be updated successfully because
-     *                      they contained unsupported values, but could not allocate a failure
-     *                      object for them.
-     * \retval C2_CORRUPTED some unknown error prevented the update of the parameters
-     *                      (unexpected)
-     */
-    virtual status_t commit_sm(
-            const std::vector<C2Param* const> &params,
+            c2_blocking_t mayBlock,
             std::vector<std::unique_ptr<C2SettingResult>>* const failures) = 0;
 
     // TUNNELING
@@ -218,19 +224,24 @@
      * If the component is successfully created, subsequent work items queued may include a
      * tunneled path between these components.
      *
-     * This method MUST be supported in any state.
+     * When attached to a component, this method MUST be supported in any component state except
+     * released.
      *
      * This method may be momentarily blocking, but MUST return within 5ms.
      *
      * \retval C2_OK        the tunnel was successfully created
      * \retval C2_BAD_INDEX the target component does not exist
-     * \retval C2_ALREADY_EXIST the tunnel already exists
-     * \retval C2_UNSUPPORTED  the tunnel is not supported
+     * \retval C2_DUPLICATE the tunnel already exists
+     * \retval C2_OMITTED   tunneling is not supported by this component
+     * \retval C2_CANNOT_DO the specific tunnel is not supported
+     * \retval C2_BAD_STATE when called in the released component state (user error)
+     *                      (this error code is only allowed for interfaces connected to components)
      *
      * \retval C2_TIMED_OUT could not create the tunnel within the time limit (unexpected)
      * \retval C2_CORRUPTED some unknown error prevented the creation of the tunnel (unexpected)
+     *                      (this error code is only allowed for interfaces connected to components)
      */
-    virtual status_t createTunnel_sm(node_id targetComponent) = 0;
+    virtual c2_status_t createTunnel_sm(c2_node_id_t targetComponent) = 0;
 
     /**
      * Releases a tunnel from this component to the target component.
@@ -239,97 +250,213 @@
      * After releasing a tunnel, subsequent work items queued MUST NOT include a tunneled
      * path between these components.
      *
-     * This method MUST be supported in any state.
+     * When attached to a component, this method MUST be supported in any component state except
+     * released.
      *
      * This method may be momentarily blocking, but MUST return within 5ms.
      *
      * \retval C2_OK        the tunnel was marked for release successfully
      * \retval C2_BAD_INDEX the target component does not exist
      * \retval C2_NOT_FOUND the tunnel does not exist
+     * \retval C2_OMITTED   tunneling is not supported by this component
+     * \retval C2_BAD_STATE when called in the released component state (user error)
+     *                      (this error code is only allowed for interfaces connected to components)
      *
      * \retval C2_TIMED_OUT could not mark the tunnel for release within the time limit (unexpected)
      * \retval C2_CORRUPTED some unknown error prevented the release of the tunnel (unexpected)
+     *                      (this error code is only allowed for interfaces connected to components)
      */
-    virtual status_t releaseTunnel_sm(node_id targetComponent) = 0;
-
+    virtual c2_status_t releaseTunnel_sm(c2_node_id_t targetComponent) = 0;
 
     // REFLECTION MECHANISM (USED FOR EXTENSION)
     // =============================================================================================
 
     /**
-     * Returns the parameter reflector.
-     *
-     * This is used to describe parameter fields.
-     *
-     * \return a shared parameter reflector object.
-     */
-    virtual std::shared_ptr<C2ParamReflector> getParamReflector() const = 0;
-
-    /**
      * Returns the set of supported parameters.
      *
+     * When attached to a component, this method MUST be supported in any component state except
+     * released.
+     *
+     * This method MUST be "non-blocking" and return within 1ms.
+     *
      * \param[out] params a vector of supported parameters will be appended to this vector.
      *
      * \retval C2_OK        the operation completed successfully.
+     * \retval C2_BAD_STATE when called in the released component state (user error)
+     *                      (this error code is only allowed for interfaces connected to components)
      * \retval C2_NO_MEMORY not enough memory to complete this method.
      */
-    virtual status_t getSupportedParams(
+    virtual c2_status_t querySupportedParams_nb(
             std::vector<std::shared_ptr<C2ParamDescriptor>> * const params) const = 0;
 
     /**
+     * Retrieves the supported values for the queried fields.
      *
-     * \todo should this take a list considering that setting some fields may further limit other
-     * fields in the same list?
+     * Client SHALL set the parameter-field specifier and the type of supported values query (e.g.
+     * currently supported values, or potential supported values) in fields.
+     * Upon return the component SHALL fill in the supported values for the fields listed as well
+     * as a status for each field. Component shall process all fields queried even if some queries
+     * fail.
+     *
+     * When attached to a component, this method MUST be supported in any component state except
+     * released.
+     *
+     * This method has a variable blocking behavior based on state.
+     * In the stopped state this method MUST be "non-blocking" and return within 1ms.
+     * In the running states this method may be momentarily blocking, but MUST return within 5ms.
+     *
+     * \param[in out] fields a vector of fields descriptor structures.
+     * \param[in] mayBlock   if true (C2_MAY_BLOCK), implementation may momentarily block.
+     *                       Otherwise (C2_DONT_BLOCK), it must be "non-blocking".
+     *
+     * \retval C2_OK        the operation completed successfully.
+     * \retval C2_BAD_STATE when called in the released component state (user error)
+     *                      (this error code is only allowed for interfaces connected to components)
+     * \retval C2_BAD_INDEX at least one field was not recognized as a component field
+     * \retval C2_TIMED_OUT could not query supported values within the time limit (unexpected)
+     *                      (this error code is only allowed for interfaces connected to components
+     *                      in the running state)
+     * \retval C2_BLOCKING  the operation must block to complete but mayBlock is false
+     *                      (this error code is only allowed for interfaces connected to components)
+     * \retval C2_CORRUPTED some unknown error prevented the operation from completing (unexpected)
+     *                      (this error code is only allowed for interfaces connected to components)
      */
-    virtual status_t getSupportedValues(
-            const std::vector<const C2ParamField> fields,
-            std::vector<C2FieldSupportedValues>* const values) const = 0;
+    virtual c2_status_t querySupportedValues_vb(
+            std::vector<C2FieldSupportedValuesQuery> &fields, c2_blocking_t mayBlock) const = 0;
 
     virtual ~C2ComponentInterface() = default;
 };
 
 class C2Component {
 public:
+    class Listener {
+    public:
+        virtual void onWorkDone_nb(std::weak_ptr<C2Component> component,
+                                std::vector<std::unique_ptr<C2Work>> workItems) = 0;
+
+        virtual void onTripped_nb(std::weak_ptr<C2Component> component,
+                               std::vector<std::shared_ptr<C2SettingResult>> settingResult) = 0;
+
+        virtual void onError_nb(std::weak_ptr<C2Component> component,
+                             uint32_t errorCode) = 0;
+
+        // virtual void onTunnelReleased(<from>, <to>) = 0;
+
+        // virtual void onComponentReleased(<id>) = 0;
+
+        virtual ~Listener() = default;
+    };
+
+    /**
+     * Sets the listener for this component
+     *
+     * This method MUST be supported in all states except released.
+     * The listener can only be set to non-null value in stopped state (that does not include
+     * tripped or error). It can be set to nullptr in both stopped and running states.
+     * Components only use the listener in running state.
+     *
+     * If listener is nullptr, the component SHALL guarantee that no more listener callbacks are
+     * done to the original listener once this method returns. (Any pending listener callbacks will
+     * need to be completed during this call - hence this call may be temporarily blocking.)
+     *
+     * This method has a variable blocking behavior based on state.
+     * In the stopped state this method MUST be "non-blocking" and return within 1ms.
+     * In the running states this method may be momentarily blocking, but MUST return within 5ms.
+     *
+     * Component SHALL handle listener notifications from the same thread (the thread used is
+     * at the component's discretion.)
+     *
+     * \note This could also be accomplished by passing a weak_ptr to a component-specific listener
+     * here and requiring the client to always promote the weak_ptr before any callback. This would
+     * put the burden on the client to clear the listener - wait for its deletion - at which point
+     * it is guaranteed that no more listener callbacks will occur.
+     *
+     * \param[in] listener the component listener object
+     * \param[in] mayBlock if true (C2_MAY_BLOCK), implementation may momentarily block.
+     *                     Otherwise (C2_DONT_BLOCK), it must be "non-blocking".
+     *
+     * \retval C2_BAD_STATE attempting to change the listener in the running state to a non-null
+     *                      value (user error), or called in the released state
+     * \retval C2_BLOCKING  the operation must block to complete but mayBlock is false
+     * \retval C2_OK        listener was updated successfully.
+     */
+    virtual c2_status_t setListener_vb(
+            const std::shared_ptr<Listener> &listener, c2_blocking_t mayBlock) = 0;
+
+    enum domain_t : uint32_t;
+    enum kind_t : uint32_t;
+    typedef uint32_t rank_t;
+
+    /**
+     * Information about a component.
+     */
+    struct Traits {
+    // public:
+        C2String name; ///< name of the component
+        domain_t domain; ///< component domain (e.g. audio or video)
+        kind_t kind; ///< component kind (e.g. encoder, decoder or filter)
+        rank_t rank; ///< rank used to determine component ordering (the lower the sooner)
+        C2StringLiteral mediaType; ///< media type supported by the component
+
+        /**
+         * name alias(es) for backward compatibility.
+         * \note Multiple components can have the same alias as long as their media-type differs.
+         */
+        std::vector<C2StringLiteral> aliases; ///< name aliases for backward compatibility
+    };
+
     // METHODS AVAILABLE WHEN RUNNING
     // =============================================================================================
 
     /**
      * Queues up work for the component.
      *
-     * This method MUST be supported in running (including tripped) states.
+     * This method MUST be supported in running (including tripped and error) states.
      *
-     * This method MUST be "non-blocking" and return within 1ms
+     * This method MUST be "non-blocking" and return within 1 ms
      *
      * It is acceptable for this method to return OK and return an error value using the
      * onWorkDone() callback.
      *
      * \retval C2_OK        the work was successfully queued
      * \retval C2_BAD_INDEX some component(s) in the work do(es) not exist
-     * \retval C2_UNSUPPORTED  the components are not tunneled
+     * \retval C2_CANNOT_DO the components are not tunneled
+     * \retval C2_BAD_STATE when called in the stopped or released state (user error)
      *
      * \retval C2_NO_MEMORY not enough memory to queue the work
      * \retval C2_CORRUPTED some unknown error prevented queuing the work (unexpected)
      */
-    virtual status_t queue_nb(std::list<std::unique_ptr<C2Work>>* const items) = 0;
+    virtual c2_status_t queue_nb(std::list<std::unique_ptr<C2Work>>* const items) = 0;
 
     /**
      * Announces a work to be queued later for the component. This reserves a slot for the queue
      * to ensure correct work ordering even if the work is queued later.
      *
-     * This method MUST be supported in running (including tripped) states.
+     * This method MUST be supported in running (including tripped and error) states.
      *
      * This method MUST be "non-blocking" and return within 1 ms
      *
      * \retval C2_OK        the work announcement has been successfully recorded
      * \retval C2_BAD_INDEX some component(s) in the work outline do(es) not exist
-     * \retval C2_UNSUPPORTED  the componentes are not tunneled
+     * \retval C2_CANNOT_DO the componentes are not tunneled
+     * \retval C2_BAD_STATE when called in the stopped or released state (user error)
      *
      * \retval C2_NO_MEMORY not enough memory to record the work announcement
      * \retval C2_CORRUPTED some unknown error prevented recording the announcement (unexpected)
      *
      * \todo Can this be rolled into queue_nb?
+     * \todo Expose next work item for each component to detect stalls
      */
-    virtual status_t announce_nb(const std::vector<C2WorkOutline> &items) = 0;
+    virtual c2_status_t announce_nb(const std::vector<C2WorkOutline> &items) = 0;
+
+    enum flush_mode_t : uint32_t {
+        /// flush work from this component only
+        FLUSH_COMPONENT,
+
+        /// flush work from this component and all components connected downstream from it via
+        /// tunneling
+        FLUSH_CHAIN = (1 << 16),
+    };
 
     /**
      * Discards and abandons any pending work for the component, and optionally any component
@@ -344,7 +471,7 @@
      * \todo we could simply take a list of numbers and flush those... this is bad for decoders
      *       also, what would happen to fine grade references?
      *
-     * This method MUST be supported in running (including tripped) states.
+     * This method MUST be supported in running (including tripped and error) states.
      *
      * This method may be momentarily blocking, but must return within 5ms.
      *
@@ -353,27 +480,46 @@
      *
      * Work that could not be abandoned or discarded immediately SHALL be marked to be
      * discarded at the earliest opportunity, and SHALL be returned via the onWorkDone() callback.
+     * This shall be completed within 500ms.
      *
-     * \param flushThrough    flush work from this component and all components connected downstream
-     *                      from it via tunneling.
+     * \param mode flush mode
      *
-     * \retval C2_OK        the work announcement has been successfully recorded
+     * \retval C2_OK        the component has been successfully flushed
+     * \retval C2_BAD_STATE when called in the stopped or released state (user error)
      * \retval C2_TIMED_OUT the flush could not be completed within the time limit (unexpected)
      * \retval C2_CORRUPTED some unknown error prevented flushing from completion (unexpected)
      */
-    virtual status_t flush_sm(bool flushThrough, std::list<std::unique_ptr<C2Work>>* const flushedWork) = 0;
+    virtual c2_status_t flush_sm(flush_mode_t mode, std::list<std::unique_ptr<C2Work>>* const flushedWork) = 0;
+
+    enum drain_mode_t : uint32_t {
+        /// drain component only and add an "end-of-stream" marker. Component shall process all
+        /// queued work and complete the current stream. If new input is received, it shall start
+        /// a new stream. \todo define what a stream is.
+        DRAIN_COMPONENT_WITH_EOS,
+        /// drain component without setting "end-of-stream" marker. Component shall process all
+        /// queued work but shall expect more work items for the same stream.
+        DRAIN_COMPONENT_NO_EOS = (1 << 0),
+
+        /// marks the last work item with a persistent "end-of-stream" marker that will drain
+        /// downstream components
+        /// \todo this may confuse work-ordering downstream
+        DRAIN_CHAIN = (1 << 16),
+
+        /**
+         * \todo define this; we could place EOS to all upstream components, just this component, or
+         *       all upstream and downstream component.
+         * \todo should EOS carry over to downstream components?
+         */
+    };
 
     /**
-     * Drains the component, and optionally downstream components
+     * Drains the component, and optionally downstream components. This is a signalling method;
+     * as such it does not wait for any work completion.
      *
-     * \todo define this; we could place EOS to all upstream components, just this component, or
-     *       all upstream and downstream component.
-     * \todo should EOS carry over to downstream components?
-     *
-     * Marks last work item as "end-of-stream", so component is notified not to wait for further
-     * work before it processes work already queued. This method is called to set the end-of-stream
-     * flag after work has been queued. Client can continue to queue further work immediately after
-     * this method returns.
+     * Marks last work item as "drain-till-here", so component is notified not to wait for further
+     * work before it processes work already queued. This method can also used to set the
+     * end-of-stream flag after work has been queued. Client can continue to queue further work
+     * immediately after this method returns.
      *
      * This method MUST be supported in running (including tripped) states.
      *
@@ -381,16 +527,16 @@
      *
      * Work that is completed SHALL be returned via the onWorkDone() callback.
      *
-     * \param drainThrough    marks the last work item with a persistent "end-of-stream" marker that
-     *                      will drain downstream components.
+     * \param mode drain mode
      *
-     * \todo this may confuse work-ordering downstream; could be an mode enum
-     *
-     * \retval C2_OK        the work announcement has been successfully recorded
+     * \retval C2_OK        the drain request has been successfully recorded
+     * \retval C2_BAD_STATE when called in the stopped or released state (user error)
+     * \retval C2_BAD_VALUE the drain mode is not supported by the component
+     *                      \todo define supported modes discovery
      * \retval C2_TIMED_OUT the flush could not be completed within the time limit (unexpected)
      * \retval C2_CORRUPTED some unknown error prevented flushing from completion (unexpected)
      */
-    virtual status_t drain_nb(bool drainThrough) = 0;
+    virtual c2_status_t drain_nb(drain_mode_t mode) = 0;
 
     // STATE CHANGE METHODS
     // =============================================================================================
@@ -398,20 +544,31 @@
     /**
      * Starts the component.
      *
-     * This method MUST be supported in stopped state.
+     * This method MUST be supported in stopped state, as well as during the tripped state.
+     *
+     * If the return value is C2_OK, the component shall be in the running state.
+     * If the return value is C2_BAD_STATE or C2_DUPLICATE, no state change is expected as a
+     * response to this call.
+     * Otherwise, the component shall be in the stopped state.
+     *
+     * \note If a component is in the tripped state and start() is called while the component
+     * configuration still results in a trip, start shall succeed and a new onTripped callback
+     * should be used to communicate the configuration conflict that results in the new trip.
      *
      * \todo This method MUST return within 500ms. Seems this should be able to return quickly, as
      * there are no immediate guarantees. Though there are guarantees for responsiveness immediately
      * after start returns.
      *
-     * \todo Could we just start a ComponentInterface to get a Component?
-     *
-     * \retval C2_OK        the work announcement has been successfully recorded
+     * \retval C2_OK        the component has started (or resumed) successfully
+     * \retval C2_DUPLICATE when called during another start call from another thread
+     * \retval C2_BAD_STATE when called in any state other than the stopped state or tripped state,
+     *                      including when called during another state change call from another
+     *                      thread (user error)
      * \retval C2_NO_MEMORY not enough memory to start the component
      * \retval C2_TIMED_OUT the component could not be started within the time limit (unexpected)
      * \retval C2_CORRUPTED some unknown error prevented starting the component (unexpected)
      */
-    virtual status_t start() = 0;
+    virtual c2_status_t start() = 0;
 
     /**
      * Stops the component.
@@ -420,51 +577,71 @@
      *
      * This method MUST return withing 500ms.
      *
-     * Upon this call, all pending work SHALL be abandoned.
+     * Upon this call, all pending work SHALL be abandoned and all buffer references SHALL be
+     * released.
+     * If the return value is C2_BAD_STATE or C2_DUPLICATE, no state change is expected as a
+     * response to this call.
+     * For all other return values, the component shall be in the stopped state.
      *
      * \todo should this return completed work, since client will just free it? Perhaps just to
      * verify accounting.
      *
      * This does not alter any settings and tunings that may have resulted in a tripped state.
      * (Is this material given the definition? Perhaps in case we want to start again.)
+     *
+     * \retval C2_OK        the component has started successfully
+     * \retval C2_DUPLICATE when called during another stop call from another thread
+     * \retval C2_BAD_STATE when called in any state other than the running state, including when
+     *                      called during another state change call from another thread (user error)
+     * \retval C2_TIMED_OUT the component could not be stopped within the time limit (unexpected)
+     * \retval C2_CORRUPTED some unknown error prevented stopping the component (unexpected)
      */
-    virtual status_t stop() = 0;
+    virtual c2_status_t stop() = 0;
 
     /**
      * Resets the component.
      *
-     * This method MUST be supported in running (including tripped) state.
+     * This method MUST be supported in all (including tripped) states other than released.
      *
-     * This method MUST be supported during any other call (\todo or just blocking ones?)
+     * This method MUST be supported during any other blocking call.
      *
      * This method MUST return withing 500ms.
      *
-     * After this call returns all work is/must be abandoned, all references should be released.
+     * After this call returns all work SHALL be abandoned, all buffer references SHALL be released.
+     * If the return value is C2_BAD_STATE or C2_DUPLICATE, no state change is expected as a
+     * response to this call.
+     * For all other return values, the component shall be in the stopped state.
      *
      * \todo should this return completed work, since client will just free it? Also, if it unblocks
      * a stop, where should completed work be returned?
      *
      * This brings settings back to their default - "guaranteeing" no tripped space.
      *
-     * \todo reclaim support - it seems that since ownership is passed, this will allow reclaiming stuff.
+     * \todo reclaim support - it seems that since ownership is passed, this will allow reclaiming
+     * stuff.
+     *
+     * \retval C2_OK        the component has been reset
+     * \retval C2_DUPLICATE when called during another reset call from another thread
+     * \retval C2_BAD_STATE when called in the released state
+     * \retval C2_TIMED_OUT the component could not be reset within the time limit (unexpected)
+     * \retval C2_CORRUPTED some unknown error prevented resetting the component (unexpected)
      */
-    virtual void reset() = 0;
+    virtual c2_status_t reset() = 0;
 
     /**
      * Releases the component.
      *
-     * This method MUST be supported in any state. (\todo Or shall we force reset() first to bring
-     * to a known state?)
+     * This method MUST be supported in stopped state.
      *
-     * This method MUST return withing 500ms.
+     * This method MUST return withing 500ms. Upon return all references shall be abandoned.
      *
-     * \todo should this return completed work, since client will just free it? Also, if it unblocks
-     * a stop, where should completed work be returned?
-     *
-     * TODO: does it matter if this call has a short time limit? Yes, as upon return all references
-     * shall be abandoned.
+     * \retval C2_OK        the component has been released
+     * \retval C2_DUPLICATE the component is already released
+     * \retval C2_BAD_STATE the component is running
+     * \retval C2_TIMED_OUT the component could not be released within the time limit (unexpected)
+     * \retval C2_CORRUPTED some unknown error prevented releasing the component (unexpected)
      */
-    virtual void release() = 0;
+    virtual c2_status_t release() = 0;
 
     /**
      * Returns the interface for this component.
@@ -473,7 +650,6 @@
      */
     virtual std::shared_ptr<C2ComponentInterface> intf() = 0;
 
-protected:
     virtual ~C2Component() = default;
 };
 
@@ -489,6 +665,8 @@
     /**
      * \return a vector of supported parameter indices parsed by this info parser.
      *
+     * This method MUST be "non-blocking" and return within 1ms.
+     *
      * \todo sticky vs. non-sticky params? this may be communicated by param-reflector.
      */
     virtual const std::vector<C2Param::Index> getParsedParams() const = 0;
@@ -502,31 +680,54 @@
      * \retval C2_TIMED_OUT could not reset the parser within the time limit (unexpected)
      * \retval C2_CORRUPTED some unknown error prevented the resetting of the parser (unexpected)
      */
-    virtual status_t reset() { return C2_OK; }
+    virtual c2_status_t reset() { return C2_OK; }
 
-    virtual status_t parseFrame(C2BufferPack &frame);
+    virtual c2_status_t parseFrame(C2BufferPack &frame);
 
     virtual ~C2FrameInfoParser() = default;
 };
 
-struct C2ComponentInfo {
-    // TBD
-
-};
-
 class C2AllocatorStore {
 public:
-    // TBD
+    typedef C2Allocator::id_t id_t;
 
-    enum Type {
-        LINEAR,     ///< basic linear allocator type
-        GRALLOC,    ///< basic gralloc allocator type
+    enum : C2Allocator::id_t {
+        DEFAULT_LINEAR,     ///< basic linear allocator type
+        DEFAULT_GRAPHIC,    ///< basic graphic allocator type
+        PLATFORM_START = 0x10,
+        VENDOR_START   = 0x100,
     };
 
     /**
-     * Creates an allocator.
+     * Returns the unique name of this allocator store.
      *
-     * \param type      the type of allocator to create
+     * This method MUST be "non-blocking" and return within 1ms.
+     *
+     * \return the name of this allocator store.
+     * \retval an empty string if there was not enough memory to allocate the actual name.
+     */
+    virtual C2String getName() const = 0;
+
+    /**
+     * Returns the set of allocators supported by this allocator store.
+     *
+     * This method MUST be "non-blocking" and return within 1ms.
+     *
+     * \retval vector of allocator information (as shared pointers)
+     * \retval an empty vector if there was not enough memory to allocate the whole vector.
+     */
+    virtual std::vector<std::shared_ptr<const C2Allocator::Traits>> listAllocators_nb() const = 0;
+
+    /**
+     * Retrieves/creates a shared allocator object.
+     *
+     * This method MUST be return within 5ms.
+     *
+     * The allocator is created on first use, and the same allocator is returned on subsequent
+     * concurrent uses in the same process. The allocator is freed when it is no longer referenced.
+     *
+     * \param id      the ID of the allocator to create. This is defined by the store, but
+     *                the ID of the default linear and graphic allocators is formalized.
      * \param allocator shared pointer where the created allocator is stored. Cleared on failure
      *                  and updated on success.
      *
@@ -537,12 +738,28 @@
      * \retval C2_NOT_FOUND no such allocator
      * \retval C2_NO_MEMORY not enough memory to create the allocator
      */
-    virtual status_t createAllocator(Type type, std::shared_ptr<C2Allocator>* const allocator) = 0;
+    virtual c2_status_t fetchAllocator(id_t id, std::shared_ptr<C2Allocator>* const allocator) = 0;
 
     virtual ~C2AllocatorStore() = default;
 };
 
 class C2ComponentStore {
+public:
+    /**
+     * Returns the name of this component or component interface object.
+     * This is a unique name for this component or component interface 'class'; however, multiple
+     * instances of this component SHALL have the same name.
+     *
+     * This method MUST be supported in any state. This call does not change the state nor the
+     * internal states of the component.
+     *
+     * This method MUST be "non-blocking" and return within 1ms.
+     *
+     * \return the name of this component or component interface object.
+     * \retval an empty string if there was not enough memory to allocate the actual name.
+     */
+    virtual C2String getName() const = 0;
+
     /**
      * Creates a component.
      *
@@ -559,7 +776,8 @@
      * \retval C2_NOT_FOUND no such component
      * \retval C2_NO_MEMORY not enough memory to create the component
      */
-    virtual status_t createComponent(C2String name, std::shared_ptr<C2Component>* const component);
+    virtual c2_status_t createComponent(
+            C2String name, std::shared_ptr<C2Component>* const component) = 0;
 
     /**
      * Creates a component interface.
@@ -580,26 +798,27 @@
      *
      * \todo Do we need an interface, or could this just be a component that is never started?
      */
-    virtual status_t createInterface(C2String name, std::shared_ptr<C2ComponentInterface>* const interface);
+    virtual c2_status_t createInterface(
+            C2String name, std::shared_ptr<C2ComponentInterface>* const interface) = 0;
 
     /**
      * Returns the list of components supported by this component store.
      *
-     * This method SHALL return within 1ms.
+     * This method MUST return within 500ms.
      *
      * \retval vector of component information.
      */
-    virtual std::vector<std::unique_ptr<const C2ComponentInfo>> getComponents();
+    virtual std::vector<std::shared_ptr<const C2Component::Traits>> listComponents() = 0;
 
     // -------------------------------------- UTILITY METHODS --------------------------------------
 
     // on-demand buffer layout conversion (swizzling)
-    virtual status_t copyBuffer(std::shared_ptr<C2GraphicBuffer> src, std::shared_ptr<C2GraphicBuffer> dst);
+    //
+    virtual c2_status_t copyBuffer(
+            std::shared_ptr<C2GraphicBuffer> src, std::shared_ptr<C2GraphicBuffer> dst) = 0;
 
-    // status_t selectPreferredColor(formats<A>, formats<B>);
-
-    // GLOBAL SETTINGS
-    // system-wide stride & slice-height (???)
+    // -------------------------------------- CONFIGURATION API -----------------------------------
+    // e.g. for global settings (system-wide stride, etc.)
 
     /**
      * Queries a set of system-wide parameters.
@@ -611,16 +830,16 @@
      *
      * \note Parameter values do not depend on the order of query.
      *
-     * This method MUST be "non-blocking" and return within 1ms.
+     * This method may be momentarily blocking, but MUST return within 5ms.
      *
-     * \param stackParams     a list of params queried. These are initialized specific to each
+     * \param stackParams   a list of params queried. These are initialized specific to each
      *                      setting; e.g. size and index are set and rest of the members are
      *                      cleared.
      *                      NOTE: Flexible settings that are of incorrect size will be invalidated.
      * \param heapParamIndices a vector of param indices for params to be queried and returned on the
      *                      heap. These parameters will be returned in heapParams. Unsupported param
      *                      indices will be ignored.
-     * \param heapParams      a list of params where to which the supported heap parameters will be
+     * \param heapParams    a list of params where to which the supported heap parameters will be
      *                      appended in the order they appear in heapParamIndices.
      *
      * \retval C2_OK        all parameters could be queried
@@ -630,10 +849,10 @@
      * \retval C2_CORRUPTED some unknown error prevented the querying of the parameters
      *                      (unexpected)
      */
-    virtual status_t query_nb(
+    virtual c2_status_t query_sm(
         const std::vector<C2Param* const> &stackParams,
         const std::vector<C2Param::Index> &heapParamIndices,
-        std::vector<std::unique_ptr<C2Param>>* const heapParams) = 0;
+        std::vector<std::unique_ptr<C2Param>>* const heapParams) const = 0;
 
     /**
      * Sets a set of system-wide parameters.
@@ -648,15 +867,15 @@
      * \note Parameter tuning DOES depend on the order of the tuning parameters. E.g. some parameter
      * update may allow some subsequent parameter update.
      *
-     * This method MUST be "non-blocking" and return within 1ms.
+     * This method may be momentarily blocking, but MUST return within 5ms.
      *
-     * \param params          a list of parameter updates. These will be updated to the actual
+     * \param params        a list of parameter updates. These will be updated to the actual
      *                      parameter values after the updates (this is because tuning is performed
      *                      at best effort).
      *                      \todo params that could not be updated are not marked here, so are
      *                      confusing - are they "existing" values or intended to be configured
      *                      values?
-     * \param failures        a list of parameter failures
+     * \param failures      a list of parameter failures
      *
      * \retval C2_OK        all parameters could be updated successfully
      * \retval C2_BAD_INDEX all supported parameters could be updated successfully, but some
@@ -669,9 +888,56 @@
      * \retval C2_CORRUPTED some unknown error prevented the update of the parameters
      *                      (unexpected)
      */
-    virtual status_t config_nb(
+    virtual c2_status_t config_sm(
             const std::vector<C2Param* const> &params,
-            std::list<std::unique_ptr<C2SettingResult>>* const failures) = 0;
+            std::vector<std::unique_ptr<C2SettingResult>>* const failures) = 0;
+
+    // REFLECTION MECHANISM (USED FOR EXTENSION)
+    // =============================================================================================
+
+    /**
+     * Returns the parameter reflector.
+     *
+     * This is used to describe parameter fields. This is shared for all components created by
+     * this component store.
+     *
+     * This method MUST be "non-blocking" and return within 1ms.
+     *
+     * \return a shared parameter reflector object.
+     */
+    virtual std::shared_ptr<C2ParamReflector> getParamReflector() const = 0;
+
+    /**
+     * Returns the set of supported parameters.
+     *
+     * This method MUST be "non-blocking" and return within 1ms.
+     *
+     * \param[out] params a vector of supported parameters will be appended to this vector.
+     *
+     * \retval C2_OK        the operation completed successfully.
+     * \retval C2_NO_MEMORY not enough memory to complete this method.
+     */
+    virtual c2_status_t querySupportedParams_nb(
+            std::vector<std::shared_ptr<C2ParamDescriptor>> * const params) const = 0;
+
+    /**
+     * Retrieves the supported values for the queried fields.
+     *
+     * Client SHALL set the parameter-field specifier and the type of supported values query (e.g.
+     * currently supported values, or potential supported values) in fields.
+     * Upon return the store SHALL fill in the supported values for the fields listed as well
+     * as a status for each field. Store shall process all fields queried even if some queries
+     * fail.
+     *
+     * This method may be momentarily blocking, but MUST return within 5ms.
+     *
+     * \param[in out] fields a vector of fields descriptor structures.
+     *
+     * \retval C2_OK        the operation completed successfully.
+     * \retval C2_BAD_INDEX at least one field was not recognized as a component store field
+     */
+    virtual c2_status_t querySupportedValues_sm(
+            std::vector<C2FieldSupportedValuesQuery> &fields) const = 0;
 
     virtual ~C2ComponentStore() = default;
 };
diff --git a/media/libstagefright/codec2/include/C2Config.h b/media/libstagefright/codec2/include/C2Config.h
index 30e9193..83cb72c 100644
--- a/media/libstagefright/codec2/include/C2Config.h
+++ b/media/libstagefright/codec2/include/C2Config.h
@@ -41,7 +41,7 @@
 enum name : type { __VA_ARGS__ }; \
 DEFINE_C2_ENUM_VALUE_CUSTOM_HELPER(name, type, names, __VA_ARGS__)
 
-enum C2ParamIndexKind : uint32_t {
+enum C2ParamIndexKind : C2Param::type_index_t {
     /// domain
     kParamIndexDomain,
 
@@ -61,17 +61,22 @@
     kParamIndexMime,
     kParamIndexStreamCount,
     kParamIndexFormat,
+    kParamIndexBlockPools,
+
+    kParamIndexMaxVideoSizeHint,
+    kParamIndexVideoSizeTuning,
+
+    kParamIndexCsd,
 
     // video info
 
     kParamIndexStructStart = 0x1,
     kParamIndexVideoSize,
-    kParamIndexMaxVideoSizeHint,
 
     kParamIndexParamStart = 0x800,
 };
 
-C2ENUM(C2DomainKind, int32_t,
+C2ENUM(C2DomainKind, uint32_t,
     C2DomainVideo,
     C2DomainAudio,
     C2DomainOther = C2DomainAudio + 1
@@ -124,6 +129,10 @@
 
 typedef C2StreamParam<C2Tuning, C2Uint32Value, kParamIndexFormat> C2StreamFormatConfig;
 
+typedef C2PortParam<C2Tuning, C2Uint64Array, kParamIndexBlockPools> C2PortBlockPoolsTuning;
+
+typedef C2StreamParam<C2Info, C2BlobValue, kParamIndexCsd> C2StreamCsdInfo;
+
 /*
    Component description fields:
 
@@ -227,22 +236,22 @@
 //   - critical parameters? (interlaced? profile? level?)
 
 struct C2VideoSizeStruct {
-    int32_t mWidth;     ///< video width
-    int32_t mHeight;    ///< video height
+    int32_t width;     ///< video width
+    int32_t height;    ///< video height
 
-    DEFINE_AND_DESCRIBE_C2STRUCT(VideoSize)
-    C2FIELD(mWidth, "width")
-    C2FIELD(mHeight, "height")
+    DEFINE_AND_DESCRIBE_BASE_C2STRUCT(VideoSize)
+    C2FIELD(width, "width")
+    C2FIELD(height, "height")
 };
 
 // video size for video decoder [OUT]
-typedef C2StreamParam<C2Info, C2VideoSizeStruct> C2VideoSizeStreamInfo;
+typedef C2StreamParam<C2Info, C2VideoSizeStruct, kParamIndexVideoSize> C2VideoSizeStreamInfo;
 
 // max video size for video decoder [IN]
 typedef C2PortParam<C2Setting, C2VideoSizeStruct, kParamIndexMaxVideoSizeHint> C2MaxVideoSizeHintPortSetting;
 
 // video encoder size [IN]
-typedef C2StreamParam<C2Tuning, C2VideoSizeStruct> C2VideoSizeStreamTuning;
+typedef C2StreamParam<C2Tuning, C2VideoSizeStruct, kParamIndexVideoSizeTuning> C2VideoSizeStreamTuning;
 
 /// @}
 
diff --git a/media/libstagefright/codec2/include/C2Param.h b/media/libstagefright/codec2/include/C2Param.h
index fd43061..2a8c1b2 100644
--- a/media/libstagefright/codec2/include/C2Param.h
+++ b/media/libstagefright/codec2/include/C2Param.h
@@ -52,6 +52,9 @@
  *   - must be POD struct, e.g. no vtable (no virtual destructor)
  *   - must have the same size in 64-bit and 32-bit mode (no size_t)
  *   - as such, no pointer members
+ *   - some common member field names are reserved as they are defined as methods for all
+ *     parameters:
+ *     they are: size, type, kind, index and stream
  *
  * Behavior:
  * - Params can be global (not related to input or output), related to input or output,
@@ -71,7 +74,7 @@
  *     an error for the specific setting, but should continue to apply other settings.
  *     TODO: this currently may result in unintended results.
  *
- * **NOTE:** unlike OMX, params are not versioned. Instead, a new struct with new base index
+ * **NOTE:** unlike OMX, params are not versioned. Instead, a new struct with new param index
  * SHALL be added as new versions are required.
  *
  * The proper subtype (Setting, Info or Param) is incorporated into the class type. Define structs
@@ -94,17 +97,21 @@
 struct C2Param {
     // param index encompasses the following:
     //
-    // - type (setting, tuning, info, struct)
-    // - vendor extension flag
-    // - flexible parameter flag
-    // - direction (global, input, output)
-    // - stream flag
-    // - stream ID (usually 0)
+    // - kind (setting, tuning, info, struct)
+    // - scope
+    //   - direction (global, input, output)
+    //   - stream flag
+    //   - stream ID (usually 0)
+    // - and the parameter's type (core index)
+    //   - flexible parameter flag
+    //   - vendor extension flag
+    //   - type index (this includes the vendor extension flag)
     //
     // layout:
     //
+    //        kind : <------- scope -------> : <----- core index ----->
     //      +------+-----+---+------+--------+----|------+--------------+
-    //      | kind | dir | - |stream|streamID|flex|vendor|  base index  |
+    //      | kind | dir | - |stream|streamID|flex|vendor|  type index  |
     //      +------+-----+---+------+--------+----+------+--------------+
     //  bit: 31..30 29.28       25   24 .. 17  16    15   14    ..     0
     //
@@ -112,7 +119,7 @@
     /**
      * C2Param kinds, usable as bitmaps.
      */
-    enum Kind : uint32_t {
+    enum kind_t : uint32_t {
         NONE    = 0,
         STRUCT  = (1 << 0),
         INFO    = (1 << 1),
@@ -121,110 +128,141 @@
     };
 
     /**
-     * base index (including the vendor extension bit) is a global index for
-     * C2 parameter structs. (e.g. the same indices cannot be reused for different
-     * structs for different components).
+     * The parameter type index specifies the underlying parameter type of a parameter as
+     * an integer value.
+     *
+     * Parameter types are divided into two groups: platform types and vendor types.
+     *
+     * Platform types are defined by the platform and are common for all implementations.
+     *
+     * Vendor types are defined by each vendors, so they may differ between implementations.
+     * It is recommended that vendor types be the same for all implementations by a specific
+     * vendor.
      */
-    struct BaseIndex {
+    typedef uint32_t type_index_t;
+    enum : uint32_t {
+            TYPE_INDEX_VENDOR_START = 0x00008000, ///< vendor indices SHALL start after this
+    };
+
+    /**
+     * Core index is the underlying parameter type for a parameter. It is used to describe the
+     * layout of the parameter structure regardless of the component or parameter kind/scope.
+     *
+     * It is used to identify and distinguish global parameters, and also parameters on a given
+     * port or stream. They must be unique for the set of global parameters, as well as for the
+     * set of parameters on each port or each stream, but the same core index can be used for
+     * parameters on different streams or ports, as well as for global parameters and port/stream
+     * parameters.
+     *
+     * Multiple parameter types can share the same layout.
+     *
+     * \note The layout for all parameters with the same core index across all components must
+     * be identical.
+     */
+    struct CoreIndex {
+    //public:
+        enum : uint32_t {
+            IS_FLEX_FLAG = 0x00010000,
+        };
+
     protected:
         enum : uint32_t {
-            kTypeMask      = 0xC0000000,
-            kTypeStruct    = 0x00000000,
-            kTypeTuning    = 0x40000000,
-            kTypeSetting   = 0x80000000,
-            kTypeInfo      = 0xC0000000,
+            KIND_MASK      = 0xC0000000,
+            KIND_STRUCT    = 0x00000000,
+            KIND_TUNING    = 0x40000000,
+            KIND_SETTING   = 0x80000000,
+            KIND_INFO      = 0xC0000000,
 
-            kDirMask       = 0x30000000,
-            kDirGlobal     = 0x20000000,
-            kDirUndefined  = 0x30000000, // MUST have all bits set
-            kDirInput      = 0x00000000,
-            kDirOutput     = 0x10000000,
+            DIR_MASK       = 0x30000000,
+            DIR_GLOBAL     = 0x20000000,
+            DIR_UNDEFINED  = DIR_MASK, // MUST have all bits set
+            DIR_INPUT      = 0x00000000,
+            DIR_OUTPUT     = 0x10000000,
 
-            kStreamFlag    = 0x02000000,
-            kStreamIdMask  = 0x01FE0000,
-            kStreamIdShift = 17,
-            kStreamIdMax   = kStreamIdMask >> kStreamIdShift,
-            kStreamMask    = kStreamFlag | kStreamIdMask,
+            IS_STREAM_FLAG  = 0x02000000,
+            STREAM_ID_MASK  = 0x01FE0000,
+            STREAM_ID_SHIFT = 17,
+            MAX_STREAM_ID   = STREAM_ID_MASK >> STREAM_ID_SHIFT,
+            STREAM_MASK     = IS_STREAM_FLAG | STREAM_ID_MASK,
 
-            kFlexibleFlag  = 0x00010000,
-            kVendorFlag    = 0x00008000,
-            kParamMask     = 0x0000FFFF,
-            kBaseMask      = kParamMask | kFlexibleFlag,
+            IS_VENDOR_FLAG  = 0x00008000,
+            TYPE_INDEX_MASK = 0x0000FFFF,
+            CORE_MASK       = TYPE_INDEX_MASK | IS_FLEX_FLAG,
         };
 
     public:
-        enum : uint32_t {
-            kVendorStart = kVendorFlag, ///< vendor structs SHALL start after this
-            _kFlexibleFlag = kFlexibleFlag, // TODO: this is only needed for testing
-        };
-
         /// constructor/conversion from uint32_t
-        inline BaseIndex(uint32_t index) : mIndex(index) { }
+        inline CoreIndex(uint32_t index) : mIndex(index) { }
 
         // no conversion from uint64_t
-        inline BaseIndex(uint64_t index) = delete;
+        inline CoreIndex(uint64_t index) = delete;
 
         /// returns true iff this is a vendor extension parameter
-        inline bool isVendor() const { return mIndex & kVendorFlag; }
+        inline bool isVendor() const { return mIndex & IS_VENDOR_FLAG; }
 
         /// returns true iff this is a flexible parameter (with variable size)
-        inline bool isFlexible() const { return mIndex & kFlexibleFlag; }
+        inline bool isFlexible() const { return mIndex & IS_FLEX_FLAG; }
 
-        /// returns the base type: the index for the underlying struct
-        inline unsigned int baseIndex() const { return mIndex & kBaseMask; }
+        /// returns the core index
+        /// This is the combination of the parameter type index and the flexible flag.
+        inline uint32_t coreIndex() const { return mIndex & CORE_MASK; }
 
-        /// returns the param index for the underlying struct
-        inline unsigned int paramIndex() const { return mIndex & kParamMask; }
+        /// returns the parameter type index
+        inline type_index_t typeIndex() const { return mIndex & TYPE_INDEX_MASK; }
 
-        DEFINE_FIELD_BASED_COMPARISON_OPERATORS(BaseIndex, mIndex)
+        DEFINE_FIELD_AND_MASK_BASED_COMPARISON_OPERATORS(CoreIndex, mIndex, CORE_MASK)
 
     protected:
         uint32_t mIndex;
     };
 
     /**
-     * type encompasses the parameter kind (tuning, setting, info), whether the
-     * parameter is global, input or output, and whether it is for a stream.
+     * Type encompasses the parameter's kind (tuning, setting, info), its scope (whether the
+     * parameter is global, input or output, and whether it is for a stream) and the its base
+     * index (which also determines its layout).
      */
-    struct Type : public BaseIndex {
+    struct Type : public CoreIndex {
+    //public:
         /// returns true iff this is a global parameter (not for input nor output)
-        inline bool isGlobal() const { return (mIndex & kDirMask) == kDirGlobal; }
+        inline bool isGlobal() const { return (mIndex & DIR_MASK) == DIR_GLOBAL; }
         /// returns true iff this is an input or input stream parameter
-        inline bool forInput() const { return (mIndex & kDirMask) == kDirInput; }
+        inline bool forInput() const { return (mIndex & DIR_MASK) == DIR_INPUT; }
         /// returns true iff this is an output or output stream parameter
-        inline bool forOutput() const { return (mIndex & kDirMask) == kDirOutput; }
+        inline bool forOutput() const { return (mIndex & DIR_MASK) == DIR_OUTPUT; }
 
         /// returns true iff this is a stream parameter
-        inline bool forStream() const { return mIndex & kStreamFlag; }
+        inline bool forStream() const { return mIndex & IS_STREAM_FLAG; }
         /// returns true iff this is a port (input or output) parameter
         inline bool forPort() const   { return !forStream() && !isGlobal(); }
 
         /// returns the parameter type: the parameter index without the stream ID
-        inline uint32_t type() const { return mIndex & (~kStreamIdMask); }
+        inline uint32_t type() const { return mIndex & (~STREAM_ID_MASK); }
 
-        /// return the kind of this param
-        inline Kind kind() const {
-            switch (mIndex & kTypeMask) {
-                case kTypeStruct: return STRUCT;
-                case kTypeInfo: return INFO;
-                case kTypeSetting: return SETTING;
-                case kTypeTuning: return TUNING;
+        /// return the kind (struct, info, setting or tuning) of this param
+        inline kind_t kind() const {
+            switch (mIndex & KIND_MASK) {
+                case KIND_STRUCT: return STRUCT;
+                case KIND_INFO: return INFO;
+                case KIND_SETTING: return SETTING;
+                case KIND_TUNING: return TUNING;
                 default: return NONE; // should not happen
             }
         }
 
         /// constructor/conversion from uint32_t
-        inline Type(uint32_t index) : BaseIndex(index) { }
+        inline Type(uint32_t index) : CoreIndex(index) { }
 
         // no conversion from uint64_t
         inline Type(uint64_t index) = delete;
 
+        DEFINE_FIELD_AND_MASK_BASED_COMPARISON_OPERATORS(Type, mIndex, ~STREAM_ID_MASK)
+
     private:
         friend struct C2Param;   // for setPort()
-        friend struct C2Tuning;  // for kTypeTuning
-        friend struct C2Setting; // for kTypeSetting
-        friend struct C2Info;    // for kTypeInfo
-        // for kDirGlobal
+        friend struct C2Tuning;  // for KIND_TUNING
+        friend struct C2Setting; // for KIND_SETTING
+        friend struct C2Info;    // for KIND_INFO
+        // for DIR_GLOBAL
         template<typename T, typename S, int I, class F> friend struct C2GlobalParam;
         template<typename T, typename S, int I, class F> friend struct C2PortParam;   // for kDir*
         template<typename T, typename S, int I, class F> friend struct C2StreamParam; // for kDir*
@@ -238,7 +276,7 @@
             if (isGlobal()) {
                 return false;
             } else {
-                mIndex = (mIndex & ~kDirMask) | (output ? kDirOutput : kDirInput);
+                mIndex = (mIndex & ~DIR_MASK) | (output ? DIR_OUTPUT : DIR_INPUT);
                 return true;
             }
         }
@@ -262,6 +300,8 @@
             return forStream() ? rawStream() : ~0U;
         }
 
+        DEFINE_FIELD_BASED_COMPARISON_OPERATORS(Index, mIndex)
+
     private:
         friend struct C2Param;           // for setStream, makeStreamId, isValid
         friend struct _C2ParamInspector; // for testing
@@ -273,23 +313,23 @@
         inline bool isValid() const {
             // there is no Type::isValid (even though some of this check could be
             // performed on types) as this is only used on index...
-            return (forStream() ? rawStream() < kStreamIdMax : rawStream() == 0)
-                    && (mIndex & kDirMask) != kDirUndefined;
+            return (forStream() ? rawStream() < MAX_STREAM_ID : rawStream() == 0)
+                    && (mIndex & DIR_MASK) != DIR_UNDEFINED;
         }
 
         /// returns the raw stream ID field
         inline unsigned rawStream() const {
-            return (mIndex & kStreamIdMask) >> kStreamIdShift;
+            return (mIndex & STREAM_ID_MASK) >> STREAM_ID_SHIFT;
         }
 
         /// returns the streamId bitfield for a given |stream|. If stream is invalid,
         /// returns an invalid bitfield.
         inline static uint32_t makeStreamId(unsigned stream) {
             // saturate stream ID (max value is invalid)
-            if (stream > kStreamIdMax) {
-                stream = kStreamIdMax;
+            if (stream > MAX_STREAM_ID) {
+                stream = MAX_STREAM_ID;
             }
-            return (stream << kStreamIdShift) & kStreamIdMask;
+            return (stream << STREAM_ID_SHIFT) & STREAM_ID_MASK;
         }
 
         /**
@@ -298,8 +338,8 @@
          */
         inline bool setStream(unsigned stream) {
             if (forStream()) {
-                mIndex = (mIndex & ~kStreamIdMask) | makeStreamId(stream);
-                return this->stream() < kStreamIdMax;
+                mIndex = (mIndex & ~STREAM_ID_MASK) | makeStreamId(stream);
+                return this->stream() < MAX_STREAM_ID;
             }
             return false;
         }
@@ -328,10 +368,17 @@
     inline unsigned stream() const { return _mIndex.stream(); }
 
     /// returns the parameter type: the parameter index without the stream ID
-    inline uint32_t type() const { return _mIndex.type(); }
+    inline Type type() const { return _mIndex.type(); }
+
+    /// returns the index of this parameter
+    /// \todo: should we restrict this to C2ParamField?
+    inline uint32_t index() const { return (uint32_t)_mIndex; }
+
+    /// returns the core index of this parameter
+    inline CoreIndex coreIndex() const { return _mIndex.coreIndex(); }
 
     /// returns the kind of this parameter
-    inline Kind kind() const { return _mIndex.kind(); }
+    inline kind_t kind() const { return _mIndex.kind(); }
 
     /// returns the size of the parameter or 0 if the parameter is invalid
     inline size_t size() const { return _mSize; }
@@ -362,6 +409,17 @@
         return param;
     }
 
+    /// Returns managed clone of |orig| at heap.
+    inline static std::unique_ptr<C2Param> Copy(const C2Param &orig) {
+        if (orig.size() == 0) {
+            return nullptr;
+        }
+        void *mem = ::operator new (orig.size());
+        C2Param *param = new (mem) C2Param(orig.size(), orig._mIndex);
+        param->updateFrom(orig);
+        return std::unique_ptr<C2Param>(param);
+    }
+
 #if 0
     template<typename P, class=decltype(C2Param(P()))>
     P *As() { return P::From(this); }
@@ -410,7 +468,7 @@
         } else if (o->_mIndex.isGlobal()) {
             return nullptr;
         } else {
-            return ((o->_mIndex.type() ^ type.mIndex) & ~Type::kDirMask) ? nullptr : o;
+            return ((o->_mIndex.type() ^ type.mIndex) & ~Type::DIR_MASK) ? nullptr : o;
         }
     }
 
@@ -438,10 +496,6 @@
 private:
     friend struct _C2ParamInspector; // for testing
 
-    /// returns the base type: the index for the underlying struct (for testing
-    /// as this can be gotten by the baseIndex enum)
-    inline uint32_t _baseIndex() const { return _mIndex.baseIndex(); }
-
     /// returns true iff |o| has the same size and index as this. This performs the
     /// basic check for equality.
     inline bool equals(const C2Param &o) const {
@@ -467,7 +521,7 @@
     template<typename ...Args>
     inline C2Setting(const Args(&... args)) : C2Param(args...) { }
 public: // TODO
-    enum : uint32_t { indexFlags = Type::kTypeSetting };
+    enum : uint32_t { PARAM_KIND = Type::KIND_SETTING };
 };
 
 /**
@@ -478,7 +532,7 @@
     template<typename ...Args>
     inline C2Tuning(const Args(&... args)) : C2Setting(args...) { }
 public: // TODO
-    enum : uint32_t { indexFlags = Type::kTypeTuning };
+    enum : uint32_t { PARAM_KIND = Type::KIND_TUNING };
 };
 
 /**
@@ -489,7 +543,7 @@
     template<typename ...Args>
     inline C2Info(const Args(&... args)) : C2Param(args...) { }
 public: // TODO
-    enum : uint32_t { indexFlags = Type::kTypeInfo };
+    enum : uint32_t { PARAM_KIND = Type::KIND_INFO };
 };
 
 /**
@@ -537,7 +591,6 @@
     /**
      * Constructor used to identify a field in an object.
      *
-     * \param U[type] pointer to the object that contains this field
      * \param pm[im] member pointer to the field
      */
     template<typename R, typename T, typename B=typename std::remove_extent<R>::type>
@@ -572,23 +625,93 @@
 };
 
 /**
- * Structure uniquely specifying a field in a configuration
+ * Structure uniquely specifying a 'field' in a configuration. The field
+ * can be a field of a configuration, a subfield of a field of a configuration,
+ * and even the whole configuration. Moreover, if the field can point to an
+ * element in a array field, or to the entire array field.
+ *
+ * This structure is used for querying supported values for a field, as well
+ * as communicating configuration failures and conflicts when trying to change
+ * a configuration for a component/interface or a store.
  */
 struct C2ParamField {
 //public:
-    // TODO: fix what this is for T[] (for now size becomes T[1])
+    /**
+     * Create a field identifier using a configuration parameter (variable),
+     * and a pointer to member.
+     *
+     * ~~~~~~~~~~~~~ (.cpp)
+     *
+     * struct C2SomeParam {
+     *   uint32_t mField;
+     *   uint32_t mArray[2];
+     *   C2OtherStruct mStruct;
+     *   uint32_t mFlexArray[];
+     * } *mParam;
+     *
+     * C2ParamField(mParam, &mParam->mField);
+     * C2ParamField(mParam, &mParam->mArray);
+     * C2ParamField(mParam, &mParam->mArray[0]);
+     * C2ParamField(mParam, &mParam->mStruct.mSubField);
+     * C2ParamField(mParam, &mParam->mFlexArray);
+     * C2ParamField(mParam, &mParam->mFlexArray[2]);
+     *
+     * ~~~~~~~~~~~~~
+     *
+     * \todo fix what this is for T[] (for now size becomes T[1])
+     *
+     * \param param pointer to parameter
+     * \param offset member pointer
+     */
     template<typename S, typename T>
     inline C2ParamField(S* param, T* offset)
         : _mIndex(param->index()),
           _mFieldId(offset) {}
 
+    /**
+     * Create a field identifier using a configuration parameter (variable),
+     * and a member pointer. This method cannot be used to refer to an
+     * array element or a subfield.
+     *
+     * ~~~~~~~~~~~~~ (.cpp)
+     *
+     * C2SomeParam mParam;
+     * C2ParamField(&mParam, &C2SomeParam::mMemberField);
+     *
+     * ~~~~~~~~~~~~~
+     *
+     * \param p pointer to parameter
+     * \param T member pointer to the field member
+     */
     template<typename R, typename T, typename U>
-    inline C2ParamField(U *p, R T::* pm) : _mIndex(p->type()), _mFieldId(p, pm) { }
+    inline C2ParamField(U *p, R T::* pm) : _mIndex(p->index()), _mFieldId(p, pm) { }
 
+    /**
+     * Create a field identifier to a configuration parameter (variable).
+     *
+     * ~~~~~~~~~~~~~ (.cpp)
+     *
+     * C2SomeParam mParam;
+     * C2ParamField(&mParam);
+     *
+     * ~~~~~~~~~~~~~
+     *
+     * \param param pointer to parameter
+     */
+    template<typename S>
+    inline C2ParamField(S* param)
+        : _mIndex(param->index()), _mFieldId(0u, param->size()) {}
+
+    /**
+     * Equality operator.
+     */
     inline bool operator==(const C2ParamField &other) const {
         return _mIndex == other._mIndex && _mFieldId == other._mFieldId;
     }
 
+    /**
+     * Ordering operator.
+     */
     inline bool operator<(const C2ParamField &other) const {
         return _mIndex < other._mIndex ||
             (_mIndex == other._mIndex && _mFieldId < other._mFieldId);
@@ -597,8 +720,8 @@
     DEFINE_OTHER_COMPARISON_OPERATORS(C2ParamField)
 
 private:
-    C2Param::Index _mIndex;
-    _C2FieldId _mFieldId;
+    C2Param::Index _mIndex; ///< parameter index
+    _C2FieldId _mFieldId;   ///< field identifier
 };
 
 /**
@@ -628,7 +751,7 @@
         template<typename T> const T &ref() const;
     };
 
-    enum Type {
+    enum type_t : uint32_t {
         NO_INIT,
         INT32,
         UINT32,
@@ -637,41 +760,41 @@
         FLOAT,
     };
 
-    template<typename T> static constexpr Type typeFor();
+    template<typename T> static constexpr type_t typeFor();
 
     // constructors - implicit
     template<typename T>
-    C2Value(T value)  : mType(typeFor<T>()),  mValue(value) { }
+    C2Value(T value)  : _mType(typeFor<T>()), _mValue(value) { }
 
-    C2Value() : mType(NO_INIT) { }
+    C2Value() : _mType(NO_INIT) { }
 
-    inline Type type() const { return mType; }
+    inline type_t type() const { return _mType; }
 
     template<typename T>
     inline bool get(T *value) const {
-        if (mType == typeFor<T>()) {
-            *value = mValue.ref<T>();
+        if (_mType == typeFor<T>()) {
+            *value = _mValue.ref<T>();
             return true;
         }
         return false;
     }
 
 private:
-    Type mType;
-    Primitive mValue;
+    type_t _mType;
+    Primitive _mValue;
 };
 
-template<> const int32_t &C2Value::Primitive::ref<int32_t>() const { return i32; }
-template<> const int64_t &C2Value::Primitive::ref<int64_t>() const { return i64; }
-template<> const uint32_t &C2Value::Primitive::ref<uint32_t>() const { return u32; }
-template<> const uint64_t &C2Value::Primitive::ref<uint64_t>() const { return u64; }
-template<> const float &C2Value::Primitive::ref<float>() const { return fp; }
+template<> inline const int32_t &C2Value::Primitive::ref<int32_t>() const { return i32; }
+template<> inline const int64_t &C2Value::Primitive::ref<int64_t>() const { return i64; }
+template<> inline const uint32_t &C2Value::Primitive::ref<uint32_t>() const { return u32; }
+template<> inline const uint64_t &C2Value::Primitive::ref<uint64_t>() const { return u64; }
+template<> inline const float &C2Value::Primitive::ref<float>() const { return fp; }
 
-template<> constexpr C2Value::Type C2Value::typeFor<int32_t>() { return INT32; }
-template<> constexpr C2Value::Type C2Value::typeFor<int64_t>() { return INT64; }
-template<> constexpr C2Value::Type C2Value::typeFor<uint32_t>() { return UINT32; }
-template<> constexpr C2Value::Type C2Value::typeFor<uint64_t>() { return UINT64; }
-template<> constexpr C2Value::Type C2Value::typeFor<float>() { return FLOAT; }
+template<> constexpr C2Value::type_t C2Value::typeFor<int32_t>() { return INT32; }
+template<> constexpr C2Value::type_t C2Value::typeFor<int64_t>() { return INT64; }
+template<> constexpr C2Value::type_t C2Value::typeFor<uint32_t>() { return UINT32; }
+template<> constexpr C2Value::type_t C2Value::typeFor<uint64_t>() { return UINT64; }
+template<> constexpr C2Value::type_t C2Value::typeFor<float>() { return FLOAT; }
 
 /**
  * field descriptor. A field is uniquely defined by an index into a parameter.
@@ -686,7 +809,7 @@
      * \note: only 32-bit and 64-bit fields are supported (e.g. no boolean, as that
      * is represented using INT32).
      */
-    enum Type : uint32_t {
+    enum type_t : uint32_t {
         // primitive types
         INT32   = C2Value::INT32,  ///< 32-bit signed integer
         UINT32  = C2Value::UINT32, ///< 32-bit unsigned integer
@@ -700,7 +823,7 @@
                         ///< however, bytes cannot be individually addressed by clients.
 
         // complex types
-        STRUCT_FLAG = 0x10000, ///< structs. Marked with this flag in addition to their baseIndex.
+        STRUCT_FLAG = 0x20000, ///< structs. Marked with this flag in addition to their coreIndex.
     };
 
     typedef std::pair<C2String, C2Value::Primitive> named_value_type;
@@ -718,7 +841,7 @@
     static named_values_type namedValuesFor(const B &);
 
     inline C2FieldDescriptor(uint32_t type, uint32_t length, C2StringLiteral name, size_t offset, size_t size)
-        : _mType((Type)type), _mLength(length), _mName(name), _mFieldId(offset, size) { }
+        : _mType((type_t)type), _mLength(length), _mName(name), _mFieldId(offset, size) { }
 
     template<typename T, class B=typename std::remove_extent<T>::type>
     inline C2FieldDescriptor(const T* offset, const char *name)
@@ -746,7 +869,7 @@
           _mFieldId(&(((S*)0)->*field)) {}
 
     /// returns the type of this field
-    inline Type type() const { return _mType; }
+    inline type_t type() const { return _mType; }
     /// returns the length of the field in case it is an array. Returns 0 for
     /// T[] arrays, returns 1 for T[1] arrays as well as if the field is not an array.
     inline size_t length() const { return _mLength; }
@@ -762,7 +885,7 @@
 #endif
 
 private:
-    const Type _mType;
+    const type_t _mType;
     const uint32_t _mLength; // the last member can be arbitrary length if it is T[] array,
                        // extending to the end of the parameter (this is marked with
                        // 0). T[0]-s are not fields.
@@ -776,27 +899,27 @@
     // 2) this is at parameter granularity.
 
     // type resolution
-    inline static Type getType(int32_t*)  { return INT32; }
-    inline static Type getType(uint32_t*) { return UINT32; }
-    inline static Type getType(int64_t*)  { return INT64; }
-    inline static Type getType(uint64_t*) { return UINT64; }
-    inline static Type getType(float*)    { return FLOAT; }
-    inline static Type getType(char*)     { return STRING; }
-    inline static Type getType(uint8_t*)  { return BLOB; }
+    inline static type_t getType(int32_t*)  { return INT32; }
+    inline static type_t getType(uint32_t*) { return UINT32; }
+    inline static type_t getType(int64_t*)  { return INT64; }
+    inline static type_t getType(uint64_t*) { return UINT64; }
+    inline static type_t getType(float*)    { return FLOAT; }
+    inline static type_t getType(char*)     { return STRING; }
+    inline static type_t getType(uint8_t*)  { return BLOB; }
 
     template<typename T,
              class=typename std::enable_if<std::is_enum<T>::value>::type>
-    inline static Type getType(T*) {
+    inline static type_t getType(T*) {
         typename std::underlying_type<T>::type underlying(0);
         return getType(&underlying);
     }
 
-    // verify C2Struct by having a fieldList and a baseIndex.
+    // verify C2Struct by having a FIELD_LIST and a CORE_INDEX.
     template<typename T,
-             class=decltype(T::baseIndex + 1), class=decltype(T::fieldList)>
-    inline static Type getType(T*) {
+             class=decltype(T::CORE_INDEX + 1), class=decltype(T::FIELD_LIST)>
+    inline static type_t getType(T*) {
         static_assert(!std::is_base_of<C2Param, T>::value, "cannot use C2Params as fields");
-        return (Type)(T::baseIndex | STRUCT_FLAG);
+        return (type_t)(T::CORE_INDEX | STRUCT_FLAG);
     }
 };
 
@@ -820,33 +943,33 @@
  */
 struct C2StructDescriptor {
 public:
-    /// Returns the parameter type
-    inline C2Param::BaseIndex baseIndex() const { return _mType.baseIndex(); }
+    /// Returns the core index of the struct
+    inline C2Param::CoreIndex coreIndex() const { return _mType.coreIndex(); }
 
-    // Returns the number of fields in this param (not counting any recursive fields).
-    // Must be at least 1 for valid params.
+    // Returns the number of fields in this struct (not counting any recursive fields).
+    // Must be at least 1 for valid structs.
     inline size_t numFields() const { return _mFields.size(); }
 
-    // Returns the list of immediate fields (not counting any recursive fields).
+    // Returns the list of direct fields (not counting any recursive fields).
     typedef std::vector<const C2FieldDescriptor>::const_iterator field_iterator;
     inline field_iterator cbegin() const { return _mFields.cbegin(); }
     inline field_iterator cend() const { return _mFields.cend(); }
 
-    // only supplying const iterator - but these are needed for range based loops
+    // only supplying const iterator - but these names are needed for range based loops
     inline field_iterator begin() const { return _mFields.cbegin(); }
     inline field_iterator end() const { return _mFields.cend(); }
 
     template<typename T>
     inline C2StructDescriptor(T*)
-        : C2StructDescriptor(T::baseIndex, T::fieldList) { }
+        : C2StructDescriptor(T::CORE_INDEX, T::FIELD_LIST) { }
 
     inline C2StructDescriptor(
-            C2Param::BaseIndex type,
+            C2Param::CoreIndex type,
             std::initializer_list<const C2FieldDescriptor> fields)
         : _mType(type), _mFields(fields) { }
 
 private:
-    const C2Param::BaseIndex _mType;
+    const C2Param::CoreIndex _mType;
     const std::vector<const C2FieldDescriptor> _mFields;
 };
 
@@ -886,7 +1009,7 @@
         : _mIsRequired(isRequired),
           _mIsPersistent(true),
           _mName(name),
-          _mType(T::typeIndex) { }
+          _mType(T::PARAM_TYPE) { }
 
     inline C2ParamDescriptor(
             bool isRequired, C2StringLiteral name, C2Param::Type type)
@@ -903,35 +1026,48 @@
 };
 
 /// \ingroup internal
-/// Define a structure without baseIndex.
-#define DEFINE_C2STRUCT_NO_BASE(name) \
+/// Define a structure without CORE_INDEX.
+#define DEFINE_BASE_C2STRUCT(name) \
 public: \
     typedef C2##name##Struct _type; /**< type name shorthand */ \
-    const static std::initializer_list<const C2FieldDescriptor> fieldList; /**< structure fields */
+    const static std::initializer_list<const C2FieldDescriptor> FIELD_LIST; /**< structure fields */
 
-/// Define a structure with matching baseIndex.
+/// Define a structure with matching CORE_INDEX.
 #define DEFINE_C2STRUCT(name) \
 public: \
-    enum : uint32_t { baseIndex = kParamIndex##name }; \
-    DEFINE_C2STRUCT_NO_BASE(name)
+    enum : uint32_t { CORE_INDEX = kParamIndex##name }; \
+    DEFINE_BASE_C2STRUCT(name)
 
-/// Define a flexible structure with matching baseIndex.
+/// Define a flexible structure without CORE_INDEX.
+#define DEFINE_BASE_FLEX_C2STRUCT(name, flexMember) \
+public: \
+    FLEX(C2##name##Struct, flexMember) \
+    DEFINE_BASE_C2STRUCT(name)
+
+/// Define a flexible structure with matching CORE_INDEX.
 #define DEFINE_FLEX_C2STRUCT(name, flexMember) \
 public: \
     FLEX(C2##name##Struct, flexMember) \
-    enum : uint32_t { baseIndex = kParamIndex##name | C2Param::BaseIndex::_kFlexibleFlag }; \
-    DEFINE_C2STRUCT_NO_BASE(name)
+    enum : uint32_t { CORE_INDEX = kParamIndex##name | C2Param::CoreIndex::IS_FLEX_FLAG }; \
+    DEFINE_BASE_C2STRUCT(name)
 
+#ifdef __C2_GENERATE_GLOBAL_VARS__
 /// \ingroup internal
 /// Describe a structure of a templated structure.
 #define DESCRIBE_TEMPLATED_C2STRUCT(strukt, list) \
     template<> \
-    const std::initializer_list<const C2FieldDescriptor> strukt::fieldList = list;
+    const std::initializer_list<const C2FieldDescriptor> strukt::FIELD_LIST = list;
 
 /// \deprecated
 /// Describe the fields of a structure using an initializer list.
 #define DESCRIBE_C2STRUCT(name, list) \
-    const std::initializer_list<const C2FieldDescriptor> C2##name##Struct::fieldList = list;
+    const std::initializer_list<const C2FieldDescriptor> C2##name##Struct::FIELD_LIST = list;
+#else
+/// \if 0
+#define DESCRIBE_TEMPLATED_C2STRUCT(strukt, list)
+#define DESCRIBE_C2STRUCT(name, list)
+/// \endif
+#endif
 
 /**
  * Describe a field of a structure.
@@ -941,26 +1077,26 @@
  *
  *  ~~~~~~~~~~~~~ (.cpp)
  *  struct C2VideoWidthStruct {
- *      int32_t mWidth;
+ *      int32_t width;
  *      C2VideoWidthStruct() {} // optional default constructor
- *      C2VideoWidthStruct(int32_t _width) : mWidth(_width) {}
+ *      C2VideoWidthStruct(int32_t _width) : width(_width) {}
  *
  *      DEFINE_AND_DESCRIBE_C2STRUCT(VideoWidth)
- *      C2FIELD(mWidth, "width")
+ *      C2FIELD(width, "width")
  *  };
  *  ~~~~~~~~~~~~~
  *
  *  ~~~~~~~~~~~~~ (.cpp)
  *  struct C2VideoWidthStruct {
- *      int32_t mWidth;
+ *      int32_t width;
  *      C2VideoWidthStruct() = default; // optional default constructor
- *      C2VideoWidthStruct(int32_t _width) : mWidth(_width) {}
+ *      C2VideoWidthStruct(int32_t _width) : width(_width) {}
  *
  *      DEFINE_C2STRUCT(VideoWidth)
  *  } C2_PACK;
  *
  *  DESCRIBE_C2STRUCT(VideoWidth, {
- *      C2FIELD(mWidth, "width")
+ *      C2FIELD(width, "width")
  *  })
  *  ~~~~~~~~~~~~~
  *
@@ -968,7 +1104,7 @@
  *
  *  ~~~~~~~~~~~~~ (.cpp)
  *  struct C2VideoFlexWidthsStruct {
- *      int32_t mWidths[];
+ *      int32_t widths[];
  *      C2VideoFlexWidthsStruct(); // must have a default constructor
  *
  *  private:
@@ -977,7 +1113,7 @@
  *      //   C2VideoFlexWidthsGlobalParam::alloc_unique(size_t, int32_t);
  *      C2VideoFlexWidthsStruct(size_t flexCount, int32_t value) {
  *          for (size_t i = 0; i < flexCount; ++i) {
- *              mWidths[i] = value;
+ *              widths[i] = value;
  *          }
  *      }
  *
@@ -988,12 +1124,12 @@
  *      template<unsigned N>
  *      C2VideoFlexWidthsStruct(size_t flexCount, const int32_t(&init)[N]) {
  *          for (size_t i = 0; i < flexCount; ++i) {
- *              mWidths[i] = init[i];
+ *              widths[i] = init[i];
  *          }
  *      }
  *
- *      DEFINE_AND_DESCRIBE_FLEX_C2STRUCT(VideoFlexWidths, mWidths)
- *      C2FIELD(mWidths, "widths")
+ *      DEFINE_AND_DESCRIBE_FLEX_C2STRUCT(VideoFlexWidths, widths)
+ *      C2FIELD(widths, "widths")
  *  };
  *  ~~~~~~~~~~~~~
  *
@@ -1011,6 +1147,7 @@
  *  ~~~~~~~~~~~~~
  *
  */
+#ifdef __C2_GENERATE_GLOBAL_VARS__
 #define C2FIELD(member, name) \
   C2FieldDescriptor(&((_type*)(nullptr))->member, name),
 
@@ -1018,17 +1155,56 @@
 #define C2SOLE_FIELD(member, name) \
   C2FieldDescriptor(&_type::member, name, 0)
 
-/// Define a structure with matching baseIndex and start describing its fields.
+/// Define a structure with matching CORE_INDEX and start describing its fields.
 /// This must be at the end of the structure definition.
 #define DEFINE_AND_DESCRIBE_C2STRUCT(name) \
-    DEFINE_C2STRUCT(name) }  C2_PACK; \
-    const std::initializer_list<const C2FieldDescriptor> C2##name##Struct::fieldList = {
+    DEFINE_C2STRUCT(name) } C2_PACK; \
+    const std::initializer_list<const C2FieldDescriptor> C2##name##Struct::FIELD_LIST = {
 
-/// Define a flexible structure with matching baseIndex and start describing its fields.
+/// Define a flexible structure with matching CORE_INDEX and start describing its fields.
 /// This must be at the end of the structure definition.
 #define DEFINE_AND_DESCRIBE_FLEX_C2STRUCT(name, flexMember) \
     DEFINE_FLEX_C2STRUCT(name, flexMember) } C2_PACK; \
-    const std::initializer_list<const C2FieldDescriptor> C2##name##Struct::fieldList = {
+    const std::initializer_list<const C2FieldDescriptor> C2##name##Struct::FIELD_LIST = {
+
+/// Define a base structure (with no CORE_INDEX) and start describing its fields.
+/// This must be at the end of the structure definition.
+#define DEFINE_AND_DESCRIBE_BASE_C2STRUCT(name) \
+    DEFINE_BASE_C2STRUCT(name) } C2_PACK; \
+    const std::initializer_list<const C2FieldDescriptor> C2##name##Struct::FIELD_LIST = {
+
+/// Define a flexible base structure (with no CORE_INDEX) and start describing its fields.
+/// This must be at the end of the structure definition.
+#define DEFINE_AND_DESCRIBE_BASE_FLEX_C2STRUCT(name, flexMember) \
+    DEFINE_BASE_FLEX_C2STRUCT(name, flexMember) } C2_PACK; \
+    const std::initializer_list<const C2FieldDescriptor> C2##name##Struct::FIELD_LIST = {
+
+#else
+/// \if 0
+/* Alternate declaration of field definitions in case no field list is to be generated.
+   TRICKY: use namespace declaration to handle closing bracket that is normally after
+   these macros. */
+#define C2FIELD(member, name)
+/// \deprecated
+#define C2SOLE_FIELD(member, name)
+/// Define a structure with matching CORE_INDEX and start describing its fields.
+/// This must be at the end of the structure definition.
+#define DEFINE_AND_DESCRIBE_C2STRUCT(name) \
+    DEFINE_C2STRUCT(name) }  C2_PACK; namespace ignored {
+/// Define a flexible structure with matching CORE_INDEX and start describing its fields.
+/// This must be at the end of the structure definition.
+#define DEFINE_AND_DESCRIBE_FLEX_C2STRUCT(name, flexMember) \
+    DEFINE_FLEX_C2STRUCT(name, flexMember) } C2_PACK; namespace ignored {
+/// Define a base structure (with no CORE_INDEX) and start describing its fields.
+/// This must be at the end of the structure definition.
+#define DEFINE_AND_DESCRIBE_BASE_C2STRUCT(name) \
+    DEFINE_BASE_C2STRUCT(name) } C2_PACK; namespace ignored {
+/// Define a flexible base structure (with no CORE_INDEX) and start describing its fields.
+/// This must be at the end of the structure definition.
+#define DEFINE_AND_DESCRIBE_BASE_FLEX_C2STRUCT(name, flexMember) \
+    DEFINE_BASE_FLEX_C2STRUCT(name, flexMember) } C2_PACK; namespace ignored {
+/// \endif
+#endif
 
 /**
  * Parameter reflector class.
@@ -1043,7 +1219,8 @@
     /**
      *  Describes a parameter structure.
      *
-     *  \param[in] paramIndex the base index of the parameter structure
+     *  \param[in] coreIndex the core index of the parameter structure containing at least the
+     *  core index
      *
      *  \return the description of the parameter structure
      *  \retval nullptr if the parameter is not supported by this reflector
@@ -1056,55 +1233,13 @@
      *  descriptions, but we want to conserve memory if client only wants the description
      *  of a few indices.
      */
-    virtual std::unique_ptr<C2StructDescriptor> describe(C2Param::BaseIndex paramIndex) = 0;
+    virtual std::unique_ptr<C2StructDescriptor> describe(C2Param::CoreIndex coreIndex) = 0;
 
 protected:
     virtual ~C2ParamReflector() = default;
 };
 
 /**
- * A useable supported values for a field.
- *
- * This can be either a range or a set of values. The range can be linear or geometric with a
- * clear minimum and maximum value, and can have an optional step size or geometric ratio. Values
- * can optionally represent flags.
- *
- * \note Do not use flags to represent bitfields. Use individual values or separate fields instead.
- */
-template<typename T>
-struct C2TypedFieldSupportedValues {
-//public:
-    enum Type {
-        RANGE,      ///< a numeric range that can be continuous or discrete
-        VALUES,     ///< a list of values
-        FLAGS       ///< a list of flags that can be OR-ed
-    };
-
-    Type type;
-
-    struct {
-        T min;
-        T max;
-        T step;
-        T nom;
-        T denom;
-    } range;
-    std::vector<T> values;
-
-    C2TypedFieldSupportedValues(T min, T max, T step = T(std::is_floating_point<T>::value ? 0 : 1))
-        : type(RANGE),
-          range{min, max, step, (T)1, (T)1} { }
-
-    C2TypedFieldSupportedValues(T min, T max, T nom, T den) :
-        type(RANGE),
-        range{min, max, (T)0, nom, den} { }
-
-    C2TypedFieldSupportedValues(bool flags, std::initializer_list<T> list) :
-        type(flags ? FLAGS : VALUES),
-        values(list) {}
-};
-
-/**
  * Generic supported values for a field.
  *
  * This can be either a range or a set of values. The range can be linear or geometric with a
@@ -1115,13 +1250,14 @@
  */
 struct C2FieldSupportedValues {
 //public:
-    enum Type {
+    enum type_t {
+        EMPTY,      ///< no supported values
         RANGE,      ///< a numeric range that can be continuous or discrete
         VALUES,     ///< a list of values
         FLAGS       ///< a list of flags that can be OR-ed
     };
 
-    Type type;
+    type_t type;
 
     typedef C2Value::Primitive Primitive;
 
@@ -1134,6 +1270,10 @@
     } range;
     std::vector<Primitive> values;
 
+    C2FieldSupportedValues()
+        : type(EMPTY) {
+    }
+
     template<typename T>
     C2FieldSupportedValues(T min, T max, T step = T(std::is_floating_point<T>::value ? 0 : 1))
         : type(RANGE),
@@ -1153,6 +1293,18 @@
         }
     }
 
+    template<typename T>
+    C2FieldSupportedValues(bool flags, const std::vector<T>& list)
+        : type(flags ? FLAGS : VALUES),
+          range{(T)0, (T)0, (T)0, (T)0, (T)0} {
+        for(T value : list) {
+            values.emplace_back(value);
+        }
+    }
+
+    /// \internal
+    /// \todo: create separate values vs. flags initializer as for flags we want
+    /// to list both allowed and disallowed flags
     template<typename T, typename E=decltype(C2FieldDescriptor::namedValuesFor(*(T*)0))>
     C2FieldSupportedValues(bool flags, const T*)
         : type(flags ? FLAGS : VALUES),
@@ -1164,6 +1316,21 @@
     }
 };
 
+/**
+ * Spported values for a specific field.
+ *
+ * This is a pair of the field specifier together with an optional supported values object.
+ * This structure is used when reporting parameter configuration failures and conflicts.
+ */
+struct C2ParamFieldValues {
+    C2ParamField paramOrField; ///< the field or parameter
+    /// optional supported values for the field if paramOrField specifies an actual field that is
+    /// numeric (non struct, blob or string). Supported values for arrays (including string and
+    /// blobs) describe the supported values for each element (character for string, and bytes for
+    /// blobs). It is optional for read-only strings and blobs.
+    std::unique_ptr<C2FieldSupportedValues> values;
+};
+
 /// @}
 
 }  // namespace android
diff --git a/media/libstagefright/codec2/include/C2ParamDef.h b/media/libstagefright/codec2/include/C2ParamDef.h
index f369617..b5834f2 100644
--- a/media/libstagefright/codec2/include/C2ParamDef.h
+++ b/media/libstagefright/codec2/include/C2ParamDef.h
@@ -59,44 +59,44 @@
                         || decltype(_C2Comparable_impl::__testNE<S>(0))::value> {
 };
 
-///  Helper class that checks if a type has a baseIndex constant.
-struct C2_HIDE _C2BaseIndexHelper_impl
+///  Helper class that checks if a type has a CORE_INDEX constant.
+struct C2_HIDE _C2CoreIndexHelper_impl
 {
-    template<typename S, int=S::baseIndex>
-    static std::true_type __testBaseIndex(int);
+    template<typename S, int=S::CORE_INDEX>
+    static std::true_type __testCoreIndex(int);
     template<typename>
-    static std::false_type __testBaseIndex(...);
+    static std::false_type __testCoreIndex(...);
 };
 
-/// Helper template that verifies a type's baseIndex and creates it if the type does not have one.
-template<typename S, int BaseIndex,
-        bool HasBase=decltype(_C2BaseIndexHelper_impl::__testBaseIndex<S>(0))::value>
-struct C2_HIDE C2BaseIndexOverride {
-    // TODO: what if we allow structs without baseIndex?
-    static_assert(BaseIndex == S::baseIndex, "baseIndex differs from structure");
+/// Helper template that verifies a type's CORE_INDEX and creates it if the type does not have one.
+template<typename S, int CoreIndex,
+        bool HasBase=decltype(_C2CoreIndexHelper_impl::__testCoreIndex<S>(0))::value>
+struct C2_HIDE C2CoreIndexOverride {
+    // TODO: what if we allow structs without CORE_INDEX?
+    static_assert(CoreIndex == S::CORE_INDEX, "CORE_INDEX differs from structure");
 };
 
-/// Specialization for types without a baseIndex.
-template<typename S, int BaseIndex>
-struct C2_HIDE C2BaseIndexOverride<S, BaseIndex, false> {
+/// Specialization for types without a CORE_INDEX.
+template<typename S, int CoreIndex>
+struct C2_HIDE C2CoreIndexOverride<S, CoreIndex, false> {
 public:
     enum : uint32_t {
-        baseIndex = BaseIndex, ///< baseIndex override.
+        CORE_INDEX = CoreIndex, ///< CORE_INDEX override.
     };
 };
 
-/// Helper template that adds a baseIndex to a type if it does not have one.
-template<typename S, int BaseIndex>
-struct C2_HIDE C2AddBaseIndex : public S, public C2BaseIndexOverride<S, BaseIndex> {};
+/// Helper template that adds a CORE_INDEX to a type if it does not have one.
+template<typename S, int CoreIndex>
+struct C2_HIDE C2AddCoreIndex : public S, public C2CoreIndexOverride<S, CoreIndex> {};
 
 /**
  * \brief Helper class to check struct requirements for parameters.
  *
  * Features:
  *  - verify default constructor, no virtual methods, and no equality operators.
- *  - expose typeIndex, and non-flex flexSize.
+ *  - expose PARAM_TYPE, and non-flex FLEX_SIZE.
  */
-template<typename S, int BaseIndex, unsigned TypeIndex>
+template<typename S, int CoreIndex, unsigned TypeFlags>
 struct C2_HIDE C2StructCheck {
     static_assert(
             std::is_default_constructible<S>::value, "C2 structure must have default constructor");
@@ -105,31 +105,31 @@
 
 public:
     enum : uint32_t {
-        typeIndex = BaseIndex | TypeIndex
+        PARAM_TYPE = CoreIndex | TypeFlags
     };
 
 protected:
     enum : uint32_t {
-        flexSize = 0, // TODO: is this still needed? this may be confusing.
+        FLEX_SIZE = 0, // TODO: is this still needed? this may be confusing.
     };
 };
 
-/// Helper class that checks if a type has an integer flexSize member.
+/// Helper class that checks if a type has an integer FLEX_SIZE member.
 struct C2_HIDE _C2Flexible_impl {
-    /// specialization for types that have a flexSize member
-    template<typename S, unsigned=S::flexSize>
+    /// specialization for types that have a FLEX_SIZE member
+    template<typename S, unsigned=S::FLEX_SIZE>
     static std::true_type __testFlexSize(int);
     template<typename>
     static std::false_type __testFlexSize(...);
 };
 
-/// Helper template that returns if a type has an integer flexSize member.
+/// Helper template that returns if a type has an integer FLEX_SIZE member.
 template<typename S>
 struct C2_HIDE _C2Flexible
     : public std::integral_constant<bool, decltype(_C2Flexible_impl::__testFlexSize<S>(0))::value> {
 };
 
-/// Macro to test if a type is flexible (has a flexSize member).
+/// Macro to test if a type is flexible (has a FLEX_SIZE member).
 #define IF_FLEXIBLE(S) ENABLE_IF(_C2Flexible<S>::value)
 /// Shorthand for std::enable_if
 #define ENABLE_IF(cond) typename std::enable_if<cond>::type
@@ -137,74 +137,76 @@
 /// Helper template that exposes the flexible subtype of a struct.
 template<typename S, typename E=void>
 struct C2_HIDE _C2FlexHelper {
-    typedef void flexType;
-    enum : uint32_t { flexSize = 0 };
+    typedef void FlexType;
+    enum : uint32_t { FLEX_SIZE = 0 };
 };
 
 /// Specialization for flexible types.
 template<typename S>
 struct C2_HIDE _C2FlexHelper<S,
         typename std::enable_if<!std::is_void<typename S::flexMemberType>::value>::type> {
-    typedef typename _C2FlexHelper<typename S::flexMemberType>::flexType flexType;
-    enum : uint32_t { flexSize = _C2FlexHelper<typename S::flexMemberType>::flexSize };
+    typedef typename _C2FlexHelper<typename S::flexMemberType>::FlexType FlexType;
+    enum : uint32_t { FLEX_SIZE = _C2FlexHelper<typename S::flexMemberType>::FLEX_SIZE };
 };
 
 /// Specialization for flex arrays.
 template<typename S>
 struct C2_HIDE _C2FlexHelper<S[],
-        typename std::enable_if<std::is_void<typename _C2FlexHelper<S>::flexType>::value>::type> {
-    typedef S flexType;
-    enum : uint32_t { flexSize = sizeof(S) };
+        typename std::enable_if<std::is_void<typename _C2FlexHelper<S>::FlexType>::value>::type> {
+    typedef S FlexType;
+    enum : uint32_t { FLEX_SIZE = sizeof(S) };
 };
 
 /**
  * \brief Helper class to check flexible struct requirements and add common operations.
  *
  * Features:
- *  - expose baseIndex and fieldList (this is normally inherited from the struct, but flexible
+ *  - expose CORE_INDEX and FIELD_LIST (this is normally inherited from the struct, but flexible
  *    structs cannot be base classes and thus inherited from)
  *  - disable copy assignment and construction (TODO: this is already done in the FLEX macro for the
  *    flexible struct, so may not be needed here)
  */
-template<typename S, int BaseIndex, unsigned TypeIndex>
-struct C2_HIDE C2FlexStructCheck : public C2StructCheck<S, BaseIndex, TypeIndex> {
+template<typename S, int ParamIndex, unsigned TypeFlags>
+struct C2_HIDE C2FlexStructCheck :
+// add flexible flag as C2StructCheck defines PARAM_TYPE
+        public C2StructCheck<S, ParamIndex | C2Param::CoreIndex::IS_FLEX_FLAG, TypeFlags> {
 public:
     enum : uint32_t {
         /// \hideinitializer
-        baseIndex = BaseIndex | C2Param::BaseIndex::_kFlexibleFlag, ///< flexible struct base-index
+        CORE_INDEX = ParamIndex | C2Param::CoreIndex::IS_FLEX_FLAG, ///< flexible struct core-index
     };
 
-    const static std::initializer_list<const C2FieldDescriptor> fieldList; // TODO assign here
+    const static std::initializer_list<const C2FieldDescriptor> FIELD_LIST; // TODO assign here
 
     // default constructor needed because of the disabled copy constructor
     inline C2FlexStructCheck() = default;
 
 protected:
     // cannot copy flexible params
-    C2FlexStructCheck(const C2FlexStructCheck<S, BaseIndex, TypeIndex> &) = delete;
-    C2FlexStructCheck& operator= (const C2FlexStructCheck<S, BaseIndex, TypeIndex> &) = delete;
+    C2FlexStructCheck(const C2FlexStructCheck<S, ParamIndex, TypeFlags> &) = delete;
+    C2FlexStructCheck& operator= (const C2FlexStructCheck<S, ParamIndex, TypeFlags> &) = delete;
 
     // constants used for helper methods
     enum : uint32_t {
         /// \hideinitializer
-        flexSize = _C2FlexHelper<S>::flexSize, ///< size of flexible type
+        FLEX_SIZE = _C2FlexHelper<S>::FLEX_SIZE, ///< size of flexible type
         /// \hideinitializer
-        maxSize = (uint32_t)std::min((size_t)UINT32_MAX, SIZE_MAX), // TODO: is this always u32 max?
+        MAX_SIZE = (uint32_t)std::min((size_t)UINT32_MAX, SIZE_MAX), // TODO: is this always u32 max?
         /// \hideinitializer
-        baseSize = sizeof(S) + sizeof(C2Param), ///< size of the base param
+        BASE_SIZE = sizeof(S) + sizeof(C2Param), ///< size of the base param
     };
 
     /// returns the allocated size of this param with flexCount, or 0 if it would overflow.
-    inline static size_t calcSize(size_t flexCount, size_t size = baseSize) {
-        if (flexCount <= (maxSize - size) / S::flexSize) {
-            return size + S::flexSize * flexCount;
+    inline static size_t calcSize(size_t flexCount, size_t size = BASE_SIZE) {
+        if (flexCount <= (MAX_SIZE - size) / S::FLEX_SIZE) {
+            return size + S::FLEX_SIZE * flexCount;
         }
         return 0;
     }
 
     /// dynamic new operator usable for params of type S
     inline void* operator new(size_t size, size_t flexCount) noexcept {
-        // TODO: assert(size == baseSize);
+        // TODO: assert(size == BASE_SIZE);
         size = calcSize(flexCount, size);
         if (size > 0) {
             return ::operator new(size);
@@ -214,21 +216,22 @@
 };
 
 // TODO: this probably does not work.
-/// Expose fieldList from subClass;
-template<typename S, int BaseIndex, unsigned TypeIndex>
-const std::initializer_list<const C2FieldDescriptor> C2FlexStructCheck<S, BaseIndex, TypeIndex>::fieldList = S::fieldList;
+/// Expose FIELD_LIST from subClass;
+template<typename S, int ParamIndex, unsigned TypeFlags>
+const std::initializer_list<const C2FieldDescriptor>
+C2FlexStructCheck<S, ParamIndex, TypeFlags>::FIELD_LIST = S::FIELD_LIST;
 
 /// Define From() cast operators for params.
-#define DEFINE_CAST_OPERATORS(_type) \
-    inline static _type* From(C2Param *other) { \
-        return (_type*)C2Param::ifSuitable( \
-                other, sizeof(_type),_type::typeIndex, _type::flexSize, \
-                (_type::typeIndex & T::Index::kDirUndefined) != T::Index::kDirUndefined); \
+#define DEFINE_CAST_OPERATORS(_Type) \
+    inline static _Type* From(C2Param *other) { \
+        return (_Type*)C2Param::ifSuitable( \
+                other, sizeof(_Type),_Type::PARAM_TYPE, _Type::FLEX_SIZE, \
+                (_Type::PARAM_TYPE & T::Index::DIR_UNDEFINED) != T::Index::DIR_UNDEFINED); \
     } \
-    inline static const _type* From(const C2Param *other) { \
-        return const_cast<const _type*>(From(const_cast<C2Param *>(other))); \
+    inline static const _Type* From(const C2Param *other) { \
+        return const_cast<const _Type*>(From(const_cast<C2Param *>(other))); \
     } \
-    inline static _type* From(std::nullptr_t) { return nullptr; } \
+    inline static _Type* From(std::nullptr_t) { return nullptr; } \
 
 /**
  * Define flexible allocators (alloc_shared or alloc_unique) for flexible params.
@@ -238,38 +241,38 @@
  *  - P::alloc_xyz(args..., std::initializer_list<T>): allocate for size of (and with) initializer
  *    list.
  */
-#define DEFINE_FLEXIBLE_ALLOC(_type, S, ptr) \
+#define DEFINE_FLEXIBLE_ALLOC(_Type, S, ptr) \
     template<typename ...Args> \
-    inline static std::ptr##_ptr<_type> alloc_##ptr(size_t flexCount, const Args(&... args)) { \
-        return std::ptr##_ptr<_type>(new(flexCount) _type(flexCount, args...)); \
+    inline static std::ptr##_ptr<_Type> alloc_##ptr(size_t flexCount, const Args(&... args)) { \
+        return std::ptr##_ptr<_Type>(new(flexCount) _Type(flexCount, args...)); \
     } \
     /* NOTE: unfortunately this is not supported by clang yet */ \
-    template<typename ...Args, typename U=typename S::flexType, unsigned N> \
-    inline static std::ptr##_ptr<_type> alloc_##ptr(const Args(&... args), const U(&init)[N]) { \
-        return std::ptr##_ptr<_type>(new(N) _type(N, args..., init)); \
+    template<typename ...Args, typename U=typename S::FlexType, unsigned N> \
+    inline static std::ptr##_ptr<_Type> alloc_##ptr(const Args(&... args), const U(&init)[N]) { \
+        return std::ptr##_ptr<_Type>(new(N) _Type(N, args..., init)); \
     } \
     /* so for now, specialize for no args */ \
-    template<typename U=typename S::flexType, unsigned N> \
-    inline static std::ptr##_ptr<_type> alloc_##ptr(const U(&init)[N]) { \
-        return std::ptr##_ptr<_type>(new(N) _type(N, init)); \
+    template<typename U=typename S::FlexType, unsigned N> \
+    inline static std::ptr##_ptr<_Type> alloc_##ptr(const U(&init)[N]) { \
+        return std::ptr##_ptr<_Type>(new(N) _Type(N, init)); \
     } \
-    template<typename ...Args, typename U=typename S::flexType> \
-    inline static std::ptr##_ptr<_type> alloc_##ptr( \
+    template<typename ...Args, typename U=typename S::FlexType> \
+    inline static std::ptr##_ptr<_Type> alloc_##ptr( \
             const Args(&... args), const std::initializer_list<U> &init) { \
-        return std::ptr##_ptr<_type>(new(init.size()) _type(init.size(), args..., init)); \
+        return std::ptr##_ptr<_Type>(new(init.size()) _Type(init.size(), args..., init)); \
     } \
 
 /**
  * Define flexible methods alloc_shared, alloc_unique and flexCount.
  */
-#define DEFINE_FLEXIBLE_METHODS(_type, S) \
-    DEFINE_FLEXIBLE_ALLOC(_type, S, shared) \
-    DEFINE_FLEXIBLE_ALLOC(_type, S, unique) \
+#define DEFINE_FLEXIBLE_METHODS(_Type, S) \
+    DEFINE_FLEXIBLE_ALLOC(_Type, S, shared) \
+    DEFINE_FLEXIBLE_ALLOC(_Type, S, unique) \
     inline size_t flexCount() const { \
-        static_assert(sizeof(_type) == _type::baseSize, "incorrect baseSize"); \
+        static_assert(sizeof(_Type) == _Type::BASE_SIZE, "incorrect BASE_SIZE"); \
         size_t sz = this->size(); \
-        if (sz >= sizeof(_type)) { \
-            return (sz - sizeof(_type)) / _type::flexSize; \
+        if (sz >= sizeof(_Type)) { \
+            return (sz - sizeof(_Type)) / _Type::FLEX_SIZE; \
         } \
         return 0; \
     } \
@@ -286,11 +289,11 @@
     typedef decltype(m) flexMemberType; \
 public: \
     /* constexpr static flexMemberType cls::* flexMember = &cls::m; */ \
-    typedef typename _C2FlexHelper<flexMemberType>::flexType flexType; \
+    typedef typename _C2FlexHelper<flexMemberType>::FlexType FlexType; \
     static_assert(\
-            !std::is_void<flexType>::value, \
+            !std::is_void<FlexType>::value, \
             "member is not flexible, or a flexible array of a flexible type"); \
-    enum : uint32_t { flexSize = _C2FlexHelper<flexMemberType>::flexSize }; \
+    enum : uint32_t { FLEX_SIZE = _C2FlexHelper<flexMemberType>::FLEX_SIZE }; \
     /** \endif */ \
 
 /// @}
@@ -299,7 +302,7 @@
  * Global-parameter template.
  *
  * Base template to define a global setting/tuning or info based on a structure and
- * an optional BaseIndex. Global parameters are not tied to a port (input or output).
+ * an optional ParamIndex. Global parameters are not tied to a port (input or output).
  *
  * Parameters wrap structures by prepending a (parameter) header. The fields of the wrapped
  * structure can be accessed directly, and constructors and potential public methods are also
@@ -307,64 +310,67 @@
  *
  * \tparam T param type C2Setting, C2Tuning or C2Info
  * \tparam S wrapped structure
- * \tparam BaseIndex optional base-index override. Must be specified for common/reused structures.
+ * \tparam ParamIndex optional parameter index override. Must be specified for base/reused
+ * structures.
  */
-template<typename T, typename S, int BaseIndex=S::baseIndex, class Flex=void>
-struct C2_HIDE C2GlobalParam : public T, public S, public C2BaseIndexOverride<S, BaseIndex>,
-        public C2StructCheck<S, BaseIndex, T::indexFlags | T::Type::kDirGlobal> {
+template<typename T, typename S, int ParamIndex=S::CORE_INDEX, class Flex=void>
+struct C2_HIDE C2GlobalParam : public T, public S, public C2CoreIndexOverride<S, ParamIndex>,
+        public C2StructCheck<S, ParamIndex, T::PARAM_KIND | T::Type::DIR_GLOBAL> {
 private:
-    typedef C2GlobalParam<T, S, BaseIndex> _type;
+    typedef C2GlobalParam<T, S, ParamIndex> _Type;
 
 public:
     /// Wrapper around base structure's constructor.
     template<typename ...Args>
-    inline C2GlobalParam(const Args(&... args)) : T(sizeof(_type), _type::typeIndex), S(args...) { }
+    inline C2GlobalParam(const Args(&... args)) : T(sizeof(_Type), _Type::PARAM_TYPE), S(args...) { }
 
-    DEFINE_CAST_OPERATORS(_type)
+    DEFINE_CAST_OPERATORS(_Type)
 };
 
 /**
  * Global-parameter template for flexible structures.
  *
  * Base template to define a global setting/tuning or info based on a flexible structure and
- * an optional BaseIndex. Global parameters are not tied to a port (input or output).
+ * an optional ParamIndex. Global parameters are not tied to a port (input or output).
  *
  * \tparam T param type C2Setting, C2Tuning or C2Info
  * \tparam S wrapped flexible structure
- * \tparam BaseIndex optional base-index override. Must be specified for common/reused structures.
+ * \tparam ParamIndex optional parameter index override. Must be specified for base/reused
+ *         structures.
  *
  * Parameters wrap structures by prepending a (parameter) header. The fields and methods of flexible
  * structures can be accessed via the m member variable; however, the constructors of the structure
  * are wrapped directly. (This is because flexible types cannot be subclassed.)
  */
-template<typename T, typename S, int BaseIndex>
-struct C2_HIDE C2GlobalParam<T, S, BaseIndex, IF_FLEXIBLE(S)>
-    : public T, public C2FlexStructCheck<S, BaseIndex, T::indexFlags | T::Type::kDirGlobal> {
+template<typename T, typename S, int ParamIndex>
+struct C2_HIDE C2GlobalParam<T, S, ParamIndex, IF_FLEXIBLE(S)>
+    : public T, public C2FlexStructCheck<S, ParamIndex, T::PARAM_KIND | T::Type::DIR_GLOBAL> {
 private:
-    typedef C2GlobalParam<T, S, BaseIndex> _type;
+    typedef C2GlobalParam<T, S, ParamIndex> _Type;
 
     /// Wrapper around base structure's constructor.
     template<typename ...Args>
     inline C2GlobalParam(size_t flexCount, const Args(&... args))
-        : T(_type::calcSize(flexCount), _type::typeIndex), m(flexCount, args...) { }
+        : T(_Type::calcSize(flexCount), _Type::PARAM_TYPE), m(flexCount, args...) { }
 
 public:
     S m; ///< wrapped flexible structure
 
-    DEFINE_FLEXIBLE_METHODS(_type, S)
-    DEFINE_CAST_OPERATORS(_type)
+    DEFINE_FLEXIBLE_METHODS(_Type, S)
+    DEFINE_CAST_OPERATORS(_Type)
 };
 
 /**
  * Port-parameter template.
  *
  * Base template to define a port setting/tuning or info based on a structure and
- * an optional BaseIndex. Port parameters are tied to a port (input or output), but not to a
+ * an optional ParamIndex. Port parameters are tied to a port (input or output), but not to a
  * specific stream.
  *
  * \tparam T param type C2Setting, C2Tuning or C2Info
  * \tparam S wrapped structure
- * \tparam BaseIndex optional base-index override. Must be specified for common/reused structures.
+ * \tparam ParamIndex optional parameter index override. Must be specified for base/reused
+ *         structures.
  *
  * Parameters wrap structures by prepending a (parameter) header. The fields of the wrapped
  * structure can be accessed directly, and constructors and potential public methods are also
@@ -373,41 +379,41 @@
  * There are 3 flavors of port parameters: unspecified, input and output. Parameters with
  * unspecified port expose a setPort method, and add an initial port parameter to the constructor.
  */
-template<typename T, typename S, int BaseIndex=S::baseIndex, class Flex=void>
-struct C2_HIDE C2PortParam : public T, public S, public C2BaseIndexOverride<S, BaseIndex>,
-        private C2StructCheck<S, BaseIndex, T::indexFlags | T::Index::kDirUndefined> {
+template<typename T, typename S, int ParamIndex=S::CORE_INDEX, class Flex=void>
+struct C2_HIDE C2PortParam : public T, public S, public C2CoreIndexOverride<S, ParamIndex>,
+        private C2StructCheck<S, ParamIndex, T::PARAM_KIND | T::Index::DIR_UNDEFINED> {
 private:
-    typedef C2PortParam<T, S, BaseIndex> _type;
+    typedef C2PortParam<T, S, ParamIndex> _Type;
 
 public:
     /// Default constructor.
-    inline C2PortParam() : T(sizeof(_type), _type::typeIndex) { }
+    inline C2PortParam() : T(sizeof(_Type), _Type::PARAM_TYPE) { }
     template<typename ...Args>
     /// Wrapper around base structure's constructor while specifying port/direction.
     inline C2PortParam(bool _output, const Args(&... args))
-        : T(sizeof(_type), _output ? output::typeIndex : input::typeIndex), S(args...) { }
+        : T(sizeof(_Type), _output ? output::PARAM_TYPE : input::PARAM_TYPE), S(args...) { }
     /// Set port/direction.
     inline void setPort(bool output) { C2Param::setPort(output); }
 
-    DEFINE_CAST_OPERATORS(_type)
+    DEFINE_CAST_OPERATORS(_Type)
 
     /// Specialization for an input port parameter.
-    struct input : public T, public S, public C2BaseIndexOverride<S, BaseIndex>,
-            public C2StructCheck<S, BaseIndex, T::indexFlags | T::Index::kDirInput> {
+    struct input : public T, public S, public C2CoreIndexOverride<S, ParamIndex>,
+            public C2StructCheck<S, ParamIndex, T::PARAM_KIND | T::Index::DIR_INPUT> {
         /// Wrapper around base structure's constructor.
         template<typename ...Args>
-        inline input(const Args(&... args)) : T(sizeof(_type), input::typeIndex), S(args...) { }
+        inline input(const Args(&... args)) : T(sizeof(_Type), input::PARAM_TYPE), S(args...) { }
 
         DEFINE_CAST_OPERATORS(input)
 
     };
 
     /// Specialization for an output port parameter.
-    struct output : public T, public S, public C2BaseIndexOverride<S, BaseIndex>,
-            public C2StructCheck<S, BaseIndex, T::indexFlags | T::Index::kDirOutput> {
+    struct output : public T, public S, public C2CoreIndexOverride<S, ParamIndex>,
+            public C2StructCheck<S, ParamIndex, T::PARAM_KIND | T::Index::DIR_OUTPUT> {
         /// Wrapper around base structure's constructor.
         template<typename ...Args>
-        inline output(const Args(&... args)) : T(sizeof(_type), output::typeIndex), S(args...) { }
+        inline output(const Args(&... args)) : T(sizeof(_Type), output::PARAM_TYPE), S(args...) { }
 
         DEFINE_CAST_OPERATORS(output)
     };
@@ -417,12 +423,13 @@
  * Port-parameter template for flexible structures.
  *
  * Base template to define a port setting/tuning or info based on a flexible structure and
- * an optional BaseIndex. Port parameters are tied to a port (input or output), but not to a
+ * an optional ParamIndex. Port parameters are tied to a port (input or output), but not to a
  * specific stream.
  *
  * \tparam T param type C2Setting, C2Tuning or C2Info
  * \tparam S wrapped flexible structure
- * \tparam BaseIndex optional base-index override. Must be specified for common/reused structures.
+ * \tparam ParamIndex optional parameter index override. Must be specified for base/reused
+ *         structures.
  *
  * Parameters wrap structures by prepending a (parameter) header. The fields and methods of flexible
  * structures can be accessed via the m member variable; however, the constructors of the structure
@@ -431,18 +438,18 @@
  * There are 3 flavors of port parameters: unspecified, input and output. Parameters with
  * unspecified port expose a setPort method, and add an initial port parameter to the constructor.
  */
-template<typename T, typename S, int BaseIndex>
-struct C2_HIDE C2PortParam<T, S, BaseIndex, IF_FLEXIBLE(S)>
-    : public T, public C2FlexStructCheck<S, BaseIndex, T::indexFlags | T::Type::kDirUndefined> {
+template<typename T, typename S, int ParamIndex>
+struct C2_HIDE C2PortParam<T, S, ParamIndex, IF_FLEXIBLE(S)>
+    : public T, public C2FlexStructCheck<S, ParamIndex, T::PARAM_KIND | T::Type::DIR_UNDEFINED> {
 private:
-    typedef C2PortParam<T, S, BaseIndex> _type;
+    typedef C2PortParam<T, S, ParamIndex> _Type;
 
     /// Default constructor for basic allocation: new(flexCount) P.
-    inline C2PortParam(size_t flexCount) : T(_type::calcSize(flexCount), _type::typeIndex) { }
+    inline C2PortParam(size_t flexCount) : T(_Type::calcSize(flexCount), _Type::PARAM_TYPE) { }
     template<typename ...Args>
     /// Wrapper around base structure's constructor while also specifying port/direction.
     inline C2PortParam(size_t flexCount, bool _output, const Args(&... args))
-        : T(_type::calcSize(flexCount), _output ? output::typeIndex : input::typeIndex),
+        : T(_Type::calcSize(flexCount), _output ? output::PARAM_TYPE : input::PARAM_TYPE),
           m(flexCount, args...) { }
 
 public:
@@ -451,17 +458,17 @@
 
     S m; ///< wrapped flexible structure
 
-    DEFINE_FLEXIBLE_METHODS(_type, S)
-    DEFINE_CAST_OPERATORS(_type)
+    DEFINE_FLEXIBLE_METHODS(_Type, S)
+    DEFINE_CAST_OPERATORS(_Type)
 
     /// Specialization for an input port parameter.
-    struct input : public T, public C2BaseIndexOverride<S, BaseIndex>,
-            public C2FlexStructCheck<S, BaseIndex, T::indexFlags | T::Index::kDirInput> {
+    struct input : public T,
+            public C2FlexStructCheck<S, ParamIndex, T::PARAM_KIND | T::Index::DIR_INPUT> {
     private:
         /// Wrapper around base structure's constructor while also specifying port/direction.
         template<typename ...Args>
         inline input(size_t flexCount, const Args(&... args))
-            : T(_type::calcSize(flexCount), input::typeIndex), m(flexCount, args...) { }
+            : T(_Type::calcSize(flexCount), input::PARAM_TYPE), m(flexCount, args...) { }
 
     public:
         S m; ///< wrapped flexible structure
@@ -471,13 +478,13 @@
     };
 
     /// Specialization for an output port parameter.
-    struct output : public T, public C2BaseIndexOverride<S, BaseIndex>,
-            public C2FlexStructCheck<S, BaseIndex, T::indexFlags | T::Index::kDirOutput> {
+    struct output : public T,
+            public C2FlexStructCheck<S, ParamIndex, T::PARAM_KIND | T::Index::DIR_OUTPUT> {
     private:
         /// Wrapper around base structure's constructor while also specifying port/direction.
         template<typename ...Args>
         inline output(size_t flexCount, const Args(&... args))
-            : T(_type::calcSize(flexCount), output::typeIndex), m(flexCount, args...) { }
+            : T(_Type::calcSize(flexCount), output::PARAM_TYPE), m(flexCount, args...) { }
 
     public:
         S m; ///< wrapped flexible structure
@@ -491,12 +498,13 @@
  * Stream-parameter template.
  *
  * Base template to define a stream setting/tuning or info based on a structure and
- * an optional BaseIndex. Stream parameters are tied to a specific stream on a port (input or
+ * an optional ParamIndex. Stream parameters are tied to a specific stream on a port (input or
  * output).
  *
  * \tparam T param type C2Setting, C2Tuning or C2Info
  * \tparam S wrapped structure
- * \tparam BaseIndex optional base-index override. Must be specified for common/reused structures.
+ * \tparam ParamIndex optional paramter index override. Must be specified for base/reused
+ *         structures.
  *
  * Parameters wrap structures by prepending a (parameter) header. The fields of the wrapped
  * structure can be accessed directly, and constructors and potential public methods are also
@@ -507,39 +515,39 @@
  * parameters with unspecified port expose a setPort method, and add an additional initial port
  * parameter to the constructor.
  */
-template<typename T, typename S, int BaseIndex=S::baseIndex, class Flex=void>
-struct C2_HIDE C2StreamParam : public T, public S, public C2BaseIndexOverride<S, BaseIndex>,
-        private C2StructCheck<S, BaseIndex,
-                T::indexFlags | T::Index::kStreamFlag | T::Index::kDirUndefined> {
+template<typename T, typename S, int ParamIndex=S::CORE_INDEX, class Flex=void>
+struct C2_HIDE C2StreamParam : public T, public S, public C2CoreIndexOverride<S, ParamIndex>,
+        private C2StructCheck<S, ParamIndex,
+                T::PARAM_KIND | T::Index::IS_STREAM_FLAG | T::Index::DIR_UNDEFINED> {
 private:
-    typedef C2StreamParam<T, S, BaseIndex> _type;
+    typedef C2StreamParam<T, S, ParamIndex> _Type;
 
 public:
     /// Default constructor. Port/direction and stream-ID is undefined.
-    inline C2StreamParam() : T(sizeof(_type), _type::typeIndex) { }
+    inline C2StreamParam() : T(sizeof(_Type), _Type::PARAM_TYPE) { }
     /// Wrapper around base structure's constructor while also specifying port/direction and
     /// stream-ID.
     template<typename ...Args>
     inline C2StreamParam(bool _output, unsigned stream, const Args(&... args))
-        : T(sizeof(_type), _output ? output::typeIndex : input::typeIndex, stream),
+        : T(sizeof(_Type), _output ? output::PARAM_TYPE : input::PARAM_TYPE, stream),
           S(args...) { }
     /// Set port/direction.
     inline void setPort(bool output) { C2Param::setPort(output); }
     /// Set stream-id. \retval true if the stream-id was successfully set.
     inline bool setStream(unsigned stream) { return C2Param::setStream(stream); }
 
-    DEFINE_CAST_OPERATORS(_type)
+    DEFINE_CAST_OPERATORS(_Type)
 
     /// Specialization for an input stream parameter.
-    struct input : public T, public S, public C2BaseIndexOverride<S, BaseIndex>,
-            public C2StructCheck<S, BaseIndex,
-                    T::indexFlags | T::Index::kStreamFlag | T::Type::kDirInput> {
+    struct input : public T, public S, public C2CoreIndexOverride<S, ParamIndex>,
+            public C2StructCheck<S, ParamIndex,
+                    T::PARAM_KIND | T::Index::IS_STREAM_FLAG | T::Type::DIR_INPUT> {
         /// Default constructor. Stream-ID is undefined.
-        inline input() : T(sizeof(_type), input::typeIndex) { }
+        inline input() : T(sizeof(_Type), input::PARAM_TYPE) { }
         /// Wrapper around base structure's constructor while also specifying stream-ID.
         template<typename ...Args>
         inline input(unsigned stream, const Args(&... args))
-            : T(sizeof(_type), input::typeIndex, stream), S(args...) { }
+            : T(sizeof(_Type), input::PARAM_TYPE, stream), S(args...) { }
         /// Set stream-id. \retval true if the stream-id was successfully set.
         inline bool setStream(unsigned stream) { return C2Param::setStream(stream); }
 
@@ -547,15 +555,15 @@
     };
 
     /// Specialization for an output stream parameter.
-    struct output : public T, public S, public C2BaseIndexOverride<S, BaseIndex>,
-            public C2StructCheck<S, BaseIndex,
-                    T::indexFlags | T::Index::kStreamFlag | T::Type::kDirOutput> {
+    struct output : public T, public S, public C2CoreIndexOverride<S, ParamIndex>,
+            public C2StructCheck<S, ParamIndex,
+                    T::PARAM_KIND | T::Index::IS_STREAM_FLAG | T::Type::DIR_OUTPUT> {
         /// Default constructor. Stream-ID is undefined.
-        inline output() : T(sizeof(_type), output::typeIndex) { }
+        inline output() : T(sizeof(_Type), output::PARAM_TYPE) { }
         /// Wrapper around base structure's constructor while also specifying stream-ID.
         template<typename ...Args>
         inline output(unsigned stream, const Args(&... args))
-            : T(sizeof(_type), output::typeIndex, stream), S(args...) { }
+            : T(sizeof(_Type), output::PARAM_TYPE, stream), S(args...) { }
         /// Set stream-id. \retval true if the stream-id was successfully set.
         inline bool setStream(unsigned stream) { return C2Param::setStream(stream); }
 
@@ -567,12 +575,13 @@
  * Stream-parameter template for flexible structures.
  *
  * Base template to define a stream setting/tuning or info based on a flexible structure and
- * an optional BaseIndex. Stream parameters are tied to a specific stream on a port (input or
+ * an optional ParamIndex. Stream parameters are tied to a specific stream on a port (input or
  * output).
  *
  * \tparam T param type C2Setting, C2Tuning or C2Info
  * \tparam S wrapped flexible structure
- * \tparam BaseIndex optional base-index override. Must be specified for common/reused structures.
+ * \tparam ParamIndex optional parameter index override. Must be specified for base/reused
+ *         structures.
  *
  * Parameters wrap structures by prepending a (parameter) header. The fields and methods of flexible
  * structures can be accessed via the m member variable; however, the constructors of the structure
@@ -583,20 +592,20 @@
  * parameters with unspecified port expose a setPort method, and add an additional initial port
  * parameter to the constructor.
  */
-template<typename T, typename S, int BaseIndex>
-struct C2_HIDE C2StreamParam<T, S, BaseIndex, IF_FLEXIBLE(S)>
-    : public T, public C2BaseIndexOverride<S, BaseIndex>,
-      private C2FlexStructCheck<S, BaseIndex,
-              T::indexFlags | T::Index::kStreamFlag | T::Index::kDirUndefined> {
+template<typename T, typename S, int ParamIndex>
+struct C2_HIDE C2StreamParam<T, S, ParamIndex, IF_FLEXIBLE(S)>
+    : public T,
+      public C2FlexStructCheck<S, ParamIndex,
+              T::PARAM_KIND | T::Index::IS_STREAM_FLAG | T::Index::DIR_UNDEFINED> {
 private:
-    typedef C2StreamParam<T, S> _type;
+    typedef C2StreamParam<T, S, ParamIndex> _Type;
     /// Default constructor. Port/direction and stream-ID is undefined.
-    inline C2StreamParam(size_t flexCount) : T(_type::calcSize(flexCount), _type::typeIndex, 0u) { }
+    inline C2StreamParam(size_t flexCount) : T(_Type::calcSize(flexCount), _Type::PARAM_TYPE, 0u) { }
     /// Wrapper around base structure's constructor while also specifying port/direction and
     /// stream-ID.
     template<typename ...Args>
     inline C2StreamParam(size_t flexCount, bool _output, unsigned stream, const Args(&... args))
-        : T(_type::calcSize(flexCount), _output ? output::typeIndex : input::typeIndex, stream),
+        : T(_Type::calcSize(flexCount), _output ? output::PARAM_TYPE : input::PARAM_TYPE, stream),
           m(flexCount, args...) { }
 
 public:
@@ -607,20 +616,20 @@
     /// Set stream-id. \retval true if the stream-id was successfully set.
     inline bool setStream(unsigned stream) { return C2Param::setStream(stream); }
 
-    DEFINE_FLEXIBLE_METHODS(_type, S)
-    DEFINE_CAST_OPERATORS(_type)
+    DEFINE_FLEXIBLE_METHODS(_Type, S)
+    DEFINE_CAST_OPERATORS(_Type)
 
     /// Specialization for an input stream parameter.
-    struct input : public T, public C2BaseIndexOverride<S, BaseIndex>,
-            public C2FlexStructCheck<S, BaseIndex,
-                    T::indexFlags | T::Index::kStreamFlag | T::Type::kDirInput> {
+    struct input : public T,
+            public C2FlexStructCheck<S, ParamIndex,
+                    T::PARAM_KIND | T::Index::IS_STREAM_FLAG | T::Type::DIR_INPUT> {
     private:
         /// Default constructor. Stream-ID is undefined.
-        inline input(size_t flexCount) : T(_type::calcSize(flexCount), input::typeIndex) { }
+        inline input(size_t flexCount) : T(_Type::calcSize(flexCount), input::PARAM_TYPE) { }
         /// Wrapper around base structure's constructor while also specifying stream-ID.
         template<typename ...Args>
         inline input(size_t flexCount, unsigned stream, const Args(&... args))
-            : T(_type::calcSize(flexCount), input::typeIndex, stream), m(flexCount, args...) { }
+            : T(_Type::calcSize(flexCount), input::PARAM_TYPE, stream), m(flexCount, args...) { }
 
     public:
         S m; ///< wrapped flexible structure
@@ -633,16 +642,16 @@
     };
 
     /// Specialization for an output stream parameter.
-    struct output : public T, public C2BaseIndexOverride<S, BaseIndex>,
-            public C2FlexStructCheck<S, BaseIndex,
-                    T::indexFlags | T::Index::kStreamFlag | T::Type::kDirOutput> {
+    struct output : public T,
+            public C2FlexStructCheck<S, ParamIndex,
+                    T::PARAM_KIND | T::Index::IS_STREAM_FLAG | T::Type::DIR_OUTPUT> {
     private:
         /// Default constructor. Stream-ID is undefined.
-        inline output(size_t flexCount) : T(_type::calcSize(flexCount), output::typeIndex) { }
+        inline output(size_t flexCount) : T(_Type::calcSize(flexCount), output::PARAM_TYPE) { }
         /// Wrapper around base structure's constructor while also specifying stream-ID.
         template<typename ...Args>
         inline output(size_t flexCount, unsigned stream, const Args(&... args))
-            : T(_type::calcSize(flexCount), output::typeIndex, stream), m(flexCount, args...) { }
+            : T(_Type::calcSize(flexCount), output::PARAM_TYPE, stream), m(flexCount, args...) { }
 
     public:
         S m; ///< wrapped flexible structure
@@ -659,16 +668,16 @@
 
 /**
  * \ingroup internal
- * A structure template encapsulating a single element with default constructors and no base-index.
+ * A structure template encapsulating a single element with default constructors and no core-index.
  */
 template<typename T>
 struct C2SimpleValueStruct {
-    T mValue; ///< simple value of the structure
+    T value; ///< simple value of the structure
     // Default constructor.
     inline C2SimpleValueStruct() = default;
     // Constructor with an initial value.
-    inline C2SimpleValueStruct(T value) : mValue(value) {}
-    DEFINE_C2STRUCT_NO_BASE(SimpleValue)
+    inline C2SimpleValueStruct(T value) : value(value) {}
+    DEFINE_BASE_C2STRUCT(SimpleValue)
 };
 
 // TODO: move this and next to some generic place
@@ -695,16 +704,16 @@
  */
 template<typename T>
 struct C2ConstMemoryBlock : public C2MemoryBlock<T> {
-    virtual const T * data() const { return mData; }
-    virtual size_t size() const { return mSize; }
+    virtual const T * data() const { return _mData; }
+    virtual size_t size() const { return _mSize; }
 
     /// Constructor.
     template<unsigned N>
-    inline constexpr C2ConstMemoryBlock(const T(&init)[N]) : mData(init), mSize(N) {}
+    inline constexpr C2ConstMemoryBlock(const T(&init)[N]) : _mData(init), _mSize(N) {}
 
 private:
-    const T *mData;
-    const size_t mSize;
+    const T *_mData;
+    const size_t _mSize;
 };
 
 /// \addtogroup internal
@@ -757,34 +766,34 @@
 
 /**
  * Specialization for a flexible blob and string arrays. A structure template encapsulating a single
- * flexible array member with default flexible constructors and no base-index. This type cannot be
+ * flexible array member with default flexible constructors and no core-index. This type cannot be
  * constructed on its own as it's size is 0.
  *
  * \internal This is different from C2SimpleArrayStruct<T[]> simply because its member has the name
- * as mValue to reflect this is a single value.
+ * as value to reflect this is a single value.
  */
 template<typename T>
 struct C2SimpleValueStruct<T[]> {
     static_assert(std::is_same<T, char>::value || std::is_same<T, uint8_t>::value,
                   "C2SimpleValueStruct<T[]> is only for BLOB or STRING");
-    T mValue[];
+    T value[];
 
     inline C2SimpleValueStruct() = default;
-    DEFINE_C2STRUCT_NO_BASE(SimpleValue)
-    FLEX(C2SimpleValueStruct, mValue)
+    DEFINE_BASE_C2STRUCT(SimpleValue)
+    FLEX(C2SimpleValueStruct, value)
 
 private:
     inline C2SimpleValueStruct(size_t flexCount, const C2MemoryBlock<T> &block) {
-        _C2ValueArrayHelper::init(mValue, flexCount, block);
+        _C2ValueArrayHelper::init(value, flexCount, block);
     }
 
     inline C2SimpleValueStruct(size_t flexCount, const std::initializer_list<T> &init) {
-        _C2ValueArrayHelper::init(mValue, flexCount, init);
+        _C2ValueArrayHelper::init(value, flexCount, init);
     }
 
     template<unsigned N>
     inline C2SimpleValueStruct(size_t flexCount, const T(&init)[N]) {
-        _C2ValueArrayHelper::init(mValue, flexCount, init);
+        _C2ValueArrayHelper::init(value, flexCount, init);
     }
 };
 
@@ -792,7 +801,7 @@
 
 /**
  * A structure template encapsulating a single flexible array element of a specific type (T) with
- * default constructors and no base-index. This type cannot be constructed on its own as it's size
+ * default constructors and no core-index. This type cannot be constructed on its own as it's size
  * is 0. Instead, it is meant to be used as a parameter, e.g.
  *
  *   typedef C2StreamParam<C2Info, C2SimpleArrayStruct<C2MyFancyStruct>,
@@ -803,30 +812,30 @@
     static_assert(!std::is_same<T, char>::value && !std::is_same<T, uint8_t>::value,
                   "use C2SimpleValueStruct<T[]> is for BLOB or STRING");
 
-    T mValues[]; ///< array member
+    T values[]; ///< array member
     /// Default constructor
     inline C2SimpleArrayStruct() = default;
-    DEFINE_C2STRUCT_NO_BASE(SimpleArray)
-    FLEX(C2SimpleArrayStruct, mValues)
+    DEFINE_BASE_FLEX_C2STRUCT(SimpleArray, values)
+    //FLEX(C2SimpleArrayStruct, values)
 
 private:
     /// Construct from a C2MemoryBlock.
     /// Used only by the flexible parameter allocators (alloc_unique & alloc_shared).
     inline C2SimpleArrayStruct(size_t flexCount, const C2MemoryBlock<T> &block) {
-        _C2ValueArrayHelper::init(mValues, flexCount, block);
+        _C2ValueArrayHelper::init(values, flexCount, block);
     }
 
     /// Construct from an initializer list.
     /// Used only by the flexible parameter allocators (alloc_unique & alloc_shared).
     inline C2SimpleArrayStruct(size_t flexCount, const std::initializer_list<T> &init) {
-        _C2ValueArrayHelper::init(mValues, flexCount, init);
+        _C2ValueArrayHelper::init(values, flexCount, init);
     }
 
     /// Construct from another flexible array.
     /// Used only by the flexible parameter allocators (alloc_unique & alloc_shared).
     template<unsigned N>
     inline C2SimpleArrayStruct(size_t flexCount, const T(&init)[N]) {
-        _C2ValueArrayHelper::init(mValues, flexCount, init);
+        _C2ValueArrayHelper::init(values, flexCount, init);
     }
 };
 
@@ -842,54 +851,54 @@
  *   typedef C2PortParam<C2Tuning, C2Int32Value, kParamIndexMyIntegerPortParam>
  *           C2MyIntegerPortParamTuning;
  *
- * They contain a single member (mValue or mValues) that is described as "value" or "values".
+ * They contain a single member (value or values) that is described as "value" or "values".
  */
-/// A 32-bit signed integer parameter in mValue, described as "value"
+/// A 32-bit signed integer parameter in value, described as "value"
 typedef C2SimpleValueStruct<int32_t> C2Int32Value;
-/// A 32-bit signed integer array parameter in mValues, described as "values"
+/// A 32-bit signed integer array parameter in values, described as "values"
 typedef C2SimpleArrayStruct<int32_t> C2Int32Array;
-/// A 32-bit unsigned integer parameter in mValue, described as "value"
+/// A 32-bit unsigned integer parameter in value, described as "value"
 typedef C2SimpleValueStruct<uint32_t> C2Uint32Value;
-/// A 32-bit unsigned integer array parameter in mValues, described as "values"
+/// A 32-bit unsigned integer array parameter in values, described as "values"
 typedef C2SimpleArrayStruct<uint32_t> C2Uint32Array;
-/// A 64-bit signed integer parameter in mValue, described as "value"
+/// A 64-bit signed integer parameter in value, described as "value"
 typedef C2SimpleValueStruct<int64_t> C2Int64Value;
-/// A 64-bit signed integer array parameter in mValues, described as "values"
+/// A 64-bit signed integer array parameter in values, described as "values"
 typedef C2SimpleArrayStruct<int64_t> C2Int64Array;
-/// A 64-bit unsigned integer parameter in mValue, described as "value"
+/// A 64-bit unsigned integer parameter in value, described as "value"
 typedef C2SimpleValueStruct<uint64_t> C2Uint64Value;
-/// A 64-bit unsigned integer array parameter in mValues, described as "values"
+/// A 64-bit unsigned integer array parameter in values, described as "values"
 typedef C2SimpleArrayStruct<uint64_t> C2Uint64Array;
-/// A float parameter in mValue, described as "value"
+/// A float parameter in value, described as "value"
 typedef C2SimpleValueStruct<float> C2FloatValue;
-/// A float array parameter in mValues, described as "values"
+/// A float array parameter in values, described as "values"
 typedef C2SimpleArrayStruct<float> C2FloatArray;
-/// A blob flexible parameter in mValue, described as "value"
+/// A blob flexible parameter in value, described as "value"
 typedef C2SimpleValueStruct<uint8_t[]> C2BlobValue;
-/// A string flexible parameter in mValue, described as "value"
+/// A string flexible parameter in value, described as "value"
 typedef C2SimpleValueStruct<char[]> C2StringValue;
 
 #if 1
 template<typename T>
-const std::initializer_list<const C2FieldDescriptor> C2SimpleValueStruct<T>::fieldList = { C2FIELD(mValue, "value") };
+const std::initializer_list<const C2FieldDescriptor> C2SimpleValueStruct<T>::FIELD_LIST = { C2FIELD(value, "value") };
 template<typename T>
-const std::initializer_list<const C2FieldDescriptor> C2SimpleValueStruct<T[]>::fieldList = { C2FIELD(mValue, "value") };
+const std::initializer_list<const C2FieldDescriptor> C2SimpleValueStruct<T[]>::FIELD_LIST = { C2FIELD(value, "value") };
 template<typename T>
-const std::initializer_list<const C2FieldDescriptor> C2SimpleArrayStruct<T>::fieldList = { C2FIELD(mValues, "values") };
+const std::initializer_list<const C2FieldDescriptor> C2SimpleArrayStruct<T>::FIELD_LIST = { C2FIELD(values, "values") };
 #else
 // This seem to be able to be handled by the template above
-DESCRIBE_TEMPLATED_C2STRUCT(C2SimpleValueStruct<int32_t>, { C2FIELD(mValue, "value") });
-DESCRIBE_TEMPLATED_C2STRUCT(C2SimpleValueStruct<uint32_t>, { C2FIELD(mValue, "value") });
-DESCRIBE_TEMPLATED_C2STRUCT(C2SimpleValueStruct<int64_t>, { C2FIELD(mValue, "value") });
-DESCRIBE_TEMPLATED_C2STRUCT(C2SimpleValueStruct<uint64_t>, { C2FIELD(mValue, "value") });
-DESCRIBE_TEMPLATED_C2STRUCT(C2SimpleValueStruct<float>, { C2FIELD(mValue, "value") });
-DESCRIBE_TEMPLATED_C2STRUCT(C2SimpleValueStruct<uint8_t[]>, { C2FIELD(mValue, "value") });
-DESCRIBE_TEMPLATED_C2STRUCT(C2SimpleValueStruct<char[]>, { C2FIELD(mValue, "value") });
-DESCRIBE_TEMPLATED_C2STRUCT(C2SimpleArrayStruct<int32_t>, { C2FIELD(mValues, "values") });
-DESCRIBE_TEMPLATED_C2STRUCT(C2SimpleArrayStruct<uint32_t>, { C2FIELD(mValues, "values") });
-DESCRIBE_TEMPLATED_C2STRUCT(C2SimpleArrayStruct<int64_t>, { C2FIELD(mValues, "values") });
-DESCRIBE_TEMPLATED_C2STRUCT(C2SimpleArrayStruct<uint64_t>, { C2FIELD(mValues, "values") });
-DESCRIBE_TEMPLATED_C2STRUCT(C2SimpleArrayStruct<float>, { C2FIELD(mValues, "values") });
+DESCRIBE_TEMPLATED_C2STRUCT(C2SimpleValueStruct<int32_t>, { C2FIELD(value, "value") });
+DESCRIBE_TEMPLATED_C2STRUCT(C2SimpleValueStruct<uint32_t>, { C2FIELD(value, "value") });
+DESCRIBE_TEMPLATED_C2STRUCT(C2SimpleValueStruct<int64_t>, { C2FIELD(value, "value") });
+DESCRIBE_TEMPLATED_C2STRUCT(C2SimpleValueStruct<uint64_t>, { C2FIELD(value, "value") });
+DESCRIBE_TEMPLATED_C2STRUCT(C2SimpleValueStruct<float>, { C2FIELD(value, "value") });
+DESCRIBE_TEMPLATED_C2STRUCT(C2SimpleValueStruct<uint8_t[]>, { C2FIELD(value, "value") });
+DESCRIBE_TEMPLATED_C2STRUCT(C2SimpleValueStruct<char[]>, { C2FIELD(value, "value") });
+DESCRIBE_TEMPLATED_C2STRUCT(C2SimpleArrayStruct<int32_t>, { C2FIELD(values, "values") });
+DESCRIBE_TEMPLATED_C2STRUCT(C2SimpleArrayStruct<uint32_t>, { C2FIELD(values, "values") });
+DESCRIBE_TEMPLATED_C2STRUCT(C2SimpleArrayStruct<int64_t>, { C2FIELD(values, "values") });
+DESCRIBE_TEMPLATED_C2STRUCT(C2SimpleArrayStruct<uint64_t>, { C2FIELD(values, "values") });
+DESCRIBE_TEMPLATED_C2STRUCT(C2SimpleArrayStruct<float>, { C2FIELD(values, "values") });
 #endif
 
 /// @}
diff --git a/media/libstagefright/codec2/include/C2Work.h b/media/libstagefright/codec2/include/C2Work.h
index a42d11a..105cf81 100644
--- a/media/libstagefright/codec2/include/C2Work.h
+++ b/media/libstagefright/codec2/include/C2Work.h
@@ -28,42 +28,56 @@
 #include <list>
 #include <vector>
 
-typedef int status_t;
-
 namespace android {
 
 /// \defgroup work Work and data processing
 /// @{
 
+/**
+ * Information describing the reason a parameter settings may fail, or
+ * may be overriden.
+ */
 struct C2SettingResult {
-    enum Failure {
+    enum Failure : uint32_t {
         READ_ONLY,  ///< parameter is read-only and cannot be set
         MISMATCH,   ///< parameter mismatches input data
         BAD_VALUE,  ///< parameter does not accept value
         BAD_TYPE,   ///< parameter is not supported
         BAD_PORT,   ///< parameter is not supported on the specific port
         BAD_INDEX,  ///< parameter is not supported on the specific stream
-        CONFLICT,   ///< parameter is in conflict with another setting
+        CONFLICT,   ///< parameter is in conflict with an/other setting(s)
+        /// parameter is out of range due to other settings (this failure mode
+        /// can only be used for strict parameters)
+        UNSUPPORTED,
+
+
+        /// requested parameter value is in conflict with an/other setting(s)
+        /// and has been corrected to the closest supported value. This failure
+        /// mode is given to provide suggestion to the client as to how to
+        /// enable the requested parameter value.
+        INFO_CONFLICT,
     };
 
-    C2ParamField field;
-    Failure failure;
-    std::unique_ptr<C2FieldSupportedValues> supportedValues; //< if different from normal (e.g. in conflict w/another param or input data)
-    std::list<C2ParamField> conflictingFields;
+    Failure failure;    ///< failure code
+
+    /// Failing (or corrected) field. Currently supported values for the field. This is set if
+    /// different from the globally supported values (e.g. due to restrictions by another param or
+    /// input data)
+    /// \todo need to define suggestions for masks to be set and unset.
+    C2ParamFieldValues field;
+
+    /// Conflicting parameters or fields with optional suggestions with (optional) suggested values
+    /// for any conflicting fields to avoid the conflict.
+    std::list<C2ParamFieldValues> conflicts;
 };
 
 // ================================================================================================
 //  WORK
 // ================================================================================================
 
-// node_id-s
-typedef uint32_t node_id;
-
-enum flags_t : uint32_t {
-    BUFFERFLAG_CODEC_CONFIG,
-    BUFFERFLAG_DROP_FRAME,
-    BUFFERFLAG_END_OF_STREAM,
-};
+// c2_node_id_t-s
+typedef uint32_t c2_node_id_t;
+typedef c2_node_id_t c2_node_id_t;
 
 enum {
     kParamIndexWorkOrdinal,
@@ -82,6 +96,12 @@
 
 struct C2BufferPack {
 //public:
+    enum flags_t : uint32_t {
+        FLAG_CODEC_CONFIG  = (1 << 0),
+        FLAG_DROP_FRAME    = (1 << 1),
+        FLAG_END_OF_STREAM = (1 << 2),
+    };
+
     flags_t  flags;
     C2WorkOrdinalStruct ordinal;
     std::vector<std::shared_ptr<C2Buffer>> buffers;
@@ -94,13 +114,13 @@
 struct C2Worklet {
 //public:
     // IN
-    node_id component;
+    c2_node_id_t component;
 
     std::list<std::unique_ptr<C2Param>> tunings; //< tunings to be applied before processing this
                                                  // worklet
     std::list<C2Param::Type> requestedInfos;
-    std::vector<std::shared_ptr<C2BlockAllocator>> allocators; //< This vector shall be the same size as
-                                                          //< output.buffers.
+    std::vector<std::shared_ptr<C2BlockPool>> allocators; //< This vector shall be the same size as
+                                                          //< output.buffers. \deprecated
 
     // OUT
     C2BufferPack output;
@@ -146,13 +166,13 @@
     std::list<std::unique_ptr<C2Worklet>> worklets;
 
     uint32_t worklets_processed;
-    status_t result;
+    c2_status_t result;
 };
 
 struct C2WorkOutline {
 //public:
     C2WorkOrdinalStruct ordinal;
-    std::list<node_id> chain;
+    std::list<c2_node_id_t> chain;
 };
 
 /// @}
diff --git a/media/libstagefright/codec2/include/SimpleC2Component.h b/media/libstagefright/codec2/include/SimpleC2Component.h
new file mode 100644
index 0000000..a4b6ee1
--- /dev/null
+++ b/media/libstagefright/codec2/include/SimpleC2Component.h
@@ -0,0 +1,195 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SIMPLE_C2_COMPONENT_H_
+#define SIMPLE_C2_COMPONENT_H_
+
+#include <list>
+#include <thread>
+#include <unordered_map>
+
+#include <C2Component.h>
+
+#include <media/stagefright/foundation/Mutexed.h>
+
+namespace android {
+
+class SimpleC2Component
+        : public C2Component, public std::enable_shared_from_this<SimpleC2Component> {
+public:
+    SimpleC2Component(
+            const std::shared_ptr<C2ComponentInterface> &intf);
+    virtual ~SimpleC2Component() = default;
+
+    // C2Component
+    // From C2Component
+    virtual c2_status_t setListener_vb(
+            const std::shared_ptr<Listener> &listener, c2_blocking_t mayBlock) override;
+    virtual c2_status_t queue_nb(std::list<std::unique_ptr<C2Work>>* const items) override;
+    virtual c2_status_t announce_nb(const std::vector<C2WorkOutline> &items) override;
+    virtual c2_status_t flush_sm(
+            flush_mode_t mode, std::list<std::unique_ptr<C2Work>>* const flushedWork) override;
+    virtual c2_status_t drain_nb(drain_mode_t mode) override;
+    virtual c2_status_t start() override;
+    virtual c2_status_t stop() override;
+    virtual c2_status_t reset() override;
+    virtual c2_status_t release() override;
+    virtual std::shared_ptr<C2ComponentInterface> intf() override;
+
+    // for thread
+    inline bool exitRequested() { return mExitRequested; }
+    void processQueue();
+
+protected:
+    /**
+     * Initialize internal states of the component according to the config set
+     * in the interface.
+     *
+     * This method is called during start(), but only at the first invocation or
+     * after reset().
+     */
+    virtual c2_status_t onInit() = 0;
+
+    /**
+     * Stop the component.
+     */
+    virtual c2_status_t onStop() = 0;
+
+    /**
+     * Reset the component.
+     */
+    virtual void onReset() = 0;
+
+    /**
+     * Release the component.
+     */
+    virtual void onRelease() = 0;
+
+    /**
+     * Flush the component.
+     */
+    virtual c2_status_t onFlush_sm() = 0;
+
+    /**
+     * Process the given work and finish pending work using finish().
+     *
+     * \param[in,out]   work    the work to process
+     * \param[in]       pool    the pool to use for allocating output blocks.
+     */
+    virtual void process(
+            const std::unique_ptr<C2Work> &work,
+            const std::shared_ptr<C2BlockPool> &pool) = 0;
+
+    /**
+     * Drain the component and finish pending work using finish().
+     *
+     * \param[in]   drainMode   mode of drain.
+     * \param[in]   pool        the pool to use for allocating output blocks.
+     *
+     * \retval C2_OK            The component has drained all pending output
+     *                          work.
+     * \retval C2_OMITTED       Unsupported mode (e.g. DRAIN_CHAIN)
+     */
+    virtual c2_status_t drain(
+            uint32_t drainMode,
+            const std::shared_ptr<C2BlockPool> &pool) = 0;
+
+    // for derived classes
+    /**
+     * Finish pending work.
+     *
+     * This method will retrieve the pending work according to |frameIndex| and
+     * feed the work into |fillWork| function. |fillWork| must be
+     * "non-blocking". Once |fillWork| returns the filled work will be returned
+     * to the client.
+     *
+     * \param[in]   frameIndex    the index of the pending work
+     * \param[in]   fillWork      the function to fill the retrieved work.
+     */
+    void finish(uint64_t frameIndex, std::function<void(const std::unique_ptr<C2Work> &)> fillWork);
+
+    std::shared_ptr<C2Buffer> createLinearBuffer(
+            const std::shared_ptr<C2LinearBlock> &block);
+
+    std::shared_ptr<C2Buffer> createLinearBuffer(
+            const std::shared_ptr<C2LinearBlock> &block, size_t offset, size_t size);
+
+    std::shared_ptr<C2Buffer> createGraphicBuffer(
+            const std::shared_ptr<C2GraphicBlock> &block);
+
+    std::shared_ptr<C2Buffer> createGraphicBuffer(
+            const std::shared_ptr<C2GraphicBlock> &block,
+            const C2Rect &crop);
+
+    static constexpr uint32_t NO_DRAIN = ~0u;
+
+private:
+    const std::shared_ptr<C2ComponentInterface> mIntf;
+    std::atomic_bool mExitRequested;
+
+    enum {
+        UNINITIALIZED,
+        STOPPED,
+        RUNNING,
+    };
+
+    struct ExecState {
+        ExecState() : mState(UNINITIALIZED) {}
+
+        int mState;
+        std::thread mThread;
+        std::shared_ptr<C2Component::Listener> mListener;
+    };
+    Mutexed<ExecState> mExecState;
+
+    class WorkQueue {
+    public:
+        inline WorkQueue() : mGeneration(0ul) {}
+
+        inline uint64_t generation() const { return mGeneration; }
+        inline void incGeneration() { ++mGeneration; }
+
+        std::unique_ptr<C2Work> pop_front();
+        void push_back(std::unique_ptr<C2Work> work);
+        bool empty() const;
+        uint32_t drainMode() const;
+        void markDrain(uint32_t drainMode);
+        void clear();
+
+        Condition mCondition;
+
+    private:
+        struct Entry {
+            std::unique_ptr<C2Work> work;
+            uint32_t drainMode;
+        };
+
+        uint64_t mGeneration;
+        std::list<Entry> mQueue;
+    };
+    Mutexed<WorkQueue> mWorkQueue;
+
+    typedef std::unordered_map<uint64_t, std::unique_ptr<C2Work>> PendingWork;
+    Mutexed<PendingWork> mPendingWork;
+
+    std::shared_ptr<C2BlockPool> mOutputBlockPool;
+
+    SimpleC2Component() = delete;
+};
+
+}  // namespace android
+
+#endif  // SIMPLE_C2_COMPONENT_H_
diff --git a/media/libstagefright/codec2/include/SimpleC2Interface.h b/media/libstagefright/codec2/include/SimpleC2Interface.h
new file mode 100644
index 0000000..3796b0b
--- /dev/null
+++ b/media/libstagefright/codec2/include/SimpleC2Interface.h
@@ -0,0 +1,98 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SIMPLE_C2_INTERFACE_H_
+#define SIMPLE_C2_INTERFACE_H_
+
+#include <C2Component.h>
+
+namespace android {
+
+class SimpleC2Interface : public C2ComponentInterface {
+public:
+    class Builder {
+    public:
+        inline Builder(
+                const char *name,
+                c2_node_id_t id)
+            : mIntf(new SimpleC2Interface(name, id)) {}
+
+        inline Builder(
+                const char *name,
+                c2_node_id_t id,
+                std::function<void(::android::SimpleC2Interface*)> deleter)
+            : mIntf(new SimpleC2Interface(name, id), deleter) {}
+
+        inline Builder &inputFormat(C2FormatKind input) {
+            mIntf->mInputFormat.value = input;
+            return *this;
+        }
+
+        inline Builder &outputFormat(C2FormatKind output) {
+            mIntf->mOutputFormat.value = output;
+            return *this;
+        }
+
+        inline std::shared_ptr<SimpleC2Interface> build() {
+            return mIntf;
+        }
+    private:
+        std::shared_ptr<SimpleC2Interface> mIntf;
+    };
+
+    virtual ~SimpleC2Interface() = default;
+
+    // From C2ComponentInterface
+    inline C2String getName() const override { return mName; }
+    inline c2_node_id_t getId() const override { return mId; }
+    c2_status_t query_vb(
+            const std::vector<C2Param* const> &stackParams,
+            const std::vector<C2Param::Index> &heapParamIndices,
+            c2_blocking_t mayBlock,
+            std::vector<std::unique_ptr<C2Param>>* const heapParams) const override;
+    inline c2_status_t config_vb(
+            const std::vector<C2Param* const> &,
+            c2_blocking_t,
+            std::vector<std::unique_ptr<C2SettingResult>>* const) override {
+        return C2_OMITTED;
+    }
+    inline c2_status_t createTunnel_sm(c2_node_id_t) override { return C2_OMITTED; }
+    inline c2_status_t releaseTunnel_sm(c2_node_id_t) override { return C2_OMITTED; }
+    inline c2_status_t querySupportedParams_nb(
+            std::vector<std::shared_ptr<C2ParamDescriptor>> * const) const override {
+        return C2_OMITTED;
+    }
+    c2_status_t querySupportedValues_vb(
+            std::vector<C2FieldSupportedValuesQuery> &,
+            c2_blocking_t) const override {
+        return C2_OMITTED;
+    }
+
+private:
+    inline SimpleC2Interface(const char *name, c2_node_id_t id)
+        : mName(name), mId(id), mInputFormat(0u), mOutputFormat(0u) {}
+
+    const C2String mName;
+    const c2_node_id_t mId;
+    C2StreamFormatConfig::input mInputFormat;
+    C2StreamFormatConfig::output mOutputFormat;
+
+    SimpleC2Interface() = delete;
+};
+
+}  // namespace android
+
+#endif  // SIMPLE_C2_INTERFACE_H_
diff --git a/media/libstagefright/codec2/tests/Android.bp b/media/libstagefright/codec2/tests/Android.bp
new file mode 100644
index 0000000..f26fbd0
--- /dev/null
+++ b/media/libstagefright/codec2/tests/Android.bp
@@ -0,0 +1,90 @@
+cc_test {
+    name: "codec2_param_test",
+
+    tags: [
+        "tests",
+    ],
+
+    srcs: [
+        "C2Param_test.cpp",
+        "vndk/C2UtilTest.cpp",
+    ],
+
+    include_dirs: [
+        "frameworks/av/media/libstagefright/codec2/include",
+        "frameworks/av/media/libstagefright/codec2/vndk/include",
+    ],
+
+    // param tests must not depend on any codec2 libraries as all params should be templated
+    shared_libs: [
+    ],
+
+    static_libs: [
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+        "-std=c++14",
+    ],
+}
+
+cc_test {
+    name: "codec2_test",
+
+    tags: [
+        "tests",
+    ],
+
+    srcs: [
+        "vndk/C2BufferTest.cpp",
+        "C2_test.cpp",
+    ],
+
+    include_dirs: [
+    ],
+
+    shared_libs: [
+        "libcutils",
+        "liblog",
+        "libstagefright_codec2",
+        "libstagefright_codec2_vndk",
+        "libutils",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+        "-std=c++14",
+    ],
+}
+
+cc_test {
+    name: "codec2_interface_test",
+
+    tags: [
+        "tests",
+    ],
+
+    srcs: [
+        "C2ComponentInterface_test.cpp",
+    ],
+
+    include_dirs: [
+        "frameworks/native/include/media/openmax",
+    ],
+
+    shared_libs: [
+        "libcutils",
+        "liblog",
+        "libstagefright_codec2",
+        "libstagefright_codec2_vndk",
+        "libutils",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+        "-std=c++14",
+    ],
+}
diff --git a/media/libstagefright/codec2/tests/Android.mk b/media/libstagefright/codec2/tests/Android.mk
deleted file mode 100644
index 49c4253..0000000
--- a/media/libstagefright/codec2/tests/Android.mk
+++ /dev/null
@@ -1,37 +0,0 @@
-# Build the unit tests.
-LOCAL_PATH:= $(call my-dir)
-include $(CLEAR_VARS)
-LOCAL_ADDITIONAL_DEPENDENCIES := $(LOCAL_PATH)/Android.mk
-
-LOCAL_MODULE := codec2_test
-
-LOCAL_MODULE_TAGS := tests
-
-LOCAL_SRC_FILES := \
-	vndk/C2UtilTest.cpp \
-	C2_test.cpp \
-	C2Param_test.cpp \
-
-LOCAL_SHARED_LIBRARIES := \
-	libcutils \
-	libstagefright_codec2 \
-	liblog
-
-LOCAL_C_INCLUDES := \
-	frameworks/av/media/libstagefright/codec2/include \
-	frameworks/av/media/libstagefright/codec2/vndk/include \
-	$(TOP)/frameworks/native/include/media/openmax \
-
-LOCAL_CFLAGS += -Werror -Wall -std=c++14
-LOCAL_CLANG := true
-
-include $(BUILD_NATIVE_TEST)
-
-# Include subdirectory makefiles
-# ============================================================
-
-# If we're building with ONE_SHOT_MAKEFILE (mm, mmm), then what the framework
-# team really wants is to build the stuff defined by this makefile.
-ifeq (,$(ONE_SHOT_MAKEFILE))
-include $(call first-makefiles-under,$(LOCAL_PATH))
-endif
diff --git a/media/libstagefright/codec2/tests/C2ComponentInterface_test.cpp b/media/libstagefright/codec2/tests/C2ComponentInterface_test.cpp
new file mode 100644
index 0000000..339f927
--- /dev/null
+++ b/media/libstagefright/codec2/tests/C2ComponentInterface_test.cpp
@@ -0,0 +1,714 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "C2ComponentInterface_test"
+
+#include <dlfcn.h>
+#include <stdio.h>
+
+#include <gtest/gtest.h>
+#include <utils/Log.h>
+
+#include <C2Component.h>
+#include <C2Param.h>
+
+#if !defined(UNUSED)
+#define UNUSED(expr)                                                           \
+  do {                                                                         \
+      (void)(expr);                                                            \
+  } while (0)
+
+#endif //!defined(UNUSED)
+
+namespace android {
+
+template <class T> std::unique_ptr<T> alloc_unique_cstr(const char *cstr) {
+    size_t len = strlen(cstr);
+    std::unique_ptr<T> ptr = T::alloc_unique(len);
+    memcpy(ptr->m.value, cstr, len);
+    return ptr;
+}
+
+class C2CompIntfTest : public ::testing::Test {
+protected:
+    C2CompIntfTest() {}
+    ~C2CompIntfTest() override {}
+
+    void setComponent(std::shared_ptr<C2ComponentInterface> intf) {
+        mIntf = intf;
+    }
+
+    void resetResults() {
+        mIntf = nullptr;
+        mParamResults.clear();
+    }
+
+    template <typename T> void testUnsupportedParam();
+
+    template <typename T> void testSupportedParam();
+
+    // testReadOnlyParam() and testWritableParam() are the main functions for testing a parameter.
+    // A caller should find out if a tested parameter is read-only or writable before calling them
+    // and it must call one of the corresponded them.
+
+    // If a parameter is read-only this is called.
+    // Test read-only parameter |preParam|. The test expects failure while config() with |newParam|,
+    // and make sure |preParam| stay unchanged.
+    template <typename T>
+    void testReadOnlyParam(const T &preParam, const T &newParam);
+
+    // If a parameter is writable this is called.
+    // Test one filed |writableField| for given writable parameter |param|.
+    // |validValues| contains all values obtained from querySupportedValues() for |writableField|.
+    // The test checks validity for config() with each value, and make sure values are config-ed
+    // by query() them out. |invalidValues| contains some values which are not in |validValues|.
+    // The test expects C2_BAD_VALUE while config() with these values,
+    // and |param| should stay unchanged.
+    template <typename TParam, typename TRealField, typename TField>
+    void testWritableParam(TParam *const param, TRealField *const writableField,
+                           const std::vector<TField> &validValues,
+                           const std::vector<TField> &invalidValues);
+
+    // Test all the defined parameters in C2Param.h.
+    void testMain(std::shared_ptr<C2ComponentInterface> intf,
+                  const std::string &componentName);
+
+    // Check permission of parameter type |T| for testing interface.
+    // This should be called first of the testing per parameter type,
+    // therefore different testing process is applied according to the permission type.
+    template <typename T>
+    void checkParamPermission(
+            int *const writable,
+            const std::vector<std::shared_ptr<C2ParamDescriptor>> &supportedParams);
+
+private:
+    enum ParamPermission : int {
+        WRITABLE,
+        READONLY,
+        UNSUPPORTED,
+    };
+
+    struct paramTestInfo {
+        std::string name;
+        int result;
+        paramTestInfo(const char *name_, int result_)
+            : name(name_), result(result_) {}
+    };
+
+    // queryOnStack() and queryonHeap() both call an interface's query_vb() and
+    // check if a component has a parameter whose type is |T|.
+    // If a component has, the value should be copied into an argument, that is
+    // |p| in queryOnStack() and |heapParams| in queryOnHeap().
+    // The return value is c2_status_t (e.g. C2_OK).
+    template <typename T> c2_status_t queryOnStack(T *const p);
+
+    template <typename T>
+    c2_status_t queryOnHeap(const T &p,
+                         std::vector<std::unique_ptr<C2Param>> *const heapParams);
+
+    // Get a value whose type is |T| in a component. The value is copied to |param|.
+    // This should be called only if a component has the parameter.
+    template <typename T> void getValue(T *const param);
+
+    // Check if the parameter's value in component is equal to |expected| and
+    // queryOnStack() and queryOnHeap() are succeeded. When this function called,
+    // it should be guaranteed a component has the parameter.
+    template <typename T> void queryParamAsExpected(const T &expected);
+
+    // Test if query functions works correctly for supported parameters.
+    // "Support" means here a component has the parameter.
+    template <typename T> void querySupportedParam();
+
+    // Test query functions works correctly for unsupported parameters.
+    // "Unsupport" means here a component doesn't have the parameter.
+    template <typename T> void queryUnsupportedParam();
+
+    // Execute an interface's config_vb(). |T| is a single parameter type, not std::vector.
+    // config() creates std::vector<C2Param *const> {p} and passes it to config_vb().
+    template <typename T>
+    c2_status_t
+    config(T *const p,
+           std::vector<std::unique_ptr<C2SettingResult>> *const failures);
+
+    // Test if config works correctly for read-only parameters.
+    // Because the failure of config() is assumed, |newParam| doesn't matter.
+    template <typename T> void configReadOnlyParam(const T &newParam);
+
+    // Test if config works correctly for writable parameters.
+    // This changes the parameter's value to |newParam|.
+    // |stConfig| is a return value of config().
+    template <typename T> void configWritableParamValidValue(const T &newParam, c2_status_t *stConfig);
+
+    // Test if config works correctly in the case an invalid value |newParam| is tried to write
+    // to an writable parameter.
+    template <typename T> void configWritableParamInvalidValue(const T &newParam);
+
+    // Create values for testing from |validValueInfos|. The values are returned as arguments.
+    // |validValues| : valid values, which can be written for the parameter.
+    // |InvalidValues| : invalid values, which cannot be written for the parameter.
+    //                   config() should be failed if these values are used as new values.
+    // This function should be called only for writable and supported parameters.
+    template <typename TField>
+    void getTestValues(const C2FieldSupportedValues &validValueInfos,
+                       std::vector<TField> *const validValues,
+                       std::vector<TField> *const invalidValues);
+
+    // Output the summary of test results. Categorizes parameters with their configuration.
+    void outputResults(const std::string &name);
+
+    std::shared_ptr<C2ComponentInterface> mIntf;
+    std::vector<paramTestInfo> mParamResults;
+    std::string mCurrentParamName;
+};
+
+// factory function
+// TODO(hiroh): Add factory functions for other types.
+template <typename T> std::unique_ptr<T> makeParam() {
+    return std::make_unique<T>();
+}
+
+template <> std::unique_ptr<C2PortMimeConfig::input> makeParam() {
+    // TODO(hiroh): Set more precise length.
+    return C2PortMimeConfig::input::alloc_unique(100);
+}
+
+#define TRACED_FAILURE(func)                            \
+    do {                                                \
+        SCOPED_TRACE(mCurrentParamName);             \
+        func;                                           \
+        if (::testing::Test::HasFatalFailure()) {       \
+            return;                                     \
+        }                                               \
+    } while (false)
+
+template <typename T> c2_status_t C2CompIntfTest::queryOnStack(T *const p) {
+    std::vector<C2Param *const> stackParams{p};
+    return mIntf->query_vb(stackParams, {}, C2_DONT_BLOCK, nullptr);
+}
+
+template <typename T>
+c2_status_t C2CompIntfTest::queryOnHeap(
+        const T &p, std::vector<std::unique_ptr<C2Param>> *const heapParams) {
+    uint32_t index = p.index() & ~0x03FE0000;
+    if (p.forStream()) {
+        index |= ((p.stream() << 17) & 0x01FE0000) | 0x02000000;
+    }
+    return mIntf->query_vb({}, {index}, C2_DONT_BLOCK, heapParams);
+}
+
+template <typename T> void C2CompIntfTest::getValue(T *const param) {
+    // When getValue() is called, a component has to have the parameter.
+    ASSERT_EQ(C2_OK, queryOnStack(param));
+}
+
+template <typename T>
+void C2CompIntfTest::queryParamAsExpected(const T &expected) {
+    // TODO(hiroh): Don't create param on stack and call queryOnStack for flex params.
+    // Note that all the current supported parameters are non-flex params.
+    T stack;
+    std::unique_ptr<T> pHeap = makeParam<T>();
+    std::vector<std::unique_ptr<C2Param>> heapParams;
+
+    ASSERT_EQ(C2_OK, queryOnStack(&stack));
+
+    // |stack| is a parameter value. The parameter size shouldn't be 0.
+    EXPECT_NE(0u, stack.size());
+    EXPECT_EQ(stack, expected);
+
+    ASSERT_EQ(C2_OK, queryOnHeap(*pHeap, &heapParams));
+
+    // |*heapParams[0]| is a parameter value. The size of |heapParams| has to be one.
+    ASSERT_EQ(1u, heapParams.size());
+    EXPECT_TRUE(heapParams[0]);
+    EXPECT_EQ(*heapParams[0], expected);
+}
+
+template <typename T> void C2CompIntfTest::querySupportedParam() {
+    std::unique_ptr<T> param = makeParam<T>();
+    // The current parameter's value is acquired by getValue(), which should be succeeded.
+    getValue(param.get());
+    queryParamAsExpected(*param);
+}
+
+template <typename T> void C2CompIntfTest::queryUnsupportedParam() {
+    // TODO(hiroh): Don't create param on stack and call queryOnStack for flex params.
+    // Note that all the current supported parameters are non-flex params.
+    T stack;
+    std::unique_ptr<T> pHeap = makeParam<T>();
+    std::vector<std::unique_ptr<C2Param>> heapParams;
+    // If a component doesn't have the parameter, queryOnStack() and queryOnHeap()
+    // should return C2_BAD_INDEX.
+    ASSERT_EQ(C2_BAD_INDEX, queryOnStack(&stack));
+    EXPECT_FALSE(stack);
+    ASSERT_EQ(C2_BAD_INDEX, queryOnHeap(*pHeap, &heapParams));
+    EXPECT_EQ(0u, heapParams.size());
+}
+
+template <typename T>
+c2_status_t C2CompIntfTest::config(
+        T *const p, std::vector<std::unique_ptr<C2SettingResult>> *const failures) {
+    std::vector<C2Param *const> params{p};
+    return mIntf->config_vb(params, C2_DONT_BLOCK, failures);
+}
+
+// Create a new parameter copied from |p|.
+template <typename T> std::unique_ptr<T> makeParamFrom(const T &p) {
+    std::unique_ptr<T> retP = makeParam<T>();
+    EXPECT_TRUE(retP->updateFrom(p));
+    EXPECT_TRUE(memcmp(retP.get(), &p, sizeof(T)) == 0);
+    return retP;
+}
+
+template <typename T>
+void C2CompIntfTest::configReadOnlyParam(const T &newParam) {
+    std::unique_ptr<T> p = makeParamFrom(newParam);
+
+    std::vector<C2Param *const> params{p.get()};
+    std::vector<std::unique_ptr<C2SettingResult>> failures;
+
+    // config_vb should be failed because a parameter is read-only.
+    ASSERT_EQ(C2_BAD_VALUE, mIntf->config_vb(params, C2_DONT_BLOCK, &failures));
+    ASSERT_EQ(1u, failures.size());
+    EXPECT_EQ(C2SettingResult::READ_ONLY, failures[0]->failure);
+}
+
+template <typename T>
+void C2CompIntfTest::configWritableParamValidValue(const T &newParam, c2_status_t *configResult) {
+    std::unique_ptr<T> p = makeParamFrom(newParam);
+
+    std::vector<C2Param *const> params{p.get()};
+    std::vector<std::unique_ptr<C2SettingResult>> failures;
+    // In most cases, config_vb return C2_OK and the parameter's value should be changed
+    // to |newParam|, which is confirmed in a caller of configWritableParamValueValue().
+    // However, this can return ~~~~ and the parameter's values is not changed,
+    // because there may be dependent limitations between fields or between parameters.
+    // TODO(hiroh): I have to fill the return value. Comments in C2Component.h doesn't mention
+    // about the return value when conflict happens. I set C2_BAD_VALUE to it temporarily now.
+    c2_status_t stConfig = mIntf->config_vb(params, C2_DONT_BLOCK, &failures);
+    if (stConfig == C2_OK) {
+        EXPECT_EQ(0u, failures.size());
+    } else {
+        ASSERT_EQ(C2_BAD_VALUE, stConfig);
+        EXPECT_EQ(1u, failures.size());
+        EXPECT_EQ(C2SettingResult::CONFLICT, failures[0]->failure);
+    }
+    *configResult = stConfig;
+}
+
+template <typename T>
+void C2CompIntfTest::configWritableParamInvalidValue(const T &newParam) {
+    std::unique_ptr<T> p = makeParamFrom(newParam);
+
+    std::vector<C2Param *const> params{p.get()};
+    std::vector<std::unique_ptr<C2SettingResult>> failures;
+    // Although a parameter is writable, config_vb should be failed,
+    // because a new value is invalid.
+    ASSERT_EQ(C2_BAD_VALUE, mIntf->config_vb(params, C2_DONT_BLOCK, &failures));
+    ASSERT_EQ(1u, failures.size());
+    EXPECT_EQ(C2SettingResult::BAD_VALUE, failures[0]->failure);
+}
+
+// There is only used enum type for the field type, that is C2DomainKind.
+// If another field type is added, it is necessary to add function for that.
+template <>
+void C2CompIntfTest::getTestValues(
+        const C2FieldSupportedValues &validValueInfos,
+        std::vector<C2DomainKind> *const validValues,
+        std::vector<C2DomainKind> *const invalidValues) {
+    UNUSED(validValueInfos);
+    validValues->emplace_back(C2DomainVideo);
+    validValues->emplace_back(C2DomainAudio);
+    validValues->emplace_back(C2DomainOther);
+
+    // There is no invalid value.
+    UNUSED(invalidValues);
+}
+
+template <typename TField>
+void C2CompIntfTest::getTestValues(
+        const C2FieldSupportedValues &validValueInfos,
+        std::vector<TField> *const validValues,
+        std::vector<TField> *const invalidValues) {
+
+    // The supported values are represented by C2Values. C2Value::Primitive needs to
+    // be transformed to a primitive value. This function is one to do that.
+    auto prim2Value = [](const C2Value::Primitive &prim) -> TField {
+        if (std::is_same<TField, int32_t>::value) {
+            return prim.i32;
+        } else if (std::is_same<TField, uint32_t>::value) {
+            return prim.u32;
+        } else if (std::is_same<TField, int64_t>::value) {
+            return prim.i64;
+        } else if (std::is_same<TField, uint64_t>::value) {
+            return prim.u64;
+        } else if (std::is_same<TField, float>::value) {
+            return prim.fp;
+        }
+        static_assert(std::is_same<TField, int32_t>::value ||
+                      std::is_same<TField, uint32_t>::value ||
+                      std::is_same<TField, int64_t>::value ||
+                      std::is_same<TField, uint64_t>::value ||
+                      std::is_same<TField, float>::value, "Invalid TField type.");
+        return 0;
+    };
+
+    // The size of validValueInfos is one.
+    const auto &c2FSV = validValueInfos;
+
+    switch (c2FSV.type) {
+    case C2FieldSupportedValues::type_t::EMPTY: {
+        invalidValues->emplace_back(TField(0));
+        // TODO(hiroh) : Should other invalid values be tested?
+        break;
+    }
+    case C2FieldSupportedValues::type_t::RANGE: {
+        const auto &range = c2FSV.range;
+        auto rmin = prim2Value(range.min);
+        auto rmax = prim2Value(range.max);
+        auto rstep = prim2Value(range.step);
+
+        ASSERT_LE(rmin, rmax);
+
+        if (rstep != 0) {
+            // Increase linear
+            for (auto v = rmin; v <= rmax; v += rstep) {
+                validValues->emplace_back(v);
+            }
+            if (rmin > std::numeric_limits<TField>::min()) {
+                invalidValues->emplace_back(rmin - 1);
+            }
+            if (rmax < std::numeric_limits<TField>::max()) {
+                invalidValues->emplace_back(rmax + 1);
+            }
+            const unsigned int N = validValues->size();
+            if (N >= 2) {
+                if (std::is_same<TField, float>::value) {
+                    invalidValues->emplace_back((validValues->at(0) + validValues->at(1)) / 2);
+                    invalidValues->emplace_back((validValues->at(N - 2) + validValues->at(N - 1)) / 2);
+                } else {
+                    if (rstep > 1) {
+                        invalidValues->emplace_back(validValues->at(0) + 1);
+                        invalidValues->emplace_back(validValues->at(N - 1) - 1);
+                    }
+                }
+            }
+        } else {
+            // There should be two cases, except linear case.
+            // 1. integer geometric case
+            // 2. float geometric case
+
+            auto nom = prim2Value(range.nom);
+            auto denom = prim2Value(range.denom);
+
+            // If both range.nom and range.denom are 1 and step is 0, we should use
+            // VALUES, shouldn't we?
+            ASSERT_FALSE(nom == 1 && denom == 1);
+
+            // (nom / denom) is not less than 1.
+            ASSERT_FALSE(denom == 0);
+            ASSERT_LE(denom, nom);
+            for (auto v = rmin; v <= rmax; v = v * nom / denom) {
+                validValues->emplace_back(v);
+            }
+
+            if (rmin > std::numeric_limits<TField>::min()) {
+                invalidValues->emplace_back(rmin - 1);
+            }
+            if (rmax < std::numeric_limits<TField>::max()) {
+                invalidValues->emplace_back(rmax + 1);
+            }
+
+            const unsigned int N = validValues->size();
+            if (N >= 2) {
+                if (std::is_same<TField, float>::value) {
+                    invalidValues->emplace_back((validValues->at(0) + validValues->at(1)) / 2);
+                    invalidValues->emplace_back((validValues->at(N - 2) + validValues->at(N - 1)) / 2);
+                } else {
+                    if (validValues->at(1) - validValues->at(0) > 1) {
+                        invalidValues->emplace_back(validValues->at(0) + 1);
+                    }
+                    if (validValues->at(N - 1) - validValues->at(N - 2) > 1) {
+                        invalidValues->emplace_back(validValues->at(N - 1) - 1);
+                    }
+                }
+            }
+        }
+        break;
+    }
+    case C2FieldSupportedValues::type_t::VALUES: {
+        for (const C2Value::Primitive &prim : c2FSV.values) {
+            validValues->emplace_back(prim2Value(prim));
+        }
+        auto minv = *std::min_element(validValues->begin(), validValues->end());
+        auto maxv = *std::max_element(validValues->begin(), validValues->end());
+        if (minv - 1 > std::numeric_limits<TField>::min()) {
+            invalidValues->emplace_back(minv - 1);
+        }
+        if (maxv + 1 < std::numeric_limits<TField>::max()) {
+            invalidValues->emplace_back(maxv + 1);
+        }
+        break;
+    }
+    case C2FieldSupportedValues::type_t::FLAGS: {
+        // TODO(hiroh) : Implement the case that param.type is FLAGS.
+        break;
+    }
+    }
+}
+
+template <typename T>
+void C2CompIntfTest::testReadOnlyParam(const T &preParam, const T &newParam) {
+    TRACED_FAILURE(configReadOnlyParam(newParam));
+    // Parameter value must not be changed
+    TRACED_FAILURE(queryParamAsExpected(preParam));
+}
+
+template <typename TParam, typename TRealField, typename TField>
+void C2CompIntfTest::testWritableParam(
+        TParam *const param, TRealField *const writableField,
+        const std::vector<TField> &validValues,
+        const std::vector<TField> &invalidValues) {
+    c2_status_t stConfig;
+
+    // Get the parameter's value in the beginning in order to reset the value at the end.
+    TRACED_FAILURE(getValue(param));
+    std::unique_ptr<TParam> defaultParam = makeParamFrom(*param);
+
+    // Test valid values
+    for (const auto &val : validValues) {
+        std::unique_ptr<TParam> preParam = makeParamFrom(*param);
+
+        // Param is try to be changed
+        *writableField = val;
+        TRACED_FAILURE(configWritableParamValidValue(*param, &stConfig));
+        if (stConfig == C2_OK) {
+            TRACED_FAILURE(queryParamAsExpected(*param));
+        } else {
+            // Param is unchanged because a field value conflicts with other field or parameter.
+            TRACED_FAILURE(queryParamAsExpected(*preParam));
+        }
+    }
+
+    // Store the current parameter in order to test |param| is unchanged
+    // after trying to write an invalid value.
+    std::unique_ptr<TParam> lastValidParam = makeParamFrom(*param);
+
+    // Test invalid values
+    for (const auto &val : invalidValues) {
+        // Param is changed
+        *writableField = val;
+        TRACED_FAILURE(configWritableParamInvalidValue(*param));
+        TRACED_FAILURE(queryParamAsExpected(*lastValidParam));
+    }
+    // Reset the parameter by config().
+    TRACED_FAILURE(configWritableParamValidValue(*defaultParam, &stConfig));
+}
+
+template <typename T> void C2CompIntfTest::testUnsupportedParam() {
+    TRACED_FAILURE(queryUnsupportedParam<T>());
+}
+
+template <typename T> void C2CompIntfTest::testSupportedParam() {
+    TRACED_FAILURE(querySupportedParam<T>());
+}
+
+bool isSupportedParam(
+        const C2Param &param,
+        const std::vector<std::shared_ptr<C2ParamDescriptor>> &sParams) {
+    for (const auto &pd : sParams) {
+        if (param.type() == pd->type().type()) {
+            return true;
+        }
+    }
+    return false;
+}
+
+template <typename T>
+void C2CompIntfTest::checkParamPermission(
+    int *const result,
+    const std::vector<std::shared_ptr<C2ParamDescriptor>> &supportedParams) {
+    std::unique_ptr<T> param = makeParam<T>();
+
+    if (!isSupportedParam(*param, supportedParams)) {
+        // If a parameter isn't supported, it just finish after calling testUnsupportedParam().
+        testUnsupportedParam<T>();
+        *result = ParamPermission::UNSUPPORTED;
+        return;
+    }
+
+    testSupportedParam<T>();
+
+    TRACED_FAILURE(getValue(param.get()));
+    std::vector<std::unique_ptr<C2SettingResult>> failures;
+    // Config does not change the parameter, because param is the present param.
+    // This config is executed to find out if a parameter is read-only or writable.
+    c2_status_t stStack = config(param.get(), &failures);
+    if (stStack == C2_BAD_VALUE) {
+        // Read-only
+        std::unique_ptr<T> newParam = makeParam<T>();
+        testReadOnlyParam(*param, *newParam);
+        *result = ParamPermission::READONLY;
+    } else {
+        // Writable
+        EXPECT_EQ(stStack, C2_OK);
+        *result = ParamPermission::WRITABLE;
+    }
+}
+
+void C2CompIntfTest::outputResults(const std::string &name) {
+    std::vector<std::string> params[3];
+    for (const auto &testInfo : mParamResults) {
+        int result = testInfo.result;
+        ASSERT_TRUE(0 <= result && result <= 2);
+        params[result].emplace_back(testInfo.name);
+    }
+    const char *resultString[] = {"Writable", "Read-Only", "Unsupported"};
+    printf("\n----TEST RESULTS (%s)----\n\n", name.c_str());
+    for (int i = 0; i < 3; i++) {
+        printf("[ %s ]\n", resultString[i]);
+        for (const auto &t : params[i]) {
+            printf("%s\n", t.c_str());
+        }
+        printf("\n");
+    }
+}
+
+#define TEST_GENERAL_WRITABLE_FIELD(TParam_, field_type_name_, field_name_) \
+    do {                                                                \
+        std::unique_ptr<TParam_> param = makeParam<TParam_>();          \
+        std::vector<C2FieldSupportedValuesQuery> validValueInfos = {    \
+            C2FieldSupportedValuesQuery::Current(                       \
+                    C2ParamField(param.get(), &field_type_name_::field_name_)) \
+        };                                                              \
+        ASSERT_EQ(C2_OK,                                                \
+                  mIntf->querySupportedValues_vb(validValueInfos, C2_DONT_BLOCK));     \
+        ASSERT_EQ(1u, validValueInfos.size());                          \
+        std::vector<decltype(param->field_name_)> validValues;          \
+        std::vector<decltype(param->field_name_)> invalidValues;        \
+        getTestValues(validValueInfos[0].values, &validValues, &invalidValues);   \
+        testWritableParam(param.get(), &param->field_name_, validValues,\
+                          invalidValues);                               \
+    } while (0)
+
+#define TEST_VSSTRUCT_WRITABLE_FIELD(TParam_, field_type_name_)         \
+    do {                                                                \
+        TEST_GENERAL_WRITABLE_FIELD(TParam_, field_type_name_, width);  \
+        TEST_GENERAL_WRITABLE_FIELD(TParam_, field_type_name_, height); \
+    } while (0)
+
+#define TEST_U32_WRITABLE_FIELD(TParam_, field_type_name_)              \
+  TEST_GENERAL_WRITABLE_FIELD(TParam_, field_type_name_, value)
+
+#define TEST_ENUM_WRITABLE_FIELD(TParam_, field_type_name_)             \
+  TEST_GENERAL_WRITABLE_FIELD(TParam_, field_type_name_, value)
+
+// TODO(hiroh): Support parameters based on char[] and uint32_t[].
+//#define TEST_STRING_WRITABLE_FIELD(TParam_, field_type_name_)
+// TEST_GENERAL_WRITABLE_FIELD(TParam_, field_type_name_, m.value)
+//#define TEST_U32ARRAY_WRITABLE_FIELD(Tparam_, field_type_name_)
+// TEST_GENERAL_WRITABLE_FIELD(Tparam_, uint32_t[], field_type_name_, values)
+
+#define EACH_TEST(TParam_, field_type_name_, test_name)                 \
+    do {                                                                \
+      int result = 0;                                                   \
+      this->mCurrentParamName = #TParam_;                            \
+      checkParamPermission<TParam_>(&result, supportedParams);          \
+      if (result == ParamPermission::WRITABLE) {                        \
+          test_name(TParam_, field_type_name_);                         \
+      }                                                                 \
+      mParamResults.emplace_back(#TParam_, result);                      \
+  } while (0)
+
+#define EACH_TEST_SELF(type_, test_name) EACH_TEST(type_, type_, test_name)
+#define EACH_TEST_INPUT(type_, test_name) EACH_TEST(type_::input, type_, test_name)
+#define EACH_TEST_OUTPUT(type_, test_name) EACH_TEST(type_::output, type_, test_name)
+void C2CompIntfTest::testMain(std::shared_ptr<C2ComponentInterface> intf,
+                              const std::string &componentName) {
+    setComponent(intf);
+
+    std::vector<std::shared_ptr<C2ParamDescriptor>> supportedParams;
+    ASSERT_EQ(C2_OK, mIntf->querySupportedParams_nb(&supportedParams));
+
+    EACH_TEST_SELF(C2ComponentLatencyInfo, TEST_U32_WRITABLE_FIELD);
+    EACH_TEST_SELF(C2ComponentTemporalInfo, TEST_U32_WRITABLE_FIELD);
+    EACH_TEST_INPUT(C2PortLatencyInfo, TEST_U32_WRITABLE_FIELD);
+    EACH_TEST_OUTPUT(C2PortLatencyInfo, TEST_U32_WRITABLE_FIELD);
+    EACH_TEST_INPUT(C2StreamFormatConfig, TEST_U32_WRITABLE_FIELD);
+    EACH_TEST_OUTPUT(C2StreamFormatConfig, TEST_U32_WRITABLE_FIELD);
+    EACH_TEST_INPUT(C2PortStreamCountConfig, TEST_U32_WRITABLE_FIELD);
+    EACH_TEST_OUTPUT(C2PortStreamCountConfig, TEST_U32_WRITABLE_FIELD);
+
+    EACH_TEST_SELF(C2ComponentDomainInfo, TEST_ENUM_WRITABLE_FIELD);
+
+    // TODO(hiroh): Support parameters based on uint32_t[] and char[].
+    // EACH_TEST_INPUT(C2PortMimeConfig, TEST_STRING_WRITABLE_FIELD);
+    // EACH_TEST_OUTPUT(C2PortMimeConfig, TEST_STRING_WRITABLE_FIELD);
+    // EACH_TEST_INPUT(C2StreamMimeConfig, TEST_STRING_WRITABLE_FIELD);
+    // EACH_TEST_OUTPUT(C2StreamMimeConfig, TEST_STRING_WRITABLE_FIELD);
+
+    // EACH_TEST_SELF(C2SupportedParamsInfo, TEST_U32ARRAY_WRITABLE_FIELD);
+    // EACH_TEST_SELF(C2RequiredParamsInfo, TEST_U32ARRAY_WRITABLE_FIELD);
+    // EACH_TEST_SELF(C2ReadOnlyParamsInfo, TEST_U32ARRAY_WRITABLE_FIELD);
+    // EACH_TEST_SELF(C2RequestedInfosInfo, TEST_U32ARRAY_WRITABLE_FIELD);
+
+    EACH_TEST_INPUT(C2VideoSizeStreamInfo, TEST_VSSTRUCT_WRITABLE_FIELD);
+    EACH_TEST_OUTPUT(C2VideoSizeStreamInfo, TEST_VSSTRUCT_WRITABLE_FIELD);
+    EACH_TEST_INPUT(C2VideoSizeStreamTuning, TEST_VSSTRUCT_WRITABLE_FIELD);
+    EACH_TEST_OUTPUT(C2VideoSizeStreamTuning, TEST_VSSTRUCT_WRITABLE_FIELD);
+    EACH_TEST_INPUT(C2MaxVideoSizeHintPortSetting, TEST_VSSTRUCT_WRITABLE_FIELD);
+    EACH_TEST_OUTPUT(C2MaxVideoSizeHintPortSetting, TEST_VSSTRUCT_WRITABLE_FIELD);
+
+    outputResults(componentName);
+    resetResults();
+}
+
+TEST_F(C2CompIntfTest, C2V4L2CodecIntf) {
+
+    // Read a shared object library.
+    void* compLib = dlopen("system/lib/libv4l2_codec2.so", RTLD_NOW);
+
+    if (!compLib) {
+        printf("Cannot open library: %s.\n", dlerror());
+        FAIL();
+        return;
+    }
+
+    typedef C2ComponentStore* create_t();
+    create_t* create_store= (create_t*) dlsym(compLib, "create_store");
+    const char* dlsym_error = dlerror();
+    if (dlsym_error) {
+        printf("Cannot load symbol create: %s.\n", dlsym_error);
+        FAIL();
+        return;
+    }
+
+    typedef void destroy_t(C2ComponentStore*);
+    destroy_t* destroy_store = (destroy_t*) dlsym(compLib, "destroy_store");
+    dlsym_error = dlerror();
+    if (dlsym_error) {
+        printf("Cannot load symbol destroy: %s.\n", dlsym_error);
+        FAIL();
+        return;
+    }
+
+    std::shared_ptr<C2ComponentStore> componentStore(create_store(), destroy_store);
+    std::shared_ptr<C2ComponentInterface> componentIntf;
+    componentStore->createInterface("v4l2.decoder", &componentIntf);
+    auto componentName = "C2V4L2Codec";
+    testMain(componentIntf, componentName);
+}
+
+} // namespace android
diff --git a/media/libstagefright/codec2/tests/C2Param_test.cpp b/media/libstagefright/codec2/tests/C2Param_test.cpp
index ec82c84..8ebc584 100644
--- a/media/libstagefright/codec2/tests/C2Param_test.cpp
+++ b/media/libstagefright/codec2/tests/C2Param_test.cpp
@@ -19,6 +19,7 @@
 
 #include <gtest/gtest.h>
 
+#define __C2_GENERATE_GLOBAL_VARS__
 #include <util/C2ParamUtils.h>
 #include <C2ParamDef.h>
 
@@ -56,11 +57,11 @@
     *os << "*" << fd.length() << ")";
 }
 
-enum C2ParamIndexType {
+enum C2ParamIndexType : C2Param::type_index_t {
     kParamIndexNumber,
     kParamIndexNumbers,
     kParamIndexNumber2,
-    kParamIndexVendorStart = C2Param::BaseIndex::kVendorStart,
+    kParamIndexVendorStart = C2Param::TYPE_INDEX_VENDOR_START,
     kParamIndexVendorNumbers,
 };
 
@@ -90,11 +91,11 @@
 };
 
 struct C2SizeStruct {
-    int32_t mNumber;
-    int32_t mHeight;
-    enum : uint32_t { baseIndex = kParamIndexSize };                        // <= needed for C2FieldDescriptor
-    const static std::initializer_list<const C2FieldDescriptor> fieldList;  // <= needed for C2FieldDescriptor
-    const static FD::Type TYPE = (FD::Type)(baseIndex | FD::STRUCT_FLAG);
+    int32_t width;
+    int32_t height;
+    enum : uint32_t { CORE_INDEX = kParamIndexSize };                        // <= needed for C2FieldDescriptor
+    const static std::initializer_list<const C2FieldDescriptor> FIELD_LIST;  // <= needed for C2FieldDescriptor
+    const static FD::type_t TYPE = (FD::type_t)(CORE_INDEX | FD::STRUCT_FLAG);
 };
 
 DEFINE_NO_NAMED_VALUES_FOR(C2SizeStruct)
@@ -109,22 +110,22 @@
 }
 
 struct C2TestStruct_A {
-    int32_t mSigned32;
-    int64_t mSigned64[2];
-    uint32_t mUnsigned32[1];
-    uint64_t mUnsigned64;
-    float mFloat;
-    C2SizeStruct mSize[3];
-    uint8_t mBlob[100];
-    char mString[100];
-    bool mYesNo[100];
+    int32_t signed32;
+    int64_t signed64[2];
+    uint32_t unsigned32[1];
+    uint64_t unsigned64;
+    float fp32;
+    C2SizeStruct sz[3];
+    uint8_t blob[100];
+    char string[100];
+    bool yesNo[100];
 
-    const static std::initializer_list<const C2FieldDescriptor> fieldList;
-    // enum : uint32_t { baseIndex = kParamIndexTest };
+    const static std::initializer_list<const C2FieldDescriptor> FIELD_LIST;
+    // enum : uint32_t { CORE_INDEX = kParamIndexTest };
     // typedef C2TestStruct_A _type;
 } __attribute__((packed));
 
-const std::initializer_list<const C2FieldDescriptor> C2TestStruct_A::fieldList =
+const std::initializer_list<const C2FieldDescriptor> C2TestStruct_A::FIELD_LIST =
     { { FD::INT32,    1, "s32",   0, 4 },
       { FD::INT64,    2, "s64",   4, 8 },
       { FD::UINT32,   1, "u32",  20, 4 },
@@ -136,7 +137,7 @@
       { FD::BLOB,   100, "y-n", 260, 1 } };
 
 TEST_P(C2ParamTest_ParamFieldList, VerifyStruct) {
-    std::vector<const C2FieldDescriptor> fields = GetParam(), expected = C2TestStruct_A::fieldList;
+    std::vector<const C2FieldDescriptor> fields = GetParam(), expected = C2TestStruct_A::FIELD_LIST;
 
     // verify first field descriptor
     EXPECT_EQ(FD::INT32, fields[0].type());
@@ -157,34 +158,34 @@
     }
 }
 
-INSTANTIATE_TEST_CASE_P(InitializerList, C2ParamTest_ParamFieldList, ::testing::Values(C2TestStruct_A::fieldList));
+INSTANTIATE_TEST_CASE_P(InitializerList, C2ParamTest_ParamFieldList, ::testing::Values(C2TestStruct_A::FIELD_LIST));
 
 // define fields using C2FieldDescriptor pointer constructor
 const std::initializer_list<const C2FieldDescriptor> C2TestStruct_A_FD_PTR_fieldList =
-    { C2FieldDescriptor(&((C2TestStruct_A*)(nullptr))->mSigned32,   "s32"),
-      C2FieldDescriptor(&((C2TestStruct_A*)(nullptr))->mSigned64,   "s64"),
-      C2FieldDescriptor(&((C2TestStruct_A*)(nullptr))->mUnsigned32, "u32"),
-      C2FieldDescriptor(&((C2TestStruct_A*)(nullptr))->mUnsigned64, "u64"),
-      C2FieldDescriptor(&((C2TestStruct_A*)(nullptr))->mFloat,      "fp"),
-      C2FieldDescriptor(&((C2TestStruct_A*)(nullptr))->mSize,       "size"),
-      C2FieldDescriptor(&((C2TestStruct_A*)(nullptr))->mBlob,       "blob"),
-      C2FieldDescriptor(&((C2TestStruct_A*)(nullptr))->mString,     "str"),
-    //  C2FieldDescriptor(&((C2TestStruct_A*)(nullptr))->mYesNo,      "y-n")
+    { C2FieldDescriptor(&((C2TestStruct_A*)(nullptr))->signed32,   "s32"),
+      C2FieldDescriptor(&((C2TestStruct_A*)(nullptr))->signed64,   "s64"),
+      C2FieldDescriptor(&((C2TestStruct_A*)(nullptr))->unsigned32, "u32"),
+      C2FieldDescriptor(&((C2TestStruct_A*)(nullptr))->unsigned64, "u64"),
+      C2FieldDescriptor(&((C2TestStruct_A*)(nullptr))->fp32,      "fp"),
+      C2FieldDescriptor(&((C2TestStruct_A*)(nullptr))->sz,       "size"),
+      C2FieldDescriptor(&((C2TestStruct_A*)(nullptr))->blob,       "blob"),
+      C2FieldDescriptor(&((C2TestStruct_A*)(nullptr))->string,     "str"),
+    //  C2FieldDescriptor(&((C2TestStruct_A*)(nullptr))->yesNo,      "y-n")
     };
 
 INSTANTIATE_TEST_CASE_P(PointerConstructor, C2ParamTest_ParamFieldList, ::testing::Values(C2TestStruct_A_FD_PTR_fieldList));
 
 // define fields using C2FieldDescriptor member-pointer constructor
 const std::initializer_list<const C2FieldDescriptor> C2TestStruct_A_FD_MEM_PTR_fieldList =
-    { C2FieldDescriptor((C2TestStruct_A*)0, &C2TestStruct_A::mSigned32,   "s32"),
-      C2FieldDescriptor((C2TestStruct_A*)0, &C2TestStruct_A::mSigned64,   "s64"),
-      C2FieldDescriptor((C2TestStruct_A*)0, &C2TestStruct_A::mUnsigned32, "u32"),
-      C2FieldDescriptor((C2TestStruct_A*)0, &C2TestStruct_A::mUnsigned64, "u64"),
-      C2FieldDescriptor((C2TestStruct_A*)0, &C2TestStruct_A::mFloat,      "fp"),
-      C2FieldDescriptor((C2TestStruct_A*)0, &C2TestStruct_A::mSize,       "size"),
-      C2FieldDescriptor((C2TestStruct_A*)0, &C2TestStruct_A::mBlob,       "blob"),
-      C2FieldDescriptor((C2TestStruct_A*)0, &C2TestStruct_A::mString,     "str"),
-    //  C2FieldDescriptor((C2TestStruct_A*)0, &C2TestStruct_A::mYesNo,      "y-n")
+    { C2FieldDescriptor((C2TestStruct_A*)0, &C2TestStruct_A::signed32,   "s32"),
+      C2FieldDescriptor((C2TestStruct_A*)0, &C2TestStruct_A::signed64,   "s64"),
+      C2FieldDescriptor((C2TestStruct_A*)0, &C2TestStruct_A::unsigned32, "u32"),
+      C2FieldDescriptor((C2TestStruct_A*)0, &C2TestStruct_A::unsigned64, "u64"),
+      C2FieldDescriptor((C2TestStruct_A*)0, &C2TestStruct_A::fp32,      "fp"),
+      C2FieldDescriptor((C2TestStruct_A*)0, &C2TestStruct_A::sz,       "size"),
+      C2FieldDescriptor((C2TestStruct_A*)0, &C2TestStruct_A::blob,       "blob"),
+      C2FieldDescriptor((C2TestStruct_A*)0, &C2TestStruct_A::string,     "str"),
+    //  C2FieldDescriptor((C2TestStruct_A*)0, &C2TestStruct_A::yesNo,      "y-n")
     };
 
 INSTANTIATE_TEST_CASE_P(MemberPointerConstructor, C2ParamTest_ParamFieldList, ::testing::Values(C2TestStruct_A_FD_MEM_PTR_fieldList));
@@ -192,75 +193,75 @@
 // Test 2. define a structure with two-step helper methods
 
 struct C2TestAStruct {
-    int32_t mSigned32;
-    int64_t mSigned64[2];
-    uint32_t mUnsigned32[1];
-    uint64_t mUnsigned64;
-    float mFloat;
-    C2SizeStruct mSize[3];
-    uint8_t mBlob[100];
-    char mString[100];
-    bool mYesNo[100];
+    int32_t signed32;
+    int64_t signed64[2];
+    uint32_t unsigned32[1];
+    uint64_t unsigned64;
+    float fp32;
+    C2SizeStruct sz[3];
+    uint8_t blob[100];
+    char string[100];
+    bool yesNo[100];
 
 private: // test access level
     DEFINE_C2STRUCT(TestA)
 } C2_PACK;
 
 DESCRIBE_C2STRUCT(TestA, {
-    C2FIELD(mSigned32, "s32")
-    C2FIELD(mSigned64, "s64")
-    C2FIELD(mUnsigned32, "u32")
-    C2FIELD(mUnsigned64, "u64")
-    C2FIELD(mFloat, "fp")
-    C2FIELD(mSize, "size")
-    C2FIELD(mBlob, "blob")
-    C2FIELD(mString, "str")
-    // C2FIELD(mYesNo, "y-n")
+    C2FIELD(signed32, "s32")
+    C2FIELD(signed64, "s64")
+    C2FIELD(unsigned32, "u32")
+    C2FIELD(unsigned64, "u64")
+    C2FIELD(fp32, "fp")
+    C2FIELD(sz, "size")
+    C2FIELD(blob, "blob")
+    C2FIELD(string, "str")
+    // C2FIELD(yesNo, "y-n")
 }) // ; optional
 
-INSTANTIATE_TEST_CASE_P(DescribeStruct2Step, C2ParamTest_ParamFieldList, ::testing::Values(C2TestAStruct::fieldList));
+INSTANTIATE_TEST_CASE_P(DescribeStruct2Step, C2ParamTest_ParamFieldList, ::testing::Values(C2TestAStruct::FIELD_LIST));
 
 // Test 3. define a structure with one-step helper method
 
 struct C2TestBStruct {
-    int32_t mSigned32;
-    int64_t mSigned64[2];
-    uint32_t mUnsigned32[1];
-    uint64_t mUnsigned64;
-    float mFloat;
-    C2SizeStruct mSize[3];
-    uint8_t mBlob[100];
-    char mString[100];
-    bool mYesNo[100];
+    int32_t signed32;
+    int64_t signed64[2];
+    uint32_t unsigned32[1];
+    uint64_t unsigned64;
+    float fp32;
+    C2SizeStruct sz[3];
+    uint8_t blob[100];
+    char string[100];
+    bool yesNo[100];
 
 private: // test access level
     DEFINE_AND_DESCRIBE_C2STRUCT(TestB)
 
-    C2FIELD(mSigned32, "s32")
-    C2FIELD(mSigned64, "s64")
-    C2FIELD(mUnsigned32, "u32")
-    C2FIELD(mUnsigned64, "u64")
-    C2FIELD(mFloat, "fp")
-    C2FIELD(mSize, "size")
-    C2FIELD(mBlob, "blob")
-    C2FIELD(mString, "str")
-    // C2FIELD(mYesNo, "y-n")
+    C2FIELD(signed32, "s32")
+    C2FIELD(signed64, "s64")
+    C2FIELD(unsigned32, "u32")
+    C2FIELD(unsigned64, "u64")
+    C2FIELD(fp32, "fp")
+    C2FIELD(sz, "size")
+    C2FIELD(blob, "blob")
+    C2FIELD(string, "str")
+    // C2FIELD(yesNo, "y-n")
 };
 
-INSTANTIATE_TEST_CASE_P(DescribeStruct1Step, C2ParamTest_ParamFieldList, ::testing::Values(C2TestBStruct::fieldList));
+INSTANTIATE_TEST_CASE_P(DescribeStruct1Step, C2ParamTest_ParamFieldList, ::testing::Values(C2TestBStruct::FIELD_LIST));
 
 // Test 4. flexible members
 
 template<typename T>
 class C2ParamTest_FlexParamFieldList : public ::testing::Test {
 protected:
-    using Type=FD::Type;
+    using type_t=FD::type_t;
 
     // static std::initializer_list<std::initializer_list<const C2FieldDescriptor>>
     static std::vector<std::vector<const C2FieldDescriptor>>
             GetLists();
 
-    constexpr static Type flexType =
+    constexpr static type_t FlexType =
             std::is_same<T, int32_t>::value ? FD::INT32 :
             std::is_same<T, int64_t>::value ? FD::INT64 :
             std::is_same<T, uint32_t>::value ? FD::UINT32 :
@@ -268,8 +269,8 @@
             std::is_same<T, float>::value ? FD::FLOAT :
             std::is_same<T, uint8_t>::value ? FD::BLOB :
             std::is_same<T, char>::value ? FD::STRING :
-            std::is_same<T, C2SizeStruct>::value ? C2SizeStruct::TYPE : (Type)0;
-    constexpr static size_t flexSize = sizeof(T);
+            std::is_same<T, C2SizeStruct>::value ? C2SizeStruct::TYPE : (type_t)0;
+    constexpr static size_t FLEX_SIZE = sizeof(T);
 };
 
 typedef ::testing::Types<int32_t, int64_t, C2SizeStruct> FlexTypes;
@@ -281,11 +282,11 @@
         if (fields.size() > 1) {
             EXPECT_EQ(2u, fields.size());
             EXPECT_EQ(C2FieldDescriptor(FD::INT32, 1, "s32", 0, 4), fields[0]);
-            EXPECT_EQ(C2FieldDescriptor(this->flexType, 0, "flex", 4, this->flexSize),
+            EXPECT_EQ(C2FieldDescriptor(this->FlexType, 0, "flex", 4, this->FLEX_SIZE),
                       fields[1]);
         } else {
             EXPECT_EQ(1u, fields.size());
-            EXPECT_EQ(C2FieldDescriptor(this->flexType, 0, "flex", 0, this->flexSize),
+            EXPECT_EQ(C2FieldDescriptor(this->FlexType, 0, "flex", 0, this->FLEX_SIZE),
                       fields[0]);
         }
     }
@@ -294,33 +295,33 @@
 struct C2TestStruct_FlexS32 {
     int32_t mFlex[];
 
-    const static std::initializer_list<const C2FieldDescriptor> fieldList;
-    // enum : uint32_t { baseIndex = kParamIndexTestFlex, flexSize = 4 };
+    const static std::initializer_list<const C2FieldDescriptor> FIELD_LIST;
+    // enum : uint32_t { CORE_INDEX = kParamIndexTestFlex, FLEX_SIZE = 4 };
     // typedef C2TestStruct_FlexS32 _type;
-    // typedef int32_t flexType;
+    // typedef int32_t FlexType;
 };
 
-const std::initializer_list<const C2FieldDescriptor> C2TestStruct_FlexS32::fieldList = {
+const std::initializer_list<const C2FieldDescriptor> C2TestStruct_FlexS32::FIELD_LIST = {
     { FD::INT32, 0, "flex", 0, 4 }
 };
 
 struct C2TestStruct_FlexEndS32 {
-    int32_t mSigned32;
+    int32_t signed32;
     int32_t mFlex[];
 
-    const static std::initializer_list<const C2FieldDescriptor> fieldList;
-    // enum : uint32_t { baseIndex = kParamIndexTestFlexEnd, flexSize = 4 };
+    const static std::initializer_list<const C2FieldDescriptor> FIELD_LIST;
+    // enum : uint32_t { CORE_INDEX = kParamIndexTestFlexEnd, FLEX_SIZE = 4 };
     // typedef C2TestStruct_FlexEnd _type;
-    // typedef int32_t flexType;
+    // typedef int32_t FlexType;
 };
 
-const std::initializer_list<const C2FieldDescriptor> C2TestStruct_FlexEndS32::fieldList = {
+const std::initializer_list<const C2FieldDescriptor> C2TestStruct_FlexEndS32::FIELD_LIST = {
     { FD::INT32, 1, "s32", 0, 4 },
     { FD::INT32, 0, "flex", 4, 4 },
 };
 
 const static std::initializer_list<const C2FieldDescriptor> C2TestStruct_FlexEndS32_ptr_fieldList = {
-    C2FieldDescriptor(&((C2TestStruct_FlexEndS32*)0)->mSigned32, "s32"),
+    C2FieldDescriptor(&((C2TestStruct_FlexEndS32*)0)->signed32, "s32"),
     C2FieldDescriptor(&((C2TestStruct_FlexEndS32*)0)->mFlex, "flex"),
 };
 
@@ -334,7 +335,7 @@
 };
 
 struct C2TestFlexEndS32Struct {
-    int32_t mSigned32;
+    int32_t signed32;
     int32_t mFlexSigned32[];
 private: // test access level
     C2TestFlexEndS32Struct() {}
@@ -343,7 +344,7 @@
 } C2_PACK;
 
 DESCRIBE_C2STRUCT(TestFlexEndS32, {
-    C2FIELD(mSigned32, "s32")
+    C2FIELD(signed32, "s32")
     C2FIELD(mFlexSigned32, "flex")
 }) // ; optional
 
@@ -352,38 +353,38 @@
 //std::initializer_list<std::initializer_list<const C2FieldDescriptor>>
 C2ParamTest_FlexParamFieldList<int32_t>::GetLists() {
     return {
-        C2TestStruct_FlexS32::fieldList,
-        C2TestStruct_FlexEndS32::fieldList,
+        C2TestStruct_FlexS32::FIELD_LIST,
+        C2TestStruct_FlexEndS32::FIELD_LIST,
         C2TestStruct_FlexEndS32_ptr_fieldList,
-        C2TestFlexS32Struct::fieldList,
-        C2TestFlexEndS32Struct::fieldList,
+        C2TestFlexS32Struct::FIELD_LIST,
+        C2TestFlexEndS32Struct::FIELD_LIST,
     };
 }
 
 struct C2TestStruct_FlexS64 {
     int64_t mFlexSigned64[];
 
-    const static std::initializer_list<const C2FieldDescriptor> fieldList;
-    // enum : uint32_t { baseIndex = kParamIndexTestFlexS64, flexSize = 8 };
+    const static std::initializer_list<const C2FieldDescriptor> FIELD_LIST;
+    // enum : uint32_t { CORE_INDEX = kParamIndexTestFlexS64, FLEX_SIZE = 8 };
     // typedef C2TestStruct_FlexS64 _type;
-    // typedef int64_t flexType;
+    // typedef int64_t FlexType;
 };
 
-const std::initializer_list<const C2FieldDescriptor> C2TestStruct_FlexS64::fieldList = {
+const std::initializer_list<const C2FieldDescriptor> C2TestStruct_FlexS64::FIELD_LIST = {
     { FD::INT64, 0, "flex", 0, 8 }
 };
 
 struct C2TestStruct_FlexEndS64 {
-    int32_t mSigned32;
+    int32_t signed32;
     int64_t mSigned64Flex[];
 
-    const static std::initializer_list<const C2FieldDescriptor> fieldList;
-    // enum : uint32_t { baseIndex = C2TestStruct_FlexEndS64, flexSize = 8 };
+    const static std::initializer_list<const C2FieldDescriptor> FIELD_LIST;
+    // enum : uint32_t { CORE_INDEX = C2TestStruct_FlexEndS64, FLEX_SIZE = 8 };
     // typedef C2TestStruct_FlexEndS64 _type;
-    // typedef int64_t flexType;
+    // typedef int64_t FlexType;
 };
 
-const std::initializer_list<const C2FieldDescriptor> C2TestStruct_FlexEndS64::fieldList = {
+const std::initializer_list<const C2FieldDescriptor> C2TestStruct_FlexEndS64::FIELD_LIST = {
     { FD::INT32, 1, "s32", 0, 4 },
     { FD::INT64, 0, "flex", 4, 8 },
 };
@@ -397,7 +398,7 @@
 };
 
 struct C2TestFlexEndS64Struct {
-    int32_t mSigned32;
+    int32_t signed32;
     int64_t mFlexSigned64[];
     C2TestFlexEndS64Struct() {}
 
@@ -405,7 +406,7 @@
 } C2_PACK;
 
 DESCRIBE_C2STRUCT(TestFlexEndS64, {
-    C2FIELD(mSigned32, "s32")
+    C2FIELD(signed32, "s32")
     C2FIELD(mFlexSigned64, "flex")
 }) // ; optional
 
@@ -414,37 +415,37 @@
 //std::initializer_list<std::initializer_list<const C2FieldDescriptor>>
 C2ParamTest_FlexParamFieldList<int64_t>::GetLists() {
     return {
-        C2TestStruct_FlexS64::fieldList,
-        C2TestStruct_FlexEndS64::fieldList,
-        C2TestFlexS64Struct::fieldList,
-        C2TestFlexEndS64Struct::fieldList,
+        C2TestStruct_FlexS64::FIELD_LIST,
+        C2TestStruct_FlexEndS64::FIELD_LIST,
+        C2TestFlexS64Struct::FIELD_LIST,
+        C2TestFlexEndS64Struct::FIELD_LIST,
     };
 }
 
 struct C2TestStruct_FlexSize {
     C2SizeStruct mFlexSize[];
 
-    const static std::initializer_list<const C2FieldDescriptor> fieldList;
-    // enum : uint32_t { baseIndex = kParamIndexTestFlexSize, flexSize = 8 };
+    const static std::initializer_list<const C2FieldDescriptor> FIELD_LIST;
+    // enum : uint32_t { CORE_INDEX = kParamIndexTestFlexSize, FLEX_SIZE = 8 };
     // typedef C2TestStruct_FlexSize _type;
-    // typedef C2SizeStruct flexType;
+    // typedef C2SizeStruct FlexType;
 };
 
-const std::initializer_list<const C2FieldDescriptor> C2TestStruct_FlexSize::fieldList = {
+const std::initializer_list<const C2FieldDescriptor> C2TestStruct_FlexSize::FIELD_LIST = {
     { C2SizeStruct::TYPE, 0, "flex", 0, sizeof(C2SizeStruct) }
 };
 
 struct C2TestStruct_FlexEndSize {
-    int32_t mSigned32;
+    int32_t signed32;
     C2SizeStruct mSizeFlex[];
 
-    const static std::initializer_list<const C2FieldDescriptor> fieldList;
-    // enum : uint32_t { baseIndex = C2TestStruct_FlexEndSize, flexSize = 8 };
+    const static std::initializer_list<const C2FieldDescriptor> FIELD_LIST;
+    // enum : uint32_t { CORE_INDEX = C2TestStruct_FlexEndSize, FLEX_SIZE = 8 };
     // typedef C2TestStruct_FlexEndSize _type;
-    // typedef C2SizeStruct flexType;
+    // typedef C2SizeStruct FlexType;
 };
 
-const std::initializer_list<const C2FieldDescriptor> C2TestStruct_FlexEndSize::fieldList = {
+const std::initializer_list<const C2FieldDescriptor> C2TestStruct_FlexEndSize::FIELD_LIST = {
     { FD::INT32, 1, "s32", 0, 4 },
     { C2SizeStruct::TYPE, 0, "flex", 4, sizeof(C2SizeStruct) },
 };
@@ -458,7 +459,7 @@
 };
 
 struct C2TestFlexEndSizeStruct {
-    int32_t mSigned32;
+    int32_t signed32;
     C2SizeStruct mFlexSize[];
     C2TestFlexEndSizeStruct() {}
 
@@ -466,97 +467,137 @@
 } C2_PACK;
 
 DESCRIBE_C2STRUCT(TestFlexEndSize, {
-    C2FIELD(mSigned32, "s32")
+    C2FIELD(signed32, "s32")
     C2FIELD(mFlexSize, "flex")
 }) // ; optional
 
+struct C2TestBaseFlexEndSizeStruct {
+    int32_t signed32;
+    C2SizeStruct mFlexSize[];
+    C2TestBaseFlexEndSizeStruct() {}
+
+    DEFINE_BASE_FLEX_C2STRUCT(TestBaseFlexEndSize, mFlexSize)
+} C2_PACK;
+
+DESCRIBE_C2STRUCT(TestBaseFlexEndSize, {
+    C2FIELD(signed32, "s32")
+    C2FIELD(mFlexSize, "flex")
+}) // ; optional
+
+struct C2TestBaseFlexEndSize2Struct {
+    int32_t signed32;
+    C2SizeStruct mFlexSize[];
+    C2TestBaseFlexEndSize2Struct() {}
+
+    DEFINE_AND_DESCRIBE_BASE_FLEX_C2STRUCT(TestBaseFlexEndSize2, mFlexSize)
+    C2FIELD(signed32, "s32")
+    C2FIELD(mFlexSize, "flex")
+};
+
 template<>
 std::vector<std::vector<const C2FieldDescriptor>>
 //std::initializer_list<std::initializer_list<const C2FieldDescriptor>>
 C2ParamTest_FlexParamFieldList<C2SizeStruct>::GetLists() {
     return {
-        C2TestStruct_FlexSize::fieldList,
-        C2TestStruct_FlexEndSize::fieldList,
-        C2TestFlexSizeStruct::fieldList,
-        C2TestFlexEndSizeStruct::fieldList,
+        C2TestStruct_FlexSize::FIELD_LIST,
+        C2TestStruct_FlexEndSize::FIELD_LIST,
+        C2TestFlexSizeStruct::FIELD_LIST,
+        C2TestFlexEndSizeStruct::FIELD_LIST,
+        C2TestBaseFlexEndSizeStruct::FIELD_LIST,
+        C2TestBaseFlexEndSize2Struct::FIELD_LIST,
     };
 }
 
 TEST_F(C2ParamTest, FieldId) {
     // pointer constructor
-    EXPECT_EQ(_C2FieldId(0, 4), _C2FieldId(&((C2TestStruct_A*)0)->mSigned32));
-    EXPECT_EQ(_C2FieldId(4, 8), _C2FieldId(&((C2TestStruct_A*)0)->mSigned64));
-    EXPECT_EQ(_C2FieldId(20, 4), _C2FieldId(&((C2TestStruct_A*)0)->mUnsigned32));
-    EXPECT_EQ(_C2FieldId(24, 8), _C2FieldId(&((C2TestStruct_A*)0)->mUnsigned64));
-    EXPECT_EQ(_C2FieldId(32, 4), _C2FieldId(&((C2TestStruct_A*)0)->mFloat));
-    EXPECT_EQ(_C2FieldId(36, 8), _C2FieldId(&((C2TestStruct_A*)0)->mSize));
-    EXPECT_EQ(_C2FieldId(60, 1), _C2FieldId(&((C2TestStruct_A*)0)->mBlob));
-    EXPECT_EQ(_C2FieldId(160, 1), _C2FieldId(&((C2TestStruct_A*)0)->mString));
-    EXPECT_EQ(_C2FieldId(260, 1), _C2FieldId(&((C2TestStruct_A*)0)->mYesNo));
+    EXPECT_EQ(_C2FieldId(0, 4), _C2FieldId(&((C2TestStruct_A*)0)->signed32));
+    EXPECT_EQ(_C2FieldId(4, 8), _C2FieldId(&((C2TestStruct_A*)0)->signed64));
+    EXPECT_EQ(_C2FieldId(20, 4), _C2FieldId(&((C2TestStruct_A*)0)->unsigned32));
+    EXPECT_EQ(_C2FieldId(24, 8), _C2FieldId(&((C2TestStruct_A*)0)->unsigned64));
+    EXPECT_EQ(_C2FieldId(32, 4), _C2FieldId(&((C2TestStruct_A*)0)->fp32));
+    EXPECT_EQ(_C2FieldId(36, 8), _C2FieldId(&((C2TestStruct_A*)0)->sz));
+    EXPECT_EQ(_C2FieldId(60, 1), _C2FieldId(&((C2TestStruct_A*)0)->blob));
+    EXPECT_EQ(_C2FieldId(160, 1), _C2FieldId(&((C2TestStruct_A*)0)->string));
+    EXPECT_EQ(_C2FieldId(260, 1), _C2FieldId(&((C2TestStruct_A*)0)->yesNo));
 
-    EXPECT_EQ(_C2FieldId(0, 4), _C2FieldId(&((C2TestFlexEndSizeStruct*)0)->mSigned32));
+    EXPECT_EQ(_C2FieldId(0, 4), _C2FieldId(&((C2TestFlexEndSizeStruct*)0)->signed32));
     EXPECT_EQ(_C2FieldId(4, 8), _C2FieldId(&((C2TestFlexEndSizeStruct*)0)->mFlexSize));
 
-    // member pointer constructor
-    EXPECT_EQ(_C2FieldId(0, 4), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::mSigned32));
-    EXPECT_EQ(_C2FieldId(4, 8), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::mSigned64));
-    EXPECT_EQ(_C2FieldId(20, 4), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::mUnsigned32));
-    EXPECT_EQ(_C2FieldId(24, 8), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::mUnsigned64));
-    EXPECT_EQ(_C2FieldId(32, 4), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::mFloat));
-    EXPECT_EQ(_C2FieldId(36, 8), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::mSize));
-    EXPECT_EQ(_C2FieldId(60, 1), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::mBlob));
-    EXPECT_EQ(_C2FieldId(160, 1), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::mString));
-    EXPECT_EQ(_C2FieldId(260, 1), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::mYesNo));
+    EXPECT_EQ(_C2FieldId(0, 4), _C2FieldId(&((C2TestBaseFlexEndSizeStruct*)0)->signed32));
+    EXPECT_EQ(_C2FieldId(4, 8), _C2FieldId(&((C2TestBaseFlexEndSizeStruct*)0)->mFlexSize));
 
-    EXPECT_EQ(_C2FieldId(0, 4), _C2FieldId((C2TestFlexEndSizeStruct*)0, &C2TestFlexEndSizeStruct::mSigned32));
+    // member pointer constructor
+    EXPECT_EQ(_C2FieldId(0, 4), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::signed32));
+    EXPECT_EQ(_C2FieldId(4, 8), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::signed64));
+    EXPECT_EQ(_C2FieldId(20, 4), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::unsigned32));
+    EXPECT_EQ(_C2FieldId(24, 8), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::unsigned64));
+    EXPECT_EQ(_C2FieldId(32, 4), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::fp32));
+    EXPECT_EQ(_C2FieldId(36, 8), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::sz));
+    EXPECT_EQ(_C2FieldId(60, 1), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::blob));
+    EXPECT_EQ(_C2FieldId(160, 1), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::string));
+    EXPECT_EQ(_C2FieldId(260, 1), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::yesNo));
+
+    EXPECT_EQ(_C2FieldId(0, 4), _C2FieldId((C2TestFlexEndSizeStruct*)0, &C2TestFlexEndSizeStruct::signed32));
     EXPECT_EQ(_C2FieldId(4, 8), _C2FieldId((C2TestFlexEndSizeStruct*)0, &C2TestFlexEndSizeStruct::mFlexSize));
 
-    // member pointer sans type pointer
-    EXPECT_EQ(_C2FieldId(0, 4), _C2FieldId(&C2TestStruct_A::mSigned32));
-    EXPECT_EQ(_C2FieldId(4, 8), _C2FieldId(&C2TestStruct_A::mSigned64));
-    EXPECT_EQ(_C2FieldId(20, 4), _C2FieldId(&C2TestStruct_A::mUnsigned32));
-    EXPECT_EQ(_C2FieldId(24, 8), _C2FieldId(&C2TestStruct_A::mUnsigned64));
-    EXPECT_EQ(_C2FieldId(32, 4), _C2FieldId(&C2TestStruct_A::mFloat));
-    EXPECT_EQ(_C2FieldId(36, 8), _C2FieldId(&C2TestStruct_A::mSize));
-    EXPECT_EQ(_C2FieldId(60, 1), _C2FieldId(&C2TestStruct_A::mBlob));
-    EXPECT_EQ(_C2FieldId(160, 1), _C2FieldId(&C2TestStruct_A::mString));
-    EXPECT_EQ(_C2FieldId(260, 1), _C2FieldId(&C2TestStruct_A::mYesNo));
+    EXPECT_EQ(_C2FieldId(0, 4), _C2FieldId((C2TestBaseFlexEndSizeStruct*)0, &C2TestBaseFlexEndSizeStruct::signed32));
+    EXPECT_EQ(_C2FieldId(4, 8), _C2FieldId((C2TestBaseFlexEndSizeStruct*)0, &C2TestBaseFlexEndSizeStruct::mFlexSize));
 
-    EXPECT_EQ(_C2FieldId(0, 4), _C2FieldId(&C2TestFlexEndSizeStruct::mSigned32));
+    // member pointer sans type pointer
+    EXPECT_EQ(_C2FieldId(0, 4), _C2FieldId(&C2TestStruct_A::signed32));
+    EXPECT_EQ(_C2FieldId(4, 8), _C2FieldId(&C2TestStruct_A::signed64));
+    EXPECT_EQ(_C2FieldId(20, 4), _C2FieldId(&C2TestStruct_A::unsigned32));
+    EXPECT_EQ(_C2FieldId(24, 8), _C2FieldId(&C2TestStruct_A::unsigned64));
+    EXPECT_EQ(_C2FieldId(32, 4), _C2FieldId(&C2TestStruct_A::fp32));
+    EXPECT_EQ(_C2FieldId(36, 8), _C2FieldId(&C2TestStruct_A::sz));
+    EXPECT_EQ(_C2FieldId(60, 1), _C2FieldId(&C2TestStruct_A::blob));
+    EXPECT_EQ(_C2FieldId(160, 1), _C2FieldId(&C2TestStruct_A::string));
+    EXPECT_EQ(_C2FieldId(260, 1), _C2FieldId(&C2TestStruct_A::yesNo));
+
+    EXPECT_EQ(_C2FieldId(0, 4), _C2FieldId(&C2TestFlexEndSizeStruct::signed32));
     EXPECT_EQ(_C2FieldId(4, 8), _C2FieldId(&C2TestFlexEndSizeStruct::mFlexSize));
 
+    EXPECT_EQ(_C2FieldId(0, 4), _C2FieldId(&C2TestBaseFlexEndSizeStruct::signed32));
+    EXPECT_EQ(_C2FieldId(4, 8), _C2FieldId(&C2TestBaseFlexEndSizeStruct::mFlexSize));
+
     typedef C2GlobalParam<C2Info, C2TestAStruct> C2TestAInfo;
     typedef C2GlobalParam<C2Info, C2TestFlexEndSizeStruct> C2TestFlexEndSizeInfo;
+    typedef C2GlobalParam<C2Info, C2TestBaseFlexEndSizeStruct, kParamIndexTestFlexEndSize> C2TestFlexEndSizeInfoFromBase;
 
     // pointer constructor in C2Param
-    EXPECT_EQ(_C2FieldId(8, 4), _C2FieldId(&((C2TestAInfo*)0)->mSigned32));
-    EXPECT_EQ(_C2FieldId(12, 8), _C2FieldId(&((C2TestAInfo*)0)->mSigned64));
-    EXPECT_EQ(_C2FieldId(28, 4), _C2FieldId(&((C2TestAInfo*)0)->mUnsigned32));
-    EXPECT_EQ(_C2FieldId(32, 8), _C2FieldId(&((C2TestAInfo*)0)->mUnsigned64));
-    EXPECT_EQ(_C2FieldId(40, 4), _C2FieldId(&((C2TestAInfo*)0)->mFloat));
-    EXPECT_EQ(_C2FieldId(44, 8), _C2FieldId(&((C2TestAInfo*)0)->mSize));
-    EXPECT_EQ(_C2FieldId(68, 1), _C2FieldId(&((C2TestAInfo*)0)->mBlob));
-    EXPECT_EQ(_C2FieldId(168, 1), _C2FieldId(&((C2TestAInfo*)0)->mString));
-    EXPECT_EQ(_C2FieldId(268, 1), _C2FieldId(&((C2TestAInfo*)0)->mYesNo));
+    EXPECT_EQ(_C2FieldId(8, 4), _C2FieldId(&((C2TestAInfo*)0)->signed32));
+    EXPECT_EQ(_C2FieldId(12, 8), _C2FieldId(&((C2TestAInfo*)0)->signed64));
+    EXPECT_EQ(_C2FieldId(28, 4), _C2FieldId(&((C2TestAInfo*)0)->unsigned32));
+    EXPECT_EQ(_C2FieldId(32, 8), _C2FieldId(&((C2TestAInfo*)0)->unsigned64));
+    EXPECT_EQ(_C2FieldId(40, 4), _C2FieldId(&((C2TestAInfo*)0)->fp32));
+    EXPECT_EQ(_C2FieldId(44, 8), _C2FieldId(&((C2TestAInfo*)0)->sz));
+    EXPECT_EQ(_C2FieldId(68, 1), _C2FieldId(&((C2TestAInfo*)0)->blob));
+    EXPECT_EQ(_C2FieldId(168, 1), _C2FieldId(&((C2TestAInfo*)0)->string));
+    EXPECT_EQ(_C2FieldId(268, 1), _C2FieldId(&((C2TestAInfo*)0)->yesNo));
 
-    EXPECT_EQ(_C2FieldId(8, 4), _C2FieldId(&((C2TestFlexEndSizeInfo*)0)->m.mSigned32));
+    EXPECT_EQ(_C2FieldId(8, 4), _C2FieldId(&((C2TestFlexEndSizeInfo*)0)->m.signed32));
     EXPECT_EQ(_C2FieldId(12, 8), _C2FieldId(&((C2TestFlexEndSizeInfo*)0)->m.mFlexSize));
 
+    EXPECT_EQ(_C2FieldId(8, 4), _C2FieldId(&((C2TestFlexEndSizeInfoFromBase*)0)->m.signed32));
+    EXPECT_EQ(_C2FieldId(12, 8), _C2FieldId(&((C2TestFlexEndSizeInfoFromBase*)0)->m.mFlexSize));
+
     // member pointer in C2Param
-    EXPECT_EQ(_C2FieldId(8, 4), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::mSigned32));
-    EXPECT_EQ(_C2FieldId(12, 8), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::mSigned64));
-    EXPECT_EQ(_C2FieldId(28, 4), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::mUnsigned32));
-    EXPECT_EQ(_C2FieldId(32, 8), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::mUnsigned64));
-    EXPECT_EQ(_C2FieldId(40, 4), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::mFloat));
-    EXPECT_EQ(_C2FieldId(44, 8), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::mSize));
-    EXPECT_EQ(_C2FieldId(68, 1), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::mBlob));
-    EXPECT_EQ(_C2FieldId(168, 1), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::mString));
-    EXPECT_EQ(_C2FieldId(268, 1), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::mYesNo));
+    EXPECT_EQ(_C2FieldId(8, 4), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::signed32));
+    EXPECT_EQ(_C2FieldId(12, 8), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::signed64));
+    EXPECT_EQ(_C2FieldId(28, 4), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::unsigned32));
+    EXPECT_EQ(_C2FieldId(32, 8), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::unsigned64));
+    EXPECT_EQ(_C2FieldId(40, 4), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::fp32));
+    EXPECT_EQ(_C2FieldId(44, 8), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::sz));
+    EXPECT_EQ(_C2FieldId(68, 1), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::blob));
+    EXPECT_EQ(_C2FieldId(168, 1), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::string));
+    EXPECT_EQ(_C2FieldId(268, 1), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::yesNo));
 
     // NOTE: cannot use a member pointer for flex params due to introduction of 'm'
-    // EXPECT_EQ(_C2FieldId(8, 4), _C2FieldId(&C2TestFlexEndSizeInfo::m.mSigned32));
+    // EXPECT_EQ(_C2FieldId(8, 4), _C2FieldId(&C2TestFlexEndSizeInfo::m.signed32));
     // EXPECT_EQ(_C2FieldId(12, 8), _C2FieldId(&C2TestFlexEndSizeInfo::m.mFlexSize));
 
+    // EXPECT_EQ(_C2FieldId(8, 4), _C2FieldId(&C2TestFlexEndSizeInfoFromBase::m.signed32));
+    // EXPECT_EQ(_C2FieldId(12, 8), _C2FieldId(&C2TestFlexEndSizeInfoFromBase::m.mFlexSize));
 
 
 }
@@ -590,33 +631,33 @@
 };
 
 void compiledStatic_arrayTypePropagationTest() {
-    (void)S32(&((C2TestFlexEndS32Struct *)0)->mSigned32);
+    (void)S32(&((C2TestFlexEndS32Struct *)0)->signed32);
     (void)FLX(&((C2TestFlexEndS32Struct *)0)->mFlexSigned32, (int32_t*)0);
     (void)FLX(&((C2TestFlexS32Struct *)0)->mFlexSigned32, (int32_t*)0);
 
     typedef C2GlobalParam<C2Info, C2TestAStruct> C2TestAInfo;
 
     // TRICKY: &derivedClass::baseMember has type of baseClass::*
-    static_assert(std::is_same<decltype(&C2TestAInfo::mSigned32), int32_t C2TestAStruct::*>::value,
+    static_assert(std::is_same<decltype(&C2TestAInfo::signed32), int32_t C2TestAStruct::*>::value,
                   "base member pointer should have base class in type");
 
     // therefore, member pointer expands to baseClass::* in templates
-    (void)MP(&C2TestAInfo::mSigned32,
+    (void)MP(&C2TestAInfo::signed32,
              (C2TestAStruct*)0 /* expected */, (C2TestAInfo*)0 /* unexpected */);
     // but can be cast to derivedClass::*
-    (void)MP((int32_t C2TestAInfo::*)&C2TestAInfo::mSigned32,
+    (void)MP((int32_t C2TestAInfo::*)&C2TestAInfo::signed32,
              (C2TestAInfo*)0 /* expected */, (C2TestAStruct*)0 /* unexpected */);
 
     // TRICKY: baseClass::* does not autoconvert to derivedClass::* even in templates
-    // (void)MP(&C2TestAInfo::mSigned32, (C2TestAInfo*)0);
+    // (void)MP(&C2TestAInfo::signed32, (C2TestAInfo*)0);
 }
 
 TEST_F(C2ParamTest, MemberPointerCast) {
     typedef C2GlobalParam<C2Info, C2TestAStruct> C2TestAInfo;
 
-    static_assert(offsetof(C2TestAInfo, mSigned32) == 8, "offset should be 8");
-    constexpr int32_t C2TestAStruct::* s32ptr = &C2TestAInfo::mSigned32;
-    constexpr int32_t C2TestAInfo::* s32ptr_derived = (int32_t C2TestAStruct::*)&C2TestAInfo::mSigned32;
+    static_assert(offsetof(C2TestAInfo, signed32) == 8, "offset should be 8");
+    constexpr int32_t C2TestAStruct::* s32ptr = &C2TestAInfo::signed32;
+    constexpr int32_t C2TestAInfo::* s32ptr_derived = (int32_t C2TestAStruct::*)&C2TestAInfo::signed32;
     constexpr int32_t C2TestAInfo::* s32ptr_cast2derived = (int32_t C2TestAInfo::*)s32ptr;
     C2TestAInfo *info = (C2TestAInfo *)256;
     C2TestAStruct *strukt = (C2TestAStruct *)info;
@@ -633,8 +674,12 @@
     EXPECT_EQ(264u, (uintptr_t)strukt_s32);
 
     typedef C2GlobalParam<C2Info, C2TestFlexEndSizeStruct> C2TestFlexEndSizeInfo;
-    static_assert(offsetof(C2TestFlexEndSizeInfo, m.mSigned32) == 8, "offset should be 8");
+    static_assert(offsetof(C2TestFlexEndSizeInfo, m.signed32) == 8, "offset should be 8");
     static_assert(offsetof(C2TestFlexEndSizeInfo, m.mFlexSize) == 12, "offset should be 12");
+
+    typedef C2GlobalParam<C2Info, C2TestBaseFlexEndSizeStruct, kParamIndexTestFlexEndSize> C2TestFlexEndSizeInfoFromBase;
+    static_assert(offsetof(C2TestFlexEndSizeInfoFromBase, m.signed32) == 8, "offset should be 8");
+    static_assert(offsetof(C2TestFlexEndSizeInfoFromBase, m.mFlexSize) == 12, "offset should be 12");
 }
 
 /* ===================================== PARAM USAGE TESTS ===================================== */
@@ -648,6 +693,15 @@
     C2FIELD(mNumber, "number")
 };
 
+struct C2NumberBaseStruct {
+    int32_t mNumber;
+    C2NumberBaseStruct() {}
+    C2NumberBaseStruct(int32_t _number) : mNumber(_number) {}
+
+    DEFINE_AND_DESCRIBE_BASE_C2STRUCT(NumberBase)
+    C2FIELD(mNumber, "number")
+};
+
 struct C2NumbersStruct {
     int32_t mNumbers[];
     C2NumbersStruct() {}
@@ -657,6 +711,8 @@
 };
 static_assert(sizeof(C2NumbersStruct) == 0, "C2NumbersStruct has incorrect size");
 
+typedef C2GlobalParam<C2Info, C2NumberStruct> C2NumberInfo;
+
 typedef C2GlobalParam<C2Tuning, C2NumberStruct> C2NumberTuning;
 typedef   C2PortParam<C2Tuning, C2NumberStruct> C2NumberPortTuning;
 typedef C2StreamParam<C2Tuning, C2NumberStruct> C2NumberStreamTuning;
@@ -670,7 +726,7 @@
 
 void test() {
     C2NumberStruct s(10);
-    (void)C2NumberStruct::fieldList;
+    (void)C2NumberStruct::FIELD_LIST;
 };
 
 typedef C2StreamParam<C2Tuning, C2Int64Value, kParamIndexNumberB> C2NumberConfig4;
@@ -679,19 +735,19 @@
 
 void test3() {
     C2NumberConfig3 s(10);
-    s.mValue = 11;
+    s.value = 11;
     s = 12;
-    (void)C2NumberConfig3::fieldList;
+    (void)C2NumberConfig3::FIELD_LIST;
     std::shared_ptr<C2VideoNameConfig> n = C2VideoNameConfig::alloc_shared(25);
-    strcpy(n->m.mValue, "lajos");
+    strcpy(n->m.value, "lajos");
     C2NumberConfig4 t(false, 0, 11);
-    t.mValue = 15;
+    t.value = 15;
 };
 
 struct C2NumbersStruct {
     int32_t mNumbers[];
-    enum { baseIndex = kParamIndexNumber };
-    const static std::initializer_list<const C2FieldDescriptor> fieldList;
+    enum { CORE_INDEX = kParamIndexNumber };
+    const static std::initializer_list<const C2FieldDescriptor> FIELD_LIST;
     C2NumbersStruct() {}
 
     FLEX(C2NumbersStruct, mNumbers);
@@ -702,13 +758,13 @@
 
 typedef C2GlobalParam<C2Info, C2NumbersStruct> C2NumbersInfo;
 
-const std::initializer_list<const C2FieldDescriptor> C2NumbersStruct::fieldList =
+const std::initializer_list<const C2FieldDescriptor> C2NumbersStruct::FIELD_LIST =
 //    { { FD::INT32, 0, "widths" } };
     { C2FieldDescriptor(&((C2NumbersStruct*)(nullptr))->mNumbers, "number") };
 
 typedef C2PortParam<C2Tuning, C2NumberStruct> C2NumberConfig;
 
-std::list<const C2FieldDescriptor> myList = C2NumberConfig::fieldList;
+std::list<const C2FieldDescriptor> myList = C2NumberConfig::FIELD_LIST;
 
     std::unique_ptr<android::C2ParamDescriptor> __test_describe(uint32_t paramType) {
         std::list<const C2FieldDescriptor> fields = describeC2Params<C2NumberConfig>();
@@ -724,7 +780,7 @@
 
         C2Param::Index index(paramType);
         switch (paramType) {
-        case C2NumberConfig::baseIndex:
+        case C2NumberConfig::CORE_INDEX:
             return std::unique_ptr<C2ParamDescriptor>(new C2ParamDescriptor{
                 true /* isRequired */,
                 "number",
@@ -773,20 +829,22 @@
 
 struct _C2ParamInspector {
     static void StaticTest();
+    static void StaticFromBaseTest();
     static void StaticFlexTest();
+    static void StaticFlexFromBaseTest();
 };
 
 // TEST_F(_C2ParamInspector, StaticTest) {
 void _C2ParamInspector::StaticTest() {
     typedef C2Param::Index I;
 
-    // C2NumberStruct: baseIndex = kIndex                          (args)
-    static_assert(C2NumberStruct::baseIndex == kParamIndexNumber, "bad index");
+    // C2NumberStruct: CORE_INDEX = kIndex                          (args)
+    static_assert(C2NumberStruct::CORE_INDEX == kParamIndexNumber, "bad index");
     static_assert(sizeof(C2NumberStruct) == 4, "bad size");
 
     // C2NumberTuning:             kIndex | tun | global           (args)
-    static_assert(C2NumberTuning::baseIndex == kParamIndexNumber, "bad index");
-    static_assert(C2NumberTuning::typeIndex == (kParamIndexNumber | I::kTypeTuning | I::kDirGlobal), "bad index");
+    static_assert(C2NumberTuning::CORE_INDEX == kParamIndexNumber, "bad index");
+    static_assert(C2NumberTuning::PARAM_TYPE == (kParamIndexNumber | I::KIND_TUNING | I::DIR_GLOBAL), "bad index");
     static_assert(sizeof(C2NumberTuning) == 12, "bad size");
 
     static_assert(offsetof(C2NumberTuning, _mSize) == 0, "bad size");
@@ -797,14 +855,14 @@
     static_assert(sizeof(C2NumberPortTuning) == 12, "bad size");
     // C2NumberPortTuning::input:  kIndex | tun | port | input     (args)
     // C2NumberPortTuning::output: kIndex | tun | port | output    (args)
-    static_assert(C2NumberPortTuning::input::baseIndex ==
+    static_assert(C2NumberPortTuning::input::CORE_INDEX ==
                   kParamIndexNumber, "bad index");
-    static_assert(C2NumberPortTuning::input::typeIndex ==
-                  (kParamIndexNumber | I::kTypeTuning | I::kDirInput), "bad index");
-    static_assert(C2NumberPortTuning::output::baseIndex ==
+    static_assert(C2NumberPortTuning::input::PARAM_TYPE ==
+                  (kParamIndexNumber | I::KIND_TUNING | I::DIR_INPUT), "bad index");
+    static_assert(C2NumberPortTuning::output::CORE_INDEX ==
                   kParamIndexNumber, "bad index");
-    static_assert(C2NumberPortTuning::output::typeIndex ==
-                  (kParamIndexNumber | I::kTypeTuning | I::kDirOutput), "bad index");
+    static_assert(C2NumberPortTuning::output::PARAM_TYPE ==
+                  (kParamIndexNumber | I::KIND_TUNING | I::DIR_OUTPUT), "bad index");
     static_assert(sizeof(C2NumberPortTuning::input) == 12, "bad size");
     static_assert(sizeof(C2NumberPortTuning::output) == 12, "bad size");
     static_assert(offsetof(C2NumberPortTuning::input, _mSize) == 0, "bad size");
@@ -818,14 +876,14 @@
     static_assert(sizeof(C2NumberStreamTuning) == 12u, "bad size");
     // C2NumberStreamTuning::input kIndex | tun | str | input      (int, args)
     // C2NumberStreamTuning::output kIx   | tun | str | output     (int, args)
-    static_assert(C2NumberStreamTuning::input::baseIndex ==
+    static_assert(C2NumberStreamTuning::input::CORE_INDEX ==
                   kParamIndexNumber, "bad index");
-    static_assert(C2NumberStreamTuning::input::typeIndex ==
-                  (kParamIndexNumber | I::kTypeTuning | I::kDirInput | I::kStreamFlag), "bad index");
-    static_assert(C2NumberStreamTuning::output::baseIndex ==
+    static_assert(C2NumberStreamTuning::input::PARAM_TYPE ==
+                  (kParamIndexNumber | I::KIND_TUNING | I::DIR_INPUT | I::IS_STREAM_FLAG), "bad index");
+    static_assert(C2NumberStreamTuning::output::CORE_INDEX ==
                   kParamIndexNumber, "bad index");
-    static_assert(C2NumberStreamTuning::output::typeIndex ==
-                  (kParamIndexNumber | I::kTypeTuning | I::kDirOutput | I::kStreamFlag), "bad index");
+    static_assert(C2NumberStreamTuning::output::PARAM_TYPE ==
+                  (kParamIndexNumber | I::KIND_TUNING | I::DIR_OUTPUT | I::IS_STREAM_FLAG), "bad index");
     static_assert(sizeof(C2NumberStreamTuning::input) == 12u, "bad size");
     static_assert(sizeof(C2NumberStreamTuning::output) == 12u, "bad size");
     static_assert(offsetof(C2NumberStreamTuning::input, _mSize) == 0, "bad size");
@@ -836,16 +894,81 @@
     static_assert(offsetof(C2NumberStreamTuning::output, mNumber) == 8, "bad offset");
 }
 
+void _C2ParamInspector::StaticFromBaseTest() {
+    enum { kParamIndexMy = 3102 };
+    typedef C2NumberBaseStruct C2MyStruct;
+    typedef C2GlobalParam<C2Setting, C2MyStruct, kParamIndexMy> C2MySetting;
+    typedef   C2PortParam<C2Setting, C2MyStruct, kParamIndexMy> C2MyPortSetting;
+    typedef C2StreamParam<C2Setting, C2MyStruct, kParamIndexMy> C2MyStreamSetting;
+
+    typedef C2Param::Index I;
+
+    // C2MyStruct has no CORE_INDEX
+    //static_assert(C2MyStruct::CORE_INDEX == kParamIndexMy, "bad index");
+    static_assert(sizeof(C2MyStruct) == 4, "bad size");
+
+    // C2MySetting:             kIndex | tun | global           (args)
+    static_assert(C2MySetting::CORE_INDEX == kParamIndexMy, "bad index");
+    static_assert(C2MySetting::PARAM_TYPE == (kParamIndexMy | I::KIND_SETTING | I::DIR_GLOBAL), "bad index");
+    static_assert(sizeof(C2MySetting) == 12, "bad size");
+
+    static_assert(offsetof(C2MySetting, _mSize) == 0, "bad size");
+    static_assert(offsetof(C2MySetting, _mIndex) == 4, "bad offset");
+    static_assert(offsetof(C2MySetting, mNumber) == 8, "bad offset");
+
+    // C2MyPortSetting:         kIndex | tun | port             (bool, args)
+    static_assert(sizeof(C2MyPortSetting) == 12, "bad size");
+    // C2MyPortSetting::input:  kIndex | tun | port | input     (args)
+    // C2MyPortSetting::output: kIndex | tun | port | output    (args)
+    static_assert(C2MyPortSetting::input::CORE_INDEX ==
+                  kParamIndexMy, "bad index");
+    static_assert(C2MyPortSetting::input::PARAM_TYPE ==
+                  (kParamIndexMy | I::KIND_SETTING | I::DIR_INPUT), "bad index");
+    static_assert(C2MyPortSetting::output::CORE_INDEX ==
+                  kParamIndexMy, "bad index");
+    static_assert(C2MyPortSetting::output::PARAM_TYPE ==
+                  (kParamIndexMy | I::KIND_SETTING | I::DIR_OUTPUT), "bad index");
+    static_assert(sizeof(C2MyPortSetting::input) == 12, "bad size");
+    static_assert(sizeof(C2MyPortSetting::output) == 12, "bad size");
+    static_assert(offsetof(C2MyPortSetting::input, _mSize) == 0, "bad size");
+    static_assert(offsetof(C2MyPortSetting::input, _mIndex) == 4, "bad offset");
+    static_assert(offsetof(C2MyPortSetting::input, mNumber) == 8, "bad offset");
+    static_assert(offsetof(C2MyPortSetting::output, _mSize) == 0, "bad size");
+    static_assert(offsetof(C2MyPortSetting::output, _mIndex) == 4, "bad offset");
+    static_assert(offsetof(C2MyPortSetting::output, mNumber) == 8, "bad offset");
+
+    // C2MyStreamSetting:       kIndex | tun | str              (bool, uint, args)
+    static_assert(sizeof(C2MyStreamSetting) == 12u, "bad size");
+    // C2MyStreamSetting::input kIndex | tun | str | input      (int, args)
+    // C2MyStreamSetting::output kIx   | tun | str | output     (int, args)
+    static_assert(C2MyStreamSetting::input::CORE_INDEX ==
+                  kParamIndexMy, "bad index");
+    static_assert(C2MyStreamSetting::input::PARAM_TYPE ==
+                  (kParamIndexMy | I::KIND_SETTING | I::DIR_INPUT | I::IS_STREAM_FLAG), "bad index");
+    static_assert(C2MyStreamSetting::output::CORE_INDEX ==
+                  kParamIndexMy, "bad index");
+    static_assert(C2MyStreamSetting::output::PARAM_TYPE ==
+                  (kParamIndexMy | I::KIND_SETTING | I::DIR_OUTPUT | I::IS_STREAM_FLAG), "bad index");
+    static_assert(sizeof(C2MyStreamSetting::input) == 12u, "bad size");
+    static_assert(sizeof(C2MyStreamSetting::output) == 12u, "bad size");
+    static_assert(offsetof(C2MyStreamSetting::input, _mSize) == 0, "bad size");
+    static_assert(offsetof(C2MyStreamSetting::input, _mIndex) == 4, "bad offset");
+    static_assert(offsetof(C2MyStreamSetting::input, mNumber) == 8, "bad offset");
+    static_assert(offsetof(C2MyStreamSetting::output, _mSize) == 0, "bad size");
+    static_assert(offsetof(C2MyStreamSetting::output, _mIndex) == 4, "bad offset");
+    static_assert(offsetof(C2MyStreamSetting::output, mNumber) == 8, "bad offset");
+}
+
 void _C2ParamInspector::StaticFlexTest() {
     typedef C2Param::Index I;
 
-    // C2NumbersStruct: baseIndex = kIndex                          (args)
-    static_assert(C2NumbersStruct::baseIndex == (I::kFlexibleFlag | kParamIndexNumbers), "bad index");
+    // C2NumbersStruct: CORE_INDEX = kIndex                          (args)
+    static_assert(C2NumbersStruct::CORE_INDEX == (I::IS_FLEX_FLAG | kParamIndexNumbers), "bad index");
     static_assert(sizeof(C2NumbersStruct) == 0, "bad size");
 
     // C2NumbersTuning:             kIndex | tun | global           (args)
-    static_assert(C2NumbersTuning::baseIndex == (I::kFlexibleFlag | kParamIndexNumbers), "bad index");
-    static_assert(C2NumbersTuning::typeIndex == (I::kFlexibleFlag | kParamIndexNumbers | I::kTypeTuning | I::kDirGlobal), "bad index");
+    static_assert(C2NumbersTuning::CORE_INDEX == (I::IS_FLEX_FLAG | kParamIndexNumbers), "bad index");
+    static_assert(C2NumbersTuning::PARAM_TYPE == (I::IS_FLEX_FLAG | kParamIndexNumbers | I::KIND_TUNING | I::DIR_GLOBAL), "bad index");
     static_assert(sizeof(C2NumbersTuning) == 8, "bad size");
 
     static_assert(offsetof(C2NumbersTuning, _mSize) == 0, "bad size");
@@ -856,14 +979,14 @@
     static_assert(sizeof(C2NumbersPortTuning) == 8, "bad size");
     // C2NumbersPortTuning::input:  kIndex | tun | port | input     (args)
     // C2NumbersPortTuning::output: kIndex | tun | port | output    (args)
-    static_assert(C2NumbersPortTuning::input::baseIndex ==
-                  (I::kFlexibleFlag | kParamIndexNumbers), "bad index");
-    static_assert(C2NumbersPortTuning::input::typeIndex ==
-                  (I::kFlexibleFlag | kParamIndexNumbers | I::kTypeTuning | I::kDirInput), "bad index");
-    static_assert(C2NumbersPortTuning::output::baseIndex ==
-                  (I::kFlexibleFlag | kParamIndexNumbers), "bad index");
-    static_assert(C2NumbersPortTuning::output::typeIndex ==
-                  (I::kFlexibleFlag | kParamIndexNumbers | I::kTypeTuning | I::kDirOutput), "bad index");
+    static_assert(C2NumbersPortTuning::input::CORE_INDEX ==
+                  (I::IS_FLEX_FLAG | kParamIndexNumbers), "bad index");
+    static_assert(C2NumbersPortTuning::input::PARAM_TYPE ==
+                  (I::IS_FLEX_FLAG | kParamIndexNumbers | I::KIND_TUNING | I::DIR_INPUT), "bad index");
+    static_assert(C2NumbersPortTuning::output::CORE_INDEX ==
+                  (I::IS_FLEX_FLAG | kParamIndexNumbers), "bad index");
+    static_assert(C2NumbersPortTuning::output::PARAM_TYPE ==
+                  (I::IS_FLEX_FLAG | kParamIndexNumbers | I::KIND_TUNING | I::DIR_OUTPUT), "bad index");
     static_assert(sizeof(C2NumbersPortTuning::input) == 8, "bad size");
     static_assert(sizeof(C2NumbersPortTuning::output) == 8, "bad size");
     static_assert(offsetof(C2NumbersPortTuning::input, _mSize) == 0, "bad size");
@@ -877,14 +1000,14 @@
     static_assert(sizeof(C2NumbersStreamTuning) == 8, "bad size");
     // C2NumbersStreamTuning::input kIndex | tun | str | input      (int, args)
     // C2NumbersStreamTuning::output kIx   | tun | str | output     (int, args)
-    static_assert(C2NumbersStreamTuning::input::baseIndex ==
-                  (I::kFlexibleFlag | kParamIndexNumbers), "bad index");
-    static_assert(C2NumbersStreamTuning::input::typeIndex ==
-                  (I::kFlexibleFlag | kParamIndexNumbers | I::kTypeTuning | I::kDirInput | I::kStreamFlag), "bad index");
-    static_assert(C2NumbersStreamTuning::output::baseIndex ==
-                  (I::kFlexibleFlag | kParamIndexNumbers), "bad index");
-    static_assert(C2NumbersStreamTuning::output::typeIndex ==
-                  (I::kFlexibleFlag | kParamIndexNumbers | I::kTypeTuning | I::kDirOutput | I::kStreamFlag), "bad index");
+    static_assert(C2NumbersStreamTuning::input::CORE_INDEX ==
+                  (I::IS_FLEX_FLAG | kParamIndexNumbers), "bad index");
+    static_assert(C2NumbersStreamTuning::input::PARAM_TYPE ==
+                  (I::IS_FLEX_FLAG | kParamIndexNumbers | I::KIND_TUNING | I::DIR_INPUT | I::IS_STREAM_FLAG), "bad index");
+    static_assert(C2NumbersStreamTuning::output::CORE_INDEX ==
+                  (I::IS_FLEX_FLAG | kParamIndexNumbers), "bad index");
+    static_assert(C2NumbersStreamTuning::output::PARAM_TYPE ==
+                  (I::IS_FLEX_FLAG | kParamIndexNumbers | I::KIND_TUNING | I::DIR_OUTPUT | I::IS_STREAM_FLAG), "bad index");
     static_assert(sizeof(C2NumbersStreamTuning::input) == 8, "bad size");
     static_assert(sizeof(C2NumbersStreamTuning::output) == 8, "bad size");
     static_assert(offsetof(C2NumbersStreamTuning::input, _mSize) == 0, "bad size");
@@ -895,6 +1018,80 @@
     static_assert(offsetof(C2NumbersStreamTuning::output, m.mNumbers) == 8, "bad offset");
 }
 
+template<bool, unsigned ...N>
+struct _print_as_warning { };
+
+template<unsigned ...N>
+struct _print_as_warning<true, N...> : std::true_type { };
+
+#define static_assert_equals(a, b, msg) \
+static_assert(_print_as_warning<(a) == (b), a, b>::value, msg)
+
+void _C2ParamInspector::StaticFlexFromBaseTest() {
+    enum { kParamIndexMy = 1203 };
+    typedef C2TestBaseFlexEndSizeStruct C2MyStruct;
+    typedef C2GlobalParam<C2Info, C2MyStruct, kParamIndexMy> C2MyInfo;
+    typedef   C2PortParam<C2Info, C2MyStruct, kParamIndexMy> C2MyPortInfo;
+    typedef C2StreamParam<C2Info, C2MyStruct, kParamIndexMy> C2MyStreamInfo;
+
+    typedef C2Param::Index I;
+
+    // C2MyStruct has no CORE_INDEX
+    //static_assert(C2MyStruct::CORE_INDEX == (I::IS_FLEX_FLAG | kParamIndexMy), "bad index");
+    static_assert(sizeof(C2MyStruct) == 4, "bad size");
+
+    // C2MyInfo:             kIndex | tun | global           (args)
+    static_assert_equals(C2MyInfo::CORE_INDEX, (I::IS_FLEX_FLAG | kParamIndexMy), "bad index");
+    static_assert_equals(C2MyInfo::PARAM_TYPE, (I::IS_FLEX_FLAG | kParamIndexMy | I::KIND_INFO | I::DIR_GLOBAL), "bad index");
+    static_assert(sizeof(C2MyInfo) == 12, "bad size");
+
+    static_assert(offsetof(C2MyInfo, _mSize) == 0, "bad size");
+    static_assert(offsetof(C2MyInfo, _mIndex) == 4, "bad offset");
+    static_assert(offsetof(C2MyInfo, m.signed32) == 8, "bad offset");
+
+    // C2MyPortInfo:         kIndex | tun | port             (bool, args)
+    static_assert(sizeof(C2MyPortInfo) == 12, "bad size");
+    // C2MyPortInfo::input:  kIndex | tun | port | input     (args)
+    // C2MyPortInfo::output: kIndex | tun | port | output    (args)
+    static_assert(C2MyPortInfo::input::CORE_INDEX ==
+                  (I::IS_FLEX_FLAG | kParamIndexMy), "bad index");
+    static_assert(C2MyPortInfo::input::PARAM_TYPE ==
+                  (I::IS_FLEX_FLAG | kParamIndexMy | I::KIND_INFO | I::DIR_INPUT), "bad index");
+    static_assert(C2MyPortInfo::output::CORE_INDEX ==
+                  (I::IS_FLEX_FLAG | kParamIndexMy), "bad index");
+    static_assert(C2MyPortInfo::output::PARAM_TYPE ==
+                  (I::IS_FLEX_FLAG | kParamIndexMy | I::KIND_INFO | I::DIR_OUTPUT), "bad index");
+    static_assert(sizeof(C2MyPortInfo::input) == 12, "bad size");
+    static_assert(sizeof(C2MyPortInfo::output) == 12, "bad size");
+    static_assert(offsetof(C2MyPortInfo::input, _mSize) == 0, "bad size");
+    static_assert(offsetof(C2MyPortInfo::input, _mIndex) == 4, "bad offset");
+    static_assert(offsetof(C2MyPortInfo::input, m.signed32) == 8, "bad offset");
+    static_assert(offsetof(C2MyPortInfo::output, _mSize) == 0, "bad size");
+    static_assert(offsetof(C2MyPortInfo::output, _mIndex) == 4, "bad offset");
+    static_assert(offsetof(C2MyPortInfo::output, m.signed32) == 8, "bad offset");
+
+    // C2MyStreamInfo:       kIndex | tun | str              (bool, uint, args)
+    static_assert(sizeof(C2MyStreamInfo) == 12, "bad size");
+    // C2MyStreamInfo::input kIndex | tun | str | input      (int, args)
+    // C2MyStreamInfo::output kIx   | tun | str | output     (int, args)
+    static_assert(C2MyStreamInfo::input::CORE_INDEX ==
+                  (I::IS_FLEX_FLAG | kParamIndexMy), "bad index");
+    static_assert(C2MyStreamInfo::input::PARAM_TYPE ==
+                  (I::IS_FLEX_FLAG | kParamIndexMy | I::KIND_INFO | I::DIR_INPUT | I::IS_STREAM_FLAG), "bad index");
+    static_assert(C2MyStreamInfo::output::CORE_INDEX ==
+                  (I::IS_FLEX_FLAG | kParamIndexMy), "bad index");
+    static_assert(C2MyStreamInfo::output::PARAM_TYPE ==
+                  (I::IS_FLEX_FLAG | kParamIndexMy | I::KIND_INFO | I::DIR_OUTPUT | I::IS_STREAM_FLAG), "bad index");
+    static_assert(sizeof(C2MyStreamInfo::input) == 12, "bad size");
+    static_assert(sizeof(C2MyStreamInfo::output) == 12, "bad size");
+    static_assert(offsetof(C2MyStreamInfo::input, _mSize) == 0, "bad size");
+    static_assert(offsetof(C2MyStreamInfo::input, _mIndex) == 4, "bad offset");
+    static_assert(offsetof(C2MyStreamInfo::input, m.signed32) == 8, "bad offset");
+    static_assert(offsetof(C2MyStreamInfo::output, _mSize) == 0, "bad size");
+    static_assert(offsetof(C2MyStreamInfo::output, _mIndex) == 4, "bad offset");
+    static_assert(offsetof(C2MyStreamInfo::output, m.signed32) == 8, "bad offset");
+}
+
 TEST_F(C2ParamTest, ParamOpsTest) {
     const C2NumberStruct str(100);
     C2NumberStruct bstr;
@@ -903,17 +1100,27 @@
         EXPECT_EQ(100, str.mNumber);
         bstr.mNumber = 100;
 
-        C2Param::BaseIndex index = C2NumberStruct::baseIndex;
+        C2Param::CoreIndex index = C2NumberStruct::CORE_INDEX;
         EXPECT_FALSE(index.isVendor());
         EXPECT_FALSE(index.isFlexible());
-        EXPECT_EQ(index.baseIndex(), kParamIndexNumber);
-        EXPECT_EQ(index.paramIndex(), kParamIndexNumber);
+        EXPECT_EQ(index.coreIndex(), kParamIndexNumber);
+        EXPECT_EQ(index.typeIndex(), kParamIndexNumber);
     }
 
     const C2NumberTuning tun(100);
     C2NumberTuning btun;
 
     {
+      C2NumberInfo inf(100);
+      std::unique_ptr<C2NumbersTuning> tun_ = C2NumbersTuning::alloc_unique(1);
+
+      EXPECT_EQ(tun.coreIndex(), inf.coreIndex());
+      EXPECT_NE(tun.coreIndex(), tun_->coreIndex());
+      EXPECT_NE(tun.type(), inf.type());
+      EXPECT_NE(tun.type(), tun_->type());
+    }
+
+    {
         // flags & invariables
         for (const auto &p : { tun, btun }) {
             EXPECT_TRUE((bool)p);
@@ -940,18 +1147,18 @@
         EXPECT_EQ(tun, btun);
 
         // index
-        EXPECT_EQ(C2Param::Type(tun.type()).baseIndex(), C2NumberStruct::baseIndex);
-        EXPECT_EQ(C2Param::Type(tun.type()).paramIndex(), kParamIndexNumber);
-        EXPECT_EQ(tun.type(), C2NumberTuning::typeIndex);
+        EXPECT_EQ(C2Param::Type(tun.type()).coreIndex(), C2NumberStruct::CORE_INDEX);
+        EXPECT_EQ(C2Param::Type(tun.type()).typeIndex(), kParamIndexNumber);
+        EXPECT_EQ(tun.type(), C2NumberTuning::PARAM_TYPE);
         EXPECT_EQ(tun.stream(), ~0u);
 
-        C2Param::BaseIndex index = C2NumberTuning::baseIndex;
+        C2Param::CoreIndex index = C2NumberTuning::CORE_INDEX;
         EXPECT_FALSE(index.isVendor());
         EXPECT_FALSE(index.isFlexible());
-        EXPECT_EQ(index.baseIndex(), kParamIndexNumber);
-        EXPECT_EQ(index.paramIndex(), kParamIndexNumber);
+        EXPECT_EQ(index.coreIndex(), kParamIndexNumber);
+        EXPECT_EQ(index.typeIndex(), kParamIndexNumber);
 
-        C2Param::Type type = C2NumberTuning::typeIndex;
+        C2Param::Type type = C2NumberTuning::PARAM_TYPE;
         EXPECT_FALSE(type.isVendor());
         EXPECT_FALSE(type.isFlexible());
         EXPECT_TRUE(type.isGlobal());
@@ -968,6 +1175,10 @@
         EXPECT_EQ(C2NumberStreamTuning::From(&tun), nullptr);
         EXPECT_EQ(C2NumberStreamTuning::input::From(&tun), nullptr);
         EXPECT_EQ(C2NumberStreamTuning::output::From(&tun), nullptr);
+
+        EXPECT_EQ(*(C2Param::Copy(btun)), btun);
+        btun.invalidate();
+        EXPECT_FALSE(C2Param::Copy(btun));
     }
 
     const C2NumberPortTuning outp1(true, 100), inp1(false, 100);
@@ -977,6 +1188,22 @@
     const C2NumberPortTuning::output outp2(100);
     C2NumberPortTuning::output boutp2;
 
+    EXPECT_EQ(inp1.coreIndex(), tun.coreIndex());
+    EXPECT_EQ(outp1.coreIndex(), tun.coreIndex());
+    EXPECT_EQ(binp1.coreIndex(), tun.coreIndex());
+    EXPECT_EQ(boutp1.coreIndex(), tun.coreIndex());
+    EXPECT_EQ(inp2.coreIndex(), tun.coreIndex());
+    EXPECT_EQ(outp2.coreIndex(), tun.coreIndex());
+
+    EXPECT_EQ(inp1.type(), inp2.type());
+    EXPECT_EQ(outp1.type(), outp2.type());
+    EXPECT_NE(inp1.type(), outp1.type());
+    EXPECT_NE(inp2.type(), outp2.type());
+    EXPECT_NE(inp1.type(), binp1.type());
+    EXPECT_NE(outp1.type(), boutp1.type());
+    EXPECT_NE(inp1.type(), tun.type());
+    EXPECT_NE(inp2.type(), tun.type());
+
     {
         static_assert(canCallSetPort(binp3), "should be able to");
         static_assert(canCallSetPort(binp1), "should be able to");
@@ -1090,39 +1317,39 @@
         EXPECT_TRUE(inp1 == boutp1);
 
         // index
-        EXPECT_EQ(C2Param::Type(inp1.type()).baseIndex(), C2NumberStruct::baseIndex);
-        EXPECT_EQ(C2Param::Type(inp1.type()).paramIndex(), kParamIndexNumber);
-        EXPECT_EQ(inp1.type(), C2NumberPortTuning::input::typeIndex);
+        EXPECT_EQ(C2Param::Type(inp1.type()).coreIndex(), C2NumberStruct::CORE_INDEX);
+        EXPECT_EQ(C2Param::Type(inp1.type()).typeIndex(), kParamIndexNumber);
+        EXPECT_EQ(inp1.type(), C2NumberPortTuning::input::PARAM_TYPE);
         EXPECT_EQ(inp1.stream(), ~0u);
 
-        EXPECT_EQ(C2Param::Type(inp2.type()).baseIndex(), C2NumberStruct::baseIndex);
-        EXPECT_EQ(C2Param::Type(inp2.type()).paramIndex(), kParamIndexNumber);
-        EXPECT_EQ(inp2.type(), C2NumberPortTuning::input::typeIndex);
+        EXPECT_EQ(C2Param::Type(inp2.type()).coreIndex(), C2NumberStruct::CORE_INDEX);
+        EXPECT_EQ(C2Param::Type(inp2.type()).typeIndex(), kParamIndexNumber);
+        EXPECT_EQ(inp2.type(), C2NumberPortTuning::input::PARAM_TYPE);
         EXPECT_EQ(inp2.stream(), ~0u);
 
-        EXPECT_EQ(C2Param::Type(outp1.type()).baseIndex(), C2NumberStruct::baseIndex);
-        EXPECT_EQ(C2Param::Type(outp1.type()).paramIndex(), kParamIndexNumber);
-        EXPECT_EQ(outp1.type(), C2NumberPortTuning::output::typeIndex);
+        EXPECT_EQ(C2Param::Type(outp1.type()).coreIndex(), C2NumberStruct::CORE_INDEX);
+        EXPECT_EQ(C2Param::Type(outp1.type()).typeIndex(), kParamIndexNumber);
+        EXPECT_EQ(outp1.type(), C2NumberPortTuning::output::PARAM_TYPE);
         EXPECT_EQ(outp1.stream(), ~0u);
 
-        EXPECT_EQ(C2Param::Type(outp2.type()).baseIndex(), C2NumberStruct::baseIndex);
-        EXPECT_EQ(C2Param::Type(outp2.type()).paramIndex(), kParamIndexNumber);
-        EXPECT_EQ(outp2.type(), C2NumberPortTuning::output::typeIndex);
+        EXPECT_EQ(C2Param::Type(outp2.type()).coreIndex(), C2NumberStruct::CORE_INDEX);
+        EXPECT_EQ(C2Param::Type(outp2.type()).typeIndex(), kParamIndexNumber);
+        EXPECT_EQ(outp2.type(), C2NumberPortTuning::output::PARAM_TYPE);
         EXPECT_EQ(outp2.stream(), ~0u);
 
-        C2Param::BaseIndex index = C2NumberPortTuning::input::typeIndex;
+        C2Param::CoreIndex index = C2NumberPortTuning::input::PARAM_TYPE;
         EXPECT_FALSE(index.isVendor());
         EXPECT_FALSE(index.isFlexible());
-        EXPECT_EQ(index.baseIndex(), kParamIndexNumber);
-        EXPECT_EQ(index.paramIndex(), kParamIndexNumber);
+        EXPECT_EQ(index.coreIndex(), kParamIndexNumber);
+        EXPECT_EQ(index.typeIndex(), kParamIndexNumber);
 
-        index = C2NumberPortTuning::output::typeIndex;
+        index = C2NumberPortTuning::output::PARAM_TYPE;
         EXPECT_FALSE(index.isVendor());
         EXPECT_FALSE(index.isFlexible());
-        EXPECT_EQ(index.baseIndex(), kParamIndexNumber);
-        EXPECT_EQ(index.paramIndex(), kParamIndexNumber);
+        EXPECT_EQ(index.coreIndex(), kParamIndexNumber);
+        EXPECT_EQ(index.typeIndex(), kParamIndexNumber);
 
-        C2Param::Type type = C2NumberPortTuning::input::typeIndex;
+        C2Param::Type type = C2NumberPortTuning::input::PARAM_TYPE;
         EXPECT_FALSE(type.isVendor());
         EXPECT_FALSE(type.isFlexible());
         EXPECT_FALSE(type.isGlobal());
@@ -1131,7 +1358,7 @@
         EXPECT_FALSE(type.forStream());
         EXPECT_TRUE(type.forPort());
 
-        type = C2NumberPortTuning::output::typeIndex;
+        type = C2NumberPortTuning::output::PARAM_TYPE;
         EXPECT_FALSE(type.isVendor());
         EXPECT_FALSE(type.isFlexible());
         EXPECT_FALSE(type.isGlobal());
@@ -1171,6 +1398,11 @@
         EXPECT_EQ(C2NumberStreamTuning::output::From(&inp2), nullptr);
         EXPECT_EQ(C2NumberStreamTuning::output::From(&outp1), nullptr);
         EXPECT_EQ(C2NumberStreamTuning::output::From(&outp2), nullptr);
+
+        EXPECT_EQ(*(C2Param::Copy(inp1)), inp1);
+        EXPECT_EQ(*(C2Param::Copy(inp2)), inp2);
+        EXPECT_EQ(*(C2Param::Copy(outp1)), outp1);
+        EXPECT_EQ(*(C2Param::Copy(outp2)), outp2);
     }
 
     const C2NumberStreamTuning outs1(true, 1u, 100), ins1(false, 1u, 100);
@@ -1180,6 +1412,24 @@
     const C2NumberStreamTuning::output outs2(1u, 100);
     C2NumberStreamTuning::output bouts2;
 
+    EXPECT_EQ(ins1.coreIndex(), tun.coreIndex());
+    EXPECT_EQ(outs1.coreIndex(), tun.coreIndex());
+    EXPECT_EQ(bins1.coreIndex(), tun.coreIndex());
+    EXPECT_EQ(bouts1.coreIndex(), tun.coreIndex());
+    EXPECT_EQ(ins2.coreIndex(), tun.coreIndex());
+    EXPECT_EQ(outs2.coreIndex(), tun.coreIndex());
+
+    EXPECT_EQ(ins1.type(), ins2.type());
+    EXPECT_EQ(ins1.type(), bins2.type());
+    EXPECT_EQ(outs1.type(), outs2.type());
+    EXPECT_EQ(outs1.type(), bouts2.type());
+    EXPECT_NE(ins1.type(), outs1.type());
+    EXPECT_NE(ins2.type(), outs2.type());
+    EXPECT_NE(ins1.type(), bins1.type());
+    EXPECT_NE(outs1.type(), bouts1.type());
+    EXPECT_NE(ins1.type(), tun.type());
+    EXPECT_NE(ins2.type(), tun.type());
+
     {
         static_assert(canCallSetPort(bins3), "should be able to");
         static_assert(canCallSetPort(bins1), "should be able to");
@@ -1305,35 +1555,35 @@
         EXPECT_TRUE(ins1 == bouts1);
 
         // index
-        EXPECT_EQ(C2Param::Type(ins1.type()).baseIndex(), C2NumberStruct::baseIndex);
-        EXPECT_EQ(C2Param::Type(ins1.type()).paramIndex(), kParamIndexNumber);
-        EXPECT_EQ(ins1.type(), C2NumberStreamTuning::input::typeIndex);
+        EXPECT_EQ(C2Param::Type(ins1.type()).coreIndex(), C2NumberStruct::CORE_INDEX);
+        EXPECT_EQ(C2Param::Type(ins1.type()).typeIndex(), kParamIndexNumber);
+        EXPECT_EQ(ins1.type(), C2NumberStreamTuning::input::PARAM_TYPE);
 
-        EXPECT_EQ(C2Param::Type(ins2.type()).baseIndex(), C2NumberStruct::baseIndex);
-        EXPECT_EQ(C2Param::Type(ins2.type()).paramIndex(), kParamIndexNumber);
-        EXPECT_EQ(ins2.type(), C2NumberStreamTuning::input::typeIndex);
+        EXPECT_EQ(C2Param::Type(ins2.type()).coreIndex(), C2NumberStruct::CORE_INDEX);
+        EXPECT_EQ(C2Param::Type(ins2.type()).typeIndex(), kParamIndexNumber);
+        EXPECT_EQ(ins2.type(), C2NumberStreamTuning::input::PARAM_TYPE);
 
-        EXPECT_EQ(C2Param::Type(outs1.type()).baseIndex(), C2NumberStruct::baseIndex);
-        EXPECT_EQ(C2Param::Type(outs1.type()).paramIndex(), kParamIndexNumber);
-        EXPECT_EQ(outs1.type(), C2NumberStreamTuning::output::typeIndex);
+        EXPECT_EQ(C2Param::Type(outs1.type()).coreIndex(), C2NumberStruct::CORE_INDEX);
+        EXPECT_EQ(C2Param::Type(outs1.type()).typeIndex(), kParamIndexNumber);
+        EXPECT_EQ(outs1.type(), C2NumberStreamTuning::output::PARAM_TYPE);
 
-        EXPECT_EQ(C2Param::Type(outs2.type()).baseIndex(), C2NumberStruct::baseIndex);
-        EXPECT_EQ(C2Param::Type(outs2.type()).paramIndex(), kParamIndexNumber);
-        EXPECT_EQ(outs2.type(), C2NumberStreamTuning::output::typeIndex);
+        EXPECT_EQ(C2Param::Type(outs2.type()).coreIndex(), C2NumberStruct::CORE_INDEX);
+        EXPECT_EQ(C2Param::Type(outs2.type()).typeIndex(), kParamIndexNumber);
+        EXPECT_EQ(outs2.type(), C2NumberStreamTuning::output::PARAM_TYPE);
 
-        C2Param::BaseIndex index = C2NumberStreamTuning::input::typeIndex;
+        C2Param::CoreIndex index = C2NumberStreamTuning::input::PARAM_TYPE;
         EXPECT_FALSE(index.isVendor());
         EXPECT_FALSE(index.isFlexible());
-        EXPECT_EQ(index.baseIndex(), kParamIndexNumber);
-        EXPECT_EQ(index.paramIndex(), kParamIndexNumber);
+        EXPECT_EQ(index.coreIndex(), kParamIndexNumber);
+        EXPECT_EQ(index.typeIndex(), kParamIndexNumber);
 
-        index = C2NumberStreamTuning::output::typeIndex;
+        index = C2NumberStreamTuning::output::PARAM_TYPE;
         EXPECT_FALSE(index.isVendor());
         EXPECT_FALSE(index.isFlexible());
-        EXPECT_EQ(index.baseIndex(), kParamIndexNumber);
-        EXPECT_EQ(index.paramIndex(), kParamIndexNumber);
+        EXPECT_EQ(index.coreIndex(), kParamIndexNumber);
+        EXPECT_EQ(index.typeIndex(), kParamIndexNumber);
 
-        C2Param::Type type = C2NumberStreamTuning::input::typeIndex;
+        C2Param::Type type = C2NumberStreamTuning::input::PARAM_TYPE;
         EXPECT_FALSE(type.isVendor());
         EXPECT_FALSE(type.isFlexible());
         EXPECT_FALSE(type.isGlobal());
@@ -1342,7 +1592,7 @@
         EXPECT_TRUE(type.forStream());
         EXPECT_FALSE(type.forPort());
 
-        type = C2NumberStreamTuning::output::typeIndex;
+        type = C2NumberStreamTuning::output::PARAM_TYPE;
         EXPECT_FALSE(type.isVendor());
         EXPECT_FALSE(type.isFlexible());
         EXPECT_FALSE(type.isGlobal());
@@ -1383,14 +1633,18 @@
         EXPECT_EQ(C2NumberStreamTuning::output::From(&outs1), (C2NumberStreamTuning::output*)&outs1);
         EXPECT_EQ(C2NumberStreamTuning::output::From(&outs2), &outs2);
 
+        EXPECT_EQ(*(C2Param::Copy(ins1)), ins1);
+        EXPECT_EQ(*(C2Param::Copy(ins2)), ins2);
+        EXPECT_EQ(*(C2Param::Copy(outs1)), outs1);
+        EXPECT_EQ(*(C2Param::Copy(outs2)), outs2);
     }
 
     {
-        uint32_t videoWidth[] = { 12u, C2NumberStreamTuning::output::typeIndex, 100 };
+        uint32_t videoWidth[] = { 12u, C2NumberStreamTuning::output::PARAM_TYPE, 100 };
         C2Param *p1 = C2Param::From(videoWidth, sizeof(videoWidth));
         EXPECT_NE(p1, nullptr);
         EXPECT_EQ(12u, p1->size());
-        EXPECT_EQ(p1->type(), C2NumberStreamTuning::output::typeIndex);
+        EXPECT_EQ(p1->type(), C2NumberStreamTuning::output::PARAM_TYPE);
 
         p1 = C2Param::From(videoWidth, sizeof(videoWidth) + 2);
         EXPECT_EQ(p1, nullptr);
@@ -1406,11 +1660,11 @@
     }
 }
 
-void StaticTestAddBaseIndex() {
+void StaticTestAddCoreIndex() {
     struct nobase {};
-    struct base { enum : uint32_t { baseIndex = 1 }; };
-    static_assert(C2AddBaseIndex<nobase, 2>::baseIndex == 2, "should be 2");
-    static_assert(C2AddBaseIndex<base, 1>::baseIndex == 1, "should be 1");
+    struct base { enum : uint32_t { CORE_INDEX = 1 }; };
+    static_assert(C2AddCoreIndex<nobase, 2>::CORE_INDEX == 2, "should be 2");
+    static_assert(C2AddCoreIndex<base, 1>::CORE_INDEX == 1, "should be 1");
 }
 
 class TestFlexHelper {
@@ -1432,11 +1686,11 @@
 
 
     static void StaticTest() {
-        static_assert(std::is_same<_C2FlexHelper<char>::flexType, void>::value, "should be void");
-        static_assert(std::is_same<_C2FlexHelper<char[]>::flexType, char>::value, "should be char");
-        static_assert(std::is_same<_C2FlexHelper<_Flex>::flexType, char>::value, "should be char");
+        static_assert(std::is_same<_C2FlexHelper<char>::FlexType, void>::value, "should be void");
+        static_assert(std::is_same<_C2FlexHelper<char[]>::FlexType, char>::value, "should be char");
+        static_assert(std::is_same<_C2FlexHelper<_Flex>::FlexType, char>::value, "should be char");
 
-        static_assert(std::is_same<_C2FlexHelper<_BoFlex>::flexType, char>::value, "should be void");
+        static_assert(std::is_same<_C2FlexHelper<_BoFlex>::FlexType, char>::value, "should be void");
 
         static_assert(_C2Flexible<_Flex>::value, "should be flexible");
         static_assert(!_C2Flexible<_NonFlex>::value, "should not be flexible");
@@ -1450,11 +1704,11 @@
 //        EXPECT_EQ(100, str->m.mNumbers[0]);
         (void)&bstr.mNumbers[0];
 
-        C2Param::BaseIndex index = C2NumbersStruct::baseIndex;
+        C2Param::CoreIndex index = C2NumbersStruct::CORE_INDEX;
         EXPECT_FALSE(index.isVendor());
         EXPECT_TRUE(index.isFlexible());
-        EXPECT_EQ(index.baseIndex(), kParamIndexNumbers | C2Param::BaseIndex::_kFlexibleFlag);
-        EXPECT_EQ(index.paramIndex(), kParamIndexNumbers);
+        EXPECT_EQ(index.coreIndex(), kParamIndexNumbers | C2Param::CoreIndex::IS_FLEX_FLAG);
+        EXPECT_EQ(index.typeIndex(), kParamIndexNumbers);
     }
 
     std::unique_ptr<C2NumbersTuning> tun_ = C2NumbersTuning::alloc_unique(1);
@@ -1490,18 +1744,18 @@
         EXPECT_EQ(*tun, *btun);
 
         // index
-        EXPECT_EQ(C2Param::Type(tun->type()).baseIndex(), C2NumbersStruct::baseIndex);
-        EXPECT_EQ(C2Param::Type(tun->type()).paramIndex(), kParamIndexNumbers);
-        EXPECT_EQ(tun->type(), C2NumbersTuning::typeIndex);
+        EXPECT_EQ(C2Param::Type(tun->type()).coreIndex(), C2NumbersStruct::CORE_INDEX);
+        EXPECT_EQ(C2Param::Type(tun->type()).typeIndex(), kParamIndexNumbers);
+        EXPECT_EQ(tun->type(), C2NumbersTuning::PARAM_TYPE);
         EXPECT_EQ(tun->stream(), ~0u);
 
-        C2Param::BaseIndex index = C2NumbersTuning::baseIndex;
+        C2Param::CoreIndex index = C2NumbersTuning::CORE_INDEX;
         EXPECT_FALSE(index.isVendor());
         EXPECT_TRUE(index.isFlexible());
-        EXPECT_EQ(index.baseIndex(), kParamIndexNumbers | C2Param::BaseIndex::_kFlexibleFlag);
-        EXPECT_EQ(index.paramIndex(), kParamIndexNumbers);
+        EXPECT_EQ(index.coreIndex(), kParamIndexNumbers | C2Param::CoreIndex::IS_FLEX_FLAG);
+        EXPECT_EQ(index.typeIndex(), kParamIndexNumbers);
 
-        C2Param::Type type = C2NumbersTuning::typeIndex;
+        C2Param::Type type = C2NumbersTuning::PARAM_TYPE;
         EXPECT_FALSE(type.isVendor());
         EXPECT_TRUE(type.isFlexible());
         EXPECT_TRUE(type.isGlobal());
@@ -1518,6 +1772,8 @@
         EXPECT_EQ(C2NumbersStreamTuning::From(tun.get()), nullptr);
         EXPECT_EQ(C2NumbersStreamTuning::input::From(tun.get()), nullptr);
         EXPECT_EQ(C2NumbersStreamTuning::output::From(tun.get()), nullptr);
+
+        EXPECT_EQ(*(C2Param::Copy(*tun)), *tun);
     }
 
     std::unique_ptr<C2NumbersPortTuning> outp1_(C2NumbersPortTuning::alloc_unique(1, true)),
@@ -1657,39 +1913,39 @@
         EXPECT_TRUE(*inp1 == *boutp1);
 
         // index
-        EXPECT_EQ(C2Param::Type(inp1->type()).baseIndex(), C2NumbersStruct::baseIndex);
-        EXPECT_EQ(C2Param::Type(inp1->type()).paramIndex(), kParamIndexNumbers);
-        EXPECT_EQ(inp1->type(), C2NumbersPortTuning::input::typeIndex);
+        EXPECT_EQ(C2Param::Type(inp1->type()).coreIndex(), C2NumbersStruct::CORE_INDEX);
+        EXPECT_EQ(C2Param::Type(inp1->type()).typeIndex(), kParamIndexNumbers);
+        EXPECT_EQ(inp1->type(), C2NumbersPortTuning::input::PARAM_TYPE);
         EXPECT_EQ(inp1->stream(), ~0u);
 
-        EXPECT_EQ(C2Param::Type(inp2->type()).baseIndex(), C2NumbersStruct::baseIndex);
-        EXPECT_EQ(C2Param::Type(inp2->type()).paramIndex(), kParamIndexNumbers);
-        EXPECT_EQ(inp2->type(), C2NumbersPortTuning::input::typeIndex);
+        EXPECT_EQ(C2Param::Type(inp2->type()).coreIndex(), C2NumbersStruct::CORE_INDEX);
+        EXPECT_EQ(C2Param::Type(inp2->type()).typeIndex(), kParamIndexNumbers);
+        EXPECT_EQ(inp2->type(), C2NumbersPortTuning::input::PARAM_TYPE);
         EXPECT_EQ(inp2->stream(), ~0u);
 
-        EXPECT_EQ(C2Param::Type(outp1->type()).baseIndex(), C2NumbersStruct::baseIndex);
-        EXPECT_EQ(C2Param::Type(outp1->type()).paramIndex(), kParamIndexNumbers);
-        EXPECT_EQ(outp1->type(), C2NumbersPortTuning::output::typeIndex);
+        EXPECT_EQ(C2Param::Type(outp1->type()).coreIndex(), C2NumbersStruct::CORE_INDEX);
+        EXPECT_EQ(C2Param::Type(outp1->type()).typeIndex(), kParamIndexNumbers);
+        EXPECT_EQ(outp1->type(), C2NumbersPortTuning::output::PARAM_TYPE);
         EXPECT_EQ(outp1->stream(), ~0u);
 
-        EXPECT_EQ(C2Param::Type(outp2->type()).baseIndex(), C2NumbersStruct::baseIndex);
-        EXPECT_EQ(C2Param::Type(outp2->type()).paramIndex(), kParamIndexNumbers);
-        EXPECT_EQ(outp2->type(), C2NumbersPortTuning::output::typeIndex);
+        EXPECT_EQ(C2Param::Type(outp2->type()).coreIndex(), C2NumbersStruct::CORE_INDEX);
+        EXPECT_EQ(C2Param::Type(outp2->type()).typeIndex(), kParamIndexNumbers);
+        EXPECT_EQ(outp2->type(), C2NumbersPortTuning::output::PARAM_TYPE);
         EXPECT_EQ(outp2->stream(), ~0u);
 
-        C2Param::BaseIndex index = C2NumbersPortTuning::input::typeIndex;
+        C2Param::CoreIndex index = C2NumbersPortTuning::input::PARAM_TYPE;
         EXPECT_FALSE(index.isVendor());
         EXPECT_TRUE(index.isFlexible());
-        EXPECT_EQ(index.baseIndex(), kParamIndexNumbers | C2Param::BaseIndex::_kFlexibleFlag);
-        EXPECT_EQ(index.paramIndex(), kParamIndexNumbers);
+        EXPECT_EQ(index.coreIndex(), kParamIndexNumbers | C2Param::CoreIndex::IS_FLEX_FLAG);
+        EXPECT_EQ(index.typeIndex(), kParamIndexNumbers);
 
-        index = C2NumbersPortTuning::output::typeIndex;
+        index = C2NumbersPortTuning::output::PARAM_TYPE;
         EXPECT_FALSE(index.isVendor());
         EXPECT_TRUE(index.isFlexible());
-        EXPECT_EQ(index.baseIndex(), kParamIndexNumbers | C2Param::BaseIndex::_kFlexibleFlag);
-        EXPECT_EQ(index.paramIndex(), kParamIndexNumbers);
+        EXPECT_EQ(index.coreIndex(), kParamIndexNumbers | C2Param::CoreIndex::IS_FLEX_FLAG);
+        EXPECT_EQ(index.typeIndex(), kParamIndexNumbers);
 
-        C2Param::Type type = C2NumbersPortTuning::input::typeIndex;
+        C2Param::Type type = C2NumbersPortTuning::input::PARAM_TYPE;
         EXPECT_FALSE(type.isVendor());
         EXPECT_TRUE(type.isFlexible());
         EXPECT_FALSE(type.isGlobal());
@@ -1698,7 +1954,7 @@
         EXPECT_FALSE(type.forStream());
         EXPECT_TRUE(type.forPort());
 
-        type = C2NumbersPortTuning::output::typeIndex;
+        type = C2NumbersPortTuning::output::PARAM_TYPE;
         EXPECT_FALSE(type.isVendor());
         EXPECT_TRUE(type.isFlexible());
         EXPECT_FALSE(type.isGlobal());
@@ -1739,6 +1995,10 @@
         EXPECT_EQ(C2NumbersStreamTuning::output::From(outp1.get()), nullptr);
         EXPECT_EQ(C2NumbersStreamTuning::output::From(outp2.get()), nullptr);
 
+        EXPECT_EQ(*(C2Param::Copy(*inp1)), *inp1);
+        EXPECT_EQ(*(C2Param::Copy(*inp2)), *inp2);
+        EXPECT_EQ(*(C2Param::Copy(*outp1)), *outp1);
+        EXPECT_EQ(*(C2Param::Copy(*outp2)), *outp2);
     }
 
     std::unique_ptr<C2NumbersStreamTuning> outs1_(C2NumbersStreamTuning::alloc_unique(1, true, 1u));
@@ -1890,35 +2150,35 @@
         EXPECT_TRUE(*ins1 == *bouts1);
 
         // index
-        EXPECT_EQ(C2Param::Type(ins1->type()).baseIndex(), C2NumbersStruct::baseIndex);
-        EXPECT_EQ(C2Param::Type(ins1->type()).paramIndex(), kParamIndexNumbers);
-        EXPECT_EQ(ins1->type(), C2NumbersStreamTuning::input::typeIndex);
+        EXPECT_EQ(C2Param::Type(ins1->type()).coreIndex(), C2NumbersStruct::CORE_INDEX);
+        EXPECT_EQ(C2Param::Type(ins1->type()).typeIndex(), kParamIndexNumbers);
+        EXPECT_EQ(ins1->type(), C2NumbersStreamTuning::input::PARAM_TYPE);
 
-        EXPECT_EQ(C2Param::Type(ins2->type()).baseIndex(), C2NumbersStruct::baseIndex);
-        EXPECT_EQ(C2Param::Type(ins2->type()).paramIndex(), kParamIndexNumbers);
-        EXPECT_EQ(ins2->type(), C2NumbersStreamTuning::input::typeIndex);
+        EXPECT_EQ(C2Param::Type(ins2->type()).coreIndex(), C2NumbersStruct::CORE_INDEX);
+        EXPECT_EQ(C2Param::Type(ins2->type()).typeIndex(), kParamIndexNumbers);
+        EXPECT_EQ(ins2->type(), C2NumbersStreamTuning::input::PARAM_TYPE);
 
-        EXPECT_EQ(C2Param::Type(outs1->type()).baseIndex(), C2NumbersStruct::baseIndex);
-        EXPECT_EQ(C2Param::Type(outs1->type()).paramIndex(), kParamIndexNumbers);
-        EXPECT_EQ(outs1->type(), C2NumbersStreamTuning::output::typeIndex);
+        EXPECT_EQ(C2Param::Type(outs1->type()).coreIndex(), C2NumbersStruct::CORE_INDEX);
+        EXPECT_EQ(C2Param::Type(outs1->type()).typeIndex(), kParamIndexNumbers);
+        EXPECT_EQ(outs1->type(), C2NumbersStreamTuning::output::PARAM_TYPE);
 
-        EXPECT_EQ(C2Param::Type(outs2->type()).baseIndex(), C2NumbersStruct::baseIndex);
-        EXPECT_EQ(C2Param::Type(outs2->type()).paramIndex(), kParamIndexNumbers);
-        EXPECT_EQ(outs2->type(), C2NumbersStreamTuning::output::typeIndex);
+        EXPECT_EQ(C2Param::Type(outs2->type()).coreIndex(), C2NumbersStruct::CORE_INDEX);
+        EXPECT_EQ(C2Param::Type(outs2->type()).typeIndex(), kParamIndexNumbers);
+        EXPECT_EQ(outs2->type(), C2NumbersStreamTuning::output::PARAM_TYPE);
 
-        C2Param::BaseIndex index = C2NumbersStreamTuning::input::typeIndex;
+        C2Param::CoreIndex index = C2NumbersStreamTuning::input::PARAM_TYPE;
         EXPECT_FALSE(index.isVendor());
         EXPECT_TRUE(index.isFlexible());
-        EXPECT_EQ(index.baseIndex(), kParamIndexNumbers | C2Param::BaseIndex::_kFlexibleFlag);
-        EXPECT_EQ(index.paramIndex(), kParamIndexNumbers);
+        EXPECT_EQ(index.coreIndex(), kParamIndexNumbers | C2Param::CoreIndex::IS_FLEX_FLAG);
+        EXPECT_EQ(index.typeIndex(), kParamIndexNumbers);
 
-        index = C2NumbersStreamTuning::output::typeIndex;
+        index = C2NumbersStreamTuning::output::PARAM_TYPE;
         EXPECT_FALSE(index.isVendor());
         EXPECT_TRUE(index.isFlexible());
-        EXPECT_EQ(index.baseIndex(), kParamIndexNumbers | C2Param::BaseIndex::_kFlexibleFlag);
-        EXPECT_EQ(index.paramIndex(), kParamIndexNumbers);
+        EXPECT_EQ(index.coreIndex(), kParamIndexNumbers | C2Param::CoreIndex::IS_FLEX_FLAG);
+        EXPECT_EQ(index.typeIndex(), kParamIndexNumbers);
 
-        C2Param::Type type = C2NumbersStreamTuning::input::typeIndex;
+        C2Param::Type type = C2NumbersStreamTuning::input::PARAM_TYPE;
         EXPECT_FALSE(type.isVendor());
         EXPECT_TRUE(type.isFlexible());
         EXPECT_FALSE(type.isGlobal());
@@ -1927,7 +2187,7 @@
         EXPECT_TRUE(type.forStream());
         EXPECT_FALSE(type.forPort());
 
-        type = C2NumbersStreamTuning::output::typeIndex;
+        type = C2NumbersStreamTuning::output::PARAM_TYPE;
         EXPECT_FALSE(type.isVendor());
         EXPECT_TRUE(type.isFlexible());
         EXPECT_FALSE(type.isGlobal());
@@ -1968,13 +2228,17 @@
         EXPECT_EQ(C2NumbersStreamTuning::output::From(outs1.get()), (C2NumbersStreamTuning::output*)outs1.get());
         EXPECT_EQ(C2NumbersStreamTuning::output::From(outs2.get()), outs2.get());
 
+        EXPECT_EQ(*(C2Param::Copy(*ins1)), *ins1);
+        EXPECT_EQ(*(C2Param::Copy(*ins2)), *ins2);
+        EXPECT_EQ(*(C2Param::Copy(*outs1)), *outs1);
+        EXPECT_EQ(*(C2Param::Copy(*outs2)), *outs2);
     }
 
     {
         C2Int32Value int32Value(INT32_MIN);
-        static_assert(std::is_same<decltype(int32Value.mValue), int32_t>::value, "should be int32_t");
-        EXPECT_EQ(INT32_MIN, int32Value.mValue);
-        std::list<const C2FieldDescriptor> fields = int32Value.fieldList;
+        static_assert(std::is_same<decltype(int32Value.value), int32_t>::value, "should be int32_t");
+        EXPECT_EQ(INT32_MIN, int32Value.value);
+        std::list<const C2FieldDescriptor> fields = int32Value.FIELD_LIST;
         EXPECT_EQ(1u, fields.size());
         EXPECT_EQ(FD::INT32, fields.cbegin()->type());
         EXPECT_EQ(1u, fields.cbegin()->length());
@@ -1983,9 +2247,9 @@
 
     {
         C2Uint32Value uint32Value(UINT32_MAX);
-        static_assert(std::is_same<decltype(uint32Value.mValue), uint32_t>::value, "should be uint32_t");
-        EXPECT_EQ(UINT32_MAX, uint32Value.mValue);
-        std::list<const C2FieldDescriptor> fields = uint32Value.fieldList;
+        static_assert(std::is_same<decltype(uint32Value.value), uint32_t>::value, "should be uint32_t");
+        EXPECT_EQ(UINT32_MAX, uint32Value.value);
+        std::list<const C2FieldDescriptor> fields = uint32Value.FIELD_LIST;
         EXPECT_EQ(1u, fields.size());
         EXPECT_EQ(FD::UINT32, fields.cbegin()->type());
         EXPECT_EQ(1u, fields.cbegin()->length());
@@ -1994,9 +2258,9 @@
 
     {
         C2Int64Value int64Value(INT64_MIN);
-        static_assert(std::is_same<decltype(int64Value.mValue), int64_t>::value, "should be int64_t");
-        EXPECT_EQ(INT64_MIN, int64Value.mValue);
-        std::list<const C2FieldDescriptor> fields = int64Value.fieldList;
+        static_assert(std::is_same<decltype(int64Value.value), int64_t>::value, "should be int64_t");
+        EXPECT_EQ(INT64_MIN, int64Value.value);
+        std::list<const C2FieldDescriptor> fields = int64Value.FIELD_LIST;
         EXPECT_EQ(1u, fields.size());
         EXPECT_EQ(FD::INT64, fields.cbegin()->type());
         EXPECT_EQ(1u, fields.cbegin()->length());
@@ -2005,9 +2269,9 @@
 
     {
         C2Uint64Value uint64Value(UINT64_MAX);
-        static_assert(std::is_same<decltype(uint64Value.mValue), uint64_t>::value, "should be uint64_t");
-        EXPECT_EQ(UINT64_MAX, uint64Value.mValue);
-        std::list<const C2FieldDescriptor> fields = uint64Value.fieldList;
+        static_assert(std::is_same<decltype(uint64Value.value), uint64_t>::value, "should be uint64_t");
+        EXPECT_EQ(UINT64_MAX, uint64Value.value);
+        std::list<const C2FieldDescriptor> fields = uint64Value.FIELD_LIST;
         EXPECT_EQ(1u, fields.size());
         EXPECT_EQ(FD::UINT64, fields.cbegin()->type());
         EXPECT_EQ(1u, fields.cbegin()->length());
@@ -2016,9 +2280,9 @@
 
     {
         C2FloatValue floatValue(123.4f);
-        static_assert(std::is_same<decltype(floatValue.mValue), float>::value, "should be float");
-        EXPECT_EQ(123.4f, floatValue.mValue);
-        std::list<const C2FieldDescriptor> fields = floatValue.fieldList;
+        static_assert(std::is_same<decltype(floatValue.value), float>::value, "should be float");
+        EXPECT_EQ(123.4f, floatValue.value);
+        std::list<const C2FieldDescriptor> fields = floatValue.FIELD_LIST;
         EXPECT_EQ(1u, fields.size());
         EXPECT_EQ(FD::FLOAT, fields.cbegin()->type());
         EXPECT_EQ(1u, fields.cbegin()->length());
@@ -2029,17 +2293,17 @@
         uint8_t initValue[] = "ABCD";
         typedef C2GlobalParam<C2Setting, C2BlobValue, 0> BlobSetting;
         std::unique_ptr<BlobSetting> blobValue = BlobSetting::alloc_unique(6, C2ConstMemoryBlock<uint8_t>(initValue));
-        static_assert(std::is_same<decltype(blobValue->m.mValue), uint8_t[]>::value, "should be uint8_t[]");
-        EXPECT_EQ(0, memcmp(blobValue->m.mValue, "ABCD\0", 6));
+        static_assert(std::is_same<decltype(blobValue->m.value), uint8_t[]>::value, "should be uint8_t[]");
+        EXPECT_EQ(0, memcmp(blobValue->m.value, "ABCD\0", 6));
         EXPECT_EQ(6u, blobValue->flexCount());
-        std::list<const C2FieldDescriptor> fields = blobValue->fieldList;
+        std::list<const C2FieldDescriptor> fields = blobValue->FIELD_LIST;
         EXPECT_EQ(1u, fields.size());
         EXPECT_EQ(FD::BLOB, fields.cbegin()->type());
         EXPECT_EQ(0u, fields.cbegin()->length());
         EXPECT_EQ(C2String("value"), fields.cbegin()->name());
 
         blobValue = BlobSetting::alloc_unique(3, C2ConstMemoryBlock<uint8_t>(initValue));
-        EXPECT_EQ(0, memcmp(blobValue->m.mValue, "ABC", 3));
+        EXPECT_EQ(0, memcmp(blobValue->m.value, "ABC", 3));
         EXPECT_EQ(3u, blobValue->flexCount());
     }
 
@@ -2048,38 +2312,38 @@
         typedef C2GlobalParam<C2Setting, C2StringValue, 0> StringSetting;
         std::unique_ptr<StringSetting> stringValue = StringSetting::alloc_unique(6, C2ConstMemoryBlock<char>(initValue));
         stringValue = StringSetting::alloc_unique(6, initValue);
-        static_assert(std::is_same<decltype(stringValue->m.mValue), char[]>::value, "should be char[]");
-        EXPECT_EQ(0, memcmp(stringValue->m.mValue, "ABCD\0", 6));
+        static_assert(std::is_same<decltype(stringValue->m.value), char[]>::value, "should be char[]");
+        EXPECT_EQ(0, memcmp(stringValue->m.value, "ABCD\0", 6));
         EXPECT_EQ(6u, stringValue->flexCount());
-        std::list<const C2FieldDescriptor> fields = stringValue->fieldList;
+        std::list<const C2FieldDescriptor> fields = stringValue->FIELD_LIST;
         EXPECT_EQ(1u, fields.size());
         EXPECT_EQ(FD::STRING, fields.cbegin()->type());
         EXPECT_EQ(0u, fields.cbegin()->length());
         EXPECT_EQ(C2String("value"), fields.cbegin()->name());
 
         stringValue = StringSetting::alloc_unique(3, C2ConstMemoryBlock<char>(initValue));
-        EXPECT_EQ(0, memcmp(stringValue->m.mValue, "AB", 3));
+        EXPECT_EQ(0, memcmp(stringValue->m.value, "AB", 3));
         EXPECT_EQ(3u, stringValue->flexCount());
 
         stringValue = StringSetting::alloc_unique(11, "initValue");
-        EXPECT_EQ(0, memcmp(stringValue->m.mValue, "initValue\0", 11));
+        EXPECT_EQ(0, memcmp(stringValue->m.value, "initValue\0", 11));
         EXPECT_EQ(11u, stringValue->flexCount());
 
         stringValue = StringSetting::alloc_unique(initValue);
-        EXPECT_EQ(0, memcmp(stringValue->m.mValue, "ABCD", 5));
+        EXPECT_EQ(0, memcmp(stringValue->m.value, "ABCD", 5));
         EXPECT_EQ(5u, stringValue->flexCount());
 
         stringValue = StringSetting::alloc_unique({ 'A', 'B', 'C', 'D' });
-        EXPECT_EQ(0, memcmp(stringValue->m.mValue, "ABC", 4));
+        EXPECT_EQ(0, memcmp(stringValue->m.value, "ABC", 4));
         EXPECT_EQ(4u, stringValue->flexCount());
     }
 
     {
-        uint32_t videoWidth[] = { 12u, C2NumbersStreamTuning::output::typeIndex, 100 };
+        uint32_t videoWidth[] = { 12u, C2NumbersStreamTuning::output::PARAM_TYPE, 100 };
         C2Param *p1 = C2Param::From(videoWidth, sizeof(videoWidth));
         EXPECT_NE(nullptr, p1);
         EXPECT_EQ(12u, p1->size());
-        EXPECT_EQ(C2NumbersStreamTuning::output::typeIndex, p1->type());
+        EXPECT_EQ(p1->type(), C2NumbersStreamTuning::output::PARAM_TYPE);
 
         C2NumbersStreamTuning::output *vst = C2NumbersStreamTuning::output::From(p1);
         EXPECT_NE(nullptr, vst);
@@ -2102,12 +2366,12 @@
     }
 
     {
-        uint32_t videoWidth[] = { 16u, C2NumbersPortTuning::input::typeIndex, 101, 102 };
+        uint32_t videoWidth[] = { 16u, C2NumbersPortTuning::input::PARAM_TYPE, 101, 102 };
 
         C2Param *p1 = C2Param::From(videoWidth, sizeof(videoWidth));
         EXPECT_NE(nullptr, p1);
         EXPECT_EQ(16u, p1->size());
-        EXPECT_EQ(C2NumbersPortTuning::input::typeIndex, p1->type());
+        EXPECT_EQ(p1->type(), C2NumbersPortTuning::input::PARAM_TYPE);
 
         C2NumbersPortTuning::input *vpt = C2NumbersPortTuning::input::From(p1);
         EXPECT_NE(nullptr, vpt);
@@ -2157,100 +2421,66 @@
 };
 
 struct C2VideoConfigStruct {
-    int32_t mWidth;
-    uint32_t mHeight;
-    MetadataType mMetadataType;
-    int32_t mSupportedFormats[];
+    int32_t width;
+    uint32_t height;
+    MetadataType metadataType;
+    int32_t supportedFormats[];
 
     C2VideoConfigStruct() {}
 
-    DEFINE_AND_DESCRIBE_FLEX_C2STRUCT(VideoConfig, mSupportedFormats)
-    C2FIELD(mWidth, "width")
-    C2FIELD(mHeight, "height")
-    C2FIELD(mMetadataType, "metadata-type")
-    C2FIELD(mSupportedFormats, "formats")
+    DEFINE_AND_DESCRIBE_FLEX_C2STRUCT(VideoConfig, supportedFormats)
+    C2FIELD(width, "width")
+    C2FIELD(height, "height")
+    C2FIELD(metadataType, "metadata-type")
+    C2FIELD(supportedFormats, "formats")
 };
 
 typedef C2PortParam<C2Tuning, C2VideoConfigStruct> C2VideoConfigPortTuning;
 
-class MyReflector : public C2ParamReflector {
-private:
-    std::unique_ptr<C2VideoConfigPortTuning::input> inputVideoConfigTuning;
-    std::unique_ptr<C2VideoConfigPortTuning::output> outputVideoConfigTuning;
-
-public:
-    void describeSupportedValues() {
-        C2TypedFieldSupportedValues<int32_t> supportedWidths(16, 1920, 8);
-        C2FieldSupportedValues supportedWidths2(16, 1920, 8);
-
-
-        std::list<C2FieldSupportedValues> supported;
-        //supported.emplace_push(inputVideoConfigTuning->mNumber, range(16, 1920, 8));
-        //supported.emplace_push(inputVideoConfigTuning->mHeight, range(16, 1088, 8));
-        //supported.emplace_push(inputVideoConfigTuning->mMetadataType, all_enums);
-        //supported.emplace_push(inputVideoConfigTuning->mSupportedFormats, { 0, 1, 5, 7 });
-    }
-
-    virtual std::unique_ptr<android::C2StructDescriptor> describe(C2Param::BaseIndex paramType) {
-        switch (paramType.baseIndex()) {
-        case C2VideoConfigPortTuning::baseIndex:
-            return std::unique_ptr<C2StructDescriptor>(new C2StructDescriptor{
-                paramType.baseIndex(),
-                C2VideoConfigPortTuning::fieldList,
-            });
-        }
-        return nullptr;
-    }
-};
-
 class MyComponentInstance : public C2ComponentInterface {
 public:
-    virtual C2String getName() const {
+    virtual C2String getName() const override {
         /// \todo this seems too specific
         return "sample.interface";
     };
 
-    virtual node_id getId() const {
+    virtual c2_node_id_t getId() const override {
         /// \todo how are these shared?
         return 0;
     }
 
-    virtual status_t commit_sm(
+    virtual c2_status_t config_vb(
             const std::vector<C2Param* const> &params,
-            std::vector<std::unique_ptr<C2SettingResult>>* const failures) {
+            c2_blocking_t mayBlock,
+            std::vector<std::unique_ptr<C2SettingResult>>* const failures) override {
         (void)params;
         (void)failures;
-        return C2_UNSUPPORTED;
+        (void)mayBlock;
+        return C2_OMITTED;
     }
 
-    virtual status_t config_nb(
-            const std::vector<C2Param* const> &params,
-            std::vector<std::unique_ptr<C2SettingResult>>* const failures) {
-        (void)params;
-        (void)failures;
-        return C2_UNSUPPORTED;
-    }
-
-    virtual status_t createTunnel_sm(node_id targetComponent) {
+    virtual c2_status_t createTunnel_sm(c2_node_id_t targetComponent) override {
         (void)targetComponent;
-        return C2_UNSUPPORTED;
+        return C2_OMITTED;
     }
 
-    virtual status_t query_nb(
+    virtual c2_status_t query_vb(
             const std::vector<C2Param* const> &stackParams,
             const std::vector<C2Param::Index> &heapParamIndices,
-            std::vector<std::unique_ptr<C2Param>>* const heapParams) const {
+            c2_blocking_t mayBlock,
+            std::vector<std::unique_ptr<C2Param>>* const heapParams) const override {
         for (C2Param* const param : stackParams) {
+            (void)mayBlock;
             if (!*param) { // param is already invalid - remember it
                 continue;
             }
 
             // note: this does not handle stream params (should use index...)
-            if (!mMyParams.count(param->type())) {
+            if (!mMyParams.count(param->index())) {
                 continue; // not my param
             }
 
-            C2Param & myParam = mMyParams.find(param->type())->second;
+            C2Param & myParam = mMyParams.find(param->index())->second;
             if (myParam.size() != param->size()) { // incorrect size
                 param->invalidate();
                 continue;
@@ -2262,7 +2492,7 @@
         for (const C2Param::Index index : heapParamIndices) {
             if (mMyParams.count(index)) {
                 C2Param & myParam = mMyParams.find(index)->second;
-                std::unique_ptr<C2Param> paramCopy(C2Param::From(&myParam, myParam.size()));
+                std::unique_ptr<C2Param> paramCopy(C2Param::Copy(myParam));
                 heapParams->push_back(std::move(paramCopy));
             }
         }
@@ -2275,12 +2505,12 @@
     C2ComponentDomainInfo mDomainInfo;
 
     MyComponentInstance() {
-        mMyParams.insert({mDomainInfo.type(), mDomainInfo});
+        mMyParams.insert({mDomainInfo.index(), mDomainInfo});
     }
 
-    virtual status_t releaseTunnel_sm(node_id targetComponent) {
+    virtual c2_status_t releaseTunnel_sm(c2_node_id_t targetComponent) override {
         (void)targetComponent;
-        return C2_UNSUPPORTED;
+        return C2_OMITTED;
     }
 
     class MyParamReflector : public C2ParamReflector {
@@ -2289,51 +2519,53 @@
     public:
         MyParamReflector(const MyComponentInstance *i) : instance(i) { }
 
-        virtual std::unique_ptr<C2StructDescriptor> describe(C2Param::BaseIndex paramIndex) {
-            switch (paramIndex.baseIndex()) {
-            case decltype(instance->mDomainInfo)::baseIndex:
+        virtual std::unique_ptr<C2StructDescriptor> describe(C2Param::CoreIndex paramIndex) override {
+            switch (paramIndex.typeIndex()) {
+            case decltype(instance->mDomainInfo)::CORE_INDEX:
             default:
                 return std::unique_ptr<C2StructDescriptor>(new C2StructDescriptor{
                     instance->mDomainInfo.type(),
-                    decltype(instance->mDomainInfo)::fieldList,
+                    decltype(instance->mDomainInfo)::FIELD_LIST,
                 });
             }
             return nullptr;
         }
     };
 
-    virtual status_t getSupportedValues(
-            const std::vector<const C2ParamField> fields,
-            std::vector<C2FieldSupportedValues>* const values) const {
-        for (const C2ParamField &field : fields) {
-            if (field == C2ParamField(&mDomainInfo, &C2ComponentDomainInfo::mValue)) {
-                values->push_back(C2FieldSupportedValues(
+    virtual c2_status_t querySupportedValues_vb(
+            std::vector<C2FieldSupportedValuesQuery> &fields,
+            c2_blocking_t mayBlock) const override {
+        (void)mayBlock;
+        for (C2FieldSupportedValuesQuery &query : fields) {
+            if (query.field == C2ParamField(&mDomainInfo, &C2ComponentDomainInfo::value)) {
+                query.values = C2FieldSupportedValues(
                     false /* flag */,
-                    &mDomainInfo.mValue
+                    &mDomainInfo.value
                     //,
                     //{(int32_t)C2DomainVideo}
-                ));
+                );
+                query.status = C2_OK;
+            } else {
+                query.status = C2_BAD_INDEX;
             }
         }
         return C2_OK;
     }
 
-    virtual std::shared_ptr<C2ParamReflector> getParamReflector() const {
+    std::shared_ptr<C2ParamReflector> getParamReflector() const {
         return std::shared_ptr<C2ParamReflector>(new MyParamReflector(this));
     }
 
-    virtual status_t getSupportedParams(std::vector<std::shared_ptr<C2ParamDescriptor>> * const params) const {
+    virtual c2_status_t querySupportedParams_nb(
+            std::vector<std::shared_ptr<C2ParamDescriptor>> * const params) const override {
         params->push_back(std::make_shared<C2ParamDescriptor>(
                 true /* required */, "_domain", &mDomainInfo));
-        return C2_OK;
-    }
-
-    status_t getSupportedParams2(std::vector<std::shared_ptr<C2ParamDescriptor>> * const params) {
         params->push_back(std::shared_ptr<C2ParamDescriptor>(
                 new C2ParamDescriptor(true /* required */, "_domain", &mDomainInfo)));
         return C2_OK;
     }
 
+    virtual ~MyComponentInstance() override = default;
 };
 
 template<typename E, bool S=std::is_enum<E>::value>
@@ -2382,9 +2614,9 @@
 template<typename T>
 void dumpFSV(const C2FieldSupportedValues &sv, T*t) {
     using namespace std;
-    cout << (std::is_enum<T>::value ? (std::is_signed<typename std::underlying_type<T>::type>::value ? "i" : "u")
+    cout << (std::is_enum<T>::value ? (std::is_signed<typename lax_underlying_type<T>::type>::value ? "i" : "u")
              : std::is_integral<T>::value ? std::is_signed<T>::value ? "i" : "u" : "f")
-        << (8 * sizeof(T));
+         << (8 * sizeof(T));
     if (sv.type == sv.RANGE) {
         cout << ".range(" << get(sv.range.min, t);
         if (get(sv.range.step, t) != std::is_integral<T>::value) {
@@ -2425,7 +2657,7 @@
         cout << "Flex";
     }
 
-    cout << type.paramIndex();
+    cout << type.typeIndex();
 
     switch (type.kind()) {
     case C2Param::INFO: cout << "Info"; break;
@@ -2436,17 +2668,17 @@
     }
 }
 
-void dumpType(C2Param::BaseIndex type) {
+void dumpType(C2Param::CoreIndex type) {
     using namespace std;
     cout << (type.isVendor() ? "Vendor" : "C2");
     if (type.isFlexible()) {
         cout << "Flex";
     }
 
-    cout << type.paramIndex() << "Struct";
+    cout << type.typeIndex() << "Struct";
 }
 
-void dumpType(FD::Type type) {
+void dumpType(FD::type_t type) {
     using namespace std;
     switch (type) {
     case FD::BLOB: cout << "blob "; break;
@@ -2466,7 +2698,7 @@
 void dumpStruct(const C2StructDescriptor &sd) {
     using namespace std;
     cout << "struct ";
-    dumpType(sd.baseIndex());
+    dumpType(sd.coreIndex());
     cout << " {" << endl;
     //C2FieldDescriptor &f;
     for (const C2FieldDescriptor &f : sd) {
@@ -2510,22 +2742,41 @@
 
 TEST_F(C2ParamTest, ReflectorTest) {
     C2ComponentDomainInfo domainInfo;
-    std::shared_ptr<C2ComponentInterface> comp(new MyComponentInstance);
-    std::vector<C2FieldSupportedValues> values;
+    std::shared_ptr<MyComponentInstance> myComp(new MyComponentInstance);
+    std::shared_ptr<C2ComponentInterface> comp = myComp;
 
     std::unique_ptr<C2StructDescriptor> desc{
-        comp->getParamReflector()->describe(C2ComponentDomainInfo::indexFlags)};
+        myComp->getParamReflector()->describe(C2ComponentDomainInfo::CORE_INDEX)};
     dumpStruct(*desc);
 
-    EXPECT_EQ(
-        C2_OK,
-        comp->getSupportedValues(
-            { C2ParamField(&domainInfo, &C2ComponentDomainInfo::mValue) },
-            &values)
-    );
+    std::vector<C2FieldSupportedValuesQuery> query = {
+        { C2ParamField(&domainInfo, &C2ComponentDomainInfo::value),
+          C2FieldSupportedValuesQuery::CURRENT },
+        C2FieldSupportedValuesQuery(C2ParamField(&domainInfo, &C2ComponentDomainInfo::value),
+          C2FieldSupportedValuesQuery::CURRENT),
+        C2FieldSupportedValuesQuery::Current(C2ParamField(&domainInfo, &C2ComponentDomainInfo::value)),
+    };
+
+    EXPECT_EQ(C2_OK, comp->querySupportedValues_vb(query, C2_DONT_BLOCK));
+
+    for (const C2FieldSupportedValuesQuery &q : query) {
+        dumpFSV(q.values, &domainInfo.value);
+    }
+}
+
+TEST_F(C2ParamTest, FieldSupportedValuesTest) {
+    typedef C2GlobalParam<C2Info, C2Uint32Value, 0> Uint32TestInfo;
+    Uint32TestInfo t;
+    std::vector<C2FieldSupportedValues> values;
+    values.push_back(C2FieldSupportedValues(0, 10, 1));  // min, max, step
+    values.push_back(C2FieldSupportedValues(1, 64, 2, 1));  // min, max, nom, den
+    values.push_back(C2FieldSupportedValues(false, {1, 2, 3}));  // flags, std::initializer_list
+    uint32_t val[] = {1, 3, 5, 7};
+    std::vector<uint32_t> v(std::begin(val), std::end(val));
+    values.push_back(C2FieldSupportedValues(false, v));  // flags, std::vector
 
     for (const C2FieldSupportedValues &sv : values) {
-        dumpFSV(sv, &domainInfo.mValue);
+        dumpFSV(sv, &t.value);
     }
 }
 
diff --git a/media/libstagefright/codec2/tests/vndk/C2BufferTest.cpp b/media/libstagefright/codec2/tests/vndk/C2BufferTest.cpp
new file mode 100644
index 0000000..f6e6478
--- /dev/null
+++ b/media/libstagefright/codec2/tests/vndk/C2BufferTest.cpp
@@ -0,0 +1,533 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gtest/gtest.h>
+
+#include <C2AllocatorIon.h>
+#include <C2AllocatorGralloc.h>
+#include <C2Buffer.h>
+#include <C2BufferPriv.h>
+#include <C2ParamDef.h>
+
+#include <system/graphics.h>
+
+namespace android {
+
+class C2BufferTest : public ::testing::Test {
+public:
+    C2BufferTest()
+        : mLinearAllocator(std::make_shared<C2AllocatorIon>()),
+          mSize(0u),
+          mAddr(nullptr),
+          mGraphicAllocator(std::make_shared<C2AllocatorGralloc>()) {
+    }
+
+    ~C2BufferTest() = default;
+
+    void allocateLinear(size_t capacity) {
+        c2_status_t err = mLinearAllocator->newLinearAllocation(
+                capacity,
+                { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE },
+                &mLinearAllocation);
+        if (err != C2_OK) {
+            mLinearAllocation.reset();
+            FAIL() << "C2Allocator::newLinearAllocation() failed: " << err;
+        }
+    }
+
+    void mapLinear(size_t offset, size_t size, uint8_t **addr) {
+        ASSERT_TRUE(mLinearAllocation);
+        c2_status_t err = mLinearAllocation->map(
+                offset,
+                size,
+                { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE },
+                // TODO: fence
+                nullptr,
+                &mAddr);
+        if (err != C2_OK) {
+            mAddr = nullptr;
+            FAIL() << "C2LinearAllocation::map() failed: " << err;
+        }
+        ASSERT_NE(nullptr, mAddr);
+        mSize = size;
+        *addr = (uint8_t *)mAddr;
+    }
+
+    void unmapLinear() {
+        ASSERT_TRUE(mLinearAllocation);
+        ASSERT_NE(nullptr, mAddr);
+        ASSERT_NE(0u, mSize);
+
+        // TODO: fence
+        ASSERT_EQ(C2_OK, mLinearAllocation->unmap(mAddr, mSize, nullptr));
+        mSize = 0u;
+        mAddr = nullptr;
+    }
+
+    std::shared_ptr<C2BlockPool> makeLinearBlockPool() {
+        return std::make_shared<C2BasicLinearBlockPool>(mLinearAllocator);
+    }
+
+    void allocateGraphic(uint32_t width, uint32_t height) {
+        c2_status_t err = mGraphicAllocator->newGraphicAllocation(
+                width,
+                height,
+                HAL_PIXEL_FORMAT_YCBCR_420_888,
+                { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE },
+                &mGraphicAllocation);
+        if (err != C2_OK) {
+            mGraphicAllocation.reset();
+            FAIL() << "C2Allocator::newGraphicAllocation() failed: " << err;
+        }
+    }
+
+    void mapGraphic(C2Rect rect, C2PlanarLayout *layout, uint8_t **addr) {
+        ASSERT_TRUE(mGraphicAllocation);
+        c2_status_t err = mGraphicAllocation->map(
+                rect,
+                { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE },
+                // TODO: fence
+                nullptr,
+                layout,
+                addr);
+        if (err != C2_OK) {
+            addr[C2PlanarLayout::PLANE_Y] = nullptr;
+            addr[C2PlanarLayout::PLANE_U] = nullptr;
+            addr[C2PlanarLayout::PLANE_V] = nullptr;
+            FAIL() << "C2GraphicAllocation::map() failed: " << err;
+        }
+    }
+
+    void unmapGraphic() {
+        ASSERT_TRUE(mGraphicAllocation);
+
+        // TODO: fence
+        ASSERT_EQ(C2_OK, mGraphicAllocation->unmap(nullptr));
+    }
+
+    std::shared_ptr<C2BlockPool> makeGraphicBlockPool() {
+        return std::make_shared<C2BasicGraphicBlockPool>(mGraphicAllocator);
+    }
+
+private:
+    std::shared_ptr<C2Allocator> mLinearAllocator;
+    std::shared_ptr<C2LinearAllocation> mLinearAllocation;
+    size_t mSize;
+    void *mAddr;
+
+    std::shared_ptr<C2Allocator> mGraphicAllocator;
+    std::shared_ptr<C2GraphicAllocation> mGraphicAllocation;
+};
+
+TEST_F(C2BufferTest, LinearAllocationTest) {
+    constexpr size_t kCapacity = 1024u * 1024u;
+
+    allocateLinear(kCapacity);
+
+    uint8_t *addr = nullptr;
+    mapLinear(0u, kCapacity, &addr);
+    ASSERT_NE(nullptr, addr);
+
+    for (size_t i = 0; i < kCapacity; ++i) {
+        addr[i] = i % 100u;
+    }
+
+    unmapLinear();
+    addr = nullptr;
+
+    mapLinear(kCapacity / 3, kCapacity / 3, &addr);
+    ASSERT_NE(nullptr, addr);
+    for (size_t i = 0; i < kCapacity / 3; ++i) {
+        ASSERT_EQ((i + kCapacity / 3) % 100, addr[i]) << " at i = " << i;
+    }
+}
+
+TEST_F(C2BufferTest, BlockPoolTest) {
+    constexpr size_t kCapacity = 1024u * 1024u;
+
+    std::shared_ptr<C2BlockPool> blockPool(makeLinearBlockPool());
+
+    std::shared_ptr<C2LinearBlock> block;
+    ASSERT_EQ(C2_OK, blockPool->fetchLinearBlock(
+            kCapacity,
+            { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE },
+            &block));
+    ASSERT_TRUE(block);
+
+    C2Acquirable<C2WriteView> writeViewHolder = block->map();
+    C2WriteView writeView = writeViewHolder.get();
+    ASSERT_EQ(C2_OK, writeView.error());
+    ASSERT_EQ(kCapacity, writeView.capacity());
+    ASSERT_EQ(0u, writeView.offset());
+    ASSERT_EQ(kCapacity, writeView.size());
+
+    uint8_t *data = writeView.data();
+    ASSERT_NE(nullptr, data);
+    for (size_t i = 0; i < writeView.size(); ++i) {
+        data[i] = i % 100u;
+    }
+
+    C2Fence fence;
+    C2ConstLinearBlock constBlock = block->share(
+            kCapacity / 3, kCapacity / 3, fence);
+
+    C2Acquirable<C2ReadView> readViewHolder = constBlock.map();
+    C2ReadView readView = readViewHolder.get();
+    ASSERT_EQ(C2_OK, readView.error());
+    ASSERT_EQ(kCapacity / 3, readView.capacity());
+
+    // TODO: fence
+    const uint8_t *constData = readView.data();
+    ASSERT_NE(nullptr, constData);
+    for (size_t i = 0; i < readView.capacity(); ++i) {
+        ASSERT_EQ((i + kCapacity / 3) % 100u, constData[i]) << " at i = " << i
+                << "; data = " << static_cast<void *>(data)
+                << "; constData = " << static_cast<const void *>(constData);
+    }
+
+    readView = readView.subView(333u, 100u);
+    ASSERT_EQ(C2_OK, readView.error());
+    ASSERT_EQ(100u, readView.capacity());
+
+    constData = readView.data();
+    ASSERT_NE(nullptr, constData);
+    for (size_t i = 0; i < readView.capacity(); ++i) {
+        ASSERT_EQ((i + 333u + kCapacity / 3) % 100u, constData[i]) << " at i = " << i;
+    }
+}
+
+void fillPlane(const C2Rect rect, const C2PlaneInfo info, uint8_t *addr, uint8_t value) {
+    for (uint32_t row = 0; row < rect.height / info.rowSampling; ++row) {
+        int32_t rowOffset = (row + rect.top / info.rowSampling) * info.rowInc;
+        for (uint32_t col = 0; col < rect.width / info.colSampling; ++col) {
+            int32_t colOffset = (col + rect.left / info.colSampling) * info.colInc;
+            addr[rowOffset + colOffset] = value;
+        }
+    }
+}
+
+bool verifyPlane(const C2Rect rect, const C2PlaneInfo info, const uint8_t *addr, uint8_t value) {
+    for (uint32_t row = 0; row < rect.height / info.rowSampling; ++row) {
+        int32_t rowOffset = (row + rect.top / info.rowSampling) * info.rowInc;
+        for (uint32_t col = 0; col < rect.width / info.colSampling; ++col) {
+            int32_t colOffset = (col + rect.left / info.colSampling) * info.colInc;
+            if (addr[rowOffset + colOffset] != value) {
+                return false;
+            }
+        }
+    }
+    return true;
+}
+
+TEST_F(C2BufferTest, GraphicAllocationTest) {
+    constexpr uint32_t kWidth = 320;
+    constexpr uint32_t kHeight = 240;
+
+    allocateGraphic(kWidth, kHeight);
+
+    uint8_t *addr[C2PlanarLayout::MAX_NUM_PLANES];
+    C2Rect rect{ 0, 0, kWidth, kHeight };
+    C2PlanarLayout layout;
+    mapGraphic(rect, &layout, addr);
+    ASSERT_NE(nullptr, addr[C2PlanarLayout::PLANE_Y]);
+    ASSERT_NE(nullptr, addr[C2PlanarLayout::PLANE_U]);
+    ASSERT_NE(nullptr, addr[C2PlanarLayout::PLANE_V]);
+
+    uint8_t *y = addr[C2PlanarLayout::PLANE_Y];
+    C2PlaneInfo yInfo = layout.planes[C2PlanarLayout::PLANE_Y];
+    uint8_t *u = addr[C2PlanarLayout::PLANE_U];
+    C2PlaneInfo uInfo = layout.planes[C2PlanarLayout::PLANE_U];
+    uint8_t *v = addr[C2PlanarLayout::PLANE_V];
+    C2PlaneInfo vInfo = layout.planes[C2PlanarLayout::PLANE_V];
+
+    fillPlane(rect, yInfo, y, 0);
+    fillPlane(rect, uInfo, u, 0);
+    fillPlane(rect, vInfo, v, 0);
+    fillPlane({ kWidth / 4, kHeight / 4, kWidth / 2, kHeight / 2 }, yInfo, y, 0x12);
+    fillPlane({ kWidth / 4, kHeight / 4, kWidth / 2, kHeight / 2 }, uInfo, u, 0x34);
+    fillPlane({ kWidth / 4, kHeight / 4, kWidth / 2, kHeight / 2 }, vInfo, v, 0x56);
+
+    unmapGraphic();
+
+    mapGraphic(rect, &layout, addr);
+    ASSERT_NE(nullptr, addr[C2PlanarLayout::PLANE_Y]);
+    ASSERT_NE(nullptr, addr[C2PlanarLayout::PLANE_U]);
+    ASSERT_NE(nullptr, addr[C2PlanarLayout::PLANE_V]);
+
+    y = addr[C2PlanarLayout::PLANE_Y];
+    yInfo = layout.planes[C2PlanarLayout::PLANE_Y];
+    u = addr[C2PlanarLayout::PLANE_U];
+    uInfo = layout.planes[C2PlanarLayout::PLANE_U];
+    v = addr[C2PlanarLayout::PLANE_V];
+    vInfo = layout.planes[C2PlanarLayout::PLANE_V];
+
+    ASSERT_TRUE(verifyPlane({ kWidth / 4, kHeight / 4, kWidth / 2, kHeight / 2 }, yInfo, y, 0x12));
+    ASSERT_TRUE(verifyPlane({ kWidth / 4, kHeight / 4, kWidth / 2, kHeight / 2 }, uInfo, u, 0x34));
+    ASSERT_TRUE(verifyPlane({ kWidth / 4, kHeight / 4, kWidth / 2, kHeight / 2 }, vInfo, v, 0x56));
+    ASSERT_TRUE(verifyPlane({ 0, 0, kWidth, kHeight / 4 }, yInfo, y, 0));
+    ASSERT_TRUE(verifyPlane({ 0, 0, kWidth, kHeight / 4 }, uInfo, u, 0));
+    ASSERT_TRUE(verifyPlane({ 0, 0, kWidth, kHeight / 4 }, vInfo, v, 0));
+    ASSERT_TRUE(verifyPlane({ 0, 0, kWidth / 4, kHeight }, yInfo, y, 0));
+    ASSERT_TRUE(verifyPlane({ 0, 0, kWidth / 4, kHeight }, uInfo, u, 0));
+    ASSERT_TRUE(verifyPlane({ 0, 0, kWidth / 4, kHeight }, vInfo, v, 0));
+}
+
+TEST_F(C2BufferTest, GraphicBlockPoolTest) {
+    constexpr uint32_t kWidth = 320;
+    constexpr uint32_t kHeight = 240;
+
+    std::shared_ptr<C2BlockPool> blockPool(makeGraphicBlockPool());
+
+    std::shared_ptr<C2GraphicBlock> block;
+    ASSERT_EQ(C2_OK, blockPool->fetchGraphicBlock(
+            kWidth,
+            kHeight,
+            HAL_PIXEL_FORMAT_YCBCR_420_888,
+            { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE },
+            &block));
+    ASSERT_TRUE(block);
+
+    C2Acquirable<C2GraphicView> graphicViewHolder = block->map();
+    C2GraphicView graphicView = graphicViewHolder.get();
+    ASSERT_EQ(C2_OK, graphicView.error());
+    ASSERT_EQ(kWidth, graphicView.width());
+    ASSERT_EQ(kHeight, graphicView.height());
+
+    uint8_t *const *data = graphicView.data();
+    C2PlanarLayout layout = graphicView.layout();
+    ASSERT_NE(nullptr, data);
+
+    uint8_t *y = data[C2PlanarLayout::PLANE_Y];
+    C2PlaneInfo yInfo = layout.planes[C2PlanarLayout::PLANE_Y];
+    uint8_t *u = data[C2PlanarLayout::PLANE_U];
+    C2PlaneInfo uInfo = layout.planes[C2PlanarLayout::PLANE_U];
+    uint8_t *v = data[C2PlanarLayout::PLANE_V];
+    C2PlaneInfo vInfo = layout.planes[C2PlanarLayout::PLANE_V];
+
+    fillPlane({ 0, 0, kWidth, kHeight }, yInfo, y, 0);
+    fillPlane({ 0, 0, kWidth, kHeight }, uInfo, u, 0);
+    fillPlane({ 0, 0, kWidth, kHeight }, vInfo, v, 0);
+    fillPlane({ kWidth / 4, kHeight / 4, kWidth / 2, kHeight / 2 }, yInfo, y, 0x12);
+    fillPlane({ kWidth / 4, kHeight / 4, kWidth / 2, kHeight / 2 }, uInfo, u, 0x34);
+    fillPlane({ kWidth / 4, kHeight / 4, kWidth / 2, kHeight / 2 }, vInfo, v, 0x56);
+
+    C2Fence fence;
+    C2ConstGraphicBlock constBlock = block->share(
+            { 0, 0, kWidth, kHeight }, fence);
+    block.reset();
+
+    C2Acquirable<const C2GraphicView> constViewHolder = constBlock.map();
+    const C2GraphicView constGraphicView = constViewHolder.get();
+    ASSERT_EQ(C2_OK, constGraphicView.error());
+    ASSERT_EQ(kWidth, constGraphicView.width());
+    ASSERT_EQ(kHeight, constGraphicView.height());
+
+    const uint8_t *const *constData = constGraphicView.data();
+    layout = graphicView.layout();
+    ASSERT_NE(nullptr, constData);
+
+    const uint8_t *cy = constData[C2PlanarLayout::PLANE_Y];
+    yInfo = layout.planes[C2PlanarLayout::PLANE_Y];
+    const uint8_t *cu = constData[C2PlanarLayout::PLANE_U];
+    uInfo = layout.planes[C2PlanarLayout::PLANE_U];
+    const uint8_t *cv = constData[C2PlanarLayout::PLANE_V];
+    vInfo = layout.planes[C2PlanarLayout::PLANE_V];
+
+    ASSERT_TRUE(verifyPlane({ kWidth / 4, kHeight / 4, kWidth / 2, kHeight / 2 }, yInfo, cy, 0x12));
+    ASSERT_TRUE(verifyPlane({ kWidth / 4, kHeight / 4, kWidth / 2, kHeight / 2 }, uInfo, cu, 0x34));
+    ASSERT_TRUE(verifyPlane({ kWidth / 4, kHeight / 4, kWidth / 2, kHeight / 2 }, vInfo, cv, 0x56));
+    ASSERT_TRUE(verifyPlane({ 0, 0, kWidth, kHeight / 4 }, yInfo, cy, 0));
+    ASSERT_TRUE(verifyPlane({ 0, 0, kWidth, kHeight / 4 }, uInfo, cu, 0));
+    ASSERT_TRUE(verifyPlane({ 0, 0, kWidth, kHeight / 4 }, vInfo, cv, 0));
+    ASSERT_TRUE(verifyPlane({ 0, 0, kWidth / 4, kHeight }, yInfo, cy, 0));
+    ASSERT_TRUE(verifyPlane({ 0, 0, kWidth / 4, kHeight }, uInfo, cu, 0));
+    ASSERT_TRUE(verifyPlane({ 0, 0, kWidth / 4, kHeight }, vInfo, cv, 0));
+}
+
+class BufferData : public C2BufferData {
+public:
+    explicit BufferData(const std::list<C2ConstLinearBlock> &blocks) : C2BufferData(blocks) {}
+    explicit BufferData(const std::list<C2ConstGraphicBlock> &blocks) : C2BufferData(blocks) {}
+};
+
+class Buffer : public C2Buffer {
+public:
+    explicit Buffer(const std::list<C2ConstLinearBlock> &blocks) : C2Buffer(blocks) {}
+    explicit Buffer(const std::list<C2ConstGraphicBlock> &blocks) : C2Buffer(blocks) {}
+};
+
+TEST_F(C2BufferTest, BufferDataTest) {
+    std::shared_ptr<C2BlockPool> linearBlockPool(makeLinearBlockPool());
+    std::shared_ptr<C2BlockPool> graphicBlockPool(makeGraphicBlockPool());
+
+    constexpr uint32_t kWidth1 = 320;
+    constexpr uint32_t kHeight1 = 240;
+    constexpr C2Rect kCrop1(kWidth1, kHeight1);
+    constexpr uint32_t kWidth2 = 176;
+    constexpr uint32_t kHeight2 = 144;
+    constexpr C2Rect kCrop2(kWidth2, kHeight2);
+    constexpr size_t kCapacity1 = 1024u;
+    constexpr size_t kCapacity2 = 2048u;
+
+    std::shared_ptr<C2LinearBlock> linearBlock1;
+    std::shared_ptr<C2LinearBlock> linearBlock2;
+    ASSERT_EQ(C2_OK, linearBlockPool->fetchLinearBlock(
+            kCapacity1,
+            { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE },
+            &linearBlock1));
+    ASSERT_EQ(C2_OK, linearBlockPool->fetchLinearBlock(
+            kCapacity2,
+            { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE },
+            &linearBlock2));
+    std::shared_ptr<C2GraphicBlock> graphicBlock1;
+    std::shared_ptr<C2GraphicBlock> graphicBlock2;
+    ASSERT_EQ(C2_OK, graphicBlockPool->fetchGraphicBlock(
+            kWidth1,
+            kHeight1,
+            HAL_PIXEL_FORMAT_YCBCR_420_888,
+            { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE },
+            &graphicBlock1));
+    ASSERT_EQ(C2_OK, graphicBlockPool->fetchGraphicBlock(
+            kWidth2,
+            kHeight2,
+            HAL_PIXEL_FORMAT_YCBCR_420_888,
+            { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE },
+            &graphicBlock2));
+
+    std::shared_ptr<C2BufferData> data(new BufferData({ linearBlock1->share(0, kCapacity1, C2Fence()) }));
+    EXPECT_EQ(C2BufferData::LINEAR, data->type());
+    ASSERT_EQ(1u, data->linearBlocks().size());
+    EXPECT_EQ(linearBlock1->handle(), data->linearBlocks().front().handle());
+    EXPECT_TRUE(data->graphicBlocks().empty());
+
+    data.reset(new BufferData({
+        linearBlock1->share(0, kCapacity1, C2Fence()),
+        linearBlock2->share(0, kCapacity2, C2Fence()),
+    }));
+    EXPECT_EQ(C2BufferData::LINEAR_CHUNKS, data->type());
+    ASSERT_EQ(2u, data->linearBlocks().size());
+    EXPECT_EQ(linearBlock1->handle(), data->linearBlocks().front().handle());
+    EXPECT_EQ(linearBlock2->handle(), data->linearBlocks().back().handle());
+    EXPECT_TRUE(data->graphicBlocks().empty());
+
+    data.reset(new BufferData({ graphicBlock1->share(kCrop1, C2Fence()) }));
+    EXPECT_EQ(C2BufferData::GRAPHIC, data->type());
+    ASSERT_EQ(1u, data->graphicBlocks().size());
+    EXPECT_EQ(graphicBlock1->handle(), data->graphicBlocks().front().handle());
+    EXPECT_TRUE(data->linearBlocks().empty());
+
+    data.reset(new BufferData({
+        graphicBlock1->share(kCrop1, C2Fence()),
+        graphicBlock2->share(kCrop2, C2Fence()),
+    }));
+    EXPECT_EQ(C2BufferData::GRAPHIC_CHUNKS, data->type());
+    ASSERT_EQ(2u, data->graphicBlocks().size());
+    EXPECT_EQ(graphicBlock1->handle(), data->graphicBlocks().front().handle());
+    EXPECT_EQ(graphicBlock2->handle(), data->graphicBlocks().back().handle());
+    EXPECT_TRUE(data->linearBlocks().empty());
+}
+
+void DestroyCallback(const C2Buffer * /* buf */, void *arg) {
+    std::function<void(void)> *cb = (std::function<void(void)> *)arg;
+    (*cb)();
+}
+
+enum : uint32_t {
+    kParamIndexNumber1,
+    kParamIndexNumber2,
+};
+
+typedef C2GlobalParam<C2Info, C2Int32Value, kParamIndexNumber1> C2Number1Info;
+typedef C2GlobalParam<C2Info, C2Int32Value, kParamIndexNumber2> C2Number2Info;
+
+TEST_F(C2BufferTest, BufferTest) {
+    std::shared_ptr<C2BlockPool> alloc(makeLinearBlockPool());
+    constexpr size_t kCapacity = 1024u;
+    std::shared_ptr<C2LinearBlock> block;
+
+    ASSERT_EQ(C2_OK, alloc->fetchLinearBlock(
+            kCapacity,
+            { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE },
+            &block));
+
+    std::atomic_bool destroyed(false);
+    std::function<void(void)> arg = [&destroyed](){ destroyed = true; };
+
+    std::shared_ptr<C2Buffer> buffer(new Buffer( { block->share(0, kCapacity, C2Fence()) }));
+    ASSERT_EQ(C2_OK, buffer->registerOnDestroyNotify(&DestroyCallback, &arg));
+    EXPECT_FALSE(destroyed);
+    ASSERT_EQ(C2_DUPLICATE, buffer->registerOnDestroyNotify(&DestroyCallback, &arg));
+    buffer.reset();
+    EXPECT_TRUE(destroyed);
+
+    buffer.reset(new Buffer( { block->share(0, kCapacity, C2Fence()) }));
+    destroyed = false;
+    ASSERT_EQ(C2_OK, buffer->registerOnDestroyNotify(&DestroyCallback, &arg));
+    EXPECT_FALSE(destroyed);
+    ASSERT_EQ(C2_NOT_FOUND, buffer->unregisterOnDestroyNotify(&DestroyCallback, nullptr));
+    ASSERT_EQ(C2_OK, buffer->unregisterOnDestroyNotify(&DestroyCallback, &arg));
+    EXPECT_FALSE(destroyed);
+    ASSERT_EQ(C2_NOT_FOUND, buffer->unregisterOnDestroyNotify(&DestroyCallback, &arg));
+    buffer.reset();
+    EXPECT_FALSE(destroyed);
+
+    std::shared_ptr<C2Info> info1(new C2Number1Info(1));
+    std::shared_ptr<C2Info> info2(new C2Number2Info(2));
+    buffer.reset(new Buffer( { block->share(0, kCapacity, C2Fence()) }));
+    EXPECT_TRUE(buffer->infos().empty());
+    EXPECT_FALSE(buffer->hasInfo(info1->type()));
+    EXPECT_FALSE(buffer->hasInfo(info2->type()));
+
+    ASSERT_EQ(C2_OK, buffer->setInfo(info1));
+    EXPECT_EQ(1u, buffer->infos().size());
+    EXPECT_EQ(*info1, *buffer->infos().front());
+    EXPECT_TRUE(buffer->hasInfo(info1->type()));
+    EXPECT_FALSE(buffer->hasInfo(info2->type()));
+
+    ASSERT_EQ(C2_OK, buffer->setInfo(info2));
+    EXPECT_EQ(2u, buffer->infos().size());
+    EXPECT_TRUE(buffer->hasInfo(info1->type()));
+    EXPECT_TRUE(buffer->hasInfo(info2->type()));
+
+    std::shared_ptr<C2Info> removed = buffer->removeInfo(info1->type());
+    ASSERT_TRUE(removed);
+    EXPECT_EQ(*removed, *info1);
+    EXPECT_EQ(1u, buffer->infos().size());
+    EXPECT_EQ(*info2, *buffer->infos().front());
+    EXPECT_FALSE(buffer->hasInfo(info1->type()));
+    EXPECT_TRUE(buffer->hasInfo(info2->type()));
+
+    removed = buffer->removeInfo(info1->type());
+    ASSERT_FALSE(removed);
+    EXPECT_EQ(1u, buffer->infos().size());
+    EXPECT_FALSE(buffer->hasInfo(info1->type()));
+    EXPECT_TRUE(buffer->hasInfo(info2->type()));
+
+    std::shared_ptr<C2Info> info3(new C2Number2Info(3));
+    ASSERT_EQ(C2_OK, buffer->setInfo(info3));
+    EXPECT_EQ(1u, buffer->infos().size());
+    EXPECT_FALSE(buffer->hasInfo(info1->type()));
+    EXPECT_TRUE(buffer->hasInfo(info2->type()));
+
+    removed = buffer->removeInfo(info2->type());
+    ASSERT_TRUE(removed);
+    EXPECT_EQ(*info3, *removed);
+    EXPECT_TRUE(buffer->infos().empty());
+    EXPECT_FALSE(buffer->hasInfo(info1->type()));
+    EXPECT_FALSE(buffer->hasInfo(info2->type()));
+}
+
+} // namespace android
diff --git a/media/libstagefright/codec2/vndk/Android.bp b/media/libstagefright/codec2/vndk/Android.bp
new file mode 100644
index 0000000..cc79dc0
--- /dev/null
+++ b/media/libstagefright/codec2/vndk/Android.bp
@@ -0,0 +1,42 @@
+cc_library {
+    name: "libstagefright_codec2_vndk",
+
+    srcs: [
+        "C2AllocatorIon.cpp",
+        "C2AllocatorGralloc.cpp",
+        "C2Buffer.cpp",
+        "C2Config.cpp",
+        "C2Store.cpp",
+    ],
+
+    export_include_dirs: [
+        "include",
+    ],
+
+    include_dirs: [
+        "frameworks/av/media/libstagefright/codec2/include",
+        "frameworks/native/include/media/hardware",
+    ],
+
+    shared_libs: [
+        "android.hardware.graphics.allocator@2.0",
+        "android.hardware.graphics.mapper@2.0",
+        "libbinder",
+        "libcutils",
+        "libdl",
+        "libhardware",
+        "libhidlbase",
+        "libion",
+        "liblog",
+        "libmedia",
+        "libstagefright_foundation",
+        "libui",
+        "libutils",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+        "-std=c++14",
+    ],
+}
diff --git a/media/libstagefright/codec2/vndk/C2AllocatorGralloc.cpp b/media/libstagefright/codec2/vndk/C2AllocatorGralloc.cpp
new file mode 100644
index 0000000..18db3e9
--- /dev/null
+++ b/media/libstagefright/codec2/vndk/C2AllocatorGralloc.cpp
@@ -0,0 +1,495 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2AllocatorGralloc"
+#include <utils/Log.h>
+
+#include <android/hardware/graphics/allocator/2.0/IAllocator.h>
+#include <android/hardware/graphics/mapper/2.0/IMapper.h>
+#include <cutils/native_handle.h>
+#include <hardware/gralloc.h>
+
+#include <C2AllocatorGralloc.h>
+#include <C2Buffer.h>
+
+namespace android {
+
+using ::android::hardware::graphics::allocator::V2_0::IAllocator;
+using ::android::hardware::graphics::common::V1_0::BufferUsage;
+using ::android::hardware::graphics::common::V1_0::PixelFormat;
+using ::android::hardware::graphics::mapper::V2_0::BufferDescriptor;
+using ::android::hardware::graphics::mapper::V2_0::Error;
+using ::android::hardware::graphics::mapper::V2_0::IMapper;
+using ::android::hardware::graphics::mapper::V2_0::YCbCrLayout;
+using ::android::hardware::hidl_handle;
+using ::android::hardware::hidl_vec;
+
+/* ===================================== GRALLOC ALLOCATION ==================================== */
+static c2_status_t maperr2error(Error maperr) {
+    switch (maperr) {
+        case Error::NONE:           return C2_OK;
+        case Error::BAD_DESCRIPTOR: return C2_BAD_VALUE;
+        case Error::BAD_BUFFER:     return C2_BAD_VALUE;
+        case Error::BAD_VALUE:      return C2_BAD_VALUE;
+        case Error::NO_RESOURCES:   return C2_NO_MEMORY;
+        case Error::UNSUPPORTED:    return C2_CANNOT_DO;
+    }
+    return C2_CORRUPTED;
+}
+
+static
+bool native_handle_is_invalid(const native_handle_t *const handle) {
+    // perform basic validation of a native handle
+    if (handle == nullptr) {
+        // null handle is considered valid
+        return false;
+    }
+    return ((size_t)handle->version != sizeof(native_handle_t) ||
+            handle->numFds < 0 ||
+            handle->numInts < 0 ||
+            // for sanity assume handles must occupy less memory than INT_MAX bytes
+            handle->numFds > int((INT_MAX - handle->version) / sizeof(int)) - handle->numInts);
+}
+
+class C2HandleGralloc : public C2Handle {
+private:
+    struct ExtraData {
+        uint32_t width;
+        uint32_t height;
+        uint32_t format;
+        uint32_t usage_lo;
+        uint32_t usage_hi;
+        uint32_t magic;
+    };
+
+    enum {
+        NUM_INTS = sizeof(ExtraData) / sizeof(int),
+    };
+    const static uint32_t MAGIC = '\xc2gr\x00';
+
+    static
+    const ExtraData* getExtraData(const C2Handle *const handle) {
+        if (handle == nullptr
+                || native_handle_is_invalid(handle)
+                || handle->numInts < NUM_INTS) {
+            return nullptr;
+        }
+        return reinterpret_cast<const ExtraData*>(
+                &handle->data[handle->numFds + handle->numInts - NUM_INTS]);
+    }
+
+    static
+    ExtraData *getExtraData(C2Handle *const handle) {
+        return const_cast<ExtraData *>(getExtraData(const_cast<const C2Handle *const>(handle)));
+    }
+
+public:
+    static bool isValid(const C2Handle *const o) {
+        if (o == nullptr) { // null handle is always valid
+            return true;
+        }
+        const ExtraData *xd = getExtraData(o);
+        // we cannot validate width/height/format/usage without accessing gralloc driver
+        return xd != nullptr && xd->magic == MAGIC;
+    }
+
+    static C2HandleGralloc* WrapNativeHandle(
+            const native_handle_t *const handle,
+            uint32_t width, uint32_t height, uint32_t format, uint64_t usage) {
+        //CHECK(handle != nullptr);
+        if (native_handle_is_invalid(handle) ||
+            handle->numInts > int((INT_MAX - handle->version) / sizeof(int)) - NUM_INTS - handle->numFds) {
+            return nullptr;
+        }
+        ExtraData xd = { width, height, format, uint32_t(usage & 0xFFFFFFFF), uint32_t(usage >> 32), MAGIC };
+        native_handle_t *res = native_handle_create(handle->numFds, handle->numInts + NUM_INTS);
+        if (res != nullptr) {
+            memcpy(&res->data, &handle->data, sizeof(int) * (handle->numFds + handle->numInts));
+            *getExtraData(res) = xd;
+        }
+        return reinterpret_cast<C2HandleGralloc *>(res);
+    }
+
+    static native_handle_t* UnwrapNativeHandle(const C2Handle *const handle) {
+        const ExtraData *xd = getExtraData(handle);
+        if (xd == nullptr || xd->magic != MAGIC) {
+            return nullptr;
+        }
+        native_handle_t *res = native_handle_create(handle->numFds, handle->numInts - NUM_INTS);
+        if (res != nullptr) {
+            memcpy(&res->data, &handle->data, sizeof(int) * (res->numFds + res->numInts));
+        }
+        return res;
+    }
+
+    static const C2HandleGralloc* Import(
+            const C2Handle *const handle,
+            uint32_t *width, uint32_t *height, uint32_t *format, uint64_t *usage) {
+        const ExtraData *xd = getExtraData(handle);
+        if (xd == nullptr) {
+            return nullptr;
+        }
+        *width = xd->width;
+        *height = xd->height;
+        *format = xd->format;
+        *usage = xd->usage_lo | (uint64_t(xd->usage_hi) << 32);
+
+        return reinterpret_cast<const C2HandleGralloc *>(handle);
+    }
+};
+
+native_handle_t* UnwrapNativeCodec2GrallocHandle(const C2Handle *const handle) {
+    return C2HandleGralloc::UnwrapNativeHandle(handle);
+}
+
+class C2AllocationGralloc : public C2GraphicAllocation {
+public:
+    virtual ~C2AllocationGralloc() override;
+
+    virtual c2_status_t map(
+            C2Rect rect, C2MemoryUsage usage, int *fenceFd,
+            C2PlanarLayout *layout /* nonnull */, uint8_t **addr /* nonnull */) override;
+    virtual c2_status_t unmap(C2Fence *fenceFd /* nullable */) override;
+    virtual bool isValid() const override { return true; }
+    virtual const C2Handle *handle() const override { return mLockedHandle ? : mHandle; }
+    virtual bool equals(const std::shared_ptr<const C2GraphicAllocation> &other) const override;
+
+    // internal methods
+    // |handle| will be moved.
+    C2AllocationGralloc(
+              const IMapper::BufferDescriptorInfo &info,
+              const sp<IMapper> &mapper,
+              hidl_handle &hidlHandle,
+              const C2HandleGralloc *const handle);
+    int dup() const;
+    c2_status_t status() const;
+
+private:
+    const IMapper::BufferDescriptorInfo mInfo;
+    const sp<IMapper> mMapper;
+    const hidl_handle mHidlHandle;
+    const C2HandleGralloc *mHandle;
+    buffer_handle_t mBuffer;
+    const C2HandleGralloc *mLockedHandle;
+    bool mLocked;
+};
+
+C2AllocationGralloc::C2AllocationGralloc(
+          const IMapper::BufferDescriptorInfo &info,
+          const sp<IMapper> &mapper,
+          hidl_handle &hidlHandle,
+          const C2HandleGralloc *const handle)
+    : C2GraphicAllocation(info.width, info.height),
+      mInfo(info),
+      mMapper(mapper),
+      mHidlHandle(std::move(hidlHandle)),
+      mHandle(handle),
+      mBuffer(nullptr),
+      mLocked(false) {}
+
+C2AllocationGralloc::~C2AllocationGralloc() {
+    if (!mBuffer) {
+        return;
+    }
+    if (mLocked) {
+        unmap(nullptr);
+    }
+    mMapper->freeBuffer(const_cast<native_handle_t *>(mBuffer));
+}
+
+c2_status_t C2AllocationGralloc::map(
+        C2Rect rect, C2MemoryUsage usage, int *fenceFd,
+        C2PlanarLayout *layout /* nonnull */, uint8_t **addr /* nonnull */) {
+    // TODO
+    (void) fenceFd;
+    (void) usage;
+
+    if (mBuffer && mLocked) {
+        return C2_DUPLICATE;
+    }
+    if (!layout || !addr) {
+        return C2_BAD_VALUE;
+    }
+
+    c2_status_t err = C2_OK;
+    if (!mBuffer) {
+        mMapper->importBuffer(
+                mHidlHandle, [&err, this](const auto &maperr, const auto &buffer) {
+                    err = maperr2error(maperr);
+                    if (err == C2_OK) {
+                        mBuffer = static_cast<buffer_handle_t>(buffer);
+                    }
+                });
+        if (err != C2_OK) {
+            return err;
+        }
+        if (mBuffer == nullptr) {
+            return C2_CORRUPTED;
+        }
+        mLockedHandle = C2HandleGralloc::WrapNativeHandle(
+                mBuffer, mInfo.width, mInfo.height, (uint32_t)mInfo.format, mInfo.usage);
+    }
+
+    if (mInfo.format == PixelFormat::YCBCR_420_888 || mInfo.format == PixelFormat::YV12) {
+        YCbCrLayout ycbcrLayout;
+        mMapper->lockYCbCr(
+                const_cast<native_handle_t *>(mBuffer),
+                BufferUsage::CPU_READ_OFTEN | BufferUsage::CPU_WRITE_OFTEN,
+                { (int32_t)rect.left, (int32_t)rect.top, (int32_t)rect.width, (int32_t)rect.height },
+                // TODO: fence
+                hidl_handle(),
+                [&err, &ycbcrLayout](const auto &maperr, const auto &mapLayout) {
+                    err = maperr2error(maperr);
+                    if (err == C2_OK) {
+                        ycbcrLayout = mapLayout;
+                    }
+                });
+        if (err != C2_OK) {
+            return err;
+        }
+        addr[C2PlanarLayout::PLANE_Y] = (uint8_t *)ycbcrLayout.y;
+        addr[C2PlanarLayout::PLANE_U] = (uint8_t *)ycbcrLayout.cb;
+        addr[C2PlanarLayout::PLANE_V] = (uint8_t *)ycbcrLayout.cr;
+        layout->type = C2PlanarLayout::TYPE_YUV;
+        layout->numPlanes = 3;
+        layout->planes[C2PlanarLayout::PLANE_Y] = {
+            C2PlaneInfo::CHANNEL_Y,         // channel
+            1,                              // colInc
+            (int32_t)ycbcrLayout.yStride,   // rowInc
+            1,                              // mColSampling
+            1,                              // mRowSampling
+            8,                              // allocatedDepth
+            8,                              // bitDepth
+            0,                              // rightShift
+            C2PlaneInfo::NATIVE,            // endianness
+        };
+        layout->planes[C2PlanarLayout::PLANE_U] = {
+            C2PlaneInfo::CHANNEL_CB,          // channel
+            (int32_t)ycbcrLayout.chromaStep,  // colInc
+            (int32_t)ycbcrLayout.cStride,     // rowInc
+            2,                                // mColSampling
+            2,                                // mRowSampling
+            8,                                // allocatedDepth
+            8,                                // bitDepth
+            0,                                // rightShift
+            C2PlaneInfo::NATIVE,              // endianness
+        };
+        layout->planes[C2PlanarLayout::PLANE_V] = {
+            C2PlaneInfo::CHANNEL_CR,          // channel
+            (int32_t)ycbcrLayout.chromaStep,  // colInc
+            (int32_t)ycbcrLayout.cStride,     // rowInc
+            2,                                // mColSampling
+            2,                                // mRowSampling
+            8,                                // allocatedDepth
+            8,                                // bitDepth
+            0,                                // rightShift
+            C2PlaneInfo::NATIVE,              // endianness
+        };
+    } else {
+        void *pointer = nullptr;
+        mMapper->lock(
+                const_cast<native_handle_t *>(mBuffer),
+                BufferUsage::CPU_READ_OFTEN | BufferUsage::CPU_WRITE_OFTEN,
+                { (int32_t)rect.left, (int32_t)rect.top, (int32_t)rect.width, (int32_t)rect.height },
+                // TODO: fence
+                hidl_handle(),
+                [&err, &pointer](const auto &maperr, const auto &mapPointer) {
+                    err = maperr2error(maperr);
+                    if (err == C2_OK) {
+                        pointer = mapPointer;
+                    }
+                });
+        if (err != C2_OK) {
+            return err;
+        }
+        // TODO
+        return C2_OMITTED;
+    }
+    mLocked = true;
+
+    return C2_OK;
+}
+
+c2_status_t C2AllocationGralloc::unmap(C2Fence *fenceFd /* nullable */) {
+    // TODO: fence
+    c2_status_t err = C2_OK;
+    mMapper->unlock(
+            const_cast<native_handle_t *>(mBuffer),
+            [&err, &fenceFd](const auto &maperr, const auto &releaseFence) {
+                // TODO
+                (void) fenceFd;
+                (void) releaseFence;
+                err = maperr2error(maperr);
+                if (err == C2_OK) {
+                    // TODO: fence
+                }
+            });
+    if (err == C2_OK) {
+        mLocked = false;
+    }
+    return err;
+}
+
+bool C2AllocationGralloc::equals(const std::shared_ptr<const C2GraphicAllocation> &other) const {
+    return other && other->handle() == handle();
+}
+
+/* ===================================== GRALLOC ALLOCATOR ==================================== */
+class C2AllocatorGralloc::Impl {
+public:
+    Impl();
+
+    id_t getId() const;
+
+    C2String getName() const;
+
+    c2_status_t newGraphicAllocation(
+            uint32_t width, uint32_t height, uint32_t format, const C2MemoryUsage &usage,
+            std::shared_ptr<C2GraphicAllocation> *allocation);
+
+    c2_status_t priorGraphicAllocation(
+            const C2Handle *handle,
+            std::shared_ptr<C2GraphicAllocation> *allocation);
+
+    c2_status_t status() const { return mInit; }
+
+private:
+    c2_status_t mInit;
+    sp<IAllocator> mAllocator;
+    sp<IMapper> mMapper;
+};
+
+C2AllocatorGralloc::Impl::Impl() : mInit(C2_OK) {
+    // TODO: share a global service
+    mAllocator = IAllocator::getService();
+    mMapper = IMapper::getService();
+    if (mAllocator == nullptr || mMapper == nullptr) {
+        mInit = C2_CORRUPTED;
+    }
+}
+
+C2Allocator::id_t C2AllocatorGralloc::Impl::getId() const {
+    return 1; /// \todo implement ID
+}
+
+C2String C2AllocatorGralloc::Impl::getName() const {
+    return "android.allocator.gralloc";
+}
+
+c2_status_t C2AllocatorGralloc::Impl::newGraphicAllocation(
+        uint32_t width, uint32_t height, uint32_t format, const C2MemoryUsage &usage,
+        std::shared_ptr<C2GraphicAllocation> *allocation) {
+    // TODO: buffer usage should be determined according to |usage|
+    (void) usage;
+
+    IMapper::BufferDescriptorInfo info = {
+        width,
+        height,
+        1u,  // layerCount
+        (PixelFormat)format,
+        BufferUsage::CPU_READ_OFTEN | BufferUsage::CPU_WRITE_OFTEN,
+    };
+    c2_status_t err = C2_OK;
+    BufferDescriptor desc;
+    mMapper->createDescriptor(
+            info, [&err, &desc](const auto &maperr, const auto &descriptor) {
+                err = maperr2error(maperr);
+                if (err == C2_OK) {
+                    desc = descriptor;
+                }
+            });
+    if (err != C2_OK) {
+        return err;
+    }
+
+    // IAllocator shares IMapper error codes.
+    hidl_handle buffer;
+    mAllocator->allocate(
+            desc,
+            1u,
+            [&err, &buffer](const auto &maperr, const auto &stride, auto &buffers) {
+                (void) stride;
+                err = maperr2error(maperr);
+                if (err != C2_OK) {
+                    return;
+                }
+                if (buffers.size() != 1u) {
+                    err = C2_CORRUPTED;
+                    return;
+                }
+                buffer = std::move(buffers[0]);
+            });
+    if (err != C2_OK) {
+        return err;
+    }
+
+
+    allocation->reset(new C2AllocationGralloc(
+            info, mMapper, buffer,
+            C2HandleGralloc::WrapNativeHandle(
+                    buffer.getNativeHandle(),
+                    info.width, info.height, (uint32_t)info.format, info.usage)));
+    return C2_OK;
+}
+
+c2_status_t C2AllocatorGralloc::Impl::priorGraphicAllocation(
+        const C2Handle *handle,
+        std::shared_ptr<C2GraphicAllocation> *allocation) {
+    IMapper::BufferDescriptorInfo info;
+    info.layerCount = 1u;
+    const C2HandleGralloc *grallocHandle = C2HandleGralloc::Import(
+            handle,
+            &info.width, &info.height, (uint32_t *)&info.format, (uint64_t *)&info.usage);
+    if (grallocHandle == nullptr) {
+        return C2_BAD_VALUE;
+    }
+
+    hidl_handle hidlHandle = C2HandleGralloc::UnwrapNativeHandle(grallocHandle);
+
+    allocation->reset(new C2AllocationGralloc(info, mMapper, hidlHandle, grallocHandle));
+    return C2_OMITTED;
+}
+
+C2AllocatorGralloc::C2AllocatorGralloc() : mImpl(new Impl) {}
+
+C2AllocatorGralloc::~C2AllocatorGralloc() { delete mImpl; }
+
+C2Allocator::id_t C2AllocatorGralloc::getId() const {
+    return mImpl->getId();
+}
+
+C2String C2AllocatorGralloc::getName() const {
+    return mImpl->getName();
+}
+
+c2_status_t C2AllocatorGralloc::newGraphicAllocation(
+        uint32_t width, uint32_t height, uint32_t format, C2MemoryUsage usage,
+        std::shared_ptr<C2GraphicAllocation> *allocation) {
+    return mImpl->newGraphicAllocation(width, height, format, usage, allocation);
+}
+
+c2_status_t C2AllocatorGralloc::priorGraphicAllocation(
+        const C2Handle *handle,
+        std::shared_ptr<C2GraphicAllocation> *allocation) {
+    return mImpl->priorGraphicAllocation(handle, allocation);
+}
+
+c2_status_t C2AllocatorGralloc::status() const {
+    return mImpl->status();
+}
+
+} // namespace android
diff --git a/media/libstagefright/codec2/vndk/C2AllocatorIon.cpp b/media/libstagefright/codec2/vndk/C2AllocatorIon.cpp
new file mode 100644
index 0000000..34c68bb
--- /dev/null
+++ b/media/libstagefright/codec2/vndk/C2AllocatorIon.cpp
@@ -0,0 +1,444 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2AllocatorIon"
+#include <utils/Log.h>
+
+#include <ion/ion.h>
+#include <sys/mman.h>
+
+#include <C2AllocatorIon.h>
+#include <C2Buffer.h>
+#include <C2ErrnoUtils.h>
+
+namespace android {
+
+/* size_t <=> int(lo), int(hi) conversions */
+constexpr inline int size2intLo(size_t s) {
+    return int(s & 0xFFFFFFFF);
+}
+
+constexpr inline int size2intHi(size_t s) {
+    // cast to uint64_t as size_t may be 32 bits wide
+    return int((uint64_t(s) >> 32) & 0xFFFFFFFF);
+}
+
+constexpr inline size_t ints2size(int intLo, int intHi) {
+    // convert in 2 stages to 64 bits as intHi may be negative
+    return size_t(unsigned(intLo)) | size_t(uint64_t(unsigned(intHi)) << 32);
+}
+
+/* ========================================= ION HANDLE ======================================== */
+/**
+ * ION handle
+ *
+ * There can be only a sole ion client per process, this is captured in the ion fd that is passed
+ * to the constructor, but this should be managed by the ion buffer allocator/mapper.
+ *
+ * ion uses ion_user_handle_t for buffers. We don't store this in the native handle as
+ * it requires an ion_free to decref. Instead, we share the buffer to get an fd that also holds
+ * a refcount.
+ *
+ * This handle will not capture mapped fd-s as updating that would require a global mutex.
+ */
+
+struct C2HandleIon : public C2Handle {
+    // ion handle owns ionFd(!) and bufferFd
+    C2HandleIon(int bufferFd, size_t size)
+        : C2Handle(cHeader),
+          mFds{ bufferFd },
+          mInts{ int(size & 0xFFFFFFFF), int((uint64_t(size) >> 32) & 0xFFFFFFFF), kMagic } { }
+
+    static bool isValid(const C2Handle * const o);
+
+    int bufferFd() const { return mFds.mBuffer; }
+    size_t size() const {
+        return size_t(unsigned(mInts.mSizeLo))
+                | size_t(uint64_t(unsigned(mInts.mSizeHi)) << 32);
+    }
+
+protected:
+    struct {
+        int mBuffer; // shared ion buffer
+    } mFds;
+    struct {
+        int mSizeLo; // low 32-bits of size
+        int mSizeHi; // high 32-bits of size
+        int mMagic;
+    } mInts;
+
+private:
+    typedef C2HandleIon _type;
+    enum {
+        kMagic = '\xc2io\x00',
+        numFds = sizeof(mFds) / sizeof(int),
+        numInts = sizeof(mInts) / sizeof(int),
+        version = sizeof(C2Handle)
+    };
+    //constexpr static C2Handle cHeader = { version, numFds, numInts, {} };
+    const static C2Handle cHeader;
+};
+
+const C2Handle C2HandleIon::cHeader = {
+    C2HandleIon::version,
+    C2HandleIon::numFds,
+    C2HandleIon::numInts,
+    {}
+};
+
+// static
+bool C2HandleIon::isValid(const C2Handle * const o) {
+    if (!o || memcmp(o, &cHeader, sizeof(cHeader))) {
+        return false;
+    }
+    const C2HandleIon *other = static_cast<const C2HandleIon*>(o);
+    return other->mInts.mMagic == kMagic;
+}
+
+// TODO: is the dup of an ion fd identical to ion_share?
+
+/* ======================================= ION ALLOCATION ====================================== */
+class C2AllocationIon : public C2LinearAllocation {
+public:
+    /* Interface methods */
+    virtual c2_status_t map(
+        size_t offset, size_t size, C2MemoryUsage usage, int *fence,
+        void **addr /* nonnull */) override;
+    virtual c2_status_t unmap(void *addr, size_t size, int *fenceFd) override;
+    virtual bool isValid() const override;
+    virtual ~C2AllocationIon() override;
+    virtual const C2Handle *handle() const override;
+    virtual bool equals(const std::shared_ptr<C2LinearAllocation> &other) const override;
+
+    // internal methods
+    C2AllocationIon(int ionFd, size_t size, size_t align, unsigned heapMask, unsigned flags);
+    C2AllocationIon(int ionFd, size_t size, int shareFd);
+
+    c2_status_t status() const;
+
+protected:
+    class Impl;
+    Impl *mImpl;
+
+    // TODO: we could make this encapsulate shared_ptr and copiable
+    C2_DO_NOT_COPY(C2AllocationIon);
+};
+
+class C2AllocationIon::Impl {
+private:
+    /**
+     * Constructs an ion allocation.
+     *
+     * \note We always create an ion allocation, even if the allocation or import fails
+     * so that we can capture the error.
+     *
+     * \param ionFd     ion client (ownership transferred to created object)
+     * \param capacity  size of allocation
+     * \param bufferFd  buffer handle (ownership transferred to created object). Must be
+     *                  invalid if err is not 0.
+     * \param buffer    ion buffer user handle (ownership transferred to created object). Must be
+     *                  invalid if err is not 0.
+     * \param err       errno during buffer allocation or import
+     */
+    Impl(int ionFd, size_t capacity, int bufferFd, ion_user_handle_t buffer, int err)
+        : mIonFd(ionFd),
+          mHandle(bufferFd, capacity),
+          mBuffer(buffer),
+          mInit(c2_map_errno<ENOMEM, EACCES, EINVAL>(err)),
+          mMapFd(-1),
+          mMapSize(0) {
+        if (mInit != C2_OK) {
+            // close ionFd now on error
+            if (mIonFd >= 0) {
+                close(mIonFd);
+                mIonFd = -1;
+            }
+            // C2_CHECK(bufferFd < 0);
+            // C2_CHECK(buffer < 0);
+        }
+    }
+
+public:
+    /**
+     * Constructs an ion allocation by importing a shared buffer fd.
+     *
+     * \param ionFd     ion client (ownership transferred to created object)
+     * \param capacity  size of allocation
+     * \param bufferFd  buffer handle (ownership transferred to created object)
+     *
+     * \return created ion allocation (implementation) which may be invalid if the
+     * import failed.
+     */
+    static Impl *Import(int ionFd, size_t capacity, int bufferFd) {
+        ion_user_handle_t buffer = -1;
+        int ret = ion_import(ionFd, bufferFd, &buffer);
+        return new Impl(ionFd, capacity, bufferFd, buffer, ret);
+    }
+
+    /**
+     * Constructs an ion allocation by allocating an ion buffer.
+     *
+     * \param ionFd     ion client (ownership transferred to created object)
+     * \param size      size of allocation
+     * \param align     desired alignment of allocation
+     * \param heapMask  mask of heaps considered
+     * \param flags     ion allocation flags
+     *
+     * \return created ion allocation (implementation) which may be invalid if the
+     * allocation failed.
+     */
+    static Impl *Alloc(int ionFd, size_t size, size_t align, unsigned heapMask, unsigned flags) {
+        int bufferFd = -1;
+        ion_user_handle_t buffer = -1;
+        int ret = ion_alloc(ionFd, size, align, heapMask, flags, &buffer);
+        if (ret == 0) {
+            // get buffer fd for native handle constructor
+            ret = ion_share(ionFd, buffer, &bufferFd);
+            if (ret != 0) {
+                ion_free(ionFd, buffer);
+                buffer = -1;
+            }
+        }
+        return new Impl(ionFd, size, bufferFd, buffer, ret);
+    }
+
+    c2_status_t map(size_t offset, size_t size, C2MemoryUsage usage, int *fenceFd, void **addr) {
+        (void)fenceFd; // TODO: wait for fence
+        *addr = nullptr;
+        if (mMapSize > 0) {
+            // TODO: technically we should return DUPLICATE here, but our block views don't
+            // actually unmap, so we end up remapping an ion buffer multiple times.
+            //
+            // return C2_DUPLICATE;
+        }
+        if (size == 0) {
+            return C2_BAD_VALUE;
+        }
+
+        int prot = PROT_NONE;
+        int flags = MAP_PRIVATE;
+        if (usage.consumer & C2MemoryUsage::CPU_READ) {
+            prot |= PROT_READ;
+        }
+        if (usage.producer & C2MemoryUsage::CPU_WRITE) {
+            prot |= PROT_WRITE;
+            flags = MAP_SHARED;
+        }
+
+        size_t alignmentBytes = offset % PAGE_SIZE;
+        size_t mapOffset = offset - alignmentBytes;
+        size_t mapSize = size + alignmentBytes;
+
+        c2_status_t err = C2_OK;
+        if (mMapFd == -1) {
+            int ret = ion_map(mIonFd, mBuffer, mapSize, prot,
+                              flags, mapOffset, (unsigned char**)&mMapAddr, &mMapFd);
+            if (ret) {
+                mMapFd = -1;
+                *addr = nullptr;
+                err = c2_map_errno<EINVAL>(-ret);
+            } else {
+                *addr = (uint8_t *)mMapAddr + alignmentBytes;
+                mMapAlignmentBytes = alignmentBytes;
+                mMapSize = mapSize;
+            }
+        } else {
+            mMapAddr = mmap(nullptr, mapSize, prot, flags, mMapFd, mapOffset);
+            if (mMapAddr == MAP_FAILED) {
+                mMapAddr = *addr = nullptr;
+                err = c2_map_errno<EINVAL>(errno);
+            } else {
+                *addr = (uint8_t *)mMapAddr + alignmentBytes;
+                mMapAlignmentBytes = alignmentBytes;
+                mMapSize = mapSize;
+            }
+        }
+        return err;
+    }
+
+    c2_status_t unmap(void *addr, size_t size, int *fenceFd) {
+        if (mMapFd < 0 || mMapSize == 0) {
+            return C2_NOT_FOUND;
+        }
+        if (addr != (uint8_t *)mMapAddr + mMapAlignmentBytes ||
+                size + mMapAlignmentBytes != mMapSize) {
+            return C2_BAD_VALUE;
+        }
+        int err = munmap(mMapAddr, mMapSize);
+        if (err != 0) {
+            return c2_map_errno<EINVAL>(errno);
+        }
+        if (fenceFd) {
+            *fenceFd = -1; // not using fences
+        }
+        mMapSize = 0;
+        return C2_OK;
+    }
+
+    ~Impl() {
+        if (mMapFd >= 0) {
+            close(mMapFd);
+            mMapFd = -1;
+        }
+        if (mInit == C2_OK) {
+            (void)ion_free(mIonFd, mBuffer);
+        }
+        if (mIonFd >= 0) {
+            close(mIonFd);
+        }
+        native_handle_close(&mHandle);
+    }
+
+    c2_status_t status() const {
+        return mInit;
+    }
+
+    const C2Handle *handle() const {
+        return &mHandle;
+    }
+
+private:
+    int mIonFd;
+    C2HandleIon mHandle;
+    ion_user_handle_t mBuffer;
+    c2_status_t mInit;
+    int mMapFd; // only one for now
+    void *mMapAddr;
+    size_t mMapAlignmentBytes;
+    size_t mMapSize;
+};
+
+c2_status_t C2AllocationIon::map(
+    size_t offset, size_t size, C2MemoryUsage usage, int *fenceFd, void **addr) {
+    return mImpl->map(offset, size, usage, fenceFd, addr);
+}
+
+c2_status_t C2AllocationIon::unmap(void *addr, size_t size, int *fenceFd) {
+    return mImpl->unmap(addr, size, fenceFd);
+}
+
+bool C2AllocationIon::isValid() const {
+    return mImpl->status() == C2_OK;
+}
+
+c2_status_t C2AllocationIon::status() const {
+    return mImpl->status();
+}
+
+bool C2AllocationIon::equals(const std::shared_ptr<C2LinearAllocation> &other) const {
+    return other != nullptr &&
+        other->handle(); // TODO
+}
+
+const C2Handle *C2AllocationIon::handle() const {
+    return mImpl->handle();
+}
+
+C2AllocationIon::~C2AllocationIon() {
+    delete mImpl;
+}
+
+C2AllocationIon::C2AllocationIon(int ionFd, size_t size, size_t align, unsigned heapMask, unsigned flags)
+    : C2LinearAllocation(size),
+      mImpl(Impl::Alloc(ionFd, size, align, heapMask, flags)) { }
+
+C2AllocationIon::C2AllocationIon(int ionFd, size_t size, int shareFd)
+    : C2LinearAllocation(size),
+      mImpl(Impl::Import(ionFd, size, shareFd)) { }
+
+/* ======================================= ION ALLOCATOR ====================================== */
+C2AllocatorIon::C2AllocatorIon() : mInit(C2_OK), mIonFd(ion_open()) {
+    if (mIonFd < 0) {
+        switch (errno) {
+        case ENOENT:    mInit = C2_OMITTED; break;
+        default:        mInit = c2_map_errno<EACCES>(errno); break;
+        }
+    }
+}
+
+C2AllocatorIon::~C2AllocatorIon() {
+    if (mInit == C2_OK) {
+        ion_close(mIonFd);
+    }
+}
+
+C2Allocator::id_t C2AllocatorIon::getId() const {
+    return 0; /// \todo implement ID
+}
+
+C2String C2AllocatorIon::getName() const {
+    return "android.allocator.ion";
+}
+
+c2_status_t C2AllocatorIon::newLinearAllocation(
+        uint32_t capacity, C2MemoryUsage usage, std::shared_ptr<C2LinearAllocation> *allocation) {
+    if (allocation == nullptr) {
+        return C2_BAD_VALUE;
+    }
+
+    allocation->reset();
+    if (mInit != C2_OK) {
+        return mInit;
+    }
+
+    // get align, heapMask and flags
+    //size_t align = 1;
+    size_t align = 0;
+    unsigned heapMask = ~0;
+    unsigned flags = 0;
+    //TODO
+    (void) usage;
+#if 0
+    int err = mUsageMapper(usage, capacity, &align, &heapMask, &flags);
+    if (err < 0) {
+        return c2_map_errno<EINVAL, ENOMEM, EACCES>(-err);
+    }
+#endif
+
+    std::shared_ptr<C2AllocationIon> alloc
+        = std::make_shared<C2AllocationIon>(dup(mIonFd), capacity, align, heapMask, flags);
+    c2_status_t ret = alloc->status();
+    if (ret == C2_OK) {
+        *allocation = alloc;
+    }
+    return ret;
+}
+
+c2_status_t C2AllocatorIon::priorLinearAllocation(
+        const C2Handle *handle, std::shared_ptr<C2LinearAllocation> *allocation) {
+    *allocation = nullptr;
+    if (mInit != C2_OK) {
+        return mInit;
+    }
+
+    if (!C2HandleIon::isValid(handle)) {
+        return C2_BAD_VALUE;
+    }
+
+    // TODO: get capacity and validate it
+    const C2HandleIon *h = static_cast<const C2HandleIon*>(handle);
+    std::shared_ptr<C2AllocationIon> alloc
+        = std::make_shared<C2AllocationIon>(dup(mIonFd), h->size(), h->bufferFd());
+    c2_status_t ret = alloc->status();
+    if (ret == C2_OK) {
+        *allocation = alloc;
+    }
+    return ret;
+}
+
+} // namespace android
+
diff --git a/media/libstagefright/codec2/vndk/C2Buffer.cpp b/media/libstagefright/codec2/vndk/C2Buffer.cpp
new file mode 100644
index 0000000..65a271e
--- /dev/null
+++ b/media/libstagefright/codec2/vndk/C2Buffer.cpp
@@ -0,0 +1,747 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2Buffer"
+#include <utils/Log.h>
+
+#include <map>
+
+#include <C2BufferPriv.h>
+
+namespace android {
+
+namespace {
+
+// Inherit from the parent, share with the friend.
+
+class DummyCapacityAspect : public _C2LinearCapacityAspect {
+    using _C2LinearCapacityAspect::_C2LinearCapacityAspect;
+    friend class ::android::C2ReadView;
+    friend class ::android::C2ConstLinearBlock;
+};
+
+class C2DefaultReadView : public C2ReadView {
+    using C2ReadView::C2ReadView;
+    friend class ::android::C2ConstLinearBlock;
+};
+
+class C2DefaultWriteView : public C2WriteView {
+    using C2WriteView::C2WriteView;
+    friend class ::android::C2LinearBlock;
+};
+
+class C2AcquirableReadView : public C2Acquirable<C2ReadView> {
+    using C2Acquirable::C2Acquirable;
+    friend class ::android::C2ConstLinearBlock;
+};
+
+class C2AcquirableWriteView : public C2Acquirable<C2WriteView> {
+    using C2Acquirable::C2Acquirable;
+    friend class ::android::C2LinearBlock;
+};
+
+class C2DefaultConstLinearBlock : public C2ConstLinearBlock {
+    using C2ConstLinearBlock::C2ConstLinearBlock;
+    friend class ::android::C2LinearBlock;
+};
+
+class C2DefaultLinearBlock : public C2LinearBlock {
+    using C2LinearBlock::C2LinearBlock;
+    friend class ::android::C2BasicLinearBlockPool;
+};
+
+class C2DefaultGraphicView : public C2GraphicView {
+    using C2GraphicView::C2GraphicView;
+    friend class ::android::C2ConstGraphicBlock;
+    friend class ::android::C2GraphicBlock;
+};
+
+class C2AcquirableConstGraphicView : public C2Acquirable<const C2GraphicView> {
+    using C2Acquirable::C2Acquirable;
+    friend class ::android::C2ConstGraphicBlock;
+};
+
+class C2AcquirableGraphicView : public C2Acquirable<C2GraphicView> {
+    using C2Acquirable::C2Acquirable;
+    friend class ::android::C2GraphicBlock;
+};
+
+class C2DefaultConstGraphicBlock : public C2ConstGraphicBlock {
+    using C2ConstGraphicBlock::C2ConstGraphicBlock;
+    friend class ::android::C2GraphicBlock;
+};
+
+class C2DefaultGraphicBlock : public C2GraphicBlock {
+    using C2GraphicBlock::C2GraphicBlock;
+    friend class ::android::C2BasicGraphicBlockPool;
+};
+
+class C2DefaultBufferData : public C2BufferData {
+    using C2BufferData::C2BufferData;
+    friend class ::android::C2Buffer;
+};
+
+}  // namespace
+
+/* ========================================== 1D BLOCK ========================================= */
+
+class C2Block1D::Impl {
+public:
+    const C2Handle *handle() const {
+        return mAllocation->handle();
+    }
+
+    Impl(std::shared_ptr<C2LinearAllocation> alloc)
+        : mAllocation(alloc) {}
+
+private:
+    std::shared_ptr<C2LinearAllocation> mAllocation;
+};
+
+const C2Handle *C2Block1D::handle() const {
+    return mImpl->handle();
+};
+
+C2Block1D::C2Block1D(std::shared_ptr<C2LinearAllocation> alloc)
+    : _C2LinearRangeAspect(alloc.get()), mImpl(new Impl(alloc)) {
+}
+
+C2Block1D::C2Block1D(std::shared_ptr<C2LinearAllocation> alloc, size_t offset, size_t size)
+    : _C2LinearRangeAspect(alloc.get(), offset, size), mImpl(new Impl(alloc)) {
+}
+
+class C2ReadView::Impl {
+public:
+    explicit Impl(const uint8_t *data)
+        : mData(data), mError(C2_OK) {}
+
+    explicit Impl(c2_status_t error)
+        : mData(nullptr), mError(error) {}
+
+    const uint8_t *data() const {
+        return mData;
+    }
+
+    c2_status_t error() const {
+        return mError;
+    }
+
+private:
+    const uint8_t *mData;
+    c2_status_t mError;
+};
+
+C2ReadView::C2ReadView(const _C2LinearCapacityAspect *parent, const uint8_t *data)
+    : _C2LinearCapacityAspect(parent), mImpl(std::make_shared<Impl>(data)) {}
+
+C2ReadView::C2ReadView(c2_status_t error)
+    : _C2LinearCapacityAspect(0u), mImpl(std::make_shared<Impl>(error)) {}
+
+const uint8_t *C2ReadView::data() const {
+    return mImpl->data();
+}
+
+C2ReadView C2ReadView::subView(size_t offset, size_t size) const {
+    if (offset > capacity()) {
+        offset = capacity();
+    }
+    if (size > capacity() - offset) {
+        size = capacity() - offset;
+    }
+    // TRICKY: newCapacity will just be used to grab the size.
+    DummyCapacityAspect newCapacity((uint32_t)size);
+    return C2ReadView(&newCapacity, data() + offset);
+}
+
+c2_status_t C2ReadView::error() const {
+    return mImpl->error();
+}
+
+class C2WriteView::Impl {
+public:
+    explicit Impl(uint8_t *base)
+        : mBase(base), mError(C2_OK) {}
+
+    explicit Impl(c2_status_t error)
+        : mBase(nullptr), mError(error) {}
+
+    uint8_t *base() const {
+        return mBase;
+    }
+
+    c2_status_t error() const {
+        return mError;
+    }
+
+private:
+    uint8_t *mBase;
+    c2_status_t mError;
+};
+
+C2WriteView::C2WriteView(const _C2LinearRangeAspect *parent, uint8_t *base)
+    : _C2EditableLinearRange(parent), mImpl(std::make_shared<Impl>(base)) {}
+
+C2WriteView::C2WriteView(c2_status_t error)
+    : _C2EditableLinearRange(nullptr), mImpl(std::make_shared<Impl>(error)) {}
+
+uint8_t *C2WriteView::base() { return mImpl->base(); }
+
+uint8_t *C2WriteView::data() { return mImpl->base() + offset(); }
+
+c2_status_t C2WriteView::error() const { return mImpl->error(); }
+
+class C2ConstLinearBlock::Impl {
+public:
+    explicit Impl(std::shared_ptr<C2LinearAllocation> alloc)
+        : mAllocation(alloc), mBase(nullptr), mSize(0u), mError(C2_CORRUPTED) {}
+
+    ~Impl() {
+        if (mBase != nullptr) {
+            // TODO: fence
+            c2_status_t err = mAllocation->unmap(mBase, mSize, nullptr);
+            if (err != C2_OK) {
+                // TODO: Log?
+            }
+        }
+    }
+
+    C2ConstLinearBlock subBlock(size_t offset, size_t size) const {
+        return C2ConstLinearBlock(mAllocation, offset, size);
+    }
+
+    void map(size_t offset, size_t size) {
+        if (mBase == nullptr) {
+            void *base = nullptr;
+            mError = mAllocation->map(
+                    offset, size, { C2MemoryUsage::CPU_READ, 0 }, nullptr, &base);
+            // TODO: fence
+            if (mError == C2_OK) {
+                mBase = (uint8_t *)base;
+                mSize = size;
+            }
+        }
+    }
+
+    const uint8_t *base() const { return mBase; }
+
+    c2_status_t error() const { return mError; }
+
+private:
+    std::shared_ptr<C2LinearAllocation> mAllocation;
+    uint8_t *mBase;
+    size_t mSize;
+    c2_status_t mError;
+};
+
+C2ConstLinearBlock::C2ConstLinearBlock(std::shared_ptr<C2LinearAllocation> alloc)
+    : C2Block1D(alloc), mImpl(std::make_shared<Impl>(alloc)) {}
+
+C2ConstLinearBlock::C2ConstLinearBlock(
+        std::shared_ptr<C2LinearAllocation> alloc, size_t offset, size_t size)
+    : C2Block1D(alloc, offset, size), mImpl(std::make_shared<Impl>(alloc)) {}
+
+C2Acquirable<C2ReadView> C2ConstLinearBlock::map() const {
+    mImpl->map(offset(), size());
+    if (mImpl->base() == nullptr) {
+        C2DefaultReadView view(mImpl->error());
+        return C2AcquirableReadView(mImpl->error(), mFence, view);
+    }
+    DummyCapacityAspect newCapacity(size());
+    C2DefaultReadView view(&newCapacity, mImpl->base());
+    return C2AcquirableReadView(mImpl->error(), mFence, view);
+}
+
+C2ConstLinearBlock C2ConstLinearBlock::subBlock(size_t offset, size_t size) const {
+    return mImpl->subBlock(offset, size);
+}
+
+class C2LinearBlock::Impl {
+public:
+    Impl(std::shared_ptr<C2LinearAllocation> alloc)
+        : mAllocation(alloc), mBase(nullptr), mSize(0u), mError(C2_CORRUPTED) {}
+
+    ~Impl() {
+        if (mBase != nullptr) {
+            // TODO: fence
+            c2_status_t err = mAllocation->unmap(mBase, mSize, nullptr);
+            if (err != C2_OK) {
+                // TODO: Log?
+            }
+        }
+    }
+
+    void map(size_t capacity) {
+        if (mBase == nullptr) {
+            void *base = nullptr;
+            // TODO: fence
+            mError = mAllocation->map(
+                    0u,
+                    capacity,
+                    { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE },
+                    nullptr,
+                    &base);
+            if (mError == C2_OK) {
+                mBase = (uint8_t *)base;
+                mSize = capacity;
+            }
+        }
+    }
+
+    C2ConstLinearBlock share(size_t offset, size_t size, C2Fence &fence) {
+        // TODO
+        (void) fence;
+        return C2DefaultConstLinearBlock(mAllocation, offset, size);
+    }
+
+    uint8_t *base() const { return mBase; }
+
+    c2_status_t error() const { return mError; }
+
+    C2Fence fence() const { return mFence; }
+
+private:
+    std::shared_ptr<C2LinearAllocation> mAllocation;
+    uint8_t *mBase;
+    size_t mSize;
+    c2_status_t mError;
+    C2Fence mFence;
+};
+
+C2LinearBlock::C2LinearBlock(std::shared_ptr<C2LinearAllocation> alloc)
+    : C2Block1D(alloc),
+      mImpl(new Impl(alloc)) {}
+
+C2LinearBlock::C2LinearBlock(std::shared_ptr<C2LinearAllocation> alloc, size_t offset, size_t size)
+    : C2Block1D(alloc, offset, size),
+      mImpl(new Impl(alloc)) {}
+
+C2Acquirable<C2WriteView> C2LinearBlock::map() {
+    mImpl->map(capacity());
+    if (mImpl->base() == nullptr) {
+        C2DefaultWriteView view(mImpl->error());
+        return C2AcquirableWriteView(mImpl->error(), mImpl->fence(), view);
+    }
+    C2DefaultWriteView view(this, mImpl->base());
+    view.setOffset_be(offset());
+    view.setSize_be(size());
+    return C2AcquirableWriteView(mImpl->error(), mImpl->fence(), view);
+}
+
+C2ConstLinearBlock C2LinearBlock::share(size_t offset, size_t size, C2Fence fence) {
+    return mImpl->share(offset, size, fence);
+}
+
+C2BasicLinearBlockPool::C2BasicLinearBlockPool(
+        const std::shared_ptr<C2Allocator> &allocator)
+  : mAllocator(allocator) {}
+
+c2_status_t C2BasicLinearBlockPool::fetchLinearBlock(
+        uint32_t capacity,
+        C2MemoryUsage usage,
+        std::shared_ptr<C2LinearBlock> *block /* nonnull */) {
+    block->reset();
+
+    std::shared_ptr<C2LinearAllocation> alloc;
+    c2_status_t err = mAllocator->newLinearAllocation(capacity, usage, &alloc);
+    if (err != C2_OK) {
+        return err;
+    }
+
+    block->reset(new C2DefaultLinearBlock(alloc));
+
+    return C2_OK;
+}
+
+/* ========================================== 2D BLOCK ========================================= */
+
+class C2Block2D::Impl {
+public:
+    const C2Handle *handle() const {
+        return mAllocation->handle();
+    }
+
+    Impl(const std::shared_ptr<C2GraphicAllocation> &alloc)
+        : mAllocation(alloc) {}
+
+private:
+    std::shared_ptr<C2GraphicAllocation> mAllocation;
+};
+
+C2Block2D::C2Block2D(const std::shared_ptr<C2GraphicAllocation> &alloc)
+    : _C2PlanarSection(alloc.get()), mImpl(new Impl(alloc)) {}
+
+const C2Handle *C2Block2D::handle() const {
+    return mImpl->handle();
+}
+
+class C2GraphicView::Impl {
+public:
+    Impl(uint8_t *const *data, const C2PlanarLayout &layout)
+        : mData(data), mLayout(layout), mError(C2_OK) {}
+    explicit Impl(c2_status_t error) : mData(nullptr), mError(error) {}
+
+    uint8_t *const *data() const { return mData; }
+    const C2PlanarLayout &layout() const { return mLayout; }
+    c2_status_t error() const { return mError; }
+
+private:
+    uint8_t *const *mData;
+    C2PlanarLayout mLayout;
+    c2_status_t mError;
+};
+
+C2GraphicView::C2GraphicView(
+        const _C2PlanarCapacityAspect *parent,
+        uint8_t *const *data,
+        const C2PlanarLayout& layout)
+    : _C2PlanarSection(parent), mImpl(new Impl(data, layout)) {}
+
+C2GraphicView::C2GraphicView(c2_status_t error)
+    : _C2PlanarSection(nullptr), mImpl(new Impl(error)) {}
+
+const uint8_t *const *C2GraphicView::data() const {
+    return mImpl->data();
+}
+
+uint8_t *const *C2GraphicView::data() {
+    return mImpl->data();
+}
+
+const C2PlanarLayout C2GraphicView::layout() const {
+    return mImpl->layout();
+}
+
+const C2GraphicView C2GraphicView::subView(const C2Rect &rect) const {
+    C2GraphicView view(this, mImpl->data(), mImpl->layout());
+    view.setCrop_be(rect);
+    return view;
+}
+
+C2GraphicView C2GraphicView::subView(const C2Rect &rect) {
+    C2GraphicView view(this, mImpl->data(), mImpl->layout());
+    view.setCrop_be(rect);
+    return view;
+}
+
+c2_status_t C2GraphicView::error() const {
+    return mImpl->error();
+}
+
+class C2ConstGraphicBlock::Impl {
+public:
+    explicit Impl(const std::shared_ptr<C2GraphicAllocation> &alloc)
+        : mAllocation(alloc), mData{ nullptr } {}
+
+    ~Impl() {
+        if (mData[0] != nullptr) {
+            // TODO: fence
+            mAllocation->unmap(nullptr);
+        }
+    }
+
+    c2_status_t map(C2Rect rect) {
+        if (mData[0] != nullptr) {
+            // Already mapped.
+            return C2_OK;
+        }
+        c2_status_t err = mAllocation->map(
+                rect,
+                { C2MemoryUsage::CPU_READ, 0 },
+                nullptr,
+                &mLayout,
+                mData);
+        if (err != C2_OK) {
+            memset(mData, 0, sizeof(mData));
+        }
+        return err;
+    }
+
+    C2ConstGraphicBlock subBlock(const C2Rect &rect, C2Fence fence) const {
+        C2ConstGraphicBlock block(mAllocation, fence);
+        block.setCrop_be(rect);
+        return block;
+    }
+
+    uint8_t *const *data() const {
+        return mData[0] == nullptr ? nullptr : &mData[0];
+    }
+
+    const C2PlanarLayout &layout() const { return mLayout; }
+
+private:
+    std::shared_ptr<C2GraphicAllocation> mAllocation;
+    C2PlanarLayout mLayout;
+    uint8_t *mData[C2PlanarLayout::MAX_NUM_PLANES];
+};
+
+C2ConstGraphicBlock::C2ConstGraphicBlock(
+        const std::shared_ptr<C2GraphicAllocation> &alloc, C2Fence fence)
+    : C2Block2D(alloc), mImpl(new Impl(alloc)), mFence(fence) {}
+
+C2Acquirable<const C2GraphicView> C2ConstGraphicBlock::map() const {
+    c2_status_t err = mImpl->map(crop());
+    if (err != C2_OK) {
+        C2DefaultGraphicView view(err);
+        return C2AcquirableConstGraphicView(err, mFence, view);
+    }
+    C2DefaultGraphicView view(this, mImpl->data(), mImpl->layout());
+    return C2AcquirableConstGraphicView(err, mFence, view);
+}
+
+C2ConstGraphicBlock C2ConstGraphicBlock::subBlock(const C2Rect &rect) const {
+    return mImpl->subBlock(rect, mFence);
+}
+
+class C2GraphicBlock::Impl {
+public:
+    explicit Impl(const std::shared_ptr<C2GraphicAllocation> &alloc)
+        : mAllocation(alloc), mData{ nullptr } {}
+
+    ~Impl() {
+        if (mData[0] != nullptr) {
+            // TODO: fence
+            mAllocation->unmap(nullptr);
+        }
+    }
+
+    c2_status_t map(C2Rect rect) {
+        if (mData[0] != nullptr) {
+            // Already mapped.
+            return C2_OK;
+        }
+        uint8_t *data[C2PlanarLayout::MAX_NUM_PLANES];
+        c2_status_t err = mAllocation->map(
+                rect,
+                { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE },
+                nullptr,
+                &mLayout,
+                data);
+        if (err == C2_OK) {
+            memcpy(mData, data, sizeof(mData));
+        } else {
+            memset(mData, 0, sizeof(mData));
+        }
+        return err;
+    }
+
+    C2ConstGraphicBlock share(const C2Rect &crop, C2Fence fence) const {
+        C2DefaultConstGraphicBlock block(mAllocation, fence);
+        block.setCrop_be(crop);
+        return block;
+    }
+
+    uint8_t *const *data() const {
+        return mData[0] == nullptr ? nullptr : mData;
+    }
+
+    const C2PlanarLayout &layout() const { return mLayout; }
+
+private:
+    std::shared_ptr<C2GraphicAllocation> mAllocation;
+    C2PlanarLayout mLayout;
+    uint8_t *mData[C2PlanarLayout::MAX_NUM_PLANES];
+};
+
+C2GraphicBlock::C2GraphicBlock(const std::shared_ptr<C2GraphicAllocation> &alloc)
+    : C2Block2D(alloc), mImpl(new Impl(alloc)) {}
+
+C2Acquirable<C2GraphicView> C2GraphicBlock::map() {
+    c2_status_t err = mImpl->map(crop());
+    if (err != C2_OK) {
+        C2DefaultGraphicView view(err);
+        // TODO: fence
+        return C2AcquirableGraphicView(err, C2Fence(), view);
+    }
+    C2DefaultGraphicView view(this, mImpl->data(), mImpl->layout());
+    // TODO: fence
+    return C2AcquirableGraphicView(err, C2Fence(), view);
+}
+
+C2ConstGraphicBlock C2GraphicBlock::share(const C2Rect &crop, C2Fence fence) {
+    return mImpl->share(crop, fence);
+}
+
+C2BasicGraphicBlockPool::C2BasicGraphicBlockPool(
+        const std::shared_ptr<C2Allocator> &allocator)
+  : mAllocator(allocator) {}
+
+c2_status_t C2BasicGraphicBlockPool::fetchGraphicBlock(
+        uint32_t width,
+        uint32_t height,
+        uint32_t format,
+        C2MemoryUsage usage,
+        std::shared_ptr<C2GraphicBlock> *block /* nonnull */) {
+    block->reset();
+
+    std::shared_ptr<C2GraphicAllocation> alloc;
+    c2_status_t err = mAllocator->newGraphicAllocation(width, height, format, usage, &alloc);
+    if (err != C2_OK) {
+        return err;
+    }
+
+    block->reset(new C2DefaultGraphicBlock(alloc));
+
+    return C2_OK;
+}
+
+/* ========================================== BUFFER ========================================= */
+
+class C2BufferData::Impl {
+public:
+    explicit Impl(const std::list<C2ConstLinearBlock> &blocks)
+        : mType(blocks.size() == 1 ? LINEAR : LINEAR_CHUNKS),
+          mLinearBlocks(blocks) {
+    }
+
+    explicit Impl(const std::list<C2ConstGraphicBlock> &blocks)
+        : mType(blocks.size() == 1 ? GRAPHIC : GRAPHIC_CHUNKS),
+          mGraphicBlocks(blocks) {
+    }
+
+    Type type() const { return mType; }
+    const std::list<C2ConstLinearBlock> &linearBlocks() const { return mLinearBlocks; }
+    const std::list<C2ConstGraphicBlock> &graphicBlocks() const { return mGraphicBlocks; }
+
+private:
+    Type mType;
+    std::list<C2ConstLinearBlock> mLinearBlocks;
+    std::list<C2ConstGraphicBlock> mGraphicBlocks;
+};
+
+C2BufferData::C2BufferData(const std::list<C2ConstLinearBlock> &blocks) : mImpl(new Impl(blocks)) {}
+C2BufferData::C2BufferData(const std::list<C2ConstGraphicBlock> &blocks) : mImpl(new Impl(blocks)) {}
+
+C2BufferData::Type C2BufferData::type() const { return mImpl->type(); }
+
+const std::list<C2ConstLinearBlock> C2BufferData::linearBlocks() const {
+    return mImpl->linearBlocks();
+}
+
+const std::list<C2ConstGraphicBlock> C2BufferData::graphicBlocks() const {
+    return mImpl->graphicBlocks();
+}
+
+class C2Buffer::Impl {
+public:
+    Impl(C2Buffer *thiz, const std::list<C2ConstLinearBlock> &blocks)
+        : mThis(thiz), mData(blocks) {}
+    Impl(C2Buffer *thiz, const std::list<C2ConstGraphicBlock> &blocks)
+        : mThis(thiz), mData(blocks) {}
+
+    ~Impl() {
+        for (const auto &pair : mNotify) {
+            pair.first(mThis, pair.second);
+        }
+    }
+
+    const C2BufferData &data() const { return mData; }
+
+    c2_status_t registerOnDestroyNotify(OnDestroyNotify onDestroyNotify, void *arg) {
+        auto it = std::find_if(
+                mNotify.begin(), mNotify.end(),
+                [onDestroyNotify, arg] (const auto &pair) {
+                    return pair.first == onDestroyNotify && pair.second == arg;
+                });
+        if (it != mNotify.end()) {
+            return C2_DUPLICATE;
+        }
+        mNotify.emplace_back(onDestroyNotify, arg);
+        return C2_OK;
+    }
+
+    c2_status_t unregisterOnDestroyNotify(OnDestroyNotify onDestroyNotify, void *arg) {
+        auto it = std::find_if(
+                mNotify.begin(), mNotify.end(),
+                [onDestroyNotify, arg] (const auto &pair) {
+                    return pair.first == onDestroyNotify && pair.second == arg;
+                });
+        if (it == mNotify.end()) {
+            return C2_NOT_FOUND;
+        }
+        mNotify.erase(it);
+        return C2_OK;
+    }
+
+    std::list<std::shared_ptr<const C2Info>> infos() const {
+        std::list<std::shared_ptr<const C2Info>> result(mInfos.size());
+        std::transform(
+                mInfos.begin(), mInfos.end(), result.begin(),
+                [] (const auto &elem) { return elem.second; });
+        return result;
+    }
+
+    c2_status_t setInfo(const std::shared_ptr<C2Info> &info) {
+        // To "update" you need to erase the existing one if any, and then insert.
+        (void) mInfos.erase(info->type());
+        (void) mInfos.insert({ info->type(), info });
+        return C2_OK;
+    }
+
+    bool hasInfo(C2Param::Type index) const {
+        return mInfos.count(index.type()) > 0;
+    }
+
+    std::shared_ptr<C2Info> removeInfo(C2Param::Type index) {
+        auto it = mInfos.find(index.type());
+        if (it == mInfos.end()) {
+            return nullptr;
+        }
+        std::shared_ptr<C2Info> ret = it->second;
+        (void) mInfos.erase(it);
+        return ret;
+    }
+
+private:
+    C2Buffer * const mThis;
+    C2DefaultBufferData mData;
+    std::map<C2Param::Type, std::shared_ptr<C2Info>> mInfos;
+    std::list<std::pair<OnDestroyNotify, void *>> mNotify;
+};
+
+C2Buffer::C2Buffer(const std::list<C2ConstLinearBlock> &blocks)
+    : mImpl(new Impl(this, blocks)) {}
+
+C2Buffer::C2Buffer(const std::list<C2ConstGraphicBlock> &blocks)
+    : mImpl(new Impl(this, blocks)) {}
+
+const C2BufferData C2Buffer::data() const { return mImpl->data(); }
+
+c2_status_t C2Buffer::registerOnDestroyNotify(OnDestroyNotify onDestroyNotify, void *arg) {
+    return mImpl->registerOnDestroyNotify(onDestroyNotify, arg);
+}
+
+c2_status_t C2Buffer::unregisterOnDestroyNotify(OnDestroyNotify onDestroyNotify, void *arg) {
+    return mImpl->unregisterOnDestroyNotify(onDestroyNotify, arg);
+}
+
+const std::list<std::shared_ptr<const C2Info>> C2Buffer::infos() const {
+    return mImpl->infos();
+}
+
+c2_status_t C2Buffer::setInfo(const std::shared_ptr<C2Info> &info) {
+    return mImpl->setInfo(info);
+}
+
+bool C2Buffer::hasInfo(C2Param::Type index) const {
+    return mImpl->hasInfo(index);
+}
+
+std::shared_ptr<C2Info> C2Buffer::removeInfo(C2Param::Type index) {
+    return mImpl->removeInfo(index);
+}
+
+} // namespace android
diff --git a/media/libstagefright/MediaSource.cpp b/media/libstagefright/codec2/vndk/C2Config.cpp
similarity index 69%
copy from media/libstagefright/MediaSource.cpp
copy to media/libstagefright/codec2/vndk/C2Config.cpp
index a17757a..6acf524 100644
--- a/media/libstagefright/MediaSource.cpp
+++ b/media/libstagefright/codec2/vndk/C2Config.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2009 The Android Open Source Project
+ * Copyright (C) 2017 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,12 +14,11 @@
  * limitations under the License.
  */
 
-#include <media/stagefright/MediaSource.h>
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2Config"
 
-namespace android {
-
-MediaSource::MediaSource() {}
-
-MediaSource::~MediaSource() {}
-
-}  // namespace android
+/**
+ * Define and initialize global config field descriptors in this cpp file
+ */
+#define __C2_GENERATE_GLOBAL_VARS__
+#include <C2Config.h>
diff --git a/media/libstagefright/codec2/vndk/C2Store.cpp b/media/libstagefright/codec2/vndk/C2Store.cpp
new file mode 100644
index 0000000..eb72d17
--- /dev/null
+++ b/media/libstagefright/codec2/vndk/C2Store.cpp
@@ -0,0 +1,530 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <C2AllocatorGralloc.h>
+#include <C2AllocatorIon.h>
+#include <C2BufferPriv.h>
+#include <C2Component.h>
+#include <C2PlatformSupport.h>
+
+#define LOG_TAG "C2Store"
+#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include <dlfcn.h>
+
+#include <map>
+#include <memory>
+#include <mutex>
+
+namespace android {
+
+/**
+ * The platform allocator store provides basic allocator-types for the framework based on ion and
+ * gralloc. Allocators are not meant to be updatable.
+ *
+ * \todo Provide allocator based on ashmem
+ * \todo Move ion allocation into its HIDL or provide some mapping from memory usage to ion flags
+ * \todo Make this allocator store extendable
+ */
+class C2PlatformAllocatorStore : public C2AllocatorStore {
+public:
+    enum : id_t {
+        ION = PLATFORM_START,
+        GRALLOC,
+    };
+
+    C2PlatformAllocatorStore(
+        /* ionmapper */
+    );
+
+    virtual c2_status_t fetchAllocator(
+            id_t id, std::shared_ptr<C2Allocator> *const allocator) override;
+
+    virtual std::vector<std::shared_ptr<const C2Allocator::Traits>> listAllocators_nb()
+            const override {
+        return std::vector<std::shared_ptr<const C2Allocator::Traits>>(); /// \todo
+    }
+
+    virtual C2String getName() const override {
+        return "android.allocator-store";
+    }
+
+private:
+    /// returns a shared-singleton ion allocator
+    std::shared_ptr<C2Allocator> fetchIonAllocator();
+
+    /// returns a shared-singleton gralloc allocator
+    std::shared_ptr<C2Allocator> fetchGrallocAllocator();
+};
+
+C2PlatformAllocatorStore::C2PlatformAllocatorStore() {
+}
+
+c2_status_t C2PlatformAllocatorStore::fetchAllocator(
+        id_t id, std::shared_ptr<C2Allocator> *const allocator) {
+    allocator->reset();
+    switch (id) {
+    // TODO: should we implement a generic registry for all, and use that?
+    case C2PlatformAllocatorStore::ION:
+    case C2AllocatorStore::DEFAULT_LINEAR:
+        *allocator = fetchIonAllocator();
+        break;
+
+    case C2PlatformAllocatorStore::GRALLOC:
+    case C2AllocatorStore::DEFAULT_GRAPHIC:
+        *allocator = fetchGrallocAllocator();
+        break;
+
+    default:
+        return C2_NOT_FOUND;
+    }
+    if (*allocator == nullptr) {
+        return C2_NO_MEMORY;
+    }
+    return C2_OK;
+}
+
+std::shared_ptr<C2Allocator> C2PlatformAllocatorStore::fetchIonAllocator() {
+    static std::mutex mutex;
+    static std::weak_ptr<C2Allocator> ionAllocator;
+    std::lock_guard<std::mutex> lock(mutex);
+    std::shared_ptr<C2Allocator> allocator = ionAllocator.lock();
+    if (allocator == nullptr) {
+        allocator = std::make_shared<C2AllocatorIon>();
+        ionAllocator = allocator;
+    }
+    return allocator;
+}
+
+std::shared_ptr<C2Allocator> C2PlatformAllocatorStore::fetchGrallocAllocator() {
+    static std::mutex mutex;
+    static std::weak_ptr<C2Allocator> grallocAllocator;
+    std::lock_guard<std::mutex> lock(mutex);
+    std::shared_ptr<C2Allocator> allocator = grallocAllocator.lock();
+    if (allocator == nullptr) {
+        allocator = std::make_shared<C2AllocatorGralloc>();
+        grallocAllocator = allocator;
+    }
+    return allocator;
+}
+
+std::shared_ptr<C2AllocatorStore> GetCodec2PlatformAllocatorStore() {
+    return std::make_shared<C2PlatformAllocatorStore>();
+}
+
+c2_status_t GetCodec2BlockPool(
+        C2BlockPool::local_id_t id, std::shared_ptr<const C2Component> component,
+        std::shared_ptr<C2BlockPool> *pool) {
+    pool->reset();
+    if (!component) {
+        return C2_BAD_VALUE;
+    }
+    // TODO support pre-registered block pools
+    std::shared_ptr<C2AllocatorStore> allocatorStore = GetCodec2PlatformAllocatorStore();
+    std::shared_ptr<C2Allocator> allocator;
+    c2_status_t res = C2_NOT_FOUND;
+
+    switch (id) {
+    case C2BlockPool::BASIC_LINEAR:
+        res = allocatorStore->fetchAllocator(C2AllocatorStore::DEFAULT_LINEAR, &allocator);
+        if (res == OK) {
+            *pool = std::make_shared<C2BasicLinearBlockPool>(allocator);
+        }
+        break;
+    case C2BlockPool::BASIC_GRAPHIC:
+        res = allocatorStore->fetchAllocator(C2AllocatorStore::DEFAULT_GRAPHIC, &allocator);
+        if (res == OK) {
+            *pool = std::make_shared<C2BasicGraphicBlockPool>(allocator);
+        }
+        break;
+    default:
+        break;
+    }
+    return res;
+}
+
+class C2PlatformComponentStore : public C2ComponentStore {
+public:
+    virtual std::vector<std::shared_ptr<const C2Component::Traits>> listComponents() override;
+    virtual std::shared_ptr<C2ParamReflector> getParamReflector() const override;
+    virtual C2String getName() const override;
+    virtual c2_status_t querySupportedValues_sm(
+            std::vector<C2FieldSupportedValuesQuery> &fields) const override;
+    virtual c2_status_t querySupportedParams_nb(
+            std::vector<std::shared_ptr<C2ParamDescriptor>> *const params) const override;
+    virtual c2_status_t query_sm(
+            const std::vector<C2Param *const> &stackParams,
+            const std::vector<C2Param::Index> &heapParamIndices,
+            std::vector<std::unique_ptr<C2Param>> *const heapParams) const override;
+    virtual c2_status_t createInterface(
+            C2String name, std::shared_ptr<C2ComponentInterface> *const interface) override;
+    virtual c2_status_t createComponent(
+            C2String name, std::shared_ptr<C2Component> *const component) override;
+    virtual c2_status_t copyBuffer(
+            std::shared_ptr<C2GraphicBuffer> src, std::shared_ptr<C2GraphicBuffer> dst) override;
+    virtual c2_status_t config_sm(
+            const std::vector<C2Param *const> &params,
+            std::vector<std::unique_ptr<C2SettingResult>> *const failures) override;
+    C2PlatformComponentStore();
+
+    virtual ~C2PlatformComponentStore() override = default;
+
+private:
+
+    /**
+     * An object encapsulating a loaded component module.
+     *
+     * \todo provide a way to add traits to known components here to avoid loading the .so-s
+     * for listComponents
+     */
+    struct ComponentModule : public C2ComponentFactory,
+            public std::enable_shared_from_this<ComponentModule> {
+        virtual c2_status_t createComponent(
+                c2_node_id_t id, std::shared_ptr<C2Component> *component,
+                ComponentDeleter deleter = std::default_delete<C2Component>()) override;
+        virtual c2_status_t createInterface(
+                c2_node_id_t id, std::shared_ptr<C2ComponentInterface> *interface,
+                InterfaceDeleter deleter = std::default_delete<C2ComponentInterface>()) override;
+
+        /**
+         * \returns the traits of the component in this module.
+         */
+        std::shared_ptr<const C2Component::Traits> getTraits();
+
+        /**
+         * Creates an uninitialized component module.
+         *
+         * \note Only used by ComponentLoader.
+         */
+        ComponentModule() : mInit(C2_NO_INIT) {}
+
+        /**
+         * Initializes a component module with a given library path. Must be called exactly once.
+         *
+         * \note Only used by ComponentLoader.
+         *
+         * \param libPath[in] library path (or name)
+         *
+         * \retval C2_OK        the component module has been successfully loaded
+         * \retval C2_NO_MEMORY not enough memory to loading the component module
+         * \retval C2_NOT_FOUND could not locate the component module
+         * \retval C2_CORRUPTED the component module could not be loaded (unexpected)
+         * \retval C2_REFUSED   permission denied to load the component module (unexpected)
+         * \retval C2_TIMED_OUT could not load the module within the time limit (unexpected)
+         */
+        c2_status_t init(std::string libPath);
+
+        virtual ~ComponentModule() override;
+
+    protected:
+        std::recursive_mutex mLock; ///< lock protecting mTraits
+        std::shared_ptr<C2Component::Traits> mTraits; ///< cached component traits
+
+        c2_status_t mInit; ///< initialization result
+
+        void *mLibHandle; ///< loaded library handle
+        C2ComponentFactory::CreateCodec2FactoryFunc createFactory; ///< loaded create function
+        C2ComponentFactory::DestroyCodec2FactoryFunc destroyFactory; ///< loaded destroy function
+        C2ComponentFactory *mComponentFactory; ///< loaded/created component factory
+    };
+
+    /**
+     * An object encapsulating a loadable component module.
+     *
+     * \todo make this also work for enumerations
+     */
+    struct ComponentLoader {
+        /**
+         * Load the component module.
+         *
+         * This method simply returns the component module if it is already currently loaded, or
+         * attempts to load it if it is not.
+         *
+         * \param module[out] pointer to the shared pointer where the loaded module shall be stored.
+         *                    This will be nullptr on error.
+         *
+         * \retval C2_OK        the component module has been successfully loaded
+         * \retval C2_NO_MEMORY not enough memory to loading the component module
+         * \retval C2_NOT_FOUND could not locate the component module
+         * \retval C2_CORRUPTED the component module could not be loaded
+         * \retval C2_REFUSED   permission denied to load the component module
+         */
+        c2_status_t fetchModule(std::shared_ptr<ComponentModule> *module) {
+            c2_status_t res = C2_OK;
+            std::lock_guard<std::mutex> lock(mMutex);
+            std::shared_ptr<ComponentModule> localModule = mModule.lock();
+            if (localModule == nullptr) {
+                localModule = std::make_shared<ComponentModule>();
+                res = localModule->init(mLibPath);
+                if (res == C2_OK) {
+                    mModule = localModule;
+                }
+            }
+            *module = localModule;
+            return res;
+        }
+
+        /**
+         * Creates a component loader for a specific library path (or name).
+         */
+        ComponentLoader(std::string libPath)
+            : mLibPath(libPath) {}
+
+    private:
+        std::mutex mMutex; ///< mutex guarding the module
+        std::weak_ptr<ComponentModule> mModule; ///< weak reference to the loaded module
+        std::string mLibPath; ///< library path (or name)
+    };
+
+    /**
+     * Retrieves the component loader for a component.
+     *
+     * \return a non-ref-holding pointer to the component loader.
+     *
+     * \retval C2_OK        the component loader has been successfully retrieved
+     * \retval C2_NO_MEMORY not enough memory to locate the component loader
+     * \retval C2_NOT_FOUND could not locate the component to be loaded
+     * \retval C2_CORRUPTED the component loader could not be identified due to some modules being
+     *                      corrupted (this can happen if the name does not refer to an already
+     *                      identified component but some components could not be loaded due to
+     *                      bad library)
+     * \retval C2_REFUSED   permission denied to find the component loader for the named component
+     *                      (this can happen if the name does not refer to an already identified
+     *                      component but some components could not be loaded due to lack of
+     *                      permissions)
+     */
+    c2_status_t findComponent(C2String name, ComponentLoader **loader);
+
+    std::map<C2String, ComponentLoader> mComponents; ///< list of components
+};
+
+c2_status_t C2PlatformComponentStore::ComponentModule::init(std::string libPath) {
+    ALOGV("in %s", __func__);
+    ALOGV("loading dll");
+    mLibHandle = dlopen(libPath.c_str(), RTLD_NOW|RTLD_NODELETE);
+    if (mLibHandle == nullptr) {
+        // could be access/symbol or simply not being there
+        ALOGD("could not dlopen %s: %s", libPath.c_str(), dlerror());
+        mInit = C2_CORRUPTED;
+    } else {
+        createFactory =
+            (C2ComponentFactory::CreateCodec2FactoryFunc)dlsym(mLibHandle, "CreateCodec2Factory");
+        destroyFactory =
+            (C2ComponentFactory::DestroyCodec2FactoryFunc)dlsym(mLibHandle, "DestroyCodec2Factory");
+
+        mComponentFactory = createFactory();
+        if (mComponentFactory == nullptr) {
+            ALOGD("could not create factory in %s", libPath.c_str());
+            mInit = C2_NO_MEMORY;
+        } else {
+            mInit = C2_OK;
+        }
+    }
+    return mInit;
+}
+
+C2PlatformComponentStore::ComponentModule::~ComponentModule() {
+    ALOGV("in %s", __func__);
+    if (destroyFactory && mComponentFactory) {
+        destroyFactory(mComponentFactory);
+    }
+    if (mLibHandle) {
+        ALOGV("unloading dll");
+        dlclose(mLibHandle);
+    }
+}
+
+c2_status_t C2PlatformComponentStore::ComponentModule::createInterface(
+        c2_node_id_t id, std::shared_ptr<C2ComponentInterface> *interface,
+        std::function<void(::android::C2ComponentInterface*)> deleter) {
+    interface->reset();
+    if (mInit != C2_OK) {
+        return mInit;
+    }
+    std::shared_ptr<ComponentModule> module = shared_from_this();
+    c2_status_t res = mComponentFactory->createInterface(
+            id, interface, [module, deleter](C2ComponentInterface *p) mutable {
+                // capture module so that we ensure we still have it while deleting interface
+                deleter(p); // delete interface first
+                module.reset(); // remove module ref (not technically needed)
+    });
+    return res;
+}
+
+c2_status_t C2PlatformComponentStore::ComponentModule::createComponent(
+        c2_node_id_t id, std::shared_ptr<C2Component> *component,
+        std::function<void(::android::C2Component*)> deleter) {
+    component->reset();
+    if (mInit != C2_OK) {
+        return mInit;
+    }
+    std::shared_ptr<ComponentModule> module = shared_from_this();
+    c2_status_t res = mComponentFactory->createComponent(
+            id, component, [module, deleter](C2Component *p) mutable {
+                // capture module so that we ensure we still have it while deleting component
+                deleter(p); // delete component first
+                module.reset(); // remove module ref (not technically needed)
+    });
+    return res;
+}
+
+std::shared_ptr<const C2Component::Traits> C2PlatformComponentStore::ComponentModule::getTraits() {
+    std::unique_lock<std::recursive_mutex> lock(mLock);
+    if (!mTraits) {
+        std::shared_ptr<C2ComponentInterface> intf;
+        c2_status_t res = createInterface(0, &intf);
+        if (res != C2_OK) {
+            return nullptr;
+        }
+
+        std::shared_ptr<C2Component::Traits> traits(new (std::nothrow) C2Component::Traits);
+        if (traits) {
+            // traits->name = intf->getName();
+        }
+
+        mTraits = traits;
+    }
+    return mTraits;
+}
+
+C2PlatformComponentStore::C2PlatformComponentStore() {
+    // TODO: move this also into a .so so it can be updated
+    mComponents.emplace("c2.google.avc.decoder", "libstagefright_soft_c2avcdec.so");
+    mComponents.emplace("c2.google.aac.decoder", "libstagefright_soft_c2aacdec.so");
+}
+
+c2_status_t C2PlatformComponentStore::copyBuffer(
+        std::shared_ptr<C2GraphicBuffer> src, std::shared_ptr<C2GraphicBuffer> dst) {
+    (void)src;
+    (void)dst;
+    return C2_OMITTED;
+}
+
+c2_status_t C2PlatformComponentStore::query_sm(
+        const std::vector<C2Param *const> &stackParams,
+        const std::vector<C2Param::Index> &heapParamIndices,
+        std::vector<std::unique_ptr<C2Param>> *const heapParams) const {
+    // there are no supported configs
+    (void)heapParams;
+    return stackParams.empty() && heapParamIndices.empty() ? C2_OK : C2_BAD_INDEX;
+}
+
+c2_status_t C2PlatformComponentStore::config_sm(
+        const std::vector<C2Param *const> &params,
+        std::vector<std::unique_ptr<C2SettingResult>> *const failures) {
+    // there are no supported configs
+    (void)failures;
+    return params.empty() ? C2_OK : C2_BAD_INDEX;
+}
+
+std::vector<std::shared_ptr<const C2Component::Traits>> C2PlatformComponentStore::listComponents() {
+    // This method SHALL return within 500ms.
+    std::vector<std::shared_ptr<const C2Component::Traits>> list;
+    for (auto &it : mComponents) {
+        ComponentLoader &loader = it.second;
+        std::shared_ptr<ComponentModule> module;
+        c2_status_t res = loader.fetchModule(&module);
+        if (res == C2_OK) {
+            std::shared_ptr<const C2Component::Traits> traits = module->getTraits();
+            if (traits) {
+                list.push_back(traits);
+            }
+        }
+    }
+    return list;
+}
+
+c2_status_t C2PlatformComponentStore::findComponent(C2String name, ComponentLoader **loader) {
+    *loader = nullptr;
+    auto pos = mComponents.find(name);
+    // TODO: check aliases
+    if (pos == mComponents.end()) {
+        return C2_NOT_FOUND;
+    }
+    *loader = &pos->second;
+    return C2_OK;
+}
+
+c2_status_t C2PlatformComponentStore::createComponent(
+        C2String name, std::shared_ptr<C2Component> *const component) {
+    // This method SHALL return within 100ms.
+    component->reset();
+    ComponentLoader *loader;
+    c2_status_t res = findComponent(name, &loader);
+    if (res == C2_OK) {
+        std::shared_ptr<ComponentModule> module;
+        res = loader->fetchModule(&module);
+        if (res == C2_OK) {
+            // TODO: get a unique node ID
+            res = module->createComponent(0, component);
+        }
+    }
+    return res;
+}
+
+c2_status_t C2PlatformComponentStore::createInterface(
+        C2String name, std::shared_ptr<C2ComponentInterface> *const interface) {
+    // This method SHALL return within 100ms.
+    interface->reset();
+    ComponentLoader *loader;
+    c2_status_t res = findComponent(name, &loader);
+    if (res == C2_OK) {
+        std::shared_ptr<ComponentModule> module;
+        res = loader->fetchModule(&module);
+        if (res == C2_OK) {
+            // TODO: get a unique node ID
+            res = module->createInterface(0, interface);
+        }
+    }
+    return res;
+}
+
+c2_status_t C2PlatformComponentStore::querySupportedParams_nb(
+        std::vector<std::shared_ptr<C2ParamDescriptor>> *const params) const {
+    // there are no supported config params
+    (void)params;
+    return C2_OK;
+}
+
+c2_status_t C2PlatformComponentStore::querySupportedValues_sm(
+        std::vector<C2FieldSupportedValuesQuery> &fields) const {
+    // there are no supported config params
+    return fields.empty() ? C2_OK : C2_BAD_INDEX;
+}
+
+C2String C2PlatformComponentStore::getName() const {
+    return "android.componentStore.platform";
+}
+
+std::shared_ptr<C2ParamReflector> C2PlatformComponentStore::getParamReflector() const {
+    // TODO
+    return nullptr;
+}
+
+std::shared_ptr<C2ComponentStore> GetCodec2PlatformComponentStore() {
+    static std::mutex mutex;
+    static std::weak_ptr<C2ComponentStore> platformStore;
+    std::lock_guard<std::mutex> lock(mutex);
+    std::shared_ptr<C2ComponentStore> store = platformStore.lock();
+    if (store == nullptr) {
+        store = std::make_shared<C2PlatformComponentStore>();
+        platformStore = store;
+    }
+    return store;
+}
+
+} // namespace android
diff --git a/media/libstagefright/codec2/vndk/include/C2AllocatorGralloc.h b/media/libstagefright/codec2/vndk/include/C2AllocatorGralloc.h
new file mode 100644
index 0000000..5311747
--- /dev/null
+++ b/media/libstagefright/codec2/vndk/include/C2AllocatorGralloc.h
@@ -0,0 +1,68 @@
+
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef STAGEFRIGHT_CODEC2_ALLOCATOR_GRALLOC_H_
+#define STAGEFRIGHT_CODEC2_ALLOCATOR_GRALLOC_H_
+
+#include <functional>
+
+#include <C2Buffer.h>
+
+namespace android {
+
+/**
+ * Unwrap the native handle from a Codec2 handle allocated by C2AllocatorGralloc.
+ *
+ * @param handle a handle allocated by C2AllocatorGralloc. This includes handles returned for a
+ * graphic block allocation handle returned.
+ *
+ * @return a new NON-OWNING native handle that must be deleted using native_handle_delete.
+ */
+native_handle_t*UnwrapNativeCodec2GrallocHandle(const C2Handle *const handle);
+
+class C2AllocatorGralloc : public C2Allocator {
+public:
+    virtual id_t getId() const override;
+
+    virtual C2String getName() const override;
+
+    virtual std::shared_ptr<const Traits> getTraits() const override {
+        return nullptr; // \todo
+    }
+
+    virtual c2_status_t newGraphicAllocation(
+            uint32_t width, uint32_t height, uint32_t format, C2MemoryUsage usage,
+            std::shared_ptr<C2GraphicAllocation> *allocation) override;
+
+    virtual c2_status_t priorGraphicAllocation(
+            const C2Handle *handle,
+            std::shared_ptr<C2GraphicAllocation> *allocation) override;
+
+    C2AllocatorGralloc();
+
+    c2_status_t status() const;
+
+    virtual ~C2AllocatorGralloc() override;
+
+private:
+    class Impl;
+    Impl *mImpl;
+};
+
+} // namespace android
+
+#endif // STAGEFRIGHT_CODEC2_ALLOCATOR_GRALLOC_H_
diff --git a/media/libstagefright/codec2/vndk/include/C2AllocatorIon.h b/media/libstagefright/codec2/vndk/include/C2AllocatorIon.h
new file mode 100644
index 0000000..bb815f9
--- /dev/null
+++ b/media/libstagefright/codec2/vndk/include/C2AllocatorIon.h
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef STAGEFRIGHT_CODEC2_ALLOCATOR_ION_H_
+#define STAGEFRIGHT_CODEC2_ALLOCATOR_ION_H_
+
+#include <functional>
+
+#include <C2Buffer.h>
+
+namespace android {
+
+class C2AllocatorIon : public C2Allocator {
+public:
+    // (usage, capacity) => (align, heapMask, flags)
+    typedef std::function<int (C2MemoryUsage, size_t,
+                      /* => */ size_t*, unsigned*, unsigned*)> usage_mapper_fn;
+
+    virtual id_t getId() const override;
+
+    virtual C2String getName() const override;
+
+    virtual std::shared_ptr<const Traits> getTraits() const override {
+        return nullptr; // \todo
+    }
+
+    virtual c2_status_t newLinearAllocation(
+            uint32_t capacity, C2MemoryUsage usage,
+            std::shared_ptr<C2LinearAllocation> *allocation) override;
+
+    virtual c2_status_t priorLinearAllocation(
+            const C2Handle *handle,
+            std::shared_ptr<C2LinearAllocation> *allocation) override;
+
+    C2AllocatorIon();
+
+    virtual c2_status_t status() const { return mInit; }
+
+    virtual ~C2AllocatorIon() override;
+
+private:
+    c2_status_t mInit;
+    int mIonFd;
+    usage_mapper_fn mUsageMapper;
+};
+
+} // namespace android
+
+#endif // STAGEFRIGHT_CODEC2_ALLOCATOR_ION_H_
diff --git a/media/libstagefright/codec2/vndk/include/C2BufferPriv.h b/media/libstagefright/codec2/vndk/include/C2BufferPriv.h
new file mode 100644
index 0000000..875a8c2
--- /dev/null
+++ b/media/libstagefright/codec2/vndk/include/C2BufferPriv.h
@@ -0,0 +1,78 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef STAGEFRIGHT_CODEC2_BUFFER_PRIV_H_
+#define STAGEFRIGHT_CODEC2_BUFFER_PRIV_H_
+
+#include <functional>
+
+#include <C2Buffer.h>
+
+namespace android {
+
+class C2BasicLinearBlockPool : public C2BlockPool {
+public:
+    explicit C2BasicLinearBlockPool(const std::shared_ptr<C2Allocator> &allocator);
+
+    virtual ~C2BasicLinearBlockPool() override = default;
+
+    virtual C2Allocator::id_t getAllocatorId() const override {
+        return mAllocator->getId();
+    }
+
+    virtual local_id_t getLocalId() const override {
+        return BASIC_LINEAR;
+    }
+
+    virtual c2_status_t fetchLinearBlock(
+            uint32_t capacity,
+            C2MemoryUsage usage,
+            std::shared_ptr<C2LinearBlock> *block /* nonnull */) override;
+
+    // TODO: fetchCircularBlock
+
+private:
+    const std::shared_ptr<C2Allocator> mAllocator;
+};
+
+class C2BasicGraphicBlockPool : public C2BlockPool {
+public:
+    explicit C2BasicGraphicBlockPool(const std::shared_ptr<C2Allocator> &allocator);
+
+    virtual ~C2BasicGraphicBlockPool() override = default;
+
+    virtual C2Allocator::id_t getAllocatorId() const override {
+        return mAllocator->getId();
+    }
+
+    virtual local_id_t getLocalId() const override {
+        return BASIC_GRAPHIC;
+    }
+
+    virtual c2_status_t fetchGraphicBlock(
+            uint32_t width,
+            uint32_t height,
+            uint32_t format,
+            C2MemoryUsage usage,
+            std::shared_ptr<C2GraphicBlock> *block /* nonnull */) override;
+
+private:
+    const std::shared_ptr<C2Allocator> mAllocator;
+};
+
+} // namespace android
+
+#endif // STAGEFRIGHT_CODEC2_BUFFER_PRIV_H_
diff --git a/media/libstagefright/codec2/vndk/include/C2ErrnoUtils.h b/media/libstagefright/codec2/vndk/include/C2ErrnoUtils.h
new file mode 100644
index 0000000..41132b9
--- /dev/null
+++ b/media/libstagefright/codec2/vndk/include/C2ErrnoUtils.h
@@ -0,0 +1,58 @@
+/*
+ * Copyright 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef STAGEFRIGHT_CODEC2_ERRNO_UTILS_H_
+#define STAGEFRIGHT_CODEC2_ERRNO_UTILS_H_
+
+#include <errno.h>
+#include <C2.h>
+
+namespace android {
+
+// standard ERRNO mappings
+template<int N> constexpr c2_status_t _c2_errno2status_impl();
+template<> constexpr c2_status_t _c2_errno2status_impl<0>()       { return C2_OK; }
+template<> constexpr c2_status_t _c2_errno2status_impl<EINVAL>()  { return C2_BAD_VALUE; }
+template<> constexpr c2_status_t _c2_errno2status_impl<EACCES>()  { return C2_REFUSED; }
+template<> constexpr c2_status_t _c2_errno2status_impl<EPERM>()   { return C2_REFUSED; }
+template<> constexpr c2_status_t _c2_errno2status_impl<ENOMEM>()  { return C2_NO_MEMORY; }
+
+// map standard errno-s to the equivalent c2_status_t
+template<int... N> struct _c2_map_errno_impl;
+template<int E, int ... N> struct _c2_map_errno_impl<E, N...> {
+    static c2_status_t map(int result) {
+        if (result == E) {
+            return _c2_errno2status_impl <E>();
+        } else {
+            return _c2_map_errno_impl<N...>::map(result);
+        }
+    }
+};
+template<> struct _c2_map_errno_impl<> {
+    static c2_status_t map(int result) {
+        return result == 0 ? C2_OK : C2_CORRUPTED;
+    }
+};
+
+template<int... N>
+c2_status_t c2_map_errno(int result) {
+    return _c2_map_errno_impl<N...>::map(result);
+}
+
+} // namespace android
+
+#endif // STAGEFRIGHT_CODEC2_ERRNO_UTILS_H_
+
diff --git a/media/libstagefright/codec2/vndk/include/C2PlatformSupport.h b/media/libstagefright/codec2/vndk/include/C2PlatformSupport.h
new file mode 100644
index 0000000..2281dab
--- /dev/null
+++ b/media/libstagefright/codec2/vndk/include/C2PlatformSupport.h
@@ -0,0 +1,126 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef STAGEFRIGHT_CODEC2_PLATFORM_SUPPORT_H_
+#define STAGEFRIGHT_CODEC2_PLATFORM_SUPPORT_H_
+
+#include <C2Component.h>
+
+#include <functional>
+#include <memory>
+
+namespace android {
+
+/**
+ * Returns the platform allocator store.
+ * \retval nullptr if the platform allocator store could not be obtained
+ */
+std::shared_ptr<C2AllocatorStore> GetCodec2PlatformAllocatorStore();
+
+/**
+ * Retrieves a block pool for a component.
+ *
+ * \param id        the local ID of the block pool
+ * \param component the component using the block pool (must be non-null)
+ * \param pool      pointer to where the obtained block pool shall be stored on success. nullptr
+ *                  will be stored here on failure
+ *
+ * \retval C2_OK        the operation was successful
+ * \retval C2_BAD_VALUE the component is null
+ * \retval C2_NOT_FOUND if the block pool does not exist
+ * \retval C2_NO_MEMORY not enough memory to fetch the block pool (this return value is only
+ *                      possible for basic pools)
+ * \retval C2_TIMED_OUT the operation timed out (this return value is only possible for basic pools)
+ * \retval C2_REFUSED   no permission to complete any required allocation (this return value is only
+ *                      possible for basic pools)
+ * \retval C2_CORRUPTED some unknown, unrecoverable error occured during operation (unexpected,
+ *                      this return value is only possible for basic pools)
+ */
+c2_status_t GetCodec2BlockPool(
+        C2BlockPool::local_id_t id, std::shared_ptr<const C2Component> component,
+        std::shared_ptr<C2BlockPool> *pool);
+
+/**
+ * Component factory object that enables to create a component and/or interface from a dynamically
+ * linked library. This is needed because the component/interfaces are managed objects, but we
+ * cannot safely create a managed object and pass it in C.
+ *
+ * Components/interfaces typically inherit from std::enable_shared_from_this, but C requires
+ * passing simple pointer, and shared_ptr constructor needs to know the class to be constructed
+ * derives from enable_shared_from_this.
+ *
+ */
+class C2ComponentFactory {
+public:
+    typedef std::function<void(::android::C2Component*)> ComponentDeleter;
+    typedef std::function<void(::android::C2ComponentInterface*)> InterfaceDeleter;
+
+    /**
+     * Creates a component.
+     *
+     * This method SHALL return within 100ms.
+     *
+     * \param id        component ID for the created component
+     * \param component shared pointer where the created component is stored. Cleared on
+     *                  failure and updated on success.
+     *
+     * \retval C2_OK        the component was created successfully
+     * \retval C2_TIMED_OUT could not create the component within the time limit (unexpected)
+     * \retval C2_CORRUPTED some unknown error prevented the creation of the component (unexpected)
+     *
+     * \retval C2_NO_MEMORY not enough memory to create the component
+     */
+    virtual c2_status_t createComponent(
+            c2_node_id_t id, std::shared_ptr<C2Component>* const component,
+            ComponentDeleter deleter = std::default_delete<C2Component>()) = 0;
+
+    /**
+     * Creates a component interface.
+     *
+     * This method SHALL return within 100ms.
+     *
+     * \param id        component interface ID for the created interface
+     * \param interface shared pointer where the created interface is stored. Cleared on
+     *                  failure and updated on success.
+     *
+     * \retval C2_OK        the component interface was created successfully
+     * \retval C2_TIMED_OUT could not create the component interface within the time limit
+     *                      (unexpected)
+     * \retval C2_CORRUPTED some unknown error prevented the creation of the component interface
+     *                      (unexpected)
+     *
+     * \retval C2_NO_MEMORY not enough memory to create the component interface
+     */
+    virtual c2_status_t createInterface(
+            c2_node_id_t id, std::shared_ptr<C2ComponentInterface>* const interface,
+            InterfaceDeleter deleter = std::default_delete<C2ComponentInterface>()) = 0;
+
+    virtual ~C2ComponentFactory() = default;
+
+    typedef ::android::C2ComponentFactory* (*CreateCodec2FactoryFunc)(void);
+    typedef void (*DestroyCodec2FactoryFunc)(::android::C2ComponentFactory*);
+};
+
+/**
+ * Returns the platform component store.
+ * \retval nullptr if the platform component store could not be obtained
+ */
+std::shared_ptr<C2ComponentStore> GetCodec2PlatformComponentStore();
+
+
+} // namespace android
+
+#endif // STAGEFRIGHT_CODEC2_PLATFORM_SUPPORT_H_
diff --git a/media/libstagefright/codec2/vndk/include/util/C2ParamUtils.h b/media/libstagefright/codec2/vndk/include/util/C2ParamUtils.h
index edae303..81c5495 100644
--- a/media/libstagefright/codec2/vndk/include/util/C2ParamUtils.h
+++ b/media/libstagefright/codec2/vndk/include/util/C2ParamUtils.h
@@ -222,7 +222,7 @@
     friend class C2ParamTest_ParamUtilsTest_Test;
 
 public:
-    static std::vector<C2String> getEnumValuesFromString(C2StringLiteral value) {
+    static std::vector<C2String> parseEnumValuesFromString(C2StringLiteral value) {
         std::vector<C2String> foundNames;
         size_t pos = 0, len = strlen(value);
         do {
@@ -278,9 +278,9 @@
 C2_HIDE
 void addC2Params(std::list<const C2FieldDescriptor> &fields, _C2Tuple<T, Params...> *)
 {
-    //C2Param::index_t index = T::baseIndex;
+    //C2Param::CodeIndex index = T::CORE_INDEX;
     //(void)index;
-    fields.insert(fields.end(), T::fieldList);
+    fields.insert(fields.end(), T::FIELD_LIST);
     addC2Params(fields, (_C2Tuple<Params...> *)nullptr);
 }
 
diff --git a/media/libstagefright/codecs/aacdec/Android.bp b/media/libstagefright/codecs/aacdec/Android.bp
index 21c00a1..abf3b1c 100644
--- a/media/libstagefright/codecs/aacdec/Android.bp
+++ b/media/libstagefright/codecs/aacdec/Android.bp
@@ -1,4 +1,45 @@
 cc_library_shared {
+    name: "libstagefright_soft_c2aacdec",
+//    vendor_available: true,
+//    vndk: {
+//        enabled: true,
+//    },
+
+    srcs: [
+        "C2SoftAac.cpp",
+        "DrcPresModeWrap.cpp",
+    ],
+
+    cflags: ["-Werror"],
+
+    sanitize: {
+        misc_undefined: [
+            "signed-integer-overflow",
+            "unsigned-integer-overflow",
+        ],
+        cfi: true,
+        diag: {
+            cfi: true,
+        },
+    },
+
+    static_libs: [
+        "libFraunhoferAAC",
+        "libstagefright_codec2_vndk"
+    ],
+
+    shared_libs: [
+        "libcutils",
+        "libion",
+        "liblog",
+        "libstagefright_codec2",
+        "libstagefright_foundation",
+        "libstagefright_simple_c2component",
+        "libutils",
+    ],
+}
+
+cc_library_shared {
     name: "libstagefright_soft_aacdec",
     vendor_available: true,
     vndk: {
diff --git a/media/libstagefright/codecs/aacdec/C2SoftAac.cpp b/media/libstagefright/codecs/aacdec/C2SoftAac.cpp
new file mode 100644
index 0000000..390f36c
--- /dev/null
+++ b/media/libstagefright/codecs/aacdec/C2SoftAac.cpp
@@ -0,0 +1,710 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftAac"
+#include <utils/Log.h>
+
+#include "C2SoftAac.h"
+
+#include <C2PlatformSupport.h>
+#include <SimpleC2Interface.h>
+
+#include <cutils/properties.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/hexdump.h>
+#include <media/stagefright/MediaErrors.h>
+#include <utils/misc.h>
+
+#include <inttypes.h>
+#include <math.h>
+#include <numeric>
+
+#define FILEREAD_MAX_LAYERS 2
+
+#define DRC_DEFAULT_MOBILE_REF_LEVEL 64  /* 64*-0.25dB = -16 dB below full scale for mobile conf */
+#define DRC_DEFAULT_MOBILE_DRC_CUT   127 /* maximum compression of dynamic range for mobile conf */
+#define DRC_DEFAULT_MOBILE_DRC_BOOST 127 /* maximum compression of dynamic range for mobile conf */
+#define DRC_DEFAULT_MOBILE_DRC_HEAVY 1   /* switch for heavy compression for mobile conf */
+#define DRC_DEFAULT_MOBILE_ENC_LEVEL (-1) /* encoder target level; -1 => the value is unknown, otherwise dB step value (e.g. 64 for -16 dB) */
+#define MAX_CHANNEL_COUNT            8  /* maximum number of audio channels that can be decoded */
+// names of properties that can be used to override the default DRC settings
+#define PROP_DRC_OVERRIDE_REF_LEVEL  "aac_drc_reference_level"
+#define PROP_DRC_OVERRIDE_CUT        "aac_drc_cut"
+#define PROP_DRC_OVERRIDE_BOOST      "aac_drc_boost"
+#define PROP_DRC_OVERRIDE_HEAVY      "aac_drc_heavy"
+#define PROP_DRC_OVERRIDE_ENC_LEVEL "aac_drc_enc_target_level"
+
+namespace android {
+
+C2SoftAac::C2SoftAac(const char *name, c2_node_id_t id)
+    : SimpleC2Component(
+            SimpleC2Interface::Builder(name, id)
+            .inputFormat(C2FormatCompressed)
+            .outputFormat(C2FormatAudio)
+            .build()),
+      mAACDecoder(NULL),
+      mStreamInfo(NULL),
+      mIsADTS(false),
+      mSignalledError(false),
+      mOutputDelayRingBuffer(NULL) {
+}
+
+C2SoftAac::~C2SoftAac() {
+    onRelease();
+}
+
+c2_status_t C2SoftAac::onInit() {
+    status_t err = initDecoder();
+    return err == OK ? C2_OK : C2_CORRUPTED;
+}
+
+c2_status_t C2SoftAac::onStop() {
+    drainDecoder();
+    // reset the "configured" state
+    mOutputDelayCompensated = 0;
+    mOutputDelayRingBufferWritePos = 0;
+    mOutputDelayRingBufferReadPos = 0;
+    mOutputDelayRingBufferFilled = 0;
+    mBuffersInfo.clear();
+
+    // To make the codec behave the same before and after a reset, we need to invalidate the
+    // streaminfo struct. This does that:
+    mStreamInfo->sampleRate = 0; // TODO: mStreamInfo is read only
+
+    mSignalledError = false;
+
+    return C2_OK;
+}
+
+void C2SoftAac::onReset() {
+    (void)onStop();
+}
+
+void C2SoftAac::onRelease() {
+    if (mAACDecoder) {
+        aacDecoder_Close(mAACDecoder);
+        mAACDecoder = NULL;
+    }
+    if (mOutputDelayRingBuffer) {
+        delete[] mOutputDelayRingBuffer;
+        mOutputDelayRingBuffer = NULL;
+    }
+}
+
+status_t C2SoftAac::initDecoder() {
+    ALOGV("initDecoder()");
+    status_t status = UNKNOWN_ERROR;
+    mAACDecoder = aacDecoder_Open(TT_MP4_ADIF, /* num layers */ 1);
+    if (mAACDecoder != NULL) {
+        mStreamInfo = aacDecoder_GetStreamInfo(mAACDecoder);
+        if (mStreamInfo != NULL) {
+            status = OK;
+        }
+    }
+
+    mOutputDelayCompensated = 0;
+    mOutputDelayRingBufferSize = 2048 * MAX_CHANNEL_COUNT * kNumDelayBlocksMax;
+    mOutputDelayRingBuffer = new short[mOutputDelayRingBufferSize];
+    mOutputDelayRingBufferWritePos = 0;
+    mOutputDelayRingBufferReadPos = 0;
+    mOutputDelayRingBufferFilled = 0;
+
+    if (mAACDecoder == NULL) {
+        ALOGE("AAC decoder is null. TODO: Can not call aacDecoder_SetParam in the following code");
+    }
+
+    //aacDecoder_SetParam(mAACDecoder, AAC_PCM_LIMITER_ENABLE, 0);
+
+    //init DRC wrapper
+    mDrcWrap.setDecoderHandle(mAACDecoder);
+    mDrcWrap.submitStreamData(mStreamInfo);
+
+    // for streams that contain metadata, use the mobile profile DRC settings unless overridden by platform properties
+    // TODO: change the DRC settings depending on audio output device type (HDMI, loadspeaker, headphone)
+    char value[PROPERTY_VALUE_MAX];
+    //  DRC_PRES_MODE_WRAP_DESIRED_TARGET
+    if (property_get(PROP_DRC_OVERRIDE_REF_LEVEL, value, NULL)) {
+        unsigned refLevel = atoi(value);
+        ALOGV("AAC decoder using desired DRC target reference level of %d instead of %d", refLevel,
+                DRC_DEFAULT_MOBILE_REF_LEVEL);
+        mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_TARGET, refLevel);
+    } else {
+        mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_TARGET, DRC_DEFAULT_MOBILE_REF_LEVEL);
+    }
+    //  DRC_PRES_MODE_WRAP_DESIRED_ATT_FACTOR
+    if (property_get(PROP_DRC_OVERRIDE_CUT, value, NULL)) {
+        unsigned cut = atoi(value);
+        ALOGV("AAC decoder using desired DRC attenuation factor of %d instead of %d", cut,
+                DRC_DEFAULT_MOBILE_DRC_CUT);
+        mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_ATT_FACTOR, cut);
+    } else {
+        mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_ATT_FACTOR, DRC_DEFAULT_MOBILE_DRC_CUT);
+    }
+    //  DRC_PRES_MODE_WRAP_DESIRED_BOOST_FACTOR
+    if (property_get(PROP_DRC_OVERRIDE_BOOST, value, NULL)) {
+        unsigned boost = atoi(value);
+        ALOGV("AAC decoder using desired DRC boost factor of %d instead of %d", boost,
+                DRC_DEFAULT_MOBILE_DRC_BOOST);
+        mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_BOOST_FACTOR, boost);
+    } else {
+        mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_BOOST_FACTOR, DRC_DEFAULT_MOBILE_DRC_BOOST);
+    }
+    //  DRC_PRES_MODE_WRAP_DESIRED_HEAVY
+    if (property_get(PROP_DRC_OVERRIDE_HEAVY, value, NULL)) {
+        unsigned heavy = atoi(value);
+        ALOGV("AAC decoder using desried DRC heavy compression switch of %d instead of %d", heavy,
+                DRC_DEFAULT_MOBILE_DRC_HEAVY);
+        mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_HEAVY, heavy);
+    } else {
+        mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_HEAVY, DRC_DEFAULT_MOBILE_DRC_HEAVY);
+    }
+    // DRC_PRES_MODE_WRAP_ENCODER_TARGET
+    if (property_get(PROP_DRC_OVERRIDE_ENC_LEVEL, value, NULL)) {
+        unsigned encoderRefLevel = atoi(value);
+        ALOGV("AAC decoder using encoder-side DRC reference level of %d instead of %d",
+                encoderRefLevel, DRC_DEFAULT_MOBILE_ENC_LEVEL);
+        mDrcWrap.setParam(DRC_PRES_MODE_WRAP_ENCODER_TARGET, encoderRefLevel);
+    } else {
+        mDrcWrap.setParam(DRC_PRES_MODE_WRAP_ENCODER_TARGET, DRC_DEFAULT_MOBILE_ENC_LEVEL);
+    }
+
+    // By default, the decoder creates a 5.1 channel downmix signal.
+    // For seven and eight channel input streams, enable 6.1 and 7.1 channel output
+    aacDecoder_SetParam(mAACDecoder, AAC_PCM_MAX_OUTPUT_CHANNELS, -1);
+
+    return status;
+}
+
+bool C2SoftAac::outputDelayRingBufferPutSamples(INT_PCM *samples, int32_t numSamples) {
+    if (numSamples == 0) {
+        return true;
+    }
+    if (outputDelayRingBufferSpaceLeft() < numSamples) {
+        ALOGE("RING BUFFER WOULD OVERFLOW");
+        return false;
+    }
+    if (mOutputDelayRingBufferWritePos + numSamples <= mOutputDelayRingBufferSize
+            && (mOutputDelayRingBufferReadPos <= mOutputDelayRingBufferWritePos
+                    || mOutputDelayRingBufferReadPos > mOutputDelayRingBufferWritePos + numSamples)) {
+        // faster memcopy loop without checks, if the preconditions allow this
+        for (int32_t i = 0; i < numSamples; i++) {
+            mOutputDelayRingBuffer[mOutputDelayRingBufferWritePos++] = samples[i];
+        }
+
+        if (mOutputDelayRingBufferWritePos >= mOutputDelayRingBufferSize) {
+            mOutputDelayRingBufferWritePos -= mOutputDelayRingBufferSize;
+        }
+    } else {
+        ALOGV("slow C2SoftAac::outputDelayRingBufferPutSamples()");
+
+        for (int32_t i = 0; i < numSamples; i++) {
+            mOutputDelayRingBuffer[mOutputDelayRingBufferWritePos] = samples[i];
+            mOutputDelayRingBufferWritePos++;
+            if (mOutputDelayRingBufferWritePos >= mOutputDelayRingBufferSize) {
+                mOutputDelayRingBufferWritePos -= mOutputDelayRingBufferSize;
+            }
+        }
+    }
+    mOutputDelayRingBufferFilled += numSamples;
+    return true;
+}
+
+int32_t C2SoftAac::outputDelayRingBufferGetSamples(INT_PCM *samples, int32_t numSamples) {
+
+    if (numSamples > mOutputDelayRingBufferFilled) {
+        ALOGE("RING BUFFER WOULD UNDERRUN");
+        return -1;
+    }
+
+    if (mOutputDelayRingBufferReadPos + numSamples <= mOutputDelayRingBufferSize
+            && (mOutputDelayRingBufferWritePos < mOutputDelayRingBufferReadPos
+                    || mOutputDelayRingBufferWritePos >= mOutputDelayRingBufferReadPos + numSamples)) {
+        // faster memcopy loop without checks, if the preconditions allow this
+        if (samples != 0) {
+            for (int32_t i = 0; i < numSamples; i++) {
+                samples[i] = mOutputDelayRingBuffer[mOutputDelayRingBufferReadPos++];
+            }
+        } else {
+            mOutputDelayRingBufferReadPos += numSamples;
+        }
+        if (mOutputDelayRingBufferReadPos >= mOutputDelayRingBufferSize) {
+            mOutputDelayRingBufferReadPos -= mOutputDelayRingBufferSize;
+        }
+    } else {
+        ALOGV("slow C2SoftAac::outputDelayRingBufferGetSamples()");
+
+        for (int32_t i = 0; i < numSamples; i++) {
+            if (samples != 0) {
+                samples[i] = mOutputDelayRingBuffer[mOutputDelayRingBufferReadPos];
+            }
+            mOutputDelayRingBufferReadPos++;
+            if (mOutputDelayRingBufferReadPos >= mOutputDelayRingBufferSize) {
+                mOutputDelayRingBufferReadPos -= mOutputDelayRingBufferSize;
+            }
+        }
+    }
+    mOutputDelayRingBufferFilled -= numSamples;
+    return numSamples;
+}
+
+int32_t C2SoftAac::outputDelayRingBufferSamplesAvailable() {
+    return mOutputDelayRingBufferFilled;
+}
+
+int32_t C2SoftAac::outputDelayRingBufferSpaceLeft() {
+    return mOutputDelayRingBufferSize - outputDelayRingBufferSamplesAvailable();
+}
+
+void C2SoftAac::drainRingBuffer(
+        const std::unique_ptr<C2Work> &work,
+        const std::shared_ptr<C2BlockPool> &pool,
+        bool eos) {
+    while (!mBuffersInfo.empty() && outputDelayRingBufferSamplesAvailable()
+            >= mStreamInfo->frameSize * mStreamInfo->numChannels) {
+        Info &outInfo = mBuffersInfo.front();
+        ALOGV("outInfo.frameIndex = %" PRIu64, outInfo.frameIndex);
+        int samplesize = mStreamInfo->numChannels * sizeof(int16_t);
+
+        int available = outputDelayRingBufferSamplesAvailable();
+        int numFrames = outInfo.decodedSizes.size();
+        int numSamples = numFrames * (mStreamInfo->frameSize * mStreamInfo->numChannels);
+        if (available < numSamples) {
+            if (eos) {
+                numSamples = available;
+            } else {
+                break;
+            }
+        }
+        ALOGV("%d samples available (%d), or %d frames",
+                numSamples, available, numFrames);
+        ALOGV("getting %d from ringbuffer", numSamples);
+
+        std::shared_ptr<C2LinearBlock> block;
+        C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
+        // TODO: error handling, proper usage, etc.
+        c2_status_t err = pool->fetchLinearBlock(numSamples * sizeof(int16_t), usage, &block);
+        if (err != C2_OK) {
+            ALOGE("err = %d", err);
+        }
+
+        C2WriteView wView = block->map().get();
+        // TODO
+        INT_PCM *outBuffer = reinterpret_cast<INT_PCM *>(wView.data());
+        int32_t ns = outputDelayRingBufferGetSamples(outBuffer, numSamples);
+        if (ns != numSamples) {
+            ALOGE("not a complete frame of samples available");
+            mSignalledError = true;
+            // TODO: notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+            return;
+        }
+        auto fillWork = [buffer = createLinearBuffer(block)](const std::unique_ptr<C2Work> &work) {
+            work->worklets.front()->output.flags = work->input.flags;
+            work->worklets.front()->output.buffers.clear();
+            work->worklets.front()->output.buffers.push_back(buffer);
+            work->worklets.front()->output.ordinal = work->input.ordinal;
+            work->worklets_processed = 1u;
+        };
+        if (work && work->input.ordinal.frame_index == outInfo.frameIndex) {
+            fillWork(work);
+        } else {
+            finish(outInfo.frameIndex, fillWork);
+        }
+
+        ALOGV("out timestamp %" PRIu64 " / %u", outInfo.timestamp, block->capacity());
+        mBuffersInfo.pop_front();
+    }
+}
+
+void C2SoftAac::process(
+        const std::unique_ptr<C2Work> &work,
+        const std::shared_ptr<C2BlockPool> &pool) {
+    work->worklets_processed = 0u;
+    if (mSignalledError) {
+        return;
+    }
+
+    UCHAR* inBuffer[FILEREAD_MAX_LAYERS];
+    UINT inBufferLength[FILEREAD_MAX_LAYERS] = {0};
+    UINT bytesValid[FILEREAD_MAX_LAYERS] = {0};
+
+    INT_PCM tmpOutBuffer[2048 * MAX_CHANNEL_COUNT];
+    C2ReadView view = work->input.buffers[0]->data().linearBlocks().front().map().get();
+    size_t offset = 0u;
+    size_t size = view.capacity();
+
+    bool eos = (work->input.flags & C2BufferPack::FLAG_END_OF_STREAM) != 0;
+    bool codecConfig = (work->input.flags & C2BufferPack::FLAG_CODEC_CONFIG) != 0;
+
+    //TODO
+#if 0
+    if (mInputBufferCount == 0 && !codecConfig) {
+        ALOGW("first buffer should have FLAG_CODEC_CONFIG set");
+        codecConfig = true;
+    }
+#endif
+    if (codecConfig) {
+        // const_cast because of libAACdec method signature.
+        inBuffer[0] = const_cast<UCHAR *>(view.data() + offset);
+        inBufferLength[0] = size;
+
+        AAC_DECODER_ERROR decoderErr =
+            aacDecoder_ConfigRaw(mAACDecoder,
+                                 inBuffer,
+                                 inBufferLength);
+
+        if (decoderErr != AAC_DEC_OK) {
+            ALOGE("aacDecoder_ConfigRaw decoderErr = 0x%4.4x", decoderErr);
+            mSignalledError = true;
+            // TODO: error
+            return;
+        }
+
+        work->worklets.front()->output.ordinal = work->input.ordinal;
+        work->worklets.front()->output.buffers.clear();
+        work->worklets.front()->output.buffers.push_back(nullptr);
+
+        return;
+    }
+
+    Info inInfo;
+    inInfo.frameIndex = work->input.ordinal.frame_index;
+    inInfo.timestamp = work->input.ordinal.timestamp;
+    inInfo.bufferSize = size;
+    inInfo.decodedSizes.clear();
+    while (size > 0u) {
+        ALOGV("size = %zu", size);
+        if (mIsADTS) {
+            size_t adtsHeaderSize = 0;
+            // skip 30 bits, aac_frame_length follows.
+            // ssssssss ssssiiip ppffffPc ccohCCll llllllll lll?????
+
+            const uint8_t *adtsHeader = view.data() + offset;
+
+            bool signalError = false;
+            if (size < 7) {
+                ALOGE("Audio data too short to contain even the ADTS header. "
+                        "Got %zu bytes.", size);
+                hexdump(adtsHeader, size);
+                signalError = true;
+            } else {
+                bool protectionAbsent = (adtsHeader[1] & 1);
+
+                unsigned aac_frame_length =
+                    ((adtsHeader[3] & 3) << 11)
+                    | (adtsHeader[4] << 3)
+                    | (adtsHeader[5] >> 5);
+
+                if (size < aac_frame_length) {
+                    ALOGE("Not enough audio data for the complete frame. "
+                            "Got %zu bytes, frame size according to the ADTS "
+                            "header is %u bytes.",
+                            size, aac_frame_length);
+                    hexdump(adtsHeader, size);
+                    signalError = true;
+                } else {
+                    adtsHeaderSize = (protectionAbsent ? 7 : 9);
+                    if (aac_frame_length < adtsHeaderSize) {
+                        signalError = true;
+                    } else {
+                        // const_cast because of libAACdec method signature.
+                        inBuffer[0] = const_cast<UCHAR *>(adtsHeader + adtsHeaderSize);
+                        inBufferLength[0] = aac_frame_length - adtsHeaderSize;
+
+                        offset += adtsHeaderSize;
+                        size -= adtsHeaderSize;
+                    }
+                }
+            }
+
+            if (signalError) {
+                mSignalledError = true;
+                // TODO: notify(OMX_EventError, OMX_ErrorStreamCorrupt, ERROR_MALFORMED, NULL);
+                return;
+            }
+        } else {
+            // const_cast because of libAACdec method signature.
+            inBuffer[0] = const_cast<UCHAR *>(view.data() + offset);
+            inBufferLength[0] = size;
+        }
+
+        // Fill and decode
+        bytesValid[0] = inBufferLength[0];
+
+        INT prevSampleRate = mStreamInfo->sampleRate;
+        INT prevNumChannels = mStreamInfo->numChannels;
+
+        aacDecoder_Fill(mAACDecoder,
+                        inBuffer,
+                        inBufferLength,
+                        bytesValid);
+
+        // run DRC check
+        mDrcWrap.submitStreamData(mStreamInfo);
+        mDrcWrap.update();
+
+        UINT inBufferUsedLength = inBufferLength[0] - bytesValid[0];
+        size -= inBufferUsedLength;
+        offset += inBufferUsedLength;
+
+        AAC_DECODER_ERROR decoderErr;
+        do {
+            if (outputDelayRingBufferSpaceLeft() <
+                    (mStreamInfo->frameSize * mStreamInfo->numChannels)) {
+                ALOGV("skipping decode: not enough space left in ringbuffer");
+                break;
+            }
+
+            int numConsumed = mStreamInfo->numTotalBytes;
+            decoderErr = aacDecoder_DecodeFrame(mAACDecoder,
+                                       tmpOutBuffer,
+                                       2048 * MAX_CHANNEL_COUNT,
+                                       0 /* flags */);
+
+            numConsumed = mStreamInfo->numTotalBytes - numConsumed;
+
+            if (decoderErr == AAC_DEC_NOT_ENOUGH_BITS) {
+                break;
+            }
+            inInfo.decodedSizes.push_back(numConsumed);
+
+            if (decoderErr != AAC_DEC_OK) {
+                ALOGW("aacDecoder_DecodeFrame decoderErr = 0x%4.4x", decoderErr);
+            }
+
+            if (bytesValid[0] != 0) {
+                ALOGE("bytesValid[0] != 0 should never happen");
+                mSignalledError = true;
+                // TODO: notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+                return;
+            }
+
+            size_t numOutBytes =
+                mStreamInfo->frameSize * sizeof(int16_t) * mStreamInfo->numChannels;
+
+            if (decoderErr == AAC_DEC_OK) {
+                if (!outputDelayRingBufferPutSamples(tmpOutBuffer,
+                        mStreamInfo->frameSize * mStreamInfo->numChannels)) {
+                    mSignalledError = true;
+                    // TODO: notify(OMX_EventError, OMX_ErrorUndefined, decoderErr, NULL);
+                    return;
+                }
+            } else {
+                ALOGW("AAC decoder returned error 0x%4.4x, substituting silence", decoderErr);
+
+                memset(tmpOutBuffer, 0, numOutBytes); // TODO: check for overflow
+
+                if (!outputDelayRingBufferPutSamples(tmpOutBuffer,
+                        mStreamInfo->frameSize * mStreamInfo->numChannels)) {
+                    mSignalledError = true;
+                    // TODO: notify(OMX_EventError, OMX_ErrorUndefined, decoderErr, NULL);
+                    return;
+                }
+
+                // Discard input buffer.
+                size = 0;
+
+                aacDecoder_SetParam(mAACDecoder, AAC_TPDEC_CLEAR_BUFFER, 1);
+
+                // After an error, replace bufferSize with the sum of the
+                // decodedSizes to resynchronize the in/out lists.
+                inInfo.decodedSizes.pop_back();
+                inInfo.bufferSize = std::accumulate(
+                        inInfo.decodedSizes.begin(), inInfo.decodedSizes.end(), 0);
+
+                // fall through
+            }
+
+            /*
+             * AAC+/eAAC+ streams can be signalled in two ways: either explicitly
+             * or implicitly, according to MPEG4 spec. AAC+/eAAC+ is a dual
+             * rate system and the sampling rate in the final output is actually
+             * doubled compared with the core AAC decoder sampling rate.
+             *
+             * Explicit signalling is done by explicitly defining SBR audio object
+             * type in the bitstream. Implicit signalling is done by embedding
+             * SBR content in AAC extension payload specific to SBR, and hence
+             * requires an AAC decoder to perform pre-checks on actual audio frames.
+             *
+             * Thus, we could not say for sure whether a stream is
+             * AAC+/eAAC+ until the first data frame is decoded.
+             */
+            if (!mStreamInfo->sampleRate || !mStreamInfo->numChannels) {
+                // TODO:
+#if 0
+                if ((mInputBufferCount > 2) && (mOutputBufferCount <= 1)) {
+                    ALOGW("Invalid AAC stream");
+                    mSignalledError = true;
+                    // TODO: notify(OMX_EventError, OMX_ErrorUndefined, decoderErr, NULL);
+                    return false;
+                }
+#endif
+            }
+            ALOGV("size = %zu", size);
+        } while (decoderErr == AAC_DEC_OK);
+    }
+
+    int32_t outputDelay = mStreamInfo->outputDelay * mStreamInfo->numChannels;
+
+    mBuffersInfo.push_back(std::move(inInfo));
+
+    if (!eos && mOutputDelayCompensated < outputDelay) {
+        // discard outputDelay at the beginning
+        int32_t toCompensate = outputDelay - mOutputDelayCompensated;
+        int32_t discard = outputDelayRingBufferSamplesAvailable();
+        if (discard > toCompensate) {
+            discard = toCompensate;
+        }
+        int32_t discarded = outputDelayRingBufferGetSamples(0, discard);
+        mOutputDelayCompensated += discarded;
+        return;
+    }
+
+    if (eos) {
+        drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
+    } else {
+        drainRingBuffer(work, pool, false /* not EOS */);
+    }
+}
+
+c2_status_t C2SoftAac::drainInternal(
+        uint32_t drainMode,
+        const std::shared_ptr<C2BlockPool> &pool,
+        const std::unique_ptr<C2Work> &work) {
+    if (drainMode == NO_DRAIN) {
+        ALOGW("drain with NO_DRAIN: no-op");
+        return C2_OK;
+    }
+    if (drainMode == DRAIN_CHAIN) {
+        ALOGW("DRAIN_CHAIN not supported");
+        return C2_OMITTED;
+    }
+
+    bool eos = (drainMode == DRAIN_COMPONENT_WITH_EOS);
+
+    drainDecoder();
+    drainRingBuffer(work, pool, eos);
+
+    if (eos) {
+        auto fillEmptyWork = [](const std::unique_ptr<C2Work> &work) {
+            work->worklets.front()->output.flags = work->input.flags;
+            work->worklets.front()->output.buffers.clear();
+            work->worklets.front()->output.buffers.emplace_back(nullptr);
+            work->worklets.front()->output.ordinal = work->input.ordinal;
+            work->worklets_processed = 1u;
+        };
+        while (mBuffersInfo.size() > 1u) {
+            finish(mBuffersInfo.front().frameIndex, fillEmptyWork);
+            mBuffersInfo.pop_front();
+        }
+        if (work->worklets_processed == 0u) {
+            fillEmptyWork(work);
+        }
+        mBuffersInfo.clear();
+    }
+
+    return C2_OK;
+}
+
+c2_status_t C2SoftAac::drain(
+        uint32_t drainMode,
+        const std::shared_ptr<C2BlockPool> &pool) {
+    return drainInternal(drainMode, pool, nullptr);
+}
+
+c2_status_t C2SoftAac::onFlush_sm() {
+    drainDecoder();
+    mBuffersInfo.clear();
+
+    int avail;
+    while ((avail = outputDelayRingBufferSamplesAvailable()) > 0) {
+        if (avail > mStreamInfo->frameSize * mStreamInfo->numChannels) {
+            avail = mStreamInfo->frameSize * mStreamInfo->numChannels;
+        }
+        int32_t ns = outputDelayRingBufferGetSamples(0, avail);
+        if (ns != avail) {
+            ALOGW("not a complete frame of samples available");
+            break;
+        }
+    }
+    mOutputDelayRingBufferReadPos = mOutputDelayRingBufferWritePos;
+
+    return C2_OK;
+}
+
+void C2SoftAac::drainDecoder() {
+    // flush decoder until outputDelay is compensated
+    while (mOutputDelayCompensated > 0) {
+        // a buffer big enough for MAX_CHANNEL_COUNT channels of decoded HE-AAC
+        INT_PCM tmpOutBuffer[2048 * MAX_CHANNEL_COUNT];
+
+        // run DRC check
+        mDrcWrap.submitStreamData(mStreamInfo);
+        mDrcWrap.update();
+
+        AAC_DECODER_ERROR decoderErr =
+            aacDecoder_DecodeFrame(mAACDecoder,
+                                   tmpOutBuffer,
+                                   2048 * MAX_CHANNEL_COUNT,
+                                   AACDEC_FLUSH);
+        if (decoderErr != AAC_DEC_OK) {
+            ALOGW("aacDecoder_DecodeFrame decoderErr = 0x%4.4x", decoderErr);
+        }
+
+        int32_t tmpOutBufferSamples = mStreamInfo->frameSize * mStreamInfo->numChannels;
+        if (tmpOutBufferSamples > mOutputDelayCompensated) {
+            tmpOutBufferSamples = mOutputDelayCompensated;
+        }
+        outputDelayRingBufferPutSamples(tmpOutBuffer, tmpOutBufferSamples);
+
+        mOutputDelayCompensated -= tmpOutBufferSamples;
+    }
+}
+
+class C2SoftAacDecFactory : public C2ComponentFactory {
+public:
+    virtual c2_status_t createComponent(
+            c2_node_id_t id, std::shared_ptr<C2Component>* const component,
+            std::function<void(::android::C2Component*)> deleter) override {
+        *component = std::shared_ptr<C2Component>(new C2SoftAac("aac", id), deleter);
+        return C2_OK;
+    }
+
+    virtual c2_status_t createInterface(
+            c2_node_id_t id, std::shared_ptr<C2ComponentInterface>* const interface,
+            std::function<void(::android::C2ComponentInterface*)> deleter) override {
+        *interface =
+                SimpleC2Interface::Builder("aac", id, deleter)
+                .inputFormat(C2FormatCompressed)
+                .outputFormat(C2FormatVideo)
+                .build();
+        return C2_OK;
+    }
+
+    virtual ~C2SoftAacDecFactory() override = default;
+};
+
+}  // namespace android
+
+extern "C" ::android::C2ComponentFactory* CreateCodec2Factory() {
+    ALOGV("in %s", __func__);
+    return new ::android::C2SoftAacDecFactory();
+}
+
+extern "C" void DestroyCodec2Factory(::android::C2ComponentFactory* factory) {
+    ALOGV("in %s", __func__);
+    delete factory;
+}
diff --git a/media/libstagefright/codecs/aacdec/C2SoftAac.h b/media/libstagefright/codecs/aacdec/C2SoftAac.h
new file mode 100644
index 0000000..b877635
--- /dev/null
+++ b/media/libstagefright/codecs/aacdec/C2SoftAac.h
@@ -0,0 +1,107 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef C2_SOFT_AAC_H_
+#define C2_SOFT_AAC_H_
+
+#include <SimpleC2Component.h>
+
+#include <media/stagefright/foundation/ABase.h>
+
+#include "aacdecoder_lib.h"
+#include "DrcPresModeWrap.h"
+
+namespace android {
+
+struct C2SoftAac : public SimpleC2Component {
+    C2SoftAac(const char *name, c2_node_id_t id);
+    virtual ~C2SoftAac();
+
+    // From SimpleC2Component
+    c2_status_t onInit() override;
+    c2_status_t onStop() override;
+    void onReset() override;
+    void onRelease() override;
+    c2_status_t onFlush_sm() override;
+    void process(
+            const std::unique_ptr<C2Work> &work,
+            const std::shared_ptr<C2BlockPool> &pool) override;
+    c2_status_t drain(
+            uint32_t drainMode,
+            const std::shared_ptr<C2BlockPool> &pool) override;
+
+private:
+    enum {
+        kNumDelayBlocksMax      = 8,
+    };
+
+    HANDLE_AACDECODER mAACDecoder;
+    CStreamInfo *mStreamInfo;
+    bool mIsADTS;
+    bool mIsFirst;
+    size_t mInputBufferCount;
+    size_t mOutputBufferCount;
+    bool mSignalledError;
+    struct Info {
+        uint64_t frameIndex;
+        size_t bufferSize;
+        uint64_t timestamp;
+        std::vector<int32_t> decodedSizes;
+    };
+    std::list<Info> mBuffersInfo;
+
+    CDrcPresModeWrapper mDrcWrap;
+
+    enum {
+        NONE,
+        AWAITING_DISABLED,
+        AWAITING_ENABLED
+    } mOutputPortSettingsChange;
+
+    void initPorts();
+    status_t initDecoder();
+    bool isConfigured() const;
+    void drainDecoder();
+
+    void drainRingBuffer(
+            const std::unique_ptr<C2Work> &work,
+            const std::shared_ptr<C2BlockPool> &pool,
+            bool eos);
+    c2_status_t drainInternal(
+            uint32_t drainMode,
+            const std::shared_ptr<C2BlockPool> &pool,
+            const std::unique_ptr<C2Work> &work);
+
+//      delay compensation
+    bool mEndOfInput;
+    bool mEndOfOutput;
+    int32_t mOutputDelayCompensated;
+    int32_t mOutputDelayRingBufferSize;
+    short *mOutputDelayRingBuffer;
+    int32_t mOutputDelayRingBufferWritePos;
+    int32_t mOutputDelayRingBufferReadPos;
+    int32_t mOutputDelayRingBufferFilled;
+    bool outputDelayRingBufferPutSamples(INT_PCM *samples, int numSamples);
+    int32_t outputDelayRingBufferGetSamples(INT_PCM *samples, int numSamples);
+    int32_t outputDelayRingBufferSamplesAvailable();
+    int32_t outputDelayRingBufferSpaceLeft();
+
+    DISALLOW_EVIL_CONSTRUCTORS(C2SoftAac);
+};
+
+}  // namespace android
+
+#endif  // C2_SOFT_AAC_H_
diff --git a/media/libstagefright/codecs/aacenc/SoftAACEncoder.h b/media/libstagefright/codecs/aacenc/SoftAACEncoder.h
deleted file mode 100644
index e64c1b7..0000000
--- a/media/libstagefright/codecs/aacenc/SoftAACEncoder.h
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SOFT_AAC_ENCODER_H_
-
-#define SOFT_AAC_ENCODER_H_
-
-#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
-
-struct VO_AUDIO_CODECAPI;
-struct VO_MEM_OPERATOR;
-
-namespace android {
-
-struct SoftAACEncoder : public SimpleSoftOMXComponent {
-    SoftAACEncoder(
-            const char *name,
-            const OMX_CALLBACKTYPE *callbacks,
-            OMX_PTR appData,
-            OMX_COMPONENTTYPE **component);
-
-protected:
-    virtual ~SoftAACEncoder();
-
-    virtual OMX_ERRORTYPE internalGetParameter(
-            OMX_INDEXTYPE index, OMX_PTR params);
-
-    virtual OMX_ERRORTYPE internalSetParameter(
-            OMX_INDEXTYPE index, const OMX_PTR params);
-
-    virtual void onQueueFilled(OMX_U32 portIndex);
-
-    virtual void onReset();
-
-private:
-    enum {
-        kNumBuffers             = 4,
-        kNumSamplesPerFrame     = 1024,
-    };
-
-    void *mEncoderHandle;
-    VO_AUDIO_CODECAPI *mApiHandle;
-    VO_MEM_OPERATOR  *mMemOperator;
-
-    OMX_U32 mNumChannels;
-    OMX_U32 mSampleRate;
-    OMX_U32 mBitRate;
-
-    bool mSentCodecSpecificData;
-    size_t mInputSize;
-    int16_t *mInputFrame;
-    int64_t mInputTimeUs;
-
-    bool mSawInputEOS;
-
-    uint8_t mAudioSpecificConfigData[2];
-
-    bool mSignalledError;
-
-    void initPorts();
-    status_t initEncoder();
-
-    status_t setAudioSpecificConfigData();
-    status_t setAudioParams();
-
-    DISALLOW_EVIL_CONSTRUCTORS(SoftAACEncoder);
-};
-
-}  // namespace android
-
-#endif  // SOFT_AAC_ENCODER_H_
diff --git a/media/libstagefright/codecs/avcdec/Android.bp b/media/libstagefright/codecs/avcdec/Android.bp
index 34db19b..04e5dc1 100644
--- a/media/libstagefright/codecs/avcdec/Android.bp
+++ b/media/libstagefright/codecs/avcdec/Android.bp
@@ -11,7 +11,6 @@
     cflags: [
         "-Wall",
         "-Werror",
-        "-Wno-unused-variable",
     ],
 
     include_dirs: [
@@ -22,7 +21,6 @@
     ],
 
     shared_libs: [
-        "libmedia_omx",
         "libstagefright_omx",
         "libstagefright_foundation",
         "libutils",
@@ -42,3 +40,44 @@
     ldflags: ["-Wl,-Bsymbolic"],
     compile_multilib: "32",
 }
+
+cc_library_shared {
+    name: "libstagefright_soft_c2avcdec",
+
+    static_libs: [
+        "libavcdec",
+    ],
+    srcs: ["C2SoftAvcDec.cpp"],
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
+
+    include_dirs: [
+        "external/libavc/decoder",
+        "external/libavc/common",
+    ],
+
+    shared_libs: [
+        "liblog",
+        "libutils",
+        "libmedia",
+        "libstagefright_codec2",
+        "libstagefright_codec2_vndk",
+        "libstagefright_foundation",
+        "libstagefright_simple_c2component",
+    ],
+
+    sanitize: {
+        misc_undefined: [
+            "signed-integer-overflow",
+        ],
+        cfi: false, // true,
+        diag: {
+            cfi: false, // true,
+        },
+    },
+
+    ldflags: ["-Wl,-Bsymbolic"],
+}
diff --git a/media/libstagefright/codecs/avcdec/C2AvcConfig.h b/media/libstagefright/codecs/avcdec/C2AvcConfig.h
new file mode 100644
index 0000000..a7e0d95
--- /dev/null
+++ b/media/libstagefright/codecs/avcdec/C2AvcConfig.h
@@ -0,0 +1,87 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef C2AVCCONFIG_H_
+#define C2AVCCONFIG_H_
+
+#include <C2Config.h>
+
+namespace android {
+
+enum : uint32_t {
+    kParamIndexAvcProfile = kParamIndexParamStart + 1,
+    kParamIndexAvcLevel,
+    kParamIndexBlockSize,
+    kParamIndexAlignment,
+    kParamIndexFramerate,
+    kParamIndexBlocksPerSecond,
+};
+
+enum C2AvcProfileIdc : uint32_t {
+    kAvcProfileUnknown  = 0,
+    kAvcProfileBaseline = 66,
+    kAvcProfileMain     = 77,
+    kAvcProfileExtended = 88,
+    kAvcProfileHigh     = 100,
+    kAvcProfileHigh10   = 110,
+    kAvcProfileHigh422  = 122,
+    kAvcProfileHigh444  = 144,
+};
+
+enum C2AvcLevelIdc : uint32_t {
+    kAvcLevelUnknown = 0,
+    kAvcLevel10      = 10,
+    kAvcLevel1b      = 9,
+    kAvcLevel11      = 11,
+    kAvcLevel12      = 12,
+    kAvcLevel13      = 13,
+    kAvcLevel20      = 20,
+    kAvcLevel21      = 21,
+    kAvcLevel22      = 22,
+    kAvcLevel30      = 30,
+    kAvcLevel31      = 31,
+    kAvcLevel32      = 32,
+    kAvcLevel40      = 40,
+    kAvcLevel41      = 41,
+    kAvcLevel42      = 42,
+    kAvcLevel50      = 50,
+    kAvcLevel51      = 51,
+    kAvcLevel52      = 52,
+};
+
+// profile for AVC video decoder [IN]
+typedef C2StreamParam<C2Info, C2SimpleValueStruct<C2AvcProfileIdc>, kParamIndexAvcProfile>
+    C2AvcProfileInfo;
+
+// level for AVC video decoder [IN]
+typedef C2StreamParam<C2Info, C2SimpleValueStruct<C2AvcLevelIdc>, kParamIndexAvcLevel>
+    C2AvcLevelInfo;
+
+// block size [OUT]
+typedef C2StreamParam<C2Info, C2VideoSizeStruct, kParamIndexBlockSize> C2BlockSizeInfo;
+
+// alignment [OUT]
+typedef C2StreamParam<C2Info, C2VideoSizeStruct, kParamIndexAlignment> C2AlignmentInfo;
+
+// frame rate [OUT, hint]
+typedef C2StreamParam<C2Info, C2Uint32Value, kParamIndexFramerate> C2FrameRateInfo;
+
+// blocks-per-second [OUT, hint]
+typedef C2StreamParam<C2Info, C2Uint32Value, kParamIndexBlocksPerSecond> C2BlocksPerSecondInfo;
+
+} // namespace android
+
+#endif  // C2AVCCONFIG_H_
diff --git a/media/libstagefright/codecs/avcdec/C2SoftAvcDec.cpp b/media/libstagefright/codecs/avcdec/C2SoftAvcDec.cpp
new file mode 100644
index 0000000..ffe6332
--- /dev/null
+++ b/media/libstagefright/codecs/avcdec/C2SoftAvcDec.cpp
@@ -0,0 +1,1362 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftAvcDec"
+#include <utils/Log.h>
+
+#include <cmath>
+#include <thread>
+#include <cinttypes>
+
+#include "ih264_typedefs.h"
+#include "iv.h"
+#include "ivd.h"
+#include "ih264d.h"
+#include "C2SoftAvcDec.h"
+
+#include <C2PlatformSupport.h>
+#include <SimpleC2Interface.h>
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/MediaDefs.h>
+#include <utils/misc.h>
+
+#include "ih264d_defs.h"
+
+namespace {
+
+template <class T>
+inline int32_t floor32(T arg) {
+   return (int32_t) std::llround(std::floor(arg));
+}
+
+} // namespace
+
+namespace android {
+
+struct iv_obj_t : public ::iv_obj_t {};
+struct ivd_video_decode_ip_t : public ::ivd_video_decode_ip_t {};
+struct ivd_video_decode_op_t : public ::ivd_video_decode_op_t {};
+
+#define PRINT_TIME  ALOGV
+
+#define componentName                   "video_decoder.avc"
+// #define codingType                      OMX_VIDEO_CodingAVC
+#define CODEC_MIME_TYPE                 MEDIA_MIMETYPE_VIDEO_AVC
+
+/** Function and structure definitions to keep code similar for each codec */
+#define ivdec_api_function              ih264d_api_function
+#define ivdext_create_ip_t              ih264d_create_ip_t
+#define ivdext_create_op_t              ih264d_create_op_t
+#define ivdext_delete_ip_t              ih264d_delete_ip_t
+#define ivdext_delete_op_t              ih264d_delete_op_t
+#define ivdext_ctl_set_num_cores_ip_t   ih264d_ctl_set_num_cores_ip_t
+#define ivdext_ctl_set_num_cores_op_t   ih264d_ctl_set_num_cores_op_t
+
+#define IVDEXT_CMD_CTL_SET_NUM_CORES    \
+        (IVD_CONTROL_API_COMMAND_TYPE_T)IH264D_CMD_CTL_SET_NUM_CORES
+namespace {
+
+#if 0
+using SupportedValuesWithFields = C2SoftAvcDecIntf::SupportedValuesWithFields;
+
+struct ValidateParam {
+    explicit ValidateParam(
+            const std::map<C2ParamField, SupportedValuesWithFields> &supportedValues)
+        : mSupportedValues(supportedValues) {}
+
+    template <class T, bool SIGNED = std::is_signed<T>::value, size_t SIZE = sizeof(T)>
+    struct Getter {
+        static T get(const C2Value::Primitive &) {
+            static_assert(!std::is_arithmetic<T>::value, "non-arithmetic type");
+            static_assert(!std::is_floating_point<T>::value || std::is_same<T, float>::value,
+                    "float is the only supported floating point type");
+            static_assert(sizeof(T) <= 8, "type exceeds 64-bit");
+        }
+    };
+
+    template <class T>
+    bool validateField(
+            const C2FieldSupportedValues &supportedValues, const T &value) {
+        switch (supportedValues.type) {
+        case C2FieldSupportedValues::EMPTY:
+            {
+                return false;
+            }
+        case C2FieldSupportedValues::RANGE:
+            {
+                // TODO: handle step, nom, denom
+                return Getter<T>::get(supportedValues.range.min) <= value
+                        && value <= Getter<T>::get(supportedValues.range.max);
+            }
+        case C2FieldSupportedValues::VALUES:
+            {
+                for (const auto &val : supportedValues.values) {
+                    if (Getter<T>::get(val) == value) {
+                        return true;
+                    }
+                }
+                return false;
+            }
+        case C2FieldSupportedValues::FLAGS:
+            // TODO
+            return false;
+        }
+        return false;
+    }
+
+protected:
+    const std::map<C2ParamField, SupportedValuesWithFields> &mSupportedValues;
+};
+
+template <>
+struct ValidateParam::Getter<float> {
+    static float get(const C2Value::Primitive &value) { return value.fp; }
+};
+template <class T>
+struct ValidateParam::Getter<T, true, 8u> {
+    static int64_t get(const C2Value::Primitive &value) { return value.i64; }
+};
+template <class T>
+struct ValidateParam::Getter<T, true, 4u> {
+    static int32_t get(const C2Value::Primitive &value) { return value.i32; }
+};
+template <class T>
+struct ValidateParam::Getter<T, false, 8u> {
+    static uint64_t get(const C2Value::Primitive &value) { return value.u64; }
+};
+template <class T>
+struct ValidateParam::Getter<T, false, 4u> {
+    static uint32_t get(const C2Value::Primitive &value) { return value.u32; }
+};
+
+template <class T>
+struct ValidateSimpleParam : public ValidateParam {
+    explicit ValidateSimpleParam(
+            const std::map<C2ParamField, SupportedValuesWithFields> &supportedValues)
+        : ValidateParam(supportedValues) {}
+
+    std::unique_ptr<C2SettingResult> operator() (C2Param *c2param) {
+        T* param = (T*)c2param;
+        C2ParamField field(param, &T::value);
+        const C2FieldSupportedValues &supportedValues = mSupportedValues.at(field).supported;
+        if (!validateField(supportedValues, param->value)) {
+            return std::unique_ptr<C2SettingResult>(
+                    new C2SettingResult {C2SettingResult::BAD_VALUE, {field, nullptr}, {}});
+        }
+        return nullptr;
+    }
+};
+
+template <class T>
+struct ValidateVideoSize : public ValidateParam {
+    explicit ValidateVideoSize(
+            const std::map<C2ParamField, SupportedValuesWithFields> &supportedValues)
+        : ValidateParam(supportedValues) {}
+
+    std::unique_ptr<C2SettingResult> operator() (C2Param *c2param) {
+        T* param = (T*)c2param;
+        C2ParamField field(param, &T::width);
+        const C2FieldSupportedValues &supportedWidth = mSupportedValues.at(field).supported;
+        if (!validateField(supportedWidth, param->width)) {
+            return std::unique_ptr<C2SettingResult>(
+                    new C2SettingResult {C2SettingResult::BAD_VALUE, {field, nullptr}, {}});
+        }
+        field = C2ParamField(param, &T::height);
+        const C2FieldSupportedValues &supportedHeight = mSupportedValues.at(field).supported;
+        if (!validateField(supportedHeight, param->height)) {
+            return std::unique_ptr<C2SettingResult>(
+                    new C2SettingResult {C2SettingResult::BAD_VALUE, {field, nullptr}, {}});
+        }
+        return nullptr;
+    }
+};
+
+template <class T>
+struct ValidateCString {
+    explicit ValidateCString(const char *expected) : mExpected(expected) {}
+
+    std::unique_ptr<C2SettingResult> operator() (C2Param *c2param) {
+        T* param = (T*)c2param;
+        if (strncmp(param->m.value, mExpected, param->flexCount()) != 0) {
+            return std::unique_ptr<C2SettingResult>(
+                    new C2SettingResult {C2SettingResult::BAD_VALUE, {C2ParamField(param, &T::m), nullptr}, {}});
+        }
+        return nullptr;
+    }
+
+private:
+    const char *mExpected;
+};
+#endif
+
+void fillEmptyWork(const std::unique_ptr<C2Work> &work) {
+    uint32_t flags = 0;
+    if ((work->input.flags & C2BufferPack::FLAG_END_OF_STREAM)) {
+        flags |= C2BufferPack::FLAG_END_OF_STREAM;
+    }
+    work->worklets.front()->output.flags = (C2BufferPack::flags_t)flags;
+    work->worklets.front()->output.buffers.clear();
+    work->worklets.front()->output.buffers.emplace_back(nullptr);
+    work->worklets.front()->output.ordinal = work->input.ordinal;
+    work->worklets_processed = 1u;
+}
+
+}  // namespace
+
+#if 0
+#define CASE(member) \
+    case decltype(component->member)::CORE_INDEX: \
+        return std::unique_ptr<C2StructDescriptor>(new C2StructDescriptor( \
+                static_cast<decltype(component->member) *>(nullptr)))
+
+class C2SoftAvcDecIntf::ParamReflector : public C2ParamReflector {
+public:
+    virtual std::unique_ptr<C2StructDescriptor> describe(C2Param::CoreIndex coreIndex) override {
+        constexpr C2SoftAvcDecIntf *component = nullptr;
+        switch (coreIndex.coreIndex()) {
+        CASE(mDomainInfo);
+        CASE(mInputStreamCount);
+        CASE(mInputStreamFormat);
+        // Output counterparts for the above would be redundant.
+        CASE(mVideoSize);
+        CASE(mMaxVideoSizeHint);
+
+        // port mime configs are stored as unique_ptr.
+        case C2PortMimeConfig::CORE_INDEX:
+            return std::unique_ptr<C2StructDescriptor>(new C2StructDescriptor(
+                    static_cast<C2PortMimeConfig *>(nullptr)));
+        }
+        return nullptr;
+    }
+};
+#undef CASE
+
+// static const CodecProfileLevel kProfileLevels[] = {
+//     { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel52 },
+//     { OMX_VIDEO_AVCProfileMain,     OMX_VIDEO_AVCLevel52 },
+//     { OMX_VIDEO_AVCProfileHigh,     OMX_VIDEO_AVCLevel52 },
+// };
+C2SoftAvcDecIntf::C2SoftAvcDecIntf(const char *name, c2_node_id_t id)
+    : mName(name),
+      mId(id),
+      mDomainInfo(C2DomainVideo),
+      mInputStreamCount(1u),
+      mOutputStreamCount(1u),
+      mInputStreamFormat(0u, C2FormatCompressed),
+      mOutputStreamFormat(0u, C2FormatVideo),
+      mProfile(0u, kAvcProfileUnknown),
+      mLevel(0u, kAvcLevelUnknown),
+      mBlockSize(0u),
+      mAlignment(0u),
+      mFrameRate(0u, 0),
+      mBlocksPerSecond(0u, 0),
+      mParamReflector(new ParamReflector) {
+    ALOGV("in %s", __func__);
+    mInputPortMime = C2PortMimeConfig::input::alloc_unique(strlen(CODEC_MIME_TYPE) + 1);
+    strcpy(mInputPortMime->m.value, CODEC_MIME_TYPE);
+    mOutputPortMime = C2PortMimeConfig::output::alloc_unique(strlen(MEDIA_MIMETYPE_VIDEO_RAW) + 1);
+    strcpy(mOutputPortMime->m.value, MEDIA_MIMETYPE_VIDEO_RAW);
+
+    mVideoSize.width = 320;
+    mVideoSize.height = 240;
+    mBlockSize.width = 16;
+    mBlockSize.height = 16;
+    mAlignment.width = 2;
+    mAlignment.height = 2;
+
+    mMaxVideoSizeHint.width = H264_MAX_FRAME_WIDTH;
+    mMaxVideoSizeHint.height = H264_MAX_FRAME_HEIGHT;
+
+    mOutputBlockPools = C2PortBlockPoolsTuning::output::alloc_unique({});
+
+    auto insertParam = [&params = mParams] (C2Param *param) {
+        params[param->index()] = param;
+    };
+
+    auto markReadOnly = [&supported = mSupportedValues] (auto *param) {
+        supported.emplace(
+                C2ParamField(param, &std::remove_pointer<decltype(param)>::type::value),
+                C2FieldSupportedValues(false /* flags */, {}));
+    };
+
+    auto markReadOnlyVideoSize = [&supported = mSupportedValues] (auto *param) {
+        supported.emplace(
+                C2ParamField(param, &std::remove_pointer<decltype(param)>::type::width),
+                C2FieldSupportedValues(false /* flags */, {}));
+        supported.emplace(
+                C2ParamField(param, &std::remove_pointer<decltype(param)>::type::height),
+                C2FieldSupportedValues(false /* flags */, {}));
+    };
+
+    insertParam(&mDomainInfo);
+    markReadOnly(&mDomainInfo);
+    mFieldVerifiers[mDomainInfo.index()] =
+            ValidateSimpleParam<decltype(mDomainInfo)>(mSupportedValues);
+
+    insertParam(mInputPortMime.get());
+    mFieldVerifiers[mInputPortMime->index()] =
+            ValidateCString<std::remove_reference<decltype(*mInputPortMime)>::type>(CODEC_MIME_TYPE);
+
+    insertParam(&mInputStreamCount);
+    markReadOnly(&mInputStreamCount);
+    mFieldVerifiers[mInputStreamCount.index()] =
+            ValidateSimpleParam<decltype(mInputStreamCount)>(mSupportedValues);
+
+    insertParam(mOutputPortMime.get());
+    mFieldVerifiers[mOutputPortMime->index()] =
+            ValidateCString<std::remove_reference<decltype(*mOutputPortMime)>::type>(MEDIA_MIMETYPE_VIDEO_RAW);
+
+    insertParam(&mOutputStreamCount);
+    markReadOnly(&mOutputStreamCount);
+    mFieldVerifiers[mOutputStreamCount.index()] =
+            ValidateSimpleParam<decltype(mOutputStreamCount)>(mSupportedValues);
+
+    insertParam(&mInputStreamFormat);
+    markReadOnly(&mInputStreamFormat);
+    mFieldVerifiers[mInputStreamFormat.index()] =
+            ValidateSimpleParam<decltype(mInputStreamFormat)>(mSupportedValues);
+
+    insertParam(&mOutputStreamFormat);
+    markReadOnly(&mOutputStreamFormat);
+    mFieldVerifiers[mOutputStreamFormat.index()] =
+            ValidateSimpleParam<decltype(mOutputStreamFormat)>(mSupportedValues);
+
+    insertParam(&mVideoSize);
+    markReadOnlyVideoSize(&mVideoSize);
+    mFieldVerifiers[mVideoSize.index()] =
+            ValidateVideoSize<decltype(mVideoSize)>(mSupportedValues);
+
+    insertParam(&mMaxVideoSizeHint);
+    mSupportedValues.emplace(
+            C2ParamField(&mMaxVideoSizeHint, &C2MaxVideoSizeHintPortSetting::width),
+            C2FieldSupportedValues(H264_MIN_FRAME_WIDTH, H264_MAX_FRAME_WIDTH, mAlignment.width));
+    mSupportedValues.emplace(
+            C2ParamField(&mMaxVideoSizeHint, &C2MaxVideoSizeHintPortSetting::height),
+            C2FieldSupportedValues(H264_MIN_FRAME_HEIGHT, H264_MAX_FRAME_HEIGHT, mAlignment.height));
+    mFieldVerifiers[mMaxVideoSizeHint.index()] =
+            ValidateVideoSize<decltype(mMaxVideoSizeHint)>(mSupportedValues);
+
+    insertParam(&mProfile);
+    mSupportedValues.emplace(
+            C2ParamField(&mProfile, &C2AvcProfileInfo::value),
+            C2FieldSupportedValues(false /* flags */, {
+                kAvcProfileUnknown,
+                kAvcProfileBaseline,
+                kAvcProfileMain,
+                kAvcProfileHigh,
+            }));
+    mFieldVerifiers[mProfile.index()] =
+            ValidateSimpleParam<decltype(mProfile)>(mSupportedValues);
+
+    insertParam(&mLevel);
+    mSupportedValues.emplace(
+            C2ParamField(&mLevel, &C2AvcLevelInfo::value),
+            C2FieldSupportedValues(false /* flags */, {
+                kAvcLevelUnknown,
+                kAvcLevel10,
+                kAvcLevel1b,
+                kAvcLevel11,
+                kAvcLevel12,
+                kAvcLevel13,
+                kAvcLevel20,
+                kAvcLevel21,
+                kAvcLevel22,
+                kAvcLevel30,
+                kAvcLevel31,
+                kAvcLevel32,
+                kAvcLevel40,
+                kAvcLevel41,
+                kAvcLevel42,
+                kAvcLevel50,
+                kAvcLevel51,
+                kAvcLevel52,
+            }));
+    mFieldVerifiers[mLevel.index()] =
+            ValidateSimpleParam<decltype(mLevel)>(mSupportedValues);
+
+    insertParam(&mBlockSize);
+    markReadOnlyVideoSize(&mBlockSize);
+    mFieldVerifiers[mBlockSize.index()] =
+            ValidateVideoSize<decltype(mBlockSize)>(mSupportedValues);
+
+    insertParam(&mAlignment);
+    markReadOnlyVideoSize(&mAlignment);
+    mFieldVerifiers[mAlignment.index()] =
+            ValidateVideoSize<decltype(mAlignment)>(mSupportedValues);
+
+    insertParam(&mFrameRate);
+    mSupportedValues.emplace(
+            C2ParamField(&mFrameRate, &C2FrameRateInfo::value),
+            C2FieldSupportedValues(0, 240));
+    mFieldVerifiers[mFrameRate.index()] =
+            ValidateSimpleParam<decltype(mFrameRate)>(mSupportedValues);
+
+    insertParam(&mBlocksPerSecond);
+    mSupportedValues.emplace(
+            C2ParamField(&mFrameRate, &C2BlocksPerSecondInfo::value),
+            C2FieldSupportedValues(0, 244800));
+    mFieldVerifiers[mBlocksPerSecond.index()] =
+            ValidateSimpleParam<decltype(mBlocksPerSecond)>(mSupportedValues);
+
+    mParamDescs.push_back(std::make_shared<C2ParamDescriptor>(
+            true, "_domain", &mDomainInfo));
+    mParamDescs.push_back(std::make_shared<C2ParamDescriptor>(
+            true, "_input_port_mime", mInputPortMime.get()));
+    mParamDescs.push_back(std::make_shared<C2ParamDescriptor>(
+            true, "_input_stream_count", &mInputStreamCount));
+    mParamDescs.push_back(std::make_shared<C2ParamDescriptor>(
+            true, "_output_port_mime", mOutputPortMime.get()));
+    mParamDescs.push_back(std::make_shared<C2ParamDescriptor>(
+            true, "_output_stream_count", &mOutputStreamCount));
+    mParamDescs.push_back(std::make_shared<C2ParamDescriptor>(
+            true, "_input_stream_format", &mInputStreamFormat));
+    mParamDescs.push_back(std::make_shared<C2ParamDescriptor>(
+            true, "_output_stream_format", &mOutputStreamFormat));
+    mParamDescs.push_back(std::make_shared<C2ParamDescriptor>(
+            false, "_video_size", &mVideoSize));
+    mParamDescs.push_back(std::make_shared<C2ParamDescriptor>(
+            false, "_max_video_size_hint", &mMaxVideoSizeHint));
+    mParamDescs.push_back(std::make_shared<C2ParamDescriptor>(
+            false, "_output_block_pools", mOutputBlockPools.get()));
+}
+
+C2SoftAvcDecIntf::~C2SoftAvcDecIntf() {
+    ALOGV("in %s", __func__);
+}
+
+C2String C2SoftAvcDecIntf::getName() const {
+    return mName;
+}
+
+c2_node_id_t C2SoftAvcDecIntf::getId() const {
+    return mId;
+}
+
+c2_status_t C2SoftAvcDecIntf::query_vb(
+        const std::vector<C2Param* const> & stackParams,
+        const std::vector<C2Param::Index> & heapParamIndices,
+        c2_blocking_t mayBlock,
+        std::vector<std::unique_ptr<C2Param>>* const heapParams) const {
+    (void)mayBlock;
+    for (C2Param* const param : stackParams) {
+        if (!*param) {
+            continue;
+        }
+
+        uint32_t index = param->index();
+        if (!mParams.count(index)) {
+            // TODO: add support for output-block-pools (this will be done when we move all
+            // config to shared ptr)
+            continue;
+        }
+
+        C2Param *myParam = mParams.find(index)->second;
+        if (myParam->size() != param->size()) {
+            param->invalidate();
+            continue;
+        }
+
+        param->updateFrom(*myParam);
+    }
+
+    for (const C2Param::Index index : heapParamIndices) {
+        if (mParams.count(index)) {
+            C2Param *myParam = mParams.find(index)->second;
+            heapParams->emplace_back(C2Param::Copy(*myParam));
+        }
+    }
+
+    return C2_OK;
+}
+
+c2_status_t C2SoftAvcDecIntf::config_vb(
+        const std::vector<C2Param* const> &params,
+        c2_blocking_t mayBlock,
+        std::vector<std::unique_ptr<C2SettingResult>>* const failures) {
+    (void)mayBlock;
+    c2_status_t err = C2_OK;
+    for (C2Param *param : params) {
+        uint32_t index = param->index();
+        if (param->index() == mOutputBlockPools.get()->index()) {
+            // setting output block pools
+            mOutputBlockPools.reset(
+                    (C2PortBlockPoolsTuning::output *)C2Param::Copy(*param).release());
+            continue;
+        }
+
+        if (mParams.count(index) == 0) {
+            // We can't create C2SettingResult with no field, so just skipping in this case.
+            err = C2_BAD_INDEX;
+            continue;
+        }
+        C2Param *myParam = mParams.find(index)->second;
+        std::unique_ptr<C2SettingResult> result;
+        if (!(result = mFieldVerifiers[index](param))) {
+            myParam->updateFrom(*param);
+            updateSupportedValues();
+        } else {
+            failures->push_back(std::move(result));
+            err = C2_BAD_VALUE;
+        }
+    }
+    return err;
+}
+
+c2_status_t C2SoftAvcDecIntf::createTunnel_sm(c2_node_id_t targetComponent) {
+    // Tunneling is not supported
+    (void) targetComponent;
+    return C2_OMITTED;
+}
+
+c2_status_t C2SoftAvcDecIntf::releaseTunnel_sm(c2_node_id_t targetComponent) {
+    // Tunneling is not supported
+    (void) targetComponent;
+    return C2_OMITTED;
+}
+
+std::shared_ptr<C2ParamReflector> C2SoftAvcDecIntf::getParamReflector() const {
+    return mParamReflector;
+}
+
+c2_status_t C2SoftAvcDecIntf::querySupportedParams_nb(
+        std::vector<std::shared_ptr<C2ParamDescriptor>> * const params) const {
+    params->insert(params->begin(), mParamDescs.begin(), mParamDescs.end());
+    return C2_OK;
+}
+
+c2_status_t C2SoftAvcDecIntf::querySupportedValues_vb(
+        std::vector<C2FieldSupportedValuesQuery> &fields, c2_blocking_t mayBlock) const {
+    (void)mayBlock;
+    c2_status_t res = C2_OK;
+    for (C2FieldSupportedValuesQuery &query : fields) {
+        if (mSupportedValues.count(query.field) == 0) {
+            query.status = C2_BAD_INDEX;
+            res = C2_BAD_INDEX;
+        } else {
+            query.status = C2_OK;
+            query.values = mSupportedValues.at(query.field).supported;
+        }
+    }
+    return res;
+}
+
+void C2SoftAvcDecIntf::updateSupportedValues() {
+    int32_t maxWidth = H264_MAX_FRAME_WIDTH;
+    int32_t maxHeight = H264_MAX_FRAME_HEIGHT;
+    // cf: Rec. ITU-T H.264 A.3
+    int maxFrameRate = 172;
+    std::vector<C2ParamField> fields;
+    if (mLevel.value != kAvcLevelUnknown) {
+        // cf: Rec. ITU-T H.264 Table A-1
+        constexpr int MaxFS[] = {
+        //  0       1       2       3       4       5       6       7       8       9
+            0,      0,      0,      0,      0,      0,      0,      0,      0,      99,
+            99,     396,    396,    396,    0,      0,      0,      0,      0,      0,
+            396,    792,    1620,   0,      0,      0,      0,      0,      0,      0,
+            1620,   3600,   5120,   0,      0,      0,      0,      0,      0,      0,
+            8192,   8192,   8704,   0,      0,      0,      0,      0,      0,      0,
+            22080,  36864,  36864,
+        };
+        constexpr int MaxMBPS[] = {
+        //  0       1       2       3       4       5       6       7       8       9
+            0,      0,      0,      0,      0,      0,      0,      0,      0,      1485,
+            1485,   3000,   6000,   11880,  0,      0,      0,      0,      0,      0,
+            11880,  19800,  20250,  0,      0,      0,      0,      0,      0,      0,
+            40500,  108000, 216000, 0,      0,      0,      0,      0,      0,      0,
+            245760, 245760, 522240, 0,      0,      0,      0,      0,      0,      0,
+            589824, 983040, 2073600,
+        };
+
+        // cf: Rec. ITU-T H.264 A.3.1
+        maxWidth = std::min(maxWidth, floor32(std::sqrt(MaxFS[mLevel.value] * 8)) * MB_SIZE);
+        maxHeight = std::min(maxHeight, floor32(std::sqrt(MaxFS[mLevel.value] * 8)) * MB_SIZE);
+        int32_t MBs = ((mVideoSize.width + 15) / 16) * ((mVideoSize.height + 15) / 16);
+        maxFrameRate = std::min(maxFrameRate, MaxMBPS[mLevel.value] / MBs);
+        fields.push_back(C2ParamField(&mLevel, &C2AvcLevelInfo::value));
+    }
+
+    SupportedValuesWithFields &maxWidthVals = mSupportedValues.at(
+            C2ParamField(&mMaxVideoSizeHint, &C2MaxVideoSizeHintPortSetting::width));
+    maxWidthVals.supported.range.max = maxWidth;
+    maxWidthVals.restrictingFields.clear();
+    maxWidthVals.restrictingFields.insert(fields.begin(), fields.end());
+
+    SupportedValuesWithFields &maxHeightVals = mSupportedValues.at(
+            C2ParamField(&mMaxVideoSizeHint, &C2MaxVideoSizeHintPortSetting::height));
+    maxHeightVals.supported.range.max = maxHeight;
+    maxHeightVals.restrictingFields.clear();
+    maxHeightVals.restrictingFields.insert(fields.begin(), fields.end());
+
+    SupportedValuesWithFields &frameRate = mSupportedValues.at(
+            C2ParamField(&mFrameRate, &C2FrameRateInfo::value));
+    frameRate.supported.range.max = maxFrameRate;
+    frameRate.restrictingFields.clear();
+    frameRate.restrictingFields.insert(fields.begin(), fields.end());
+}
+#endif
+
+///////////////////////////////////////////////////////////////////////////////
+
+C2SoftAvcDec::C2SoftAvcDec(
+        const char *name,
+        c2_node_id_t id)
+    : SimpleC2Component(
+          SimpleC2Interface::Builder(name, id)
+          .inputFormat(C2FormatCompressed)
+          .outputFormat(C2FormatVideo)
+          .build()),
+      mCodecCtx(NULL),
+      mFlushOutBuffer(NULL),
+      mIvColorFormat(IV_YUV_420P),
+      mChangingResolution(false),
+      mSignalledError(false),
+      mWidth(320),
+      mHeight(240),
+      mInputOffset(0) {
+    ALOGV("in %s", __func__);
+    GETTIME(&mTimeStart, NULL);
+
+    // If input dump is enabled, then open create an empty file
+    GENERATE_FILE_NAMES();
+    CREATE_DUMP_FILE(mInFile);
+}
+
+C2SoftAvcDec::~C2SoftAvcDec() {
+    ALOGV("in %s", __func__);
+    CHECK_EQ(deInitDecoder(), (status_t)OK);
+}
+
+c2_status_t C2SoftAvcDec::onInit() {
+    // TODO: initialize using intf
+    return C2_OK;
+}
+
+c2_status_t C2SoftAvcDec::onStop() {
+    mSignalledError = false;
+    resetDecoder();
+    resetPlugin();
+
+    return C2_OK;
+}
+
+void C2SoftAvcDec::onReset() {
+    (void)onStop();
+}
+
+void C2SoftAvcDec::onRelease() {
+    (void)deInitDecoder();
+}
+
+c2_status_t C2SoftAvcDec::onFlush_sm() {
+    setFlushMode();
+
+    /* Allocate a picture buffer to flushed data */
+    uint32_t displayStride = mWidth;
+    uint32_t displayHeight = mHeight;
+
+    uint32_t bufferSize = displayStride * displayHeight * 3 / 2;
+    mFlushOutBuffer = (uint8_t *)memalign(128, bufferSize);
+    if (NULL == mFlushOutBuffer) {
+        ALOGE("Could not allocate flushOutputBuffer of size %u", bufferSize);
+        return C2_NO_MEMORY;
+    }
+
+    while (true) {
+        ivd_video_decode_ip_t s_dec_ip;
+        ivd_video_decode_op_t s_dec_op;
+        IV_API_CALL_STATUS_T status;
+        // size_t sizeY, sizeUV;
+
+        setDecodeArgs(&s_dec_ip, &s_dec_op, NULL, NULL, 0, 0u);
+
+        status = ivdec_api_function(mCodecCtx, (void *)&s_dec_ip, (void *)&s_dec_op);
+        if (0 == s_dec_op.u4_output_present) {
+            resetPlugin();
+            break;
+        }
+    }
+
+    if (mFlushOutBuffer) {
+        free(mFlushOutBuffer);
+        mFlushOutBuffer = NULL;
+    }
+    return C2_OK;
+}
+
+static void *ivd_aligned_malloc(void *ctxt, WORD32 alignment, WORD32 size) {
+    (void) ctxt;
+    return memalign(alignment, size);
+}
+
+static void ivd_aligned_free(void *ctxt, void *buf) {
+    (void) ctxt;
+    free(buf);
+    return;
+}
+
+static size_t GetCPUCoreCount() {
+    long cpuCoreCount = 1;
+#if defined(_SC_NPROCESSORS_ONLN)
+    cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
+#else
+    // _SC_NPROC_ONLN must be defined...
+    cpuCoreCount = sysconf(_SC_NPROC_ONLN);
+#endif
+    CHECK(cpuCoreCount >= 1);
+    ALOGV("Number of CPU cores: %ld", cpuCoreCount);
+    return (size_t)cpuCoreCount;
+}
+
+void C2SoftAvcDec::logVersion() {
+    ivd_ctl_getversioninfo_ip_t s_ctl_ip;
+    ivd_ctl_getversioninfo_op_t s_ctl_op;
+    UWORD8 au1_buf[512];
+    IV_API_CALL_STATUS_T status;
+
+    s_ctl_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+    s_ctl_ip.e_sub_cmd = IVD_CMD_CTL_GETVERSION;
+    s_ctl_ip.u4_size = sizeof(ivd_ctl_getversioninfo_ip_t);
+    s_ctl_op.u4_size = sizeof(ivd_ctl_getversioninfo_op_t);
+    s_ctl_ip.pv_version_buffer = au1_buf;
+    s_ctl_ip.u4_version_buffer_size = sizeof(au1_buf);
+
+    status =
+        ivdec_api_function(mCodecCtx, (void *)&s_ctl_ip, (void *)&s_ctl_op);
+
+    if (status != IV_SUCCESS) {
+        ALOGE("Error in getting version number: 0x%x",
+                s_ctl_op.u4_error_code);
+    } else {
+        ALOGV("Ittiam decoder version number: %s",
+                (char *)s_ctl_ip.pv_version_buffer);
+    }
+    return;
+}
+
+status_t C2SoftAvcDec::setParams(size_t stride) {
+    ivd_ctl_set_config_ip_t s_ctl_ip;
+    ivd_ctl_set_config_op_t s_ctl_op;
+    IV_API_CALL_STATUS_T status;
+    s_ctl_ip.u4_disp_wd = (UWORD32)stride;
+    s_ctl_ip.e_frm_skip_mode = IVD_SKIP_NONE;
+
+    s_ctl_ip.e_frm_out_mode = IVD_DISPLAY_FRAME_OUT;
+    s_ctl_ip.e_vid_dec_mode = IVD_DECODE_FRAME;
+    s_ctl_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+    s_ctl_ip.e_sub_cmd = IVD_CMD_CTL_SETPARAMS;
+    s_ctl_ip.u4_size = sizeof(ivd_ctl_set_config_ip_t);
+    s_ctl_op.u4_size = sizeof(ivd_ctl_set_config_op_t);
+
+    ALOGV("Set the run-time (dynamic) parameters stride = %zu", stride);
+    status = ivdec_api_function(mCodecCtx, (void *)&s_ctl_ip, (void *)&s_ctl_op);
+
+    if (status != IV_SUCCESS) {
+        ALOGE("Error in setting the run-time parameters: 0x%x",
+                s_ctl_op.u4_error_code);
+
+        return UNKNOWN_ERROR;
+    }
+    return OK;
+}
+
+status_t C2SoftAvcDec::resetPlugin() {
+    mInputOffset = 0;
+
+    /* Initialize both start and end times */
+    gettimeofday(&mTimeStart, NULL);
+    gettimeofday(&mTimeEnd, NULL);
+
+    return OK;
+}
+
+status_t C2SoftAvcDec::resetDecoder() {
+    ivd_ctl_reset_ip_t s_ctl_ip;
+    ivd_ctl_reset_op_t s_ctl_op;
+    IV_API_CALL_STATUS_T status;
+
+    s_ctl_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+    s_ctl_ip.e_sub_cmd = IVD_CMD_CTL_RESET;
+    s_ctl_ip.u4_size = sizeof(ivd_ctl_reset_ip_t);
+    s_ctl_op.u4_size = sizeof(ivd_ctl_reset_op_t);
+
+    status = ivdec_api_function(mCodecCtx, (void *)&s_ctl_ip, (void *)&s_ctl_op);
+    if (IV_SUCCESS != status) {
+        ALOGE("Error in reset: 0x%x", s_ctl_op.u4_error_code);
+        return UNKNOWN_ERROR;
+    }
+    mSignalledError = false;
+
+    /* Set number of cores/threads to be used by the codec */
+    setNumCores();
+
+    mStride = 0;
+    return OK;
+}
+
+status_t C2SoftAvcDec::setNumCores() {
+    ivdext_ctl_set_num_cores_ip_t s_set_cores_ip;
+    ivdext_ctl_set_num_cores_op_t s_set_cores_op;
+    IV_API_CALL_STATUS_T status;
+    s_set_cores_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+    s_set_cores_ip.e_sub_cmd = IVDEXT_CMD_CTL_SET_NUM_CORES;
+    s_set_cores_ip.u4_num_cores = MIN(mNumCores, CODEC_MAX_NUM_CORES);
+    s_set_cores_ip.u4_size = sizeof(ivdext_ctl_set_num_cores_ip_t);
+    s_set_cores_op.u4_size = sizeof(ivdext_ctl_set_num_cores_op_t);
+    status = ivdec_api_function(
+            mCodecCtx, (void *)&s_set_cores_ip, (void *)&s_set_cores_op);
+    if (IV_SUCCESS != status) {
+        ALOGE("Error in setting number of cores: 0x%x",
+                s_set_cores_op.u4_error_code);
+        return UNKNOWN_ERROR;
+    }
+    return OK;
+}
+
+status_t C2SoftAvcDec::setFlushMode() {
+    ALOGV("setFlushMode");
+    IV_API_CALL_STATUS_T status;
+    ivd_ctl_flush_ip_t s_video_flush_ip;
+    ivd_ctl_flush_op_t s_video_flush_op;
+
+    s_video_flush_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+    s_video_flush_ip.e_sub_cmd = IVD_CMD_CTL_FLUSH;
+    s_video_flush_ip.u4_size = sizeof(ivd_ctl_flush_ip_t);
+    s_video_flush_op.u4_size = sizeof(ivd_ctl_flush_op_t);
+
+    /* Set the decoder in Flush mode, subsequent decode() calls will flush */
+    status = ivdec_api_function(
+            mCodecCtx, (void *)&s_video_flush_ip, (void *)&s_video_flush_op);
+
+    if (status != IV_SUCCESS) {
+        ALOGE("Error in setting the decoder in flush mode: (%d) 0x%x", status,
+                s_video_flush_op.u4_error_code);
+        return UNKNOWN_ERROR;
+    }
+    return OK;
+}
+
+status_t C2SoftAvcDec::initDecoder() {
+    IV_API_CALL_STATUS_T status;
+
+    mNumCores = GetCPUCoreCount();
+    mCodecCtx = NULL;
+
+    mStride = mWidth;
+
+    /* Initialize the decoder */
+    {
+        ivdext_create_ip_t s_create_ip;
+        ivdext_create_op_t s_create_op;
+
+        void *dec_fxns = (void *)ivdec_api_function;
+
+        s_create_ip.s_ivd_create_ip_t.u4_size = sizeof(ivdext_create_ip_t);
+        s_create_ip.s_ivd_create_ip_t.e_cmd = IVD_CMD_CREATE;
+        s_create_ip.s_ivd_create_ip_t.u4_share_disp_buf = 0;
+        s_create_op.s_ivd_create_op_t.u4_size = sizeof(ivdext_create_op_t);
+        s_create_ip.s_ivd_create_ip_t.e_output_format = (IV_COLOR_FORMAT_T)mIvColorFormat;
+        s_create_ip.s_ivd_create_ip_t.pf_aligned_alloc = ivd_aligned_malloc;
+        s_create_ip.s_ivd_create_ip_t.pf_aligned_free = ivd_aligned_free;
+        s_create_ip.s_ivd_create_ip_t.pv_mem_ctxt = NULL;
+
+        status = ivdec_api_function(mCodecCtx, (void *)&s_create_ip, (void *)&s_create_op);
+
+        mCodecCtx = (iv_obj_t*)s_create_op.s_ivd_create_op_t.pv_handle;
+        mCodecCtx->pv_fxns = dec_fxns;
+        mCodecCtx->u4_size = sizeof(iv_obj_t);
+
+        if (status != IV_SUCCESS) {
+            ALOGE("Error in create: 0x%x",
+                    s_create_op.s_ivd_create_op_t.u4_error_code);
+            deInitDecoder();
+            return UNKNOWN_ERROR;
+        }
+    }
+
+    /* Reset the plugin state */
+    resetPlugin();
+
+    /* Set the run time (dynamic) parameters */
+    setParams(mStride);
+
+    /* Set number of cores/threads to be used by the codec */
+    setNumCores();
+
+    /* Get codec version */
+    logVersion();
+
+    mFlushNeeded = false;
+    return OK;
+}
+
+status_t C2SoftAvcDec::deInitDecoder() {
+    IV_API_CALL_STATUS_T status;
+
+    if (mCodecCtx) {
+        ivdext_delete_ip_t s_delete_ip;
+        ivdext_delete_op_t s_delete_op;
+
+        s_delete_ip.s_ivd_delete_ip_t.u4_size = sizeof(ivdext_delete_ip_t);
+        s_delete_ip.s_ivd_delete_ip_t.e_cmd = IVD_CMD_DELETE;
+
+        s_delete_op.s_ivd_delete_op_t.u4_size = sizeof(ivdext_delete_op_t);
+
+        status = ivdec_api_function(mCodecCtx, (void *)&s_delete_ip, (void *)&s_delete_op);
+        if (status != IV_SUCCESS) {
+            ALOGE("Error in delete: 0x%x",
+                    s_delete_op.s_ivd_delete_op_t.u4_error_code);
+            return UNKNOWN_ERROR;
+        }
+        mCodecCtx = NULL;
+    }
+
+    mChangingResolution = false;
+
+    return OK;
+}
+
+bool C2SoftAvcDec::getVUIParams() {
+    IV_API_CALL_STATUS_T status;
+    ih264d_ctl_get_vui_params_ip_t s_ctl_get_vui_params_ip;
+    ih264d_ctl_get_vui_params_op_t s_ctl_get_vui_params_op;
+
+    s_ctl_get_vui_params_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+    s_ctl_get_vui_params_ip.e_sub_cmd =
+        (IVD_CONTROL_API_COMMAND_TYPE_T)IH264D_CMD_CTL_GET_VUI_PARAMS;
+
+    s_ctl_get_vui_params_ip.u4_size =
+        sizeof(ih264d_ctl_get_vui_params_ip_t);
+
+    s_ctl_get_vui_params_op.u4_size = sizeof(ih264d_ctl_get_vui_params_op_t);
+
+    status = ivdec_api_function(
+            (iv_obj_t *)mCodecCtx, (void *)&s_ctl_get_vui_params_ip,
+            (void *)&s_ctl_get_vui_params_op);
+
+    if (status != IV_SUCCESS) {
+        ALOGW("Error in getting VUI params: 0x%x",
+                s_ctl_get_vui_params_op.u4_error_code);
+        return false;
+    }
+
+    int32_t primaries = s_ctl_get_vui_params_op.u1_colour_primaries;
+    int32_t transfer = s_ctl_get_vui_params_op.u1_tfr_chars;
+    int32_t coeffs = s_ctl_get_vui_params_op.u1_matrix_coeffs;
+    bool fullRange = s_ctl_get_vui_params_op.u1_video_full_range_flag;
+
+    ColorAspects colorAspects;
+    ColorUtils::convertIsoColorAspectsToCodecAspects(
+            primaries, transfer, coeffs, fullRange, colorAspects);
+
+    // Update color aspects if necessary.
+    if (colorAspectsDiffer(colorAspects, mBitstreamColorAspects)) {
+        mBitstreamColorAspects = colorAspects;
+        status_t err = handleColorAspectsChange();
+        CHECK(err == OK);
+    }
+    return true;
+}
+
+bool C2SoftAvcDec::setDecodeArgs(
+        ivd_video_decode_ip_t *ps_dec_ip,
+        ivd_video_decode_op_t *ps_dec_op,
+        C2ReadView *inBuffer,
+        C2GraphicView *outBuffer,
+        uint32_t workIndex,
+        size_t inOffset) {
+    size_t width = mWidth;
+    size_t height = mHeight;
+    size_t sizeY = width * height;
+    size_t sizeUV;
+
+    ps_dec_ip->u4_size = sizeof(ivd_video_decode_ip_t);
+    ps_dec_op->u4_size = sizeof(ivd_video_decode_op_t);
+
+    ps_dec_ip->e_cmd = IVD_CMD_VIDEO_DECODE;
+
+    /* When in flush and after EOS with zero byte input,
+     * inBuffer is set to zero. Hence check for non-null */
+    if (inBuffer) {
+        ps_dec_ip->u4_ts = workIndex;
+        ps_dec_ip->pv_stream_buffer = const_cast<uint8_t *>(inBuffer->data()) + inOffset;
+        ps_dec_ip->u4_num_Bytes = inBuffer->capacity() - inOffset;
+    } else {
+        ps_dec_ip->u4_ts = 0;
+        ps_dec_ip->pv_stream_buffer = NULL;
+        ps_dec_ip->u4_num_Bytes = 0;
+    }
+
+    sizeUV = sizeY / 4;
+    ps_dec_ip->s_out_buffer.u4_min_out_buf_size[0] = sizeY;
+    ps_dec_ip->s_out_buffer.u4_min_out_buf_size[1] = sizeUV;
+    ps_dec_ip->s_out_buffer.u4_min_out_buf_size[2] = sizeUV;
+
+    if (outBuffer) {
+        if (outBuffer->width() < width ||
+                outBuffer->height() < height) {
+            ALOGE("Output buffer too small: provided (%dx%d) required (%zux%zu)",
+                  outBuffer->width(), outBuffer->height(), width, height);
+            return false;
+        }
+        ps_dec_ip->s_out_buffer.pu1_bufs[0] = outBuffer->data()[0];
+        ps_dec_ip->s_out_buffer.pu1_bufs[1] = outBuffer->data()[1];
+        ps_dec_ip->s_out_buffer.pu1_bufs[2] = outBuffer->data()[2];
+    } else {
+        // mFlushOutBuffer always has the right size.
+        ps_dec_ip->s_out_buffer.pu1_bufs[0] = mFlushOutBuffer;
+        ps_dec_ip->s_out_buffer.pu1_bufs[1] = mFlushOutBuffer + sizeY;
+        ps_dec_ip->s_out_buffer.pu1_bufs[2] = mFlushOutBuffer + sizeY + sizeUV;
+    }
+
+    ps_dec_ip->s_out_buffer.u4_num_bufs = 3;
+    return true;
+}
+
+c2_status_t C2SoftAvcDec::ensureDecoderState(const std::shared_ptr<C2BlockPool> &pool) {
+    if (NULL == mCodecCtx) {
+        if (OK != initDecoder()) {
+            ALOGE("Failed to initialize decoder");
+            // TODO: notify(OMX_EventError, OMX_ErrorUnsupportedSetting, 0, NULL);
+            mSignalledError = true;
+            return C2_CORRUPTED;
+        }
+    }
+    if (mWidth != mStride) {
+        /* Set the run-time (dynamic) parameters */
+        mStride = mWidth;
+        setParams(mStride);
+    }
+
+    if (!mAllocatedBlock) {
+        // TODO: error handling
+        // TODO: format & usage
+        uint32_t format = HAL_PIXEL_FORMAT_YV12;
+        C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
+        ALOGV("using allocator %u", pool->getAllocatorId());
+
+        (void)pool->fetchGraphicBlock(
+                mWidth, mHeight, format, usage, &mAllocatedBlock);
+        ALOGV("provided (%dx%d) required (%dx%d)",
+                mAllocatedBlock->width(), mAllocatedBlock->height(), mWidth, mHeight);
+    }
+    return C2_OK;
+}
+
+void C2SoftAvcDec::finishWork(uint64_t index, const std::unique_ptr<C2Work> &work) {
+    std::shared_ptr<C2Buffer> buffer = createGraphicBuffer(std::move(mAllocatedBlock));
+    auto fillWork = [buffer](const std::unique_ptr<C2Work> &work) {
+        uint32_t flags = 0;
+        if (work->input.flags & C2BufferPack::FLAG_END_OF_STREAM) {
+            flags |= C2BufferPack::FLAG_END_OF_STREAM;
+            ALOGV("EOS");
+        }
+        work->worklets.front()->output.flags = (C2BufferPack::flags_t)flags;
+        work->worklets.front()->output.buffers.clear();
+        work->worklets.front()->output.buffers.push_back(buffer);
+        work->worklets.front()->output.ordinal = work->input.ordinal;
+        work->worklets_processed = 1u;
+    };
+    if (work && index == work->input.ordinal.frame_index) {
+        fillWork(work);
+    } else {
+        finish(index, fillWork);
+    }
+}
+
+void C2SoftAvcDec::process(
+        const std::unique_ptr<C2Work> &work,
+        const std::shared_ptr<C2BlockPool> &pool) {
+    bool eos = false;
+
+    work->result = C2_OK;
+    work->worklets_processed = 0u;
+
+    const C2ConstLinearBlock &buffer =
+        work->input.buffers[0]->data().linearBlocks().front();
+    if (buffer.capacity() == 0) {
+        ALOGV("empty input: %llu", (long long)work->input.ordinal.frame_index);
+        // TODO: result?
+        fillEmptyWork(work);
+        if ((work->input.flags & C2BufferPack::FLAG_END_OF_STREAM)) {
+            eos = true;
+        }
+        return;
+    } else if (work->input.flags & C2BufferPack::FLAG_END_OF_STREAM) {
+        ALOGV("input EOS: %llu", (long long)work->input.ordinal.frame_index);
+        eos = true;
+    }
+
+    C2ReadView input = work->input.buffers[0]->data().linearBlocks().front().map().get();
+    uint32_t workIndex = work->input.ordinal.frame_index & 0xFFFFFFFF;
+    size_t inOffset = 0u;
+
+    while (inOffset < input.capacity()) {
+        if (mSignalledError) {
+            break;
+        }
+        (void)ensureDecoderState(pool);
+        C2GraphicView output = mAllocatedBlock->map().get();
+        if (output.error() != OK) {
+            ALOGE("mapped err = %d", output.error());
+        }
+
+        ivd_video_decode_ip_t s_dec_ip;
+        ivd_video_decode_op_t s_dec_op;
+        WORD32 timeDelay, timeTaken;
+        //size_t sizeY, sizeUV;
+
+        if (!setDecodeArgs(&s_dec_ip, &s_dec_op, &input, &output, workIndex, inOffset)) {
+            ALOGE("Decoder arg setup failed");
+            // TODO: notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+            mSignalledError = true;
+            break;
+        }
+        ALOGV("Decoder arg setup succeeded");
+        // If input dump is enabled, then write to file
+        DUMP_TO_FILE(mInFile, s_dec_ip.pv_stream_buffer, s_dec_ip.u4_num_Bytes, mInputOffset);
+
+        GETTIME(&mTimeStart, NULL);
+        /* Compute time elapsed between end of previous decode()
+         * to start of current decode() */
+        TIME_DIFF(mTimeEnd, mTimeStart, timeDelay);
+
+        IV_API_CALL_STATUS_T status;
+        status = ivdec_api_function(mCodecCtx, (void *)&s_dec_ip, (void *)&s_dec_op);
+        ALOGV("status = %d, error_code = %d", status, (s_dec_op.u4_error_code & 0xFF));
+
+        bool unsupportedResolution =
+            (IVD_STREAM_WIDTH_HEIGHT_NOT_SUPPORTED == (s_dec_op.u4_error_code & 0xFF));
+
+        /* Check for unsupported dimensions */
+        if (unsupportedResolution) {
+            ALOGE("Unsupported resolution : %dx%d", mWidth, mHeight);
+            // TODO: notify(OMX_EventError, OMX_ErrorUnsupportedSetting, 0, NULL);
+            mSignalledError = true;
+            break;
+        }
+
+        bool allocationFailed = (IVD_MEM_ALLOC_FAILED == (s_dec_op.u4_error_code & 0xFF));
+        if (allocationFailed) {
+            ALOGE("Allocation failure in decoder");
+            // TODO: notify(OMX_EventError, OMX_ErrorUnsupportedSetting, 0, NULL);
+            mSignalledError = true;
+            break;
+        }
+
+        bool resChanged = (IVD_RES_CHANGED == (s_dec_op.u4_error_code & 0xFF));
+
+        getVUIParams();
+
+        GETTIME(&mTimeEnd, NULL);
+        /* Compute time taken for decode() */
+        TIME_DIFF(mTimeStart, mTimeEnd, timeTaken);
+
+        PRINT_TIME("timeTaken=%6d delay=%6d numBytes=%6d", timeTaken, timeDelay,
+               s_dec_op.u4_num_bytes_consumed);
+        ALOGV("bytes total=%u", input.capacity());
+        if (s_dec_op.u4_frame_decoded_flag && !mFlushNeeded) {
+            mFlushNeeded = true;
+        }
+
+        if (1 != s_dec_op.u4_frame_decoded_flag) {
+            /* If the input did not contain picture data, return work without
+             * buffer */
+            ALOGV("no picture data: %u", workIndex);
+            fillEmptyWork(work);
+        }
+
+        if (resChanged) {
+            ALOGV("res changed");
+            if (mFlushNeeded) {
+                drainInternal(DRAIN_COMPONENT_NO_EOS, pool, work);
+            }
+            resetDecoder();
+            resetPlugin();
+            mStride = mWidth;
+            setParams(mStride);
+            continue;
+        }
+
+        if ((0 < s_dec_op.u4_pic_wd) && (0 < s_dec_op.u4_pic_ht)) {
+            uint32_t width = s_dec_op.u4_pic_wd;
+            uint32_t height = s_dec_op.u4_pic_ht;
+            ALOGV("width = %u height = %u", width, height);
+            if (width != mWidth || height != mHeight) {
+                mAllocatedBlock.reset();
+                mWidth = width;
+                mHeight = height;
+            }
+            // TODO: continue?
+        }
+
+        if (mUpdateColorAspects) {
+            //notify(OMX_EventPortSettingsChanged, kOutputPortIndex,
+            //    kDescribeColorAspectsIndex, NULL);
+            ALOGV("update color aspect");
+            mUpdateColorAspects = false;
+        }
+
+        if (s_dec_op.u4_output_present) {
+            ALOGV("output_present: %d", s_dec_op.u4_ts);
+            finishWork(s_dec_op.u4_ts, work);
+        }
+
+        inOffset += s_dec_op.u4_num_bytes_consumed;
+    }
+    if (inOffset >= input.capacity()) {
+        /* If input EOS is seen, drain the decoder.
+         * There can be a case where EOS is sent along with last picture data
+         * In that case, only after decoding that input data, decoder has to be
+         * put in flush. This case is handled here  */
+        if (eos) {
+            drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
+        }
+    }
+}
+
+c2_status_t C2SoftAvcDec::drainInternal(
+        uint32_t drainMode,
+        const std::shared_ptr<C2BlockPool> &pool,
+        const std::unique_ptr<C2Work> &work) {
+    if (drainMode == NO_DRAIN) {
+        ALOGW("drain with NO_DRAIN: no-op");
+        return C2_OK;
+    }
+    if (drainMode == DRAIN_CHAIN) {
+        ALOGW("DRAIN_CHAIN not supported");
+        return C2_OMITTED;
+    }
+    setFlushMode();
+
+    while (true) {
+        (void)ensureDecoderState(pool);
+        C2GraphicView output = mAllocatedBlock->map().get();
+        if (output.error() != OK) {
+            ALOGE("mapped err = %d", output.error());
+        }
+
+        ivd_video_decode_ip_t s_dec_ip;
+        ivd_video_decode_op_t s_dec_op;
+
+        setDecodeArgs(&s_dec_ip, &s_dec_op, NULL, &output, 0, 0u);
+
+        (void)ivdec_api_function(mCodecCtx, (void *)&s_dec_ip, (void *)&s_dec_op);
+
+        if (s_dec_op.u4_output_present) {
+            ALOGV("output_present: %d", s_dec_op.u4_ts);
+            finishWork(s_dec_op.u4_ts, work);
+        } else {
+            break;
+        }
+    }
+
+    if (drainMode == DRAIN_COMPONENT_WITH_EOS
+            && work && work->worklets_processed == 0u) {
+        fillEmptyWork(work);
+    }
+
+    return C2_OK;
+}
+
+bool C2SoftAvcDec::colorAspectsDiffer(
+        const ColorAspects &a, const ColorAspects &b) {
+    if (a.mRange != b.mRange
+        || a.mPrimaries != b.mPrimaries
+        || a.mTransfer != b.mTransfer
+        || a.mMatrixCoeffs != b.mMatrixCoeffs) {
+        return true;
+    }
+    return false;
+}
+
+c2_status_t C2SoftAvcDec::drain(
+        uint32_t drainMode,
+        const std::shared_ptr<C2BlockPool> &pool) {
+    return drainInternal(drainMode, pool, nullptr);
+}
+
+void C2SoftAvcDec::updateFinalColorAspects(
+        const ColorAspects &otherAspects, const ColorAspects &preferredAspects) {
+    Mutex::Autolock autoLock(mColorAspectsLock);
+    ColorAspects newAspects;
+    newAspects.mRange = preferredAspects.mRange != ColorAspects::RangeUnspecified ?
+        preferredAspects.mRange : otherAspects.mRange;
+    newAspects.mPrimaries = preferredAspects.mPrimaries != ColorAspects::PrimariesUnspecified ?
+        preferredAspects.mPrimaries : otherAspects.mPrimaries;
+    newAspects.mTransfer = preferredAspects.mTransfer != ColorAspects::TransferUnspecified ?
+        preferredAspects.mTransfer : otherAspects.mTransfer;
+    newAspects.mMatrixCoeffs = preferredAspects.mMatrixCoeffs != ColorAspects::MatrixUnspecified ?
+        preferredAspects.mMatrixCoeffs : otherAspects.mMatrixCoeffs;
+
+    // Check to see if need update mFinalColorAspects.
+    if (colorAspectsDiffer(mFinalColorAspects, newAspects)) {
+        mFinalColorAspects = newAspects;
+        mUpdateColorAspects = true;
+    }
+}
+
+status_t C2SoftAvcDec::handleColorAspectsChange() {
+//    int perference = getColorAspectPreference();
+//    ALOGD("Color Aspects preference: %d ", perference);
+//
+//     if (perference == kPreferBitstream) {
+//         updateFinalColorAspects(mDefaultColorAspects, mBitstreamColorAspects);
+//     } else if (perference == kPreferContainer) {
+//         updateFinalColorAspects(mBitstreamColorAspects, mDefaultColorAspects);
+//     } else {
+//         return OMX_ErrorUnsupportedSetting;
+//     }
+    updateFinalColorAspects(mDefaultColorAspects, mBitstreamColorAspects);
+    return C2_OK;
+}
+
+class C2SoftAvcDecFactory : public C2ComponentFactory {
+public:
+    virtual c2_status_t createComponent(
+            c2_node_id_t id, std::shared_ptr<C2Component>* const component,
+            std::function<void(::android::C2Component*)> deleter) override {
+        *component = std::shared_ptr<C2Component>(new C2SoftAvcDec("avc", id), deleter);
+        return C2_OK;
+    }
+
+    virtual c2_status_t createInterface(
+            c2_node_id_t id, std::shared_ptr<C2ComponentInterface>* const interface,
+            std::function<void(::android::C2ComponentInterface*)> deleter) override {
+        *interface =
+              SimpleC2Interface::Builder("avc", id, deleter)
+              .inputFormat(C2FormatCompressed)
+              .outputFormat(C2FormatVideo)
+              .build();
+//            std::shared_ptr<C2ComponentInterface>(new C2SoftAvcDecIntf("avc", id), deleter);
+        return C2_OK;
+    }
+
+    virtual ~C2SoftAvcDecFactory() override = default;
+};
+
+}  // namespace android
+
+extern "C" ::android::C2ComponentFactory* CreateCodec2Factory() {
+    ALOGV("in %s", __func__);
+    return new ::android::C2SoftAvcDecFactory();
+}
+
+extern "C" void DestroyCodec2Factory(::android::C2ComponentFactory* factory) {
+    ALOGV("in %s", __func__);
+    delete factory;
+}
diff --git a/media/libstagefright/codecs/avcdec/C2SoftAvcDec.h b/media/libstagefright/codecs/avcdec/C2SoftAvcDec.h
new file mode 100644
index 0000000..0e8cf77
--- /dev/null
+++ b/media/libstagefright/codecs/avcdec/C2SoftAvcDec.h
@@ -0,0 +1,293 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef C2_SOFT_H264_DEC_H_
+
+#define C2_SOFT_H264_DEC_H_
+
+#include <condition_variable>
+#include <map>
+#include <memory>
+#include <mutex>
+#include <set>
+#include <unordered_map>
+
+#include <util/C2ParamUtils.h>
+
+#include <C2Component.h>
+#include <C2Param.h>
+#include <SimpleC2Component.h>
+
+#include "C2AvcConfig.h"
+
+#include <media/stagefright/foundation/ABase.h>
+#include <media/stagefright/foundation/ColorUtils.h>
+
+#include <sys/time.h>
+
+namespace android {
+
+struct iv_obj_t;
+struct ivd_video_decode_ip_t;
+struct ivd_video_decode_op_t;
+
+/** Number of entries in the time-stamp array */
+#define MAX_PENDING_WORKS 64
+
+/** Maximum number of cores supported by the codec */
+#define CODEC_MAX_NUM_CORES 4
+
+#define CODEC_MAX_WIDTH     1920
+
+#define CODEC_MAX_HEIGHT    1088
+
+/** Input buffer size */
+#define INPUT_BUF_SIZE (1024 * 1024)
+
+#define MIN(a, b) ((a) < (b)) ? (a) : (b)
+
+/** Get time */
+#define GETTIME(a, b) gettimeofday(a, b);
+
+/** Compute difference between start and end */
+#define TIME_DIFF(start, end, diff) \
+    diff = (((end).tv_sec - (start).tv_sec) * 1000000) + \
+            ((end).tv_usec - (start).tv_usec);
+
+#if 0
+class C2SoftAvcDecIntf : public C2ComponentInterface {
+public:
+    struct SupportedValuesWithFields {
+        C2FieldSupportedValues supported;
+        std::set<C2ParamField> restrictingFields;
+
+        SupportedValuesWithFields(const C2FieldSupportedValues &supported) : supported(supported) {}
+    };
+
+    C2SoftAvcDecIntf(const char *name, c2_node_id_t id);
+    virtual ~C2SoftAvcDecIntf() override;
+
+    // From C2ComponentInterface
+    virtual C2String getName() const override;
+    virtual c2_node_id_t getId() const override;
+    virtual c2_status_t query_vb(
+            const std::vector<C2Param* const> &stackParams,
+            const std::vector<C2Param::Index> &heapParamIndices,
+            c2_blocking_t mayBlock,
+            std::vector<std::unique_ptr<C2Param>>* const heapParams) const override;
+    virtual c2_status_t config_vb(
+            const std::vector<C2Param* const> &params,
+            c2_blocking_t mayBlock,
+            std::vector<std::unique_ptr<C2SettingResult>>* const failures) override;
+    virtual c2_status_t createTunnel_sm(c2_node_id_t targetComponent) override;
+    virtual c2_status_t releaseTunnel_sm(c2_node_id_t targetComponent) override;
+    // TODO: move this into some common store class
+    std::shared_ptr<C2ParamReflector> getParamReflector() const;
+    virtual c2_status_t querySupportedParams_nb(
+            std::vector<std::shared_ptr<C2ParamDescriptor>> * const params) const override;
+    virtual c2_status_t querySupportedValues_vb(
+            std::vector<C2FieldSupportedValuesQuery> &fields, c2_blocking_t mayBlock) const override;
+
+private:
+    class ParamReflector;
+
+    const C2String mName;
+    const c2_node_id_t mId;
+
+    C2ComponentDomainInfo mDomainInfo;
+    // TODO: config desc
+    std::unique_ptr<C2PortMimeConfig::input> mInputPortMime;
+    C2PortStreamCountConfig::input mInputStreamCount;
+    std::unique_ptr<C2PortMimeConfig::output> mOutputPortMime;
+    C2PortStreamCountConfig::output mOutputStreamCount;
+    // TODO: C2StreamMimeConfig mInputStreamMime;
+    // TODO: C2StreamMimeConfig mOutputStreamMime;
+    C2StreamFormatConfig::input mInputStreamFormat;
+    std::unique_ptr<C2PortBlockPoolsTuning::output> mOutputBlockPools;
+    C2StreamFormatConfig::output mOutputStreamFormat;
+    C2VideoSizeStreamInfo::output mVideoSize;
+    C2MaxVideoSizeHintPortSetting::input mMaxVideoSizeHint;
+    C2AvcProfileInfo::input mProfile;
+    C2AvcLevelInfo::input mLevel;
+    C2BlockSizeInfo::output mBlockSize;
+    C2AlignmentInfo::output mAlignment;
+    C2FrameRateInfo::output mFrameRate;
+    C2BlocksPerSecondInfo::output mBlocksPerSecond;
+
+    std::shared_ptr<C2ParamReflector> mParamReflector;
+
+    std::unordered_map<uint32_t, C2Param *> mParams;
+    // C2ParamField is LessThanComparable
+    std::map<C2ParamField, SupportedValuesWithFields> mSupportedValues;
+    std::unordered_map<
+            uint32_t, std::function<std::unique_ptr<C2SettingResult>(C2Param *)>> mFieldVerifiers;
+    std::vector<std::shared_ptr<C2ParamDescriptor>> mParamDescs;
+
+    void updateSupportedValues();
+    friend class C2SoftAvcDec;
+};
+#endif
+
+class C2SoftAvcDec : public SimpleC2Component {
+public:
+    C2SoftAvcDec(const char *name, c2_node_id_t id);
+    virtual ~C2SoftAvcDec();
+
+    // From SimpleC2Component
+    c2_status_t onInit() override;
+    c2_status_t onStop() override;
+    void onReset() override;
+    void onRelease() override;
+    c2_status_t onFlush_sm() override;
+    void process(
+            const std::unique_ptr<C2Work> &work,
+            const std::shared_ptr<C2BlockPool> &pool) override;
+    c2_status_t drain(
+            uint32_t drainMode,
+            const std::shared_ptr<C2BlockPool> &pool) override;
+
+private:
+    Mutex mColorAspectsLock;
+    // color aspects passed from the framework.
+    ColorAspects mDefaultColorAspects;
+    // color aspects parsed from the bitstream.
+    ColorAspects mBitstreamColorAspects;
+    // final color aspects after combining the above two aspects.
+    ColorAspects mFinalColorAspects;
+    bool mUpdateColorAspects;
+
+    bool colorAspectsDiffer(const ColorAspects &a, const ColorAspects &b);
+
+    // This functions takes two color aspects and updates the mFinalColorAspects
+    // based on |preferredAspects|.
+    void updateFinalColorAspects(
+            const ColorAspects &otherAspects, const ColorAspects &preferredAspects);
+
+    // This function will update the mFinalColorAspects based on codec preference.
+    status_t handleColorAspectsChange();
+
+    std::shared_ptr<C2GraphicBlock> mAllocatedBlock;
+
+    iv_obj_t *mCodecCtx;         // Codec context
+
+    size_t mNumCores;            // Number of cores to be uesd by the codec
+
+    struct timeval mTimeStart;   // Time at the start of decode()
+    struct timeval mTimeEnd;     // Time at the end of decode()
+
+    // Internal buffer to be used to flush out the buffers from decoder
+    uint8_t *mFlushOutBuffer;
+
+#ifdef FILE_DUMP_ENABLE
+    char mInFile[200];
+#endif /* FILE_DUMP_ENABLE */
+
+    int mIvColorFormat;        // Ittiam Color format
+
+    bool mIsInFlush;        // codec is flush mode
+    bool mReceivedEOS;      // EOS is receieved on input port
+
+    // The input stream has changed to a different resolution, which is still supported by the
+    // codec. So the codec is switching to decode the new resolution.
+    bool mChangingResolution;
+    bool mFlushNeeded;
+    bool mSignalledError;
+    uint32_t mWidth;
+    uint32_t mHeight;
+    uint32_t mStride;
+    size_t mInputOffset;
+
+    status_t initDecoder();
+    status_t deInitDecoder();
+    status_t setFlushMode();
+    status_t setParams(size_t stride);
+    void logVersion();
+    status_t setNumCores();
+    status_t resetDecoder();
+    status_t resetPlugin();
+
+    c2_status_t ensureDecoderState(const std::shared_ptr<C2BlockPool> &pool);
+    void finishWork(uint64_t index, const std::unique_ptr<C2Work> &work);
+    c2_status_t drainInternal(
+            uint32_t drainMode,
+            const std::shared_ptr<C2BlockPool> &pool,
+            const std::unique_ptr<C2Work> &work);
+
+    bool setDecodeArgs(
+            ivd_video_decode_ip_t *ps_dec_ip,
+            ivd_video_decode_op_t *ps_dec_op,
+            C2ReadView *inBuffer,
+            C2GraphicView *outBuffer,
+            uint32_t timeStampIx,
+            size_t inOffset);
+
+    bool getVUIParams();
+
+    DISALLOW_EVIL_CONSTRUCTORS(C2SoftAvcDec);
+};
+
+#ifdef FILE_DUMP_ENABLE
+
+#define INPUT_DUMP_PATH     "/sdcard/media/avcd_input"
+#define INPUT_DUMP_EXT      "h264"
+
+#define GENERATE_FILE_NAMES() {                         \
+    GETTIME(&mTimeStart, NULL);                         \
+    strcpy(mInFile, "");                                \
+    sprintf(mInFile, "%s_%ld.%ld.%s", INPUT_DUMP_PATH,  \
+            mTimeStart.tv_sec, mTimeStart.tv_usec,      \
+            INPUT_DUMP_EXT);                            \
+}
+
+#define CREATE_DUMP_FILE(m_filename) {                  \
+    FILE *fp = fopen(m_filename, "wb");                 \
+    if (fp != NULL) {                                   \
+        fclose(fp);                                     \
+    } else {                                            \
+        ALOGD("Could not open file %s", m_filename);    \
+    }                                                   \
+}
+#define DUMP_TO_FILE(m_filename, m_buf, m_size, m_offset)\
+{                                                       \
+    FILE *fp = fopen(m_filename, "ab");                 \
+    if (fp != NULL && m_buf != NULL && m_offset == 0) { \
+        int i;                                          \
+        i = fwrite(m_buf, 1, m_size, fp);               \
+        ALOGD("fwrite ret %d to write %d", i, m_size);  \
+        if (i != (int) m_size) {                        \
+            ALOGD("Error in fwrite, returned %d", i);   \
+            perror("Error in write to file");           \
+        }                                               \
+    } else if (fp == NULL) {                            \
+        ALOGD("Could not write to file %s", m_filename);\
+    }                                                   \
+    if (fp) {                                           \
+        fclose(fp);                                     \
+    }                                                   \
+}
+#else /* FILE_DUMP_ENABLE */
+#define INPUT_DUMP_PATH
+#define INPUT_DUMP_EXT
+#define OUTPUT_DUMP_PATH
+#define OUTPUT_DUMP_EXT
+#define GENERATE_FILE_NAMES()
+#define CREATE_DUMP_FILE(m_filename)
+#define DUMP_TO_FILE(m_filename, m_buf, m_size, m_offset)
+#endif /* FILE_DUMP_ENABLE */
+
+} // namespace android
+
+#endif  // C2_SOFT_H264_DEC_H_
diff --git a/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp b/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp
index dae6e79..3924fc2 100644
--- a/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp
+++ b/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp
@@ -171,7 +171,7 @@
 status_t SoftAVC::resetPlugin() {
     mIsInFlush = false;
     mReceivedEOS = false;
-    mInputOffset = 0;
+
     memset(mTimeStamps, 0, sizeof(mTimeStamps));
     memset(mTimeStampsValid, 0, sizeof(mTimeStampsValid));
 
@@ -304,7 +304,6 @@
 }
 
 status_t SoftAVC::deInitDecoder() {
-    size_t i;
     IV_API_CALL_STATUS_T status;
 
     if (mCodecCtx) {
@@ -334,6 +333,7 @@
     SoftVideoDecoderOMXComponent::onReset();
 
     mSignalledError = false;
+    mInputOffset = 0;
     resetDecoder();
     resetPlugin();
 }
@@ -450,7 +450,6 @@
             ivd_video_decode_ip_t s_dec_ip;
             ivd_video_decode_op_t s_dec_op;
             IV_API_CALL_STATUS_T status;
-            size_t sizeY, sizeUV;
 
             setDecodeArgs(&s_dec_ip, &s_dec_op, NULL, NULL, 0);
 
@@ -465,7 +464,8 @@
             free(mFlushOutBuffer);
             mFlushOutBuffer = NULL;
         }
-
+    } else {
+        mInputOffset = 0;
     }
 }
 
@@ -561,7 +561,6 @@
             ivd_video_decode_ip_t s_dec_ip;
             ivd_video_decode_op_t s_dec_op;
             nsecs_t timeDelay, timeTaken;
-            size_t sizeY, sizeUV;
 
             if (!setDecodeArgs(&s_dec_ip, &s_dec_op, inHeader, outHeader, timeStampIx)) {
                 ALOGE("Decoder arg setup failed");
diff --git a/media/libstagefright/codecs/avcenc/Android.bp b/media/libstagefright/codecs/avcenc/Android.bp
index 5203126..4a0411e 100644
--- a/media/libstagefright/codecs/avcenc/Android.bp
+++ b/media/libstagefright/codecs/avcenc/Android.bp
@@ -17,7 +17,7 @@
     ],
 
     shared_libs: [
-        "libmedia_omx",
+        "libstagefright_foundation",
         "libstagefright_omx",
         "libutils",
         "liblog",
diff --git a/media/libstagefright/codecs/cmds/Android.bp b/media/libstagefright/codecs/cmds/Android.bp
new file mode 100644
index 0000000..40f1a3d
--- /dev/null
+++ b/media/libstagefright/codecs/cmds/Android.bp
@@ -0,0 +1,40 @@
+cc_binary {
+    name: "codec2",
+
+    srcs: [
+        "codec2.cpp",
+    ],
+
+    include_dirs: [
+    ],
+
+    shared_libs: [
+        "libbinder",
+        "libcutils",
+        "libgui",
+        "liblog",
+        "libstagefright",
+        "libstagefright_codec2",
+        "libstagefright_codec2_vndk",
+        "libstagefright_foundation",
+        "libui",
+        "libutils",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+        "-std=c++14",
+    ],
+
+//    sanitize: {
+//        cfi: true,
+//        misc_undefined: [
+//            "unsigned-integer-overflow",
+//            "signed-integer-overflow",
+//        ],
+//        diag: {
+//            cfi: true,
+//        },
+//    },
+}
diff --git a/media/libstagefright/codecs/cmds/codec2.cpp b/media/libstagefright/codecs/cmds/codec2.cpp
new file mode 100644
index 0000000..78fb527
--- /dev/null
+++ b/media/libstagefright/codecs/cmds/codec2.cpp
@@ -0,0 +1,486 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <inttypes.h>
+#include <fcntl.h>
+#include <stdlib.h>
+#include <string.h>
+#include <sys/time.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+
+#include <thread>
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "codec2"
+#include <media/stagefright/foundation/ADebug.h>
+
+#include <binder/IServiceManager.h>
+#include <binder/ProcessState.h>
+#include <media/DataSource.h>
+#include <media/ICrypto.h>
+#include <media/IMediaHTTPService.h>
+#include <media/MediaExtractor.h>
+#include <media/MediaSource.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/DataSourceFactory.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MediaExtractorFactory.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/Utils.h>
+
+#include <gui/GLConsumer.h>
+#include <gui/IProducerListener.h>
+#include <gui/Surface.h>
+#include <gui/SurfaceComposerClient.h>
+
+#include <util/C2ParamUtils.h>
+#include <C2AllocatorGralloc.h>
+#include <C2Buffer.h>
+#include <C2BufferPriv.h>
+#include <C2Component.h>
+#include <C2PlatformSupport.h>
+#include <C2Work.h>
+
+extern "C" ::android::C2ComponentFactory *CreateCodec2Factory();
+extern "C" void DestroyCodec2Factory(::android::C2ComponentFactory *);
+
+#include "../avcdec/C2SoftAvcDec.h"
+
+using namespace android;
+using namespace std::chrono_literals;
+
+namespace {
+
+class LinearBuffer : public C2Buffer {
+public:
+    explicit LinearBuffer(const std::shared_ptr<C2LinearBlock> &block)
+        : C2Buffer({ block->share(block->offset(), block->size(), ::android::C2Fence()) }) {}
+};
+
+class Listener;
+
+class SimplePlayer {
+public:
+    SimplePlayer();
+    ~SimplePlayer();
+
+    void onWorkDone(std::weak_ptr<C2Component> component,
+                    std::vector<std::unique_ptr<C2Work>> workItems);
+    void onTripped(std::weak_ptr<C2Component> component,
+                   std::vector<std::shared_ptr<C2SettingResult>> settingResult);
+    void onError(std::weak_ptr<C2Component> component, uint32_t errorCode);
+
+    void play(const sp<IMediaSource> &source);
+
+private:
+    typedef std::unique_lock<std::mutex> ULock;
+
+    std::shared_ptr<Listener> mListener;
+    std::shared_ptr<C2Component> mComponent;
+
+    sp<IProducerListener> mProducerListener;
+
+    std::shared_ptr<C2Allocator> mAllocIon;
+    std::shared_ptr<C2BlockPool> mLinearPool;
+
+    std::mutex mQueueLock;
+    std::condition_variable mQueueCondition;
+    std::list<std::unique_ptr<C2Work>> mWorkQueue;
+
+    std::mutex mProcessedLock;
+    std::condition_variable mProcessedCondition;
+    std::list<std::unique_ptr<C2Work>> mProcessedWork;
+
+    sp<Surface> mSurface;
+    sp<SurfaceComposerClient> mComposerClient;
+    sp<SurfaceControl> mControl;
+};
+
+class Listener : public C2Component::Listener {
+public:
+    explicit Listener(SimplePlayer *thiz) : mThis(thiz) {}
+    virtual ~Listener() = default;
+
+    virtual void onWorkDone_nb(std::weak_ptr<C2Component> component,
+                            std::vector<std::unique_ptr<C2Work>> workItems) override {
+        mThis->onWorkDone(component, std::move(workItems));
+    }
+
+    virtual void onTripped_nb(std::weak_ptr<C2Component> component,
+                           std::vector<std::shared_ptr<C2SettingResult>> settingResult) override {
+        mThis->onTripped(component, settingResult);
+    }
+
+    virtual void onError_nb(std::weak_ptr<C2Component> component,
+                         uint32_t errorCode) override {
+        mThis->onError(component, errorCode);
+    }
+
+private:
+    SimplePlayer * const mThis;
+};
+
+
+SimplePlayer::SimplePlayer()
+    : mListener(new Listener(this)),
+      mProducerListener(new DummyProducerListener),
+      mComposerClient(new SurfaceComposerClient) {
+    CHECK_EQ(mComposerClient->initCheck(), (status_t)OK);
+
+    std::shared_ptr<C2AllocatorStore> store = GetCodec2PlatformAllocatorStore();
+    CHECK_EQ(store->fetchAllocator(C2AllocatorStore::DEFAULT_LINEAR, &mAllocIon), C2_OK);
+    mLinearPool = std::make_shared<C2BasicLinearBlockPool>(mAllocIon);
+
+    mControl = mComposerClient->createSurface(
+            String8("A Surface"),
+            1280,
+            800,
+            HAL_PIXEL_FORMAT_YV12);
+            //PIXEL_FORMAT_RGB_565);
+
+    CHECK(mControl != NULL);
+    CHECK(mControl->isValid());
+
+    SurfaceComposerClient::Transaction{}
+            .setLayer(mControl, INT_MAX)
+            .show(mControl)
+            .apply();
+
+    mSurface = mControl->getSurface();
+    CHECK(mSurface != NULL);
+    mSurface->connect(NATIVE_WINDOW_API_CPU, mProducerListener);
+}
+
+SimplePlayer::~SimplePlayer() {
+    mComposerClient->dispose();
+}
+
+void SimplePlayer::onWorkDone(
+        std::weak_ptr<C2Component> component, std::vector<std::unique_ptr<C2Work>> workItems) {
+    ALOGV("SimplePlayer::onWorkDone");
+    (void) component;
+    ULock l(mProcessedLock);
+    for (auto & item : workItems) {
+        mProcessedWork.push_back(std::move(item));
+    }
+    mProcessedCondition.notify_all();
+}
+
+void SimplePlayer::onTripped(
+        std::weak_ptr<C2Component> component,
+        std::vector<std::shared_ptr<C2SettingResult>> settingResult) {
+    (void) component;
+    (void) settingResult;
+    // TODO
+}
+
+void SimplePlayer::onError(std::weak_ptr<C2Component> component, uint32_t errorCode) {
+    (void) component;
+    (void) errorCode;
+    // TODO
+}
+
+void SimplePlayer::play(const sp<IMediaSource> &source) {
+    ALOGV("SimplePlayer::play");
+    sp<AMessage> format;
+    (void) convertMetaDataToMessage(source->getFormat(), &format);
+
+    sp<ABuffer> csd0, csd1;
+    format->findBuffer("csd-0", &csd0);
+    format->findBuffer("csd-1", &csd1);
+
+    status_t err = source->start();
+
+    if (err != OK) {
+        fprintf(stderr, "source returned error %d (0x%08x)\n", err, err);
+        return;
+    }
+
+    std::shared_ptr<C2ComponentStore> store = GetCodec2PlatformComponentStore();
+    std::shared_ptr<C2Component> component;
+    (void)store->createComponent("c2.google.avc.decoder", &component);
+
+    (void)component->setListener_vb(mListener, C2_DONT_BLOCK);
+    std::unique_ptr<C2PortBlockPoolsTuning::output> pools =
+        C2PortBlockPoolsTuning::output::alloc_unique({ (uint64_t)C2BlockPool::BASIC_GRAPHIC });
+    std::vector<std::unique_ptr<C2SettingResult>> result;
+    (void)component->intf()->config_vb({pools.get()}, C2_DONT_BLOCK, &result);
+    component->start();
+
+    for (int i = 0; i < 8; ++i) {
+        mWorkQueue.emplace_back(new C2Work);
+    }
+
+    std::atomic_bool running(true);
+    std::thread surfaceThread([this, &running]() {
+        const sp<IGraphicBufferProducer> &igbp = mSurface->getIGraphicBufferProducer();
+        while (running) {
+            std::unique_ptr<C2Work> work;
+            {
+                ULock l(mProcessedLock);
+                if (mProcessedWork.empty()) {
+                    mProcessedCondition.wait_for(l, 100ms);
+                    if (mProcessedWork.empty()) {
+                        continue;
+                    }
+                }
+                work.swap(mProcessedWork.front());
+                mProcessedWork.pop_front();
+            }
+            int slot;
+            sp<Fence> fence;
+            ALOGV("Render: Frame #%" PRId64, work->worklets.front()->output.ordinal.frame_index);
+            const std::shared_ptr<C2Buffer> &output = work->worklets.front()->output.buffers[0];
+            if (output) {
+                const C2ConstGraphicBlock &block = output->data().graphicBlocks().front();
+                native_handle_t *grallocHandle = UnwrapNativeCodec2GrallocHandle(block.handle());
+                sp<GraphicBuffer> buffer(new GraphicBuffer(
+                        grallocHandle,
+                        GraphicBuffer::CLONE_HANDLE,
+                        block.width(),
+                        block.height(),
+                        HAL_PIXEL_FORMAT_YV12,
+                        1,
+                        (uint64_t)GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN,
+                        block.width()));
+                native_handle_delete(grallocHandle);
+
+                status_t err = igbp->attachBuffer(&slot, buffer);
+
+                IGraphicBufferProducer::QueueBufferInput qbi(
+                        work->worklets.front()->output.ordinal.timestamp * 1000ll,
+                        false,
+                        HAL_DATASPACE_UNKNOWN,
+                        Rect(block.width(), block.height()),
+                        NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW,
+                        0,
+                        Fence::NO_FENCE,
+                        0);
+                IGraphicBufferProducer::QueueBufferOutput qbo;
+                err = igbp->queueBuffer(slot, qbi, &qbo);
+            }
+
+            work->input.buffers.clear();
+            work->worklets.clear();
+
+            ULock l(mQueueLock);
+            mWorkQueue.push_back(std::move(work));
+            mQueueCondition.notify_all();
+        }
+        ALOGV("render loop finished");
+    });
+
+    long numFrames = 0;
+    mLinearPool.reset(new C2BasicLinearBlockPool(mAllocIon));
+
+    for (;;) {
+        size_t size = 0u;
+        void *data = nullptr;
+        int64_t timestamp = 0u;
+        MediaBuffer *buffer = nullptr;
+        sp<ABuffer> csd;
+        if (csd0 != nullptr) {
+            csd = csd0;
+            csd0 = nullptr;
+        } else if (csd1 != nullptr) {
+            csd = csd1;
+            csd1 = nullptr;
+        } else {
+            status_t err = source->read(&buffer);
+            if (err != OK) {
+                CHECK(buffer == NULL);
+
+                if (err == INFO_FORMAT_CHANGED) {
+                    continue;
+                }
+
+                break;
+            }
+            sp<MetaData> meta = buffer->meta_data();
+            CHECK(meta->findInt64(kKeyTime, &timestamp));
+
+            size = buffer->size();
+            data = buffer->data();
+        }
+
+        if (csd != nullptr) {
+            size = csd->size();
+            data = csd->data();
+        }
+
+        // Prepare C2Work
+
+        std::unique_ptr<C2Work> work;
+        while (!work) {
+            ULock l(mQueueLock);
+            if (!mWorkQueue.empty()) {
+                work.swap(mWorkQueue.front());
+                mWorkQueue.pop_front();
+            } else {
+                mQueueCondition.wait_for(l, 100ms);
+            }
+        }
+        work->input.flags = (C2BufferPack::flags_t)0;
+        work->input.ordinal.timestamp = timestamp;
+        work->input.ordinal.frame_index = numFrames;
+
+        std::shared_ptr<C2LinearBlock> block;
+        mLinearPool->fetchLinearBlock(
+                size,
+                { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE },
+                &block);
+        C2WriteView view = block->map().get();
+        if (view.error() != C2_OK) {
+            fprintf(stderr, "C2LinearBlock::map() failed : %d", view.error());
+            break;
+        }
+        memcpy(view.base(), data, size);
+
+        work->input.buffers.clear();
+        work->input.buffers.emplace_back(new LinearBuffer(block));
+        work->worklets.clear();
+        work->worklets.emplace_back(new C2Worklet);
+
+        std::list<std::unique_ptr<C2Work>> items;
+        items.push_back(std::move(work));
+
+        ALOGV("Frame #%ld size = %zu", numFrames, size);
+        // DO THE DECODING
+        component->queue_nb(&items);
+
+        if (buffer) {
+            buffer->release();
+            buffer = NULL;
+        }
+
+        ++numFrames;
+    }
+    ALOGV("main loop finished");
+    source->stop();
+    running.store(false);
+    surfaceThread.join();
+
+    component->release();
+    printf("\n");
+}
+
+}  // namespace
+
+static void usage(const char *me) {
+    fprintf(stderr, "usage: %s [options] [input_filename]\n", me);
+    fprintf(stderr, "       -h(elp)\n");
+}
+
+int main(int argc, char **argv) {
+    android::ProcessState::self()->startThreadPool();
+
+    int res;
+    while ((res = getopt(argc, argv, "h")) >= 0) {
+        switch (res) {
+            case 'h':
+            default:
+            {
+                usage(argv[0]);
+                exit(1);
+                break;
+            }
+        }
+    }
+
+    argc -= optind;
+    argv += optind;
+
+    if (argc < 1) {
+        fprintf(stderr, "No input file specified\n");
+        return 1;
+    }
+
+    status_t err = OK;
+    SimplePlayer player;
+
+    for (int k = 0; k < argc && err == OK; ++k) {
+        const char *filename = argv[k];
+
+        sp<DataSource> dataSource =
+            DataSourceFactory::CreateFromURI(NULL /* httpService */, filename);
+
+        if (strncasecmp(filename, "sine:", 5) && dataSource == NULL) {
+            fprintf(stderr, "Unable to create data source.\n");
+            return 1;
+        }
+
+        Vector<sp<IMediaSource> > mediaSources;
+        sp<IMediaSource> mediaSource;
+
+        sp<IMediaExtractor> extractor = MediaExtractorFactory::Create(dataSource);
+
+        if (extractor == NULL) {
+            fprintf(stderr, "could not create extractor.\n");
+            return -1;
+        }
+
+        sp<MetaData> meta = extractor->getMetaData();
+
+        if (meta != NULL) {
+            const char *mime;
+            if (!meta->findCString(kKeyMIMEType, &mime)) {
+                fprintf(stderr, "extractor did not provide MIME type.\n");
+                return -1;
+            }
+        }
+
+        size_t numTracks = extractor->countTracks();
+
+        size_t i;
+        for (i = 0; i < numTracks; ++i) {
+            meta = extractor->getTrackMetaData(
+                    i, MediaExtractor::kIncludeExtensiveMetaData);
+
+            if (meta == NULL) {
+                break;
+            }
+            const char *mime;
+            meta->findCString(kKeyMIMEType, &mime);
+
+            // TODO: allowing AVC only for the time being
+            if (!strncasecmp(mime, "video/avc", 9)) {
+                break;
+            }
+
+            meta = NULL;
+        }
+
+        if (meta == NULL) {
+            fprintf(stderr, "No AVC track found.\n");
+            return -1;
+        }
+
+        mediaSource = extractor->getTrack(i);
+        if (mediaSource == nullptr) {
+            fprintf(stderr, "skip NULL track %zu, total tracks %zu.\n", i, numTracks);
+            return -1;
+        }
+
+        player.play(mediaSource);
+    }
+
+    return 0;
+}
diff --git a/media/libstagefright/codecs/flac/enc/Android.bp b/media/libstagefright/codecs/flac/enc/Android.bp
index 066917b..854f7ce 100644
--- a/media/libstagefright/codecs/flac/enc/Android.bp
+++ b/media/libstagefright/codecs/flac/enc/Android.bp
@@ -22,7 +22,6 @@
     },
 
     shared_libs: [
-        "libmedia_omx",
         "libstagefright_omx",
         "libstagefright_foundation",
         "libutils",
diff --git a/media/libstagefright/codecs/g711/dec/Android.bp b/media/libstagefright/codecs/g711/dec/Android.bp
index fff72a8..07e5052 100644
--- a/media/libstagefright/codecs/g711/dec/Android.bp
+++ b/media/libstagefright/codecs/g711/dec/Android.bp
@@ -13,7 +13,7 @@
     ],
 
     shared_libs: [
-        "libmedia_omx",
+        "libstagefright_foundation",
         "libstagefright_omx",
         "libutils",
         "liblog",
diff --git a/media/libstagefright/codecs/gsm/dec/Android.bp b/media/libstagefright/codecs/gsm/dec/Android.bp
index 753eeef..0739ad4 100644
--- a/media/libstagefright/codecs/gsm/dec/Android.bp
+++ b/media/libstagefright/codecs/gsm/dec/Android.bp
@@ -27,7 +27,7 @@
     },
 
     shared_libs: [
-        "libmedia_omx",
+        "libstagefright_foundation",
         "libstagefright_omx",
         "libutils",
         "liblog",
diff --git a/media/libstagefright/codecs/hevcdec/Android.bp b/media/libstagefright/codecs/hevcdec/Android.bp
index d9a5ee3..f19ba00 100644
--- a/media/libstagefright/codecs/hevcdec/Android.bp
+++ b/media/libstagefright/codecs/hevcdec/Android.bp
@@ -32,7 +32,6 @@
     },
 
     shared_libs: [
-        "libmedia_omx",
         "libstagefright_omx",
         "libstagefright_foundation",
         "libutils",
diff --git a/media/libstagefright/codecs/m4v_h263/dec/Android.bp b/media/libstagefright/codecs/m4v_h263/dec/Android.bp
index 1216ae5..e57bb78 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/Android.bp
+++ b/media/libstagefright/codecs/m4v_h263/dec/Android.bp
@@ -93,7 +93,6 @@
     static_libs: ["libstagefright_m4vh263dec"],
 
     shared_libs: [
-        "libmedia_omx",
         "libstagefright_omx",
         "libstagefright_foundation",
         "libutils",
diff --git a/media/libstagefright/codecs/m4v_h263/enc/Android.bp b/media/libstagefright/codecs/m4v_h263/enc/Android.bp
index a95b807..8a3fe34 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/Android.bp
+++ b/media/libstagefright/codecs/m4v_h263/enc/Android.bp
@@ -4,8 +4,7 @@
 
     srcs: [
         "src/bitstream_io.cpp",
-        "src/combined_encode.cpp",
-        "src/datapart_encode.cpp",
+        "src/combined_encode.cpp", "src/datapart_encode.cpp",
         "src/dct.cpp",
         "src/findhalfpel.cpp",
         "src/fastcodemb.cpp",
@@ -80,7 +79,7 @@
     static_libs: ["libstagefright_m4vh263enc"],
 
     shared_libs: [
-        "libmedia_omx",
+        "libstagefright_foundation",
         "libstagefright_omx",
         "libutils",
         "liblog",
diff --git a/media/libstagefright/codecs/mp3dec/Android.bp b/media/libstagefright/codecs/mp3dec/Android.bp
index c554a99..5a0e282 100644
--- a/media/libstagefright/codecs/mp3dec/Android.bp
+++ b/media/libstagefright/codecs/mp3dec/Android.bp
@@ -107,7 +107,6 @@
     },
 
     shared_libs: [
-        "libmedia_omx",
         "libstagefright_omx",
         "libstagefright_foundation",
         "libutils",
diff --git a/media/libstagefright/codecs/mpeg2dec/Android.bp b/media/libstagefright/codecs/mpeg2dec/Android.bp
index 9590e9f..9b8a188 100644
--- a/media/libstagefright/codecs/mpeg2dec/Android.bp
+++ b/media/libstagefright/codecs/mpeg2dec/Android.bp
@@ -22,7 +22,6 @@
     ],
 
     shared_libs: [
-        "libmedia_omx",
         "libstagefright_omx",
         "libstagefright_foundation",
         "libutils",
diff --git a/media/libstagefright/codecs/on2/dec/Android.bp b/media/libstagefright/codecs/on2/dec/Android.bp
index 59c1f5d..a4eed8c 100644
--- a/media/libstagefright/codecs/on2/dec/Android.bp
+++ b/media/libstagefright/codecs/on2/dec/Android.bp
@@ -15,7 +15,6 @@
     static_libs: ["libvpx"],
 
     shared_libs: [
-        "libmedia_omx",
         "libstagefright_omx",
         "libstagefright_foundation",
         "libutils",
diff --git a/media/libstagefright/codecs/on2/enc/Android.bp b/media/libstagefright/codecs/on2/enc/Android.bp
index 5a52225..b21ffa1 100644
--- a/media/libstagefright/codecs/on2/enc/Android.bp
+++ b/media/libstagefright/codecs/on2/enc/Android.bp
@@ -32,7 +32,6 @@
     static_libs: ["libvpx"],
 
     shared_libs: [
-        "libmedia_omx",
         "libstagefright_omx",
         "libstagefright_foundation",
         "libutils",
diff --git a/media/libstagefright/codecs/on2/enc/SoftVP9Encoder.cpp b/media/libstagefright/codecs/on2/enc/SoftVP9Encoder.cpp
index 4c7290d..1ea1c85 100644
--- a/media/libstagefright/codecs/on2/enc/SoftVP9Encoder.cpp
+++ b/media/libstagefright/codecs/on2/enc/SoftVP9Encoder.cpp
@@ -69,6 +69,13 @@
               codecReturn);
         return codecReturn;
     }
+    codecReturn = vpx_codec_control(mCodecContext, VP9E_SET_ROW_MT, 1);
+    if (codecReturn != VPX_CODEC_OK) {
+        ALOGE("Error setting VP9E_SET_ROW_MT to 1. vpx_codec_control() "
+              "returned %d", codecReturn);
+        return codecReturn;
+    }
+
     // For VP9, we always set CPU_USED to 8 (because the realtime default is 0
     // which is too slow).
     codecReturn = vpx_codec_control(mCodecContext, VP8E_SET_CPUUSED, 8);
diff --git a/media/libstagefright/codecs/opus/dec/Android.bp b/media/libstagefright/codecs/opus/dec/Android.bp
index 88d6ec4..32a4f32 100644
--- a/media/libstagefright/codecs/opus/dec/Android.bp
+++ b/media/libstagefright/codecs/opus/dec/Android.bp
@@ -14,7 +14,6 @@
 
     shared_libs: [
         "libopus",
-        "libmedia_omx",
         "libstagefright_omx",
         "libstagefright_foundation",
         "libutils",
diff --git a/media/libstagefright/codecs/vorbis/dec/Android.bp b/media/libstagefright/codecs/vorbis/dec/Android.bp
index 628b36c..b7a6c1c 100644
--- a/media/libstagefright/codecs/vorbis/dec/Android.bp
+++ b/media/libstagefright/codecs/vorbis/dec/Android.bp
@@ -14,7 +14,6 @@
 
     shared_libs: [
         "libvorbisidec",
-        "libmedia_omx",
         "libstagefright_omx",
         "libstagefright_foundation",
         "libutils",
diff --git a/media/libstagefright/colorconversion/ColorConverter.cpp b/media/libstagefright/colorconversion/ColorConverter.cpp
index 0982006..cbb38fd 100644
--- a/media/libstagefright/colorconversion/ColorConverter.cpp
+++ b/media/libstagefright/colorconversion/ColorConverter.cpp
@@ -129,6 +129,12 @@
             dstWidth, dstHeight,
             dstCropLeft, dstCropTop, dstCropRight, dstCropBottom, mDstFormat);
 
+    if (!((src.mCropLeft & 1) == 0
+        && src.cropWidth() == dst.cropWidth()
+        && src.cropHeight() == dst.cropHeight())) {
+        return ERROR_UNSUPPORTED;
+    }
+
     status_t err;
 
     switch (mSrcFormat) {
@@ -172,12 +178,6 @@
 
     uint8_t *kAdjustedClip = initClip();
 
-    if (!((src.mCropLeft & 1) == 0
-        && src.cropWidth() == dst.cropWidth()
-        && src.cropHeight() == dst.cropHeight())) {
-        return ERROR_UNSUPPORTED;
-    }
-
     uint16_t *dst_ptr = (uint16_t *)dst.mBits
         + dst.mCropTop * dst.mWidth + dst.mCropLeft;
 
@@ -232,12 +232,6 @@
 
 status_t ColorConverter::convertYUV420PlanarUseLibYUV(
         const BitmapParams &src, const BitmapParams &dst) {
-    if (!((src.mCropLeft & 1) == 0
-            && src.cropWidth() == dst.cropWidth()
-            && src.cropHeight() == dst.cropHeight())) {
-        return ERROR_UNSUPPORTED;
-    }
-
     uint8_t *dst_ptr = (uint8_t *)dst.mBits
         + dst.mCropTop * dst.mStride + dst.mCropLeft * dst.mBpp;
 
@@ -338,12 +332,6 @@
 }
 status_t ColorConverter::convertYUV420Planar(
         const BitmapParams &src, const BitmapParams &dst) {
-    if (!((src.mCropLeft & 1) == 0
-            && src.cropWidth() == dst.cropWidth()
-            && src.cropHeight() == dst.cropHeight())) {
-        return ERROR_UNSUPPORTED;
-    }
-
     uint8_t *kAdjustedClip = initClip();
 
     uint8_t *dst_ptr = (uint8_t *)dst.mBits
@@ -422,12 +410,6 @@
         const BitmapParams &src, const BitmapParams &dst) {
     uint8_t *kAdjustedClip = initClip();
 
-    if (!((src.mCropLeft & 1) == 0
-            && src.cropWidth() == dst.cropWidth()
-            && src.cropHeight() == dst.cropHeight())) {
-        return ERROR_UNSUPPORTED;
-    }
-
     uint16_t *dst_ptr = (uint16_t *)dst.mBits
         + dst.mCropTop * dst.mWidth + dst.mCropLeft;
 
@@ -496,12 +478,6 @@
 
     uint8_t *kAdjustedClip = initClip();
 
-    if (!((src.mCropLeft & 1) == 0
-            && src.cropWidth() == dst.cropWidth()
-            && src.cropHeight() == dst.cropHeight())) {
-        return ERROR_UNSUPPORTED;
-    }
-
     uint16_t *dst_ptr = (uint16_t *)dst.mBits
         + dst.mCropTop * dst.mWidth + dst.mCropLeft;
 
@@ -568,12 +544,6 @@
         const BitmapParams &src, const BitmapParams &dst) {
     uint8_t *kAdjustedClip = initClip();
 
-    if (!((src.mCropLeft & 1) == 0
-            && src.cropWidth() == dst.cropWidth()
-            && src.cropHeight() == dst.cropHeight())) {
-        return ERROR_UNSUPPORTED;
-    }
-
     uint16_t *dst_ptr = (uint16_t *)dst.mBits
         + dst.mCropTop * dst.mWidth + dst.mCropLeft;
 
diff --git a/media/libstagefright/flac/dec/Android.bp b/media/libstagefright/flac/dec/Android.bp
index 1b9fe0f..581e51b 100644
--- a/media/libstagefright/flac/dec/Android.bp
+++ b/media/libstagefright/flac/dec/Android.bp
@@ -1,4 +1,4 @@
-cc_library_shared {
+cc_library {
     name: "libstagefright_flacdec",
     vendor_available: true,
     vndk: {
@@ -27,10 +27,15 @@
         },
     },
 
-    static_libs: ["libFLAC"],
+    static: {
+        whole_static_libs: ["libFLAC"],
+    },
+
+    shared: {
+        static_libs: ["libFLAC"],
+    },
 
     shared_libs: [
-        "libcutils",
         "liblog",
         "libstagefright_foundation",
         "libutils",
diff --git a/media/libstagefright/foundation/ANetworkSession.cpp b/media/libstagefright/foundation/ANetworkSession.cpp
index f8b7b41..eafdc37 100644
--- a/media/libstagefright/foundation/ANetworkSession.cpp
+++ b/media/libstagefright/foundation/ANetworkSession.cpp
@@ -33,22 +33,11 @@
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/ByteUtils.h>
 #include <media/stagefright/foundation/hexdump.h>
 
 namespace android {
 
-static uint16_t U16_AT(const uint8_t *ptr) {
-    return ptr[0] << 8 | ptr[1];
-}
-
-static uint32_t U32_AT(const uint8_t *ptr) {
-    return ptr[0] << 24 | ptr[1] << 16 | ptr[2] << 8 | ptr[3];
-}
-
-static uint64_t U64_AT(const uint8_t *ptr) {
-    return ((uint64_t)U32_AT(ptr)) << 32 | U32_AT(ptr + 4);
-}
-
 static const size_t kMaxUDPSize = 1500;
 static const int32_t kMaxUDPRetries = 200;
 
diff --git a/media/libstagefright/foundation/Android.bp b/media/libstagefright/foundation/Android.bp
index 221af1d..df3e280 100644
--- a/media/libstagefright/foundation/Android.bp
+++ b/media/libstagefright/foundation/Android.bp
@@ -13,6 +13,8 @@
     include_dirs: [
         "frameworks/av/include",
         "frameworks/native/include",
+        "frameworks/native/libs/arect/include",
+        "frameworks/native/libs/nativebase/include",
     ],
 
     local_include_dirs: [
@@ -30,7 +32,6 @@
 
     export_shared_lib_headers: [
         "libbinder",
-        "libui",
     ],
 
     cflags: [
@@ -42,10 +43,8 @@
     shared_libs: [
         "libbinder",
         "libutils",
-        "libui",
         "libcutils",
         "liblog",
-        "libpowermanager",
     ],
 
     srcs: [
@@ -61,23 +60,19 @@
         "ANetworkSession.cpp",
         "AString.cpp",
         "AStringUtils.cpp",
-        "AWakeLock.cpp",
+        "ByteUtils.cpp",
         "ColorUtils.cpp",
         "MediaBuffer.cpp",
         "MediaBufferGroup.cpp",
+        "MediaDefs.cpp",
+        "MediaKeys.cpp",
         "MetaData.cpp",
         "ParsedMessage.cpp",
+        "avc_utils.cpp",
         "base64.cpp",
         "hexdump.cpp",
     ],
 
-    target: {
-        vendor: {
-            exclude_shared_libs: ["libpowermanager"],
-            exclude_srcs: ["AWakeLock.cpp"],
-        },
-    },
-
     clang: true,
 
     sanitize: {
diff --git a/media/libstagefright/foundation/ByteUtils.cpp b/media/libstagefright/foundation/ByteUtils.cpp
new file mode 100644
index 0000000..14d40aa
--- /dev/null
+++ b/media/libstagefright/foundation/ByteUtils.cpp
@@ -0,0 +1,65 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ByteUtils"
+
+#include <media/stagefright/foundation/ByteUtils.h>
+
+namespace android {
+
+uint16_t U16_AT(const uint8_t *ptr) {
+    return ptr[0] << 8 | ptr[1];
+}
+
+uint32_t U32_AT(const uint8_t *ptr) {
+    return ptr[0] << 24 | ptr[1] << 16 | ptr[2] << 8 | ptr[3];
+}
+
+uint64_t U64_AT(const uint8_t *ptr) {
+    return ((uint64_t)U32_AT(ptr)) << 32 | U32_AT(ptr + 4);
+}
+
+uint16_t U16LE_AT(const uint8_t *ptr) {
+    return ptr[0] | (ptr[1] << 8);
+}
+
+uint32_t U32LE_AT(const uint8_t *ptr) {
+    return ptr[3] << 24 | ptr[2] << 16 | ptr[1] << 8 | ptr[0];
+}
+
+uint64_t U64LE_AT(const uint8_t *ptr) {
+    return ((uint64_t)U32LE_AT(ptr + 4)) << 32 | U32LE_AT(ptr);
+}
+
+// XXX warning: these won't work on big-endian host.
+uint64_t ntoh64(uint64_t x) {
+    return ((uint64_t)ntohl(x & 0xffffffff) << 32) | ntohl(x >> 32);
+}
+
+uint64_t hton64(uint64_t x) {
+    return ((uint64_t)htonl(x & 0xffffffff) << 32) | htonl(x >> 32);
+}
+
+void MakeFourCCString(uint32_t x, char *s) {
+    s[0] = x >> 24;
+    s[1] = (x >> 16) & 0xff;
+    s[2] = (x >> 8) & 0xff;
+    s[3] = x & 0xff;
+    s[4] = '\0';
+}
+
+}  // namespace android
diff --git a/media/libstagefright/foundation/ColorUtils.cpp b/media/libstagefright/foundation/ColorUtils.cpp
index 88a8351..c4eaa27 100644
--- a/media/libstagefright/foundation/ColorUtils.cpp
+++ b/media/libstagefright/foundation/ColorUtils.cpp
@@ -398,6 +398,7 @@
 }
 
 // TODO: move this into a Video HAL
+const static
 ALookup<CU::ColorStandard, std::pair<CA::Primaries, CA::MatrixCoeffs>> sStandardFallbacks {
     {
         { CU::kColorStandardBT601_625, { CA::PrimariesBT709_5, CA::MatrixBT470_6M } },
@@ -420,6 +421,7 @@
     }
 };
 
+const static
 ALookup<CU::ColorStandard, CA::Primaries> sStandardPrimariesFallbacks {
     {
         { CU::kColorStandardFilm,                 CA::PrimariesGenericFilm },
@@ -430,7 +432,8 @@
     }
 };
 
-static ALookup<android_dataspace, android_dataspace> sLegacyDataSpaceToV0 {
+const static
+ALookup<android_dataspace, android_dataspace> sLegacyDataSpaceToV0 {
     {
         { HAL_DATASPACE_SRGB, HAL_DATASPACE_V0_SRGB },
         { HAL_DATASPACE_BT709, HAL_DATASPACE_V0_BT709 },
@@ -441,6 +444,73 @@
     }
 };
 
+#define GET_HAL_ENUM(class, name) HAL_DATASPACE_##class##name
+#define GET_HAL_BITFIELD(class, name) (GET_HAL_ENUM(class, _##name) >> GET_HAL_ENUM(class, _SHIFT))
+
+const static
+ALookup<CU::ColorStandard, uint32_t> sGfxStandards {
+    {
+        { CU::kColorStandardUnspecified,          GET_HAL_BITFIELD(STANDARD, UNSPECIFIED) },
+        { CU::kColorStandardBT709,                GET_HAL_BITFIELD(STANDARD, BT709) },
+        { CU::kColorStandardBT601_625,            GET_HAL_BITFIELD(STANDARD, BT601_625) },
+        { CU::kColorStandardBT601_625_Unadjusted, GET_HAL_BITFIELD(STANDARD, BT601_625_UNADJUSTED) },
+        { CU::kColorStandardBT601_525,            GET_HAL_BITFIELD(STANDARD, BT601_525) },
+        { CU::kColorStandardBT601_525_Unadjusted, GET_HAL_BITFIELD(STANDARD, BT601_525_UNADJUSTED) },
+        { CU::kColorStandardBT2020,               GET_HAL_BITFIELD(STANDARD, BT2020) },
+        { CU::kColorStandardBT2020Constant,       GET_HAL_BITFIELD(STANDARD, BT2020_CONSTANT_LUMINANCE) },
+        { CU::kColorStandardBT470M,               GET_HAL_BITFIELD(STANDARD, BT470M) },
+        { CU::kColorStandardFilm,                 GET_HAL_BITFIELD(STANDARD, FILM) },
+        { CU::kColorStandardDCI_P3,               GET_HAL_BITFIELD(STANDARD, DCI_P3) },
+    }
+};
+
+// verify public values are stable
+static_assert(CU::kColorStandardUnspecified == 0, "SDK mismatch"); // N
+static_assert(CU::kColorStandardBT709 == 1, "SDK mismatch"); // N
+static_assert(CU::kColorStandardBT601_625 == 2, "SDK mismatch"); // N
+static_assert(CU::kColorStandardBT601_525 == 4, "SDK mismatch"); // N
+static_assert(CU::kColorStandardBT2020 == 6, "SDK mismatch"); // N
+
+const static
+ALookup<CU::ColorTransfer, uint32_t> sGfxTransfers {
+    {
+        { CU::kColorTransferUnspecified, GET_HAL_BITFIELD(TRANSFER, UNSPECIFIED) },
+        { CU::kColorTransferLinear,      GET_HAL_BITFIELD(TRANSFER, LINEAR) },
+        { CU::kColorTransferSRGB,        GET_HAL_BITFIELD(TRANSFER, SRGB) },
+        { CU::kColorTransferSMPTE_170M,  GET_HAL_BITFIELD(TRANSFER, SMPTE_170M) },
+        { CU::kColorTransferGamma22,     GET_HAL_BITFIELD(TRANSFER, GAMMA2_2) },
+        { CU::kColorTransferGamma28,     GET_HAL_BITFIELD(TRANSFER, GAMMA2_8) },
+        { CU::kColorTransferST2084,      GET_HAL_BITFIELD(TRANSFER, ST2084) },
+        { CU::kColorTransferHLG,         GET_HAL_BITFIELD(TRANSFER, HLG) },
+    }
+};
+
+// verify public values are stable
+static_assert(CU::kColorTransferUnspecified == 0, "SDK mismatch"); // N
+static_assert(CU::kColorTransferLinear == 1, "SDK mismatch"); // N
+static_assert(CU::kColorTransferSRGB == 2, "SDK mismatch"); // N
+static_assert(CU::kColorTransferSMPTE_170M == 3, "SDK mismatch"); // N
+static_assert(CU::kColorTransferST2084 == 6, "SDK mismatch"); // N
+static_assert(CU::kColorTransferHLG == 7, "SDK mismatch"); // N
+
+const static
+ALookup<CU::ColorRange, uint32_t> sGfxRanges {
+    {
+        { CU::kColorRangeUnspecified, GET_HAL_BITFIELD(RANGE, UNSPECIFIED) },
+        { CU::kColorRangeFull,        GET_HAL_BITFIELD(RANGE, FULL) },
+        { CU::kColorRangeLimited,     GET_HAL_BITFIELD(RANGE, LIMITED) },
+    }
+};
+
+// verify public values are stable
+static_assert(CU::kColorRangeUnspecified == 0, "SDK mismatch"); // N
+static_assert(CU::kColorRangeFull == 1, "SDK mismatch"); // N
+static_assert(CU::kColorRangeLimited == 2, "SDK mismatch"); // N
+
+#undef GET_HAL_BITFIELD
+#undef GET_HAL_ENUM
+
+
 bool ColorUtils::convertDataSpaceToV0(android_dataspace &dataSpace) {
     (void)sLegacyDataSpaceToV0.lookup(dataSpace, &dataSpace);
     return (dataSpace & 0xC000FFFF) == 0;
@@ -507,9 +577,23 @@
         }
     }
 
+    // assume 1-to-1 mapping to HAL values (to deal with potential vendor extensions)
+    uint32_t gfxRange = range;
+    uint32_t gfxStandard = standard;
+    uint32_t gfxTransfer = transfer;
+    // TRICKY: use & to ensure all three mappings are completed
+    if (!(sGfxRanges.map(range, &gfxRange) & sGfxStandards.map(standard, &gfxStandard)
+            & sGfxTransfers.map(transfer, &gfxTransfer))) {
+        ALOGW("could not safely map platform color aspects (R:%u(%s) S:%u(%s) T:%u(%s) to "
+              "graphics dataspace (R:%u S:%u T:%u)",
+              range, asString(range), standard, asString(standard), transfer, asString(transfer),
+              gfxRange, gfxStandard, gfxTransfer);
+    }
+
     android_dataspace dataSpace = (android_dataspace)(
-            (range << HAL_DATASPACE_RANGE_SHIFT) | (standard << HAL_DATASPACE_STANDARD_SHIFT) |
-            (transfer << HAL_DATASPACE_TRANSFER_SHIFT));
+            (gfxRange << HAL_DATASPACE_RANGE_SHIFT) |
+            (gfxStandard << HAL_DATASPACE_STANDARD_SHIFT) |
+            (gfxTransfer << HAL_DATASPACE_TRANSFER_SHIFT));
     (void)sLegacyDataSpaceToV0.rlookup(dataSpace, &dataSpace);
 
     if (!mayExpand) {
diff --git a/media/libstagefright/foundation/MediaBuffer.cpp b/media/libstagefright/foundation/MediaBuffer.cpp
index 16000ef..95951dd 100644
--- a/media/libstagefright/foundation/MediaBuffer.cpp
+++ b/media/libstagefright/foundation/MediaBuffer.cpp
@@ -77,19 +77,6 @@
     }
 }
 
-MediaBuffer::MediaBuffer(const sp<GraphicBuffer>& graphicBuffer)
-    : mObserver(NULL),
-      mRefCount(0),
-      mData(NULL),
-      mSize(1),
-      mRangeOffset(0),
-      mRangeLength(mSize),
-      mGraphicBuffer(graphicBuffer),
-      mOwnsData(false),
-      mMetaData(new MetaData),
-      mOriginal(NULL) {
-}
-
 MediaBuffer::MediaBuffer(const sp<ABuffer> &buffer)
     : mObserver(NULL),
       mRefCount(0),
@@ -135,12 +122,10 @@
 }
 
 void *MediaBuffer::data() const {
-    CHECK(mGraphicBuffer == NULL);
     return mData;
 }
 
 size_t MediaBuffer::size() const {
-    CHECK(mGraphicBuffer == NULL);
     return mSize;
 }
 
@@ -153,19 +138,15 @@
 }
 
 void MediaBuffer::set_range(size_t offset, size_t length) {
-    if ((mGraphicBuffer == NULL) && (offset + length > mSize)) {
+    if (offset + length > mSize) {
         ALOGE("offset = %zu, length = %zu, mSize = %zu", offset, length, mSize);
     }
-    CHECK((mGraphicBuffer != NULL) || (offset + length <= mSize));
+    CHECK(offset + length <= mSize);
 
     mRangeOffset = offset;
     mRangeLength = length;
 }
 
-sp<GraphicBuffer> MediaBuffer::graphicBuffer() const {
-    return mGraphicBuffer;
-}
-
 sp<MetaData> MediaBuffer::meta_data() {
     return mMetaData;
 }
@@ -199,8 +180,6 @@
 }
 
 MediaBuffer *MediaBuffer::clone() {
-    CHECK(mGraphicBuffer == NULL);
-
     MediaBuffer *buffer = new MediaBuffer(mData, mSize);
     buffer->set_range(mRangeOffset, mRangeLength);
     buffer->mMetaData = new MetaData(*mMetaData.get());
diff --git a/media/libmedia/MediaDefs.cpp b/media/libstagefright/foundation/MediaDefs.cpp
similarity index 94%
rename from media/libmedia/MediaDefs.cpp
rename to media/libstagefright/foundation/MediaDefs.cpp
index 544a6ae..1695c75 100644
--- a/media/libmedia/MediaDefs.cpp
+++ b/media/libstagefright/foundation/MediaDefs.cpp
@@ -14,11 +14,12 @@
  * limitations under the License.
  */
 
-#include <media/MediaDefs.h>
+#include <media/stagefright/foundation/MediaDefs.h>
 
 namespace android {
 
 const char *MEDIA_MIMETYPE_IMAGE_JPEG = "image/jpeg";
+const char *MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC = "image/vnd.android.heic";
 
 const char *MEDIA_MIMETYPE_VIDEO_VP8 = "video/x-vnd.on2.vp8";
 const char *MEDIA_MIMETYPE_VIDEO_VP9 = "video/x-vnd.on2.vp9";
@@ -58,6 +59,7 @@
 const char *MEDIA_MIMETYPE_CONTAINER_MPEG2TS = "video/mp2ts";
 const char *MEDIA_MIMETYPE_CONTAINER_AVI = "video/avi";
 const char *MEDIA_MIMETYPE_CONTAINER_MPEG2PS = "video/mp2p";
+const char *MEDIA_MIMETYPE_CONTAINER_HEIF = "image/heif";
 
 const char *MEDIA_MIMETYPE_TEXT_3GPP = "text/3gpp-tt";
 const char *MEDIA_MIMETYPE_TEXT_SUBRIP = "application/x-subrip";
diff --git a/include/media/AudioClient.h b/media/libstagefright/foundation/MediaKeys.cpp
similarity index 63%
rename from include/media/AudioClient.h
rename to media/libstagefright/foundation/MediaKeys.cpp
index 9efd76d..53920c9 100644
--- a/include/media/AudioClient.h
+++ b/media/libstagefright/foundation/MediaKeys.cpp
@@ -14,25 +14,13 @@
  * limitations under the License.
  */
 
-
-#ifndef ANDROID_AUDIO_CLIENT_H
-#define ANDROID_AUDIO_CLIENT_H
-
-#include <system/audio.h>
-#include <utils/String16.h>
+#include <media/stagefright/foundation/MediaKeys.h>
 
 namespace android {
 
-class AudioClient {
- public:
-    AudioClient() :
-        clientUid(-1), clientPid(-1), packageName("") {}
+const char *const kIStreamListenerKeyDiscontinuityMask = "discontinuity-mask";
+const char *const kATSParserKeyResumeAtPTS = "resume-at-PTS";
+const char *const kATSParserKeyMediaTimeUs = "media-time-us";
+const char *const kATSParserKeyRecentMediaTimeUs = "recent-media-time-us";
 
-    uid_t clientUid;
-    pid_t clientPid;
-    String16 packageName;
-};
-
-}; // namespace android
-
-#endif  // ANDROID_AUDIO_CLIENT_H
+}  // namespace android
diff --git a/media/libstagefright/avc_utils.cpp b/media/libstagefright/foundation/avc_utils.cpp
similarity index 98%
rename from media/libstagefright/avc_utils.cpp
rename to media/libstagefright/foundation/avc_utils.cpp
index b75b468..bfaeb21 100644
--- a/media/libstagefright/avc_utils.cpp
+++ b/media/libstagefright/foundation/avc_utils.cpp
@@ -18,10 +18,10 @@
 #define LOG_TAG "avc_utils"
 #include <utils/Log.h>
 
-#include "include/avc_utils.h"
 
 #include <media/stagefright/foundation/ABitReader.h>
 #include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/avc_utils.h>
 #include <media/stagefright/foundation/hexdump.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
@@ -468,11 +468,9 @@
     return meta;
 }
 
-template <typename T>
-bool IsIDRInternal(const sp<T> &buffer) {
-    const uint8_t *data = buffer->data();
-    size_t size = buffer->size();
-
+bool IsIDR(const uint8_t *data, size_t size) {
+//    const uint8_t *data = buffer->data();
+//    size_t size = buffer->size();
     bool foundIDR = false;
 
     const uint8_t *nalStart;
@@ -494,14 +492,6 @@
     return foundIDR;
 }
 
-bool IsIDR(const sp<ABuffer> &buffer) {
-    return IsIDRInternal(buffer);
-}
-
-bool IsIDR(const sp<MediaCodecBuffer> &buffer) {
-    return IsIDRInternal(buffer);
-}
-
 bool IsAVCReferenceFrame(const sp<ABuffer> &accessUnit) {
     const uint8_t *data = accessUnit->data();
     size_t size = accessUnit->size();
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/ByteUtils.h b/media/libstagefright/foundation/include/media/stagefright/foundation/ByteUtils.h
new file mode 100644
index 0000000..dc4125f
--- /dev/null
+++ b/media/libstagefright/foundation/include/media/stagefright/foundation/ByteUtils.h
@@ -0,0 +1,44 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef BYTE_UTILS_H
+
+#define BYTE_UTILS_H
+
+#include <arpa/inet.h>
+
+namespace android {
+
+#define FOURCC(c1, c2, c3, c4) \
+    ((c1) << 24 | (c2) << 16 | (c3) << 8 | (c4))
+
+uint16_t U16_AT(const uint8_t *ptr);
+uint32_t U32_AT(const uint8_t *ptr);
+uint64_t U64_AT(const uint8_t *ptr);
+
+uint16_t U16LE_AT(const uint8_t *ptr);
+uint32_t U32LE_AT(const uint8_t *ptr);
+uint64_t U64LE_AT(const uint8_t *ptr);
+
+uint64_t ntoh64(uint64_t x);
+uint64_t hton64(uint64_t x);
+
+void MakeFourCCString(uint32_t x, char *s);
+
+}  // namespace android
+
+#endif  // BYTE_UTILS_H
+
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h b/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h
index b889a02..d6c768d 100644
--- a/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h
+++ b/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h
@@ -39,26 +39,28 @@
      * vendor-extension section so they won't collide with future platform values.
      */
 
-#define GET_HAL_ENUM(class, name) HAL_DATASPACE_##class##name
-#define GET_HAL_BITFIELD(class, name) (GET_HAL_ENUM(class, _##name) >> GET_HAL_ENUM(class, _SHIFT))
-
+    /**
+     * graphic.h constants changed in Android 8.0 after ColorStandard values were already public
+     * in Android 7.0. We will not deal with the break in graphic.h here, but list the public
+     * Android SDK MediaFormat values here.
+     */
     enum ColorStandard : uint32_t {
-        kColorStandardUnspecified =          GET_HAL_BITFIELD(STANDARD, UNSPECIFIED),
-        kColorStandardBT709 =                GET_HAL_BITFIELD(STANDARD, BT709),
-        kColorStandardBT601_625 =            GET_HAL_BITFIELD(STANDARD, BT601_625),
-        kColorStandardBT601_625_Unadjusted = GET_HAL_BITFIELD(STANDARD, BT601_625_UNADJUSTED),
-        kColorStandardBT601_525 =            GET_HAL_BITFIELD(STANDARD, BT601_525),
-        kColorStandardBT601_525_Unadjusted = GET_HAL_BITFIELD(STANDARD, BT601_525_UNADJUSTED),
-        kColorStandardBT2020 =               GET_HAL_BITFIELD(STANDARD, BT2020),
-        kColorStandardBT2020Constant =       GET_HAL_BITFIELD(STANDARD, BT2020_CONSTANT_LUMINANCE),
-        kColorStandardBT470M =               GET_HAL_BITFIELD(STANDARD, BT470M),
-        kColorStandardFilm =                 GET_HAL_BITFIELD(STANDARD, FILM),
-        kColorStandardMax =                  GET_HAL_BITFIELD(STANDARD, MASK),
+        kColorStandardUnspecified =          0,
+        kColorStandardBT709 =                1,
+        kColorStandardBT601_625 =            2,
+        kColorStandardBT601_625_Unadjusted = 3, // not in SDK
+        kColorStandardBT601_525 =            4,
+        kColorStandardBT601_525_Unadjusted = 5, // not in SDK
+        kColorStandardBT2020 =               6,
+        kColorStandardBT2020Constant =       7, // not in SDK
+        kColorStandardBT470M =               8, // not in SDK
+        kColorStandardFilm =                 9, // not in SDK
+        kColorStandardDCI_P3 =               10, // not in SDK, new in Android 8.0
 
         /* This marks a section of color-standard values that are not supported by graphics HAL,
            but track defined color primaries-matrix coefficient combinations in media.
            These are stable for a given release. */
-        kColorStandardExtendedStart = kColorStandardMax + 1,
+        kColorStandardExtendedStart = 64,
 
         /* This marks a section of color-standard values that are not supported by graphics HAL
            nor using media defined color primaries or matrix coefficients. These may differ per
@@ -67,19 +69,19 @@
     };
 
     enum ColorTransfer : uint32_t  {
-        kColorTransferUnspecified = GET_HAL_BITFIELD(TRANSFER, UNSPECIFIED),
-        kColorTransferLinear =      GET_HAL_BITFIELD(TRANSFER, LINEAR),
-        kColorTransferSRGB =        GET_HAL_BITFIELD(TRANSFER, SRGB),
-        kColorTransferSMPTE_170M =  GET_HAL_BITFIELD(TRANSFER, SMPTE_170M),
-        kColorTransferGamma22 =     GET_HAL_BITFIELD(TRANSFER, GAMMA2_2),
-        kColorTransferGamma28 =     GET_HAL_BITFIELD(TRANSFER, GAMMA2_8),
-        kColorTransferST2084 =      GET_HAL_BITFIELD(TRANSFER, ST2084),
-        kColorTransferHLG =         GET_HAL_BITFIELD(TRANSFER, HLG),
-        kColorTransferMax =         GET_HAL_BITFIELD(TRANSFER, MASK),
+        kColorTransferUnspecified = 0,
+        kColorTransferLinear =      1,
+        kColorTransferSRGB =        2,
+        kColorTransferSMPTE_170M =  3, // not in SDK
+        kColorTransferGamma22 =     4, // not in SDK
+        kColorTransferGamma28 =     5, // not in SDK
+        kColorTransferST2084 =      6,
+        kColorTransferHLG =         7,
+        kColorTransferGamma26 =     8, // not in SDK, new in Android 8.0
 
         /* This marks a section of color-transfer values that are not supported by graphics HAL,
            but track media-defined color-transfer. These are stable for a given release. */
-        kColorTransferExtendedStart = kColorTransferMax + 1,
+        kColorTransferExtendedStart = 32,
 
         /* This marks a section of color-transfer values that are not supported by graphics HAL
            nor defined by media. These may differ per device. */
@@ -87,23 +89,19 @@
     };
 
     enum ColorRange : uint32_t  {
-        kColorRangeUnspecified = GET_HAL_BITFIELD(RANGE, UNSPECIFIED),
-        kColorRangeFull =        GET_HAL_BITFIELD(RANGE, FULL),
-        kColorRangeLimited =     GET_HAL_BITFIELD(RANGE, LIMITED),
-        kColorRangeMax =         GET_HAL_BITFIELD(RANGE, MASK),
+        kColorRangeUnspecified = 0,
+        kColorRangeFull =        1,
+        kColorRangeLimited =     2,
 
         /* This marks a section of color-transfer values that are not supported by graphics HAL,
            but track media-defined color-transfer. These are stable for a given release. */
-        kColorRangeExtendedStart = kColorRangeMax + 1,
+        kColorRangeExtendedStart = 8,
 
         /* This marks a section of color-transfer values that are not supported by graphics HAL
            nor defined by media. These may differ per device. */
         kColorRangeVendorStart = 0x10000,
     };
 
-#undef GET_HAL_BITFIELD
-#undef GET_HAL_ENUM
-
     /*
      * Static utilities for codec support
      */
@@ -197,7 +195,8 @@
         case ColorUtils::kColorStandardBT2020Constant:       return "BT2020Constant";
         case ColorUtils::kColorStandardBT470M:               return "BT470M";
         case ColorUtils::kColorStandardFilm:                 return "Film";
-        default:                                            return def;
+        case ColorUtils::kColorStandardDCI_P3:               return "DCI_P3";
+        default:                                             return def;
     }
 }
 
@@ -212,7 +211,8 @@
         case ColorUtils::kColorTransferGamma28:     return "Gamma28";
         case ColorUtils::kColorTransferST2084:      return "ST2084";
         case ColorUtils::kColorTransferHLG:         return "HLG";
-        default:                                   return def;
+        case ColorUtils::kColorTransferGamma26:     return "Gamma26";
+        default:                                    return def;
     }
 }
 
@@ -222,7 +222,7 @@
         case ColorUtils::kColorRangeUnspecified: return "Unspecified";
         case ColorUtils::kColorRangeFull:        return "Full";
         case ColorUtils::kColorRangeLimited:     return "Limited";
-        default:                                return def;
+        default:                                 return def;
     }
 }
 
diff --git a/media/libmedia/include/media/MediaDefs.h b/media/libstagefright/foundation/include/media/stagefright/foundation/MediaDefs.h
similarity index 96%
rename from media/libmedia/include/media/MediaDefs.h
rename to media/libstagefright/foundation/include/media/stagefright/foundation/MediaDefs.h
index 7f17013..25be89f 100644
--- a/media/libmedia/include/media/MediaDefs.h
+++ b/media/libstagefright/foundation/include/media/stagefright/foundation/MediaDefs.h
@@ -21,6 +21,7 @@
 namespace android {
 
 extern const char *MEDIA_MIMETYPE_IMAGE_JPEG;
+extern const char *MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC;
 
 extern const char *MEDIA_MIMETYPE_VIDEO_VP8;
 extern const char *MEDIA_MIMETYPE_VIDEO_VP9;
@@ -60,6 +61,7 @@
 extern const char *MEDIA_MIMETYPE_CONTAINER_MPEG2TS;
 extern const char *MEDIA_MIMETYPE_CONTAINER_AVI;
 extern const char *MEDIA_MIMETYPE_CONTAINER_MPEG2PS;
+extern const char *MEDIA_MIMETYPE_CONTAINER_HEIF;
 
 extern const char *MEDIA_MIMETYPE_TEXT_3GPP;
 extern const char *MEDIA_MIMETYPE_TEXT_SUBRIP;
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/MediaKeys.h b/media/libstagefright/foundation/include/media/stagefright/foundation/MediaKeys.h
new file mode 100644
index 0000000..db924bb
--- /dev/null
+++ b/media/libstagefright/foundation/include/media/stagefright/foundation/MediaKeys.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MEDIA_KEYS_H_
+
+#define MEDIA_KEYS_H_
+
+namespace android {
+
+// When signalling a discontinuity to IStreamListener you can optionally
+// specify the type(s) of discontinuity, i.e. if the audio format has changed,
+// the video format has changed, time has jumped or any combination thereof.
+// To do so, include a non-zero int32_t value under the key
+// "kIStreamListenerKeyDiscontinuityMask" when issuing the
+// IStreamListener::DISCONTINUITY command.
+// If there is a change in audio/video format, The new logical stream
+// must start with proper codec initialization
+// information for playback to continue, i.e. SPS and PPS in the case
+// of AVC video etc.
+// If this key is not present, only a time discontinuity is assumed.
+// The value should be a bitmask of values from
+// ATSParser::DiscontinuityType.
+extern const char *const kIStreamListenerKeyDiscontinuityMask;
+
+// When signalling a discontinuity to ATSParser you can optionally
+// specify an int64_t PTS timestamp in "extra".
+// If present, rendering of data following the discontinuity
+// will be suppressed until media time reaches this timestamp.
+extern const char *const kATSParserKeyResumeAtPTS;
+
+// When signalling a discontinuity to ATSParser you can optionally
+// specify an int64_t PTS timestamp in "extra".
+// It indicates the media time (in us) to be associated
+// with the next PTS occuring in the stream. The value is of type int64_t.
+extern const char *const kATSParserKeyMediaTimeUs;
+
+// When signalling a discontinuity to ATSParser you can optionally
+// specify an int64_t PTS timestamp in "extra".
+// It indicates the media time (in us) of a recent
+// sample from the same content, and is used as a hint for the parser to
+// handle PTS wraparound. This is required when a new parser is created
+// to continue parsing content from the same timeline.
+extern const char *const kATSParserKeyRecentMediaTimeUs;
+
+}  // namespace android
+
+#endif  // MEDIA_KEYS_H_
diff --git a/media/libstagefright/include/avc_utils.h b/media/libstagefright/foundation/include/media/stagefright/foundation/avc_utils.h
similarity index 96%
rename from media/libstagefright/include/avc_utils.h
rename to media/libstagefright/foundation/include/media/stagefright/foundation/avc_utils.h
index d05906a..a939f12 100644
--- a/media/libstagefright/include/avc_utils.h
+++ b/media/libstagefright/foundation/include/media/stagefright/foundation/avc_utils.h
@@ -18,7 +18,6 @@
 
 #define AVC_UTILS_H_
 
-#include <media/MediaCodecBuffer.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <utils/Errors.h>
 
@@ -84,8 +83,7 @@
 class MetaData;
 sp<MetaData> MakeAVCCodecSpecificData(const sp<ABuffer> &accessUnit);
 
-bool IsIDR(const sp<ABuffer> &accessUnit);
-bool IsIDR(const sp<MediaCodecBuffer> &accessUnit);
+bool IsIDR(const uint8_t *data, size_t size);
 bool IsAVCReferenceFrame(const sp<ABuffer> &accessUnit);
 uint32_t FindAVCLayerId(const uint8_t *data, size_t size);
 
diff --git a/media/libstagefright/foundation/tests/Utils_test.cpp b/media/libstagefright/foundation/tests/Utils_test.cpp
index 0439d5c..fc2e044 100644
--- a/media/libstagefright/foundation/tests/Utils_test.cpp
+++ b/media/libstagefright/foundation/tests/Utils_test.cpp
@@ -22,7 +22,7 @@
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AStringUtils.h>
 #include <media/stagefright/foundation/AUtils.h>
-#include <media/stagefright/Utils.h> // for FOURCC
+#include <media/stagefright/foundation/ByteUtils.h> // for FOURCC
 
 namespace android {
 
diff --git a/media/libstagefright/http/Android.bp b/media/libstagefright/http/Android.bp
index 5d90b0a..2e49fc4 100644
--- a/media/libstagefright/http/Android.bp
+++ b/media/libstagefright/http/Android.bp
@@ -12,7 +12,6 @@
     shared_libs: [
         "liblog",
         "libutils",
-        "libbinder",
         "libandroid_runtime",
         "libmedia",
     ],
diff --git a/media/libstagefright/http/MediaHTTP.cpp b/media/libstagefright/http/MediaHTTP.cpp
index 5b18814..7c9247e 100644
--- a/media/libstagefright/http/MediaHTTP.cpp
+++ b/media/libstagefright/http/MediaHTTP.cpp
@@ -25,11 +25,11 @@
 #include <media/stagefright/foundation/ALooper.h>
 #include <media/stagefright/Utils.h>
 
-#include <media/IMediaHTTPConnection.h>
+#include <media/MediaHTTPConnection.h>
 
 namespace android {
 
-MediaHTTP::MediaHTTP(const sp<IMediaHTTPConnection> &conn)
+MediaHTTP::MediaHTTP(const sp<MediaHTTPConnection> &conn)
     : mInitCheck((conn != NULL) ? OK : NO_INIT),
       mHTTPConnection(conn),
       mCachedSizeValid(false),
@@ -176,12 +176,6 @@
     return mDecryptHandle;
 }
 
-void MediaHTTP::getDrmInfo(
-        sp<DecryptHandle> &handle, DrmManagerClient **client) {
-    handle = mDecryptHandle;
-    *client = mDrmManagerClient;
-}
-
 String8 MediaHTTP::getUri() {
     if (mInitCheck != OK) {
         return String8::empty();
diff --git a/media/libstagefright/httplive/Android.bp b/media/libstagefright/httplive/Android.bp
index e415334..ac113b8 100644
--- a/media/libstagefright/httplive/Android.bp
+++ b/media/libstagefright/httplive/Android.bp
@@ -36,8 +36,18 @@
         "libcrypto",
         "libcutils",
         "libmedia",
+        "libmediaextractor",
         "libstagefright",
         "libstagefright_foundation",
         "libutils",
+        "libhidlbase",
+        "android.hardware.cas@1.0",
+        "android.hardware.cas.native@1.0",
     ],
+
+    static_libs: [
+        "libstagefright_id3",
+        "libstagefright_mpeg2support",
+    ],
+
 }
diff --git a/media/libstagefright/httplive/HTTPDownloader.cpp b/media/libstagefright/httplive/HTTPDownloader.cpp
index 793695a..72604e3 100644
--- a/media/libstagefright/httplive/HTTPDownloader.cpp
+++ b/media/libstagefright/httplive/HTTPDownloader.cpp
@@ -21,12 +21,12 @@
 #include "HTTPDownloader.h"
 #include "M3UParser.h"
 
-#include <media/IMediaHTTPConnection.h>
-#include <media/IMediaHTTPService.h>
+#include <media/DataSource.h>
+#include <media/MediaHTTPConnection.h>
+#include <media/MediaHTTPService.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/MediaHTTP.h>
-#include <media/stagefright/DataSource.h>
 #include <media/stagefright/FileSource.h>
 #include <openssl/aes.h>
 #include <openssl/md5.h>
@@ -36,7 +36,7 @@
 namespace android {
 
 HTTPDownloader::HTTPDownloader(
-        const sp<IMediaHTTPService> &httpService,
+        const sp<MediaHTTPService> &httpService,
         const KeyedVector<String8, String8> &headers) :
     mHTTPDataSource(new MediaHTTP(httpService->makeHTTPConnection())),
     mExtraHeaders(headers),
diff --git a/media/libstagefright/httplive/HTTPDownloader.h b/media/libstagefright/httplive/HTTPDownloader.h
index 1db4a48..0d4bd31 100644
--- a/media/libstagefright/httplive/HTTPDownloader.h
+++ b/media/libstagefright/httplive/HTTPDownloader.h
@@ -28,12 +28,12 @@
 struct ABuffer;
 class DataSource;
 struct HTTPBase;
-struct IMediaHTTPService;
+struct MediaHTTPService;
 struct M3UParser;
 
 struct HTTPDownloader : public RefBase {
     HTTPDownloader(
-            const sp<IMediaHTTPService> &httpService,
+            const sp<MediaHTTPService> &httpService,
             const KeyedVector<String8, String8> &headers);
 
     void reconnect();
diff --git a/media/libstagefright/httplive/LiveDataSource.h b/media/libstagefright/httplive/LiveDataSource.h
index b7be637..91e9f9f 100644
--- a/media/libstagefright/httplive/LiveDataSource.h
+++ b/media/libstagefright/httplive/LiveDataSource.h
@@ -18,8 +18,8 @@
 
 #define LIVE_DATA_SOURCE_H_
 
+#include <media/DataSource.h>
 #include <media/stagefright/foundation/ABase.h>
-#include <media/stagefright/DataSource.h>
 #include <utils/threads.h>
 #include <utils/List.h>
 
diff --git a/media/libstagefright/httplive/LiveSession.cpp b/media/libstagefright/httplive/LiveSession.cpp
index 143fd59..7eff8eb 100644
--- a/media/libstagefright/httplive/LiveSession.cpp
+++ b/media/libstagefright/httplive/LiveSession.cpp
@@ -26,7 +26,7 @@
 #include "mpeg2ts/AnotherPacketSource.h"
 
 #include <cutils/properties.h>
-#include <media/IMediaHTTPService.h>
+#include <media/MediaHTTPService.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
@@ -49,6 +49,10 @@
 const int64_t LiveSession::kUpSwitchMarginUs = 5000000ll;
 const int64_t LiveSession::kResumeThresholdUs = 100000ll;
 
+//TODO: redefine this mark to a fair value
+// default buffer underflow mark
+static const int kUnderflowMarkMs = 1000;  // 1 second
+
 struct LiveSession::BandwidthEstimator : public RefBase {
     BandwidthEstimator();
 
@@ -270,7 +274,7 @@
 
 LiveSession::LiveSession(
         const sp<AMessage> &notify, uint32_t flags,
-        const sp<IMediaHTTPService> &httpService)
+        const sp<MediaHTTPService> &httpService)
     : mNotify(notify),
       mFlags(flags),
       mHTTPService(httpService),
@@ -840,7 +844,7 @@
                     // (If we don't have that cushion we'd rather cancel and try again.)
                     int64_t delayUs =
                         switchUp ?
-                            (mBufferingSettings.mRebufferingWatermarkLowMs * 1000ll + 1000000ll)
+                            (kUnderflowMarkMs * 1000ll + 1000000ll)
                             : 0;
                     bool needResumeUntil = false;
                     sp<AMessage> stopParams = msg;
@@ -1010,7 +1014,8 @@
         mFetcherLooper = new ALooper();
 
         mFetcherLooper->setName("Fetcher");
-        mFetcherLooper->start(false, false);
+        mFetcherLooper->start(false, /* runOnCallingThread */
+                              true  /* canCallJava */);
     }
 
     // create fetcher to fetch the master playlist
@@ -2202,14 +2207,14 @@
         ++activeCount;
         int64_t readyMarkUs =
             (mInPreparationPhase ?
-                mBufferingSettings.mInitialWatermarkMs :
-                mBufferingSettings.mRebufferingWatermarkHighMs) * 1000ll;
+                mBufferingSettings.mInitialMarkMs :
+                mBufferingSettings.mResumePlaybackMarkMs) * 1000ll;
         if (bufferedDurationUs > readyMarkUs
                 || mPacketSources[i]->isFinished(0)) {
             ++readyCount;
         }
         if (!mPacketSources[i]->isFinished(0)) {
-            if (bufferedDurationUs < mBufferingSettings.mRebufferingWatermarkLowMs * 1000ll) {
+            if (bufferedDurationUs < kUnderflowMarkMs * 1000ll) {
                 ++underflowCount;
             }
             if (bufferedDurationUs > mUpSwitchMark) {
diff --git a/media/libstagefright/httplive/LiveSession.h b/media/libstagefright/httplive/LiveSession.h
index abf8cf0..7a6d487 100644
--- a/media/libstagefright/httplive/LiveSession.h
+++ b/media/libstagefright/httplive/LiveSession.h
@@ -33,7 +33,7 @@
 struct AnotherPacketSource;
 class DataSource;
 struct HTTPBase;
-struct IMediaHTTPService;
+struct MediaHTTPService;
 struct LiveDataSource;
 struct M3UParser;
 struct PlaylistFetcher;
@@ -71,7 +71,7 @@
     LiveSession(
             const sp<AMessage> &notify,
             uint32_t flags,
-            const sp<IMediaHTTPService> &httpService);
+            const sp<MediaHTTPService> &httpService);
 
     void setBufferingSettings(const BufferingSettings &buffering);
 
@@ -187,7 +187,7 @@
 
     sp<AMessage> mNotify;
     uint32_t mFlags;
-    sp<IMediaHTTPService> mHTTPService;
+    sp<MediaHTTPService> mHTTPService;
 
     bool mBuffering;
     bool mInPreparationPhase;
diff --git a/media/libstagefright/httplive/M3UParser.cpp b/media/libstagefright/httplive/M3UParser.cpp
index 8a6482b..bc3e57c 100644
--- a/media/libstagefright/httplive/M3UParser.cpp
+++ b/media/libstagefright/httplive/M3UParser.cpp
@@ -23,6 +23,7 @@
 #include <cutils/properties.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/ByteUtils.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/Utils.h>
diff --git a/media/libstagefright/httplive/PlaylistFetcher.cpp b/media/libstagefright/httplive/PlaylistFetcher.cpp
index 00cf142..b46d923 100644
--- a/media/libstagefright/httplive/PlaylistFetcher.cpp
+++ b/media/libstagefright/httplive/PlaylistFetcher.cpp
@@ -23,7 +23,6 @@
 #include "HTTPDownloader.h"
 #include "LiveSession.h"
 #include "M3UParser.h"
-#include "include/avc_utils.h"
 #include "include/ID3.h"
 #include "mpeg2ts/AnotherPacketSource.h"
 #include "mpeg2ts/HlsSampleDecryptor.h"
@@ -31,6 +30,9 @@
 #include <media/stagefright/foundation/ABitReader.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/ByteUtils.h>
+#include <media/stagefright/foundation/MediaKeys.h>
+#include <media/stagefright/foundation/avc_utils.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MetaData.h>
 #include <media/stagefright/Utils.h>
@@ -1692,12 +1694,12 @@
         sp<AMessage> extra = new AMessage;
         // Since we are using absolute timestamps, signal an offset of 0 to prevent
         // ATSParser from skewing the timestamps of access units.
-        extra->setInt64(IStreamListener::kKeyMediaTimeUs, 0);
+        extra->setInt64(kATSParserKeyMediaTimeUs, 0);
 
         // When adapting, signal a recent media time to the parser,
         // so that PTS wrap around is handled for the new variant.
         if (mStartTimeUs >= 0 && !mStartTimeUsRelative) {
-            extra->setInt64(IStreamListener::kKeyRecentMediaTimeUs, mStartTimeUs);
+            extra->setInt64(kATSParserKeyRecentMediaTimeUs, mStartTimeUs);
         }
 
         mTSParser->signalDiscontinuity(
@@ -1828,7 +1830,7 @@
                             (long long)timeUs - mStartTimeUs,
                             mIDRFound);
                     if (isAvc) {
-                        if (IsIDR(accessUnit)) {
+                        if (IsIDR(accessUnit->data(), accessUnit->size())) {
                             mVideoBuffer->clear();
                             FSLOGV(stream, "found IDR, clear mVideoBuffer");
                             mIDRFound = true;
diff --git a/media/libstagefright/id3/ID3.cpp b/media/libstagefright/id3/ID3.cpp
index 58d8b13..61403be 100644
--- a/media/libstagefright/id3/ID3.cpp
+++ b/media/libstagefright/id3/ID3.cpp
@@ -20,9 +20,9 @@
 
 #include "../include/ID3.h"
 
+#include <media/DataSource.h>
 #include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/DataSource.h>
-#include <media/stagefright/Utils.h>
+#include <media/stagefright/foundation/ByteUtils.h>
 #include <utils/String8.h>
 #include <byteswap.h>
 
diff --git a/media/libstagefright/include/AVIExtractor.h b/media/libstagefright/include/AVIExtractor.h
index 3be505c..1223c80 100644
--- a/media/libstagefright/include/AVIExtractor.h
+++ b/media/libstagefright/include/AVIExtractor.h
@@ -18,9 +18,9 @@
 
 #define AVI_EXTRACTOR_H_
 
+#include <media/MediaExtractor.h>
+#include <media/MediaSource.h>
 #include <media/stagefright/foundation/ABase.h>
-#include <media/stagefright/MediaExtractor.h>
-#include <media/stagefright/MediaSource.h>
 #include <utils/Vector.h>
 
 namespace android {
diff --git a/media/libstagefright/include/CCodecBufferChannel.h b/media/libstagefright/include/CCodecBufferChannel.h
new file mode 100644
index 0000000..c5062d6
--- /dev/null
+++ b/media/libstagefright/include/CCodecBufferChannel.h
@@ -0,0 +1,305 @@
+/*
+ * Copyright 2017, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef A_BUFFER_CHANNEL_H_
+
+#define A_BUFFER_CHANNEL_H_
+
+#include <map>
+#include <memory>
+#include <mutex>
+#include <vector>
+
+#include <C2Buffer.h>
+#include <C2Component.h>
+
+#include <media/stagefright/foundation/Mutexed.h>
+#include <media/stagefright/CodecBase.h>
+#include <media/ICrypto.h>
+
+namespace android {
+
+/**
+ * BufferChannelBase implementation for CCodec.
+ */
+class CCodecBufferChannel : public BufferChannelBase {
+public:
+    /**
+     * Base class for representation of buffers at one port.
+     */
+    class Buffers {
+    public:
+        Buffers() = default;
+        virtual ~Buffers() = default;
+
+        /**
+         * Set format for MediaCodec-facing buffers.
+         */
+        inline void setFormat(const sp<AMessage> &format) { mFormat = format; }
+
+        /**
+         * Returns true if the buffers are operating under array mode.
+         */
+        virtual bool isArrayMode() { return false; }
+
+        /**
+         * Fills the vector with MediaCodecBuffer's if in array mode; otherwise,
+         * no-op.
+         */
+        virtual void getArray(Vector<sp<MediaCodecBuffer>> *) {}
+
+    protected:
+        // Format to be used for creating MediaCodec-facing buffers.
+        sp<AMessage> mFormat;
+
+    private:
+        DISALLOW_EVIL_CONSTRUCTORS(Buffers);
+    };
+
+    class InputBuffers : public Buffers {
+    public:
+        using Buffers::Buffers;
+        virtual ~InputBuffers() = default;
+
+        /**
+         * Set a block pool to obtain input memory blocks.
+         */
+        inline void setPool(const std::shared_ptr<C2BlockPool> &pool) { mPool = pool; }
+
+        /**
+         * Get a new MediaCodecBuffer for input and its corresponding index.
+         * Returns false if no new buffer can be obtained at the moment.
+         */
+        virtual bool requestNewBuffer(size_t *index, sp<MediaCodecBuffer> *buffer) = 0;
+
+        /**
+         * Release the buffer obtained from requestNewBuffer() and get the
+         * associated C2Buffer object back. Returns empty shared_ptr if the
+         * buffer is not on file.
+         */
+        virtual std::shared_ptr<C2Buffer> releaseBuffer(const sp<MediaCodecBuffer> &buffer) = 0;
+
+        /**
+         * Flush internal state. After this call, no index or buffer previously
+         * returned from requestNewBuffer() is valid.
+         */
+        virtual void flush() = 0;
+
+        /**
+         * Return array-backed version of input buffers. The returned object
+         * shall retain the internal state so that it will honor index and
+         * buffer from previous calls of requestNewBuffer().
+         */
+        virtual std::unique_ptr<InputBuffers> toArrayMode() = 0;
+
+    protected:
+        // Pool to obtain blocks for input buffers.
+        std::shared_ptr<C2BlockPool> mPool;
+
+    private:
+        DISALLOW_EVIL_CONSTRUCTORS(InputBuffers);
+    };
+
+    class OutputBuffers : public Buffers {
+    public:
+        using Buffers::Buffers;
+        virtual ~OutputBuffers() = default;
+
+        /**
+         * Register output C2Buffer from the component and obtain corresponding
+         * index and MediaCodecBuffer object. Returns false if registration
+         * fails.
+         */
+        virtual bool registerBuffer(
+                const std::shared_ptr<C2Buffer> &buffer,
+                size_t *index,
+                sp<MediaCodecBuffer> *codecBuffer) = 0;
+
+        /**
+         * Register codec specific data as a buffer to be consistent with
+         * MediaCodec behavior.
+         */
+        virtual bool registerCsd(
+                const C2StreamCsdInfo::output * /* csd */,
+                size_t * /* index */,
+                sp<MediaCodecBuffer> * /* codecBuffer */) {
+            return false;
+        }
+
+        /**
+         * Release the buffer obtained from registerBuffer() and get the
+         * associated C2Buffer object back. Returns empty shared_ptr if the
+         * buffer is not on file.
+         */
+        virtual std::shared_ptr<C2Buffer> releaseBuffer(const sp<MediaCodecBuffer> &buffer) = 0;
+
+        /**
+         * Flush internal state. After this call, no index or buffer previously
+         * returned from registerBuffer() is valid.
+         */
+        virtual void flush(const std::list<std::unique_ptr<C2Work>> &flushedWork) = 0;
+
+        /**
+         * Return array-backed version of output buffers. The returned object
+         * shall retain the internal state so that it will honor index and
+         * buffer from previous calls of registerBuffer().
+         */
+        virtual std::unique_ptr<OutputBuffers> toArrayMode() = 0;
+
+    private:
+        DISALLOW_EVIL_CONSTRUCTORS(OutputBuffers);
+    };
+
+    CCodecBufferChannel(const std::function<void(status_t, enum ActionCode)> &onError);
+    virtual ~CCodecBufferChannel();
+
+    // BufferChannelBase interface
+    virtual status_t queueInputBuffer(const sp<MediaCodecBuffer> &buffer) override;
+    virtual status_t queueSecureInputBuffer(
+            const sp<MediaCodecBuffer> &buffer,
+            bool secure,
+            const uint8_t *key,
+            const uint8_t *iv,
+            CryptoPlugin::Mode mode,
+            CryptoPlugin::Pattern pattern,
+            const CryptoPlugin::SubSample *subSamples,
+            size_t numSubSamples,
+            AString *errorDetailMsg) override;
+    virtual status_t renderOutputBuffer(
+            const sp<MediaCodecBuffer> &buffer, int64_t timestampNs) override;
+    virtual status_t discardBuffer(const sp<MediaCodecBuffer> &buffer) override;
+    virtual void getInputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
+    virtual void getOutputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
+
+    // Methods below are interface for CCodec to use.
+
+    void setComponent(const std::shared_ptr<C2Component> &component);
+    status_t setSurface(const sp<Surface> &surface);
+
+    /**
+     * Set C2BlockPool for input buffers.
+     *
+     * TODO: start timestamp?
+     */
+    void setInputBufferAllocator(const sp<C2BlockPool> &inAlloc);
+
+    /**
+     * Set C2BlockPool for output buffers. This object shall never use the
+     * allocator itself; it's just passed
+     *
+     * TODO: start timestamp?
+     */
+    void setOutputBufferAllocator(const sp<C2BlockPool> &outAlloc);
+
+    /**
+     * Start queueing buffers to the component. This object should never queue
+     * buffers before this call.
+     */
+    void start(const sp<AMessage> &inputFormat, const sp<AMessage> &outputFormat);
+
+    /**
+     * Stop queueing buffers to the component. This object should never queue
+     * buffers after this call, until start() is called.
+     */
+    void stop();
+
+    void flush(const std::list<std::unique_ptr<C2Work>> &flushedWork);
+
+    /**
+     * Notify MediaCodec about work done.
+     *
+     * @param workItems   finished work items.
+     */
+    void onWorkDone(std::vector<std::unique_ptr<C2Work>> workItems);
+
+private:
+    class QueueGuard;
+
+    /**
+     * Special mutex-like object with the following properties:
+     *
+     * - At STOPPED state (initial, or after stop())
+     *   - QueueGuard object gets created at STOPPED state, and the client is
+     *     supposed to return immediately.
+     * - At RUNNING state (after start())
+     *   - Each QueueGuard object
+     */
+    class QueueSync {
+    public:
+        /**
+         * At construction the sync object is in STOPPED state.
+         */
+        inline QueueSync() : mCount(-1) {}
+        ~QueueSync() = default;
+
+        /**
+         * Transition to RUNNING state when stopped. No-op if already in RUNNING
+         * state.
+         */
+        void start();
+
+        /**
+         * At RUNNING state, wait until all QueueGuard object created during
+         * RUNNING state are destroyed, and then transition to STOPPED state.
+         * No-op if already in STOPPED state.
+         */
+        void stop();
+
+    private:
+        std::mutex mMutex;
+        std::atomic_int32_t mCount;
+
+        friend class CCodecBufferChannel::QueueGuard;
+    };
+
+    class QueueGuard {
+    public:
+        QueueGuard(QueueSync &sync);
+        ~QueueGuard();
+        inline bool isRunning() { return mRunning; }
+
+    private:
+        QueueSync &mSync;
+        bool mRunning;
+    };
+
+    QueueSync mSync;
+    sp<MemoryDealer> mDealer;
+    sp<IMemory> mDecryptDestination;
+    int32_t mHeapSeqNum;
+
+    std::shared_ptr<C2Component> mComponent;
+    std::function<void(status_t, enum ActionCode)> mOnError;
+    std::shared_ptr<C2BlockPool> mInputAllocator;
+    QueueSync mQueueSync;
+
+    Mutexed<std::unique_ptr<InputBuffers>> mInputBuffers;
+    Mutexed<std::unique_ptr<OutputBuffers>> mOutputBuffers;
+
+    std::atomic_uint64_t mFrameIndex;
+    std::atomic_uint64_t mFirstValidFrameIndex;
+
+    sp<MemoryDealer> makeMemoryDealer(size_t heapSize);
+    Mutexed<sp<Surface>> mSurface;
+
+    inline bool hasCryptoOrDescrambler() {
+        return mCrypto != NULL || mDescrambler != NULL;
+    }
+};
+
+}  // namespace android
+
+#endif  // A_BUFFER_CHANNEL_H_
diff --git a/media/libstagefright/include/CallbackDataSource.h b/media/libstagefright/include/CallbackDataSource.h
index 0d775e6..32556d6 100644
--- a/media/libstagefright/include/CallbackDataSource.h
+++ b/media/libstagefright/include/CallbackDataSource.h
@@ -17,7 +17,7 @@
 #ifndef ANDROID_CALLBACKDATASOURCE_H
 #define ANDROID_CALLBACKDATASOURCE_H
 
-#include <media/stagefright/DataSource.h>
+#include <media/DataSource.h>
 #include <media/stagefright/foundation/ADebug.h>
 
 namespace android {
@@ -42,7 +42,6 @@
         return mName;
     }
     virtual sp<DecryptHandle> DrmInitialization(const char *mime = NULL);
-    virtual sp<IDataSource> getIDataSource() const;
 
 private:
     sp<IDataSource> mIDataSource;
@@ -71,7 +70,6 @@
         return mName;
     }
     virtual sp<DecryptHandle> DrmInitialization(const char *mime = NULL);
-    virtual sp<IDataSource> getIDataSource() const;
 
 private:
     // 2kb comes from experimenting with the time-to-first-frame from a MediaPlayer
diff --git a/media/libstagefright/include/Codec2Buffer.h b/media/libstagefright/include/Codec2Buffer.h
new file mode 100644
index 0000000..0272cea
--- /dev/null
+++ b/media/libstagefright/include/Codec2Buffer.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright 2017, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CODEC2_BUFFER_H_
+
+#define CODEC2_BUFFER_H_
+
+#include <C2Buffer.h>
+
+#include <media/MediaCodecBuffer.h>
+
+namespace android {
+
+class C2Buffer;
+
+/**
+ * MediaCodecBuffer implementation wraps around C2LinearBlock.
+ */
+class Codec2Buffer : public MediaCodecBuffer {
+public:
+    static sp<Codec2Buffer> allocate(
+            const sp<AMessage> &format, const std::shared_ptr<C2LinearBlock> &block);
+
+    virtual ~Codec2Buffer() = default;
+
+    C2ConstLinearBlock share();
+
+private:
+    Codec2Buffer(
+            const sp<AMessage> &format,
+            const sp<ABuffer> &buffer,
+            const std::shared_ptr<C2LinearBlock> &block);
+    Codec2Buffer() = delete;
+
+    std::shared_ptr<C2LinearBlock> mBlock;
+};
+
+}  // namespace android
+
+#endif  // CODEC2_BUFFER_H_
diff --git a/media/libstagefright/include/FrameDecoder.h b/media/libstagefright/include/FrameDecoder.h
new file mode 100644
index 0000000..d7c074c
--- /dev/null
+++ b/media/libstagefright/include/FrameDecoder.h
@@ -0,0 +1,176 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FRAME_DECODER_H_
+#define FRAME_DECODER_H_
+
+#include <media/stagefright/foundation/AString.h>
+#include <media/stagefright/foundation/ABase.h>
+#include <media/MediaSource.h>
+#include <media/openmax/OMX_Video.h>
+#include <system/graphics-base.h>
+
+namespace android {
+
+struct AMessage;
+class MediaCodecBuffer;
+class VideoFrame;
+
+struct FrameDecoder {
+    FrameDecoder(
+            const AString &componentName,
+            const sp<MetaData> &trackMeta,
+            const sp<IMediaSource> &source) :
+                mComponentName(componentName),
+                mTrackMeta(trackMeta),
+                mSource(source),
+                mDstFormat(OMX_COLOR_Format16bitRGB565),
+                mDstBpp(2) {}
+
+    VideoFrame* extractFrame(
+            int64_t frameTimeUs,
+            int option,
+            int colorFormat,
+            bool metaOnly);
+
+    status_t extractFrames(
+            int64_t frameTimeUs,
+            size_t numFrames,
+            int option,
+            int colorFormat,
+            std::vector<VideoFrame*>* frames);
+
+protected:
+    virtual ~FrameDecoder() {}
+
+    virtual sp<AMessage> onGetFormatAndSeekOptions(
+            int64_t frameTimeUs,
+            size_t numFrames,
+            int seekMode,
+            MediaSource::ReadOptions *options) = 0;
+
+    virtual status_t onInputReceived(
+            const sp<MediaCodecBuffer> &codecBuffer,
+            const sp<MetaData> &sampleMeta,
+            bool firstSample,
+            uint32_t *flags) = 0;
+
+    virtual status_t onOutputReceived(
+            const sp<MediaCodecBuffer> &videoFrameBuffer,
+            const sp<AMessage> &outputFormat,
+            int64_t timeUs,
+            bool *done) = 0;
+
+    VideoFrame *allocVideoFrame(int32_t width, int32_t height, bool metaOnly);
+
+    sp<MetaData> trackMeta()     const      { return mTrackMeta; }
+    OMX_COLOR_FORMATTYPE dstFormat() const  { return mDstFormat; }
+    int32_t dstBpp()             const      { return mDstBpp; }
+
+    void addFrame(VideoFrame *frame) {
+        mFrames.push_back(std::unique_ptr<VideoFrame>(frame));
+    }
+
+private:
+    AString mComponentName;
+    sp<MetaData> mTrackMeta;
+    sp<IMediaSource> mSource;
+    OMX_COLOR_FORMATTYPE mDstFormat;
+    int32_t mDstBpp;
+    std::vector<std::unique_ptr<VideoFrame> > mFrames;
+
+    bool setDstColorFormat(android_pixel_format_t colorFormat);
+    status_t extractInternal(int64_t frameTimeUs, size_t numFrames, int option);
+
+    DISALLOW_EVIL_CONSTRUCTORS(FrameDecoder);
+};
+
+struct VideoFrameDecoder : public FrameDecoder {
+    VideoFrameDecoder(
+            const AString &componentName,
+            const sp<MetaData> &trackMeta,
+            const sp<IMediaSource> &source) :
+                FrameDecoder(componentName, trackMeta, source),
+                mIsAvcOrHevc(false),
+                mSeekMode(MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC),
+                mTargetTimeUs(-1ll),
+                mNumFrames(0),
+                mNumFramesDecoded(0) {}
+
+protected:
+    virtual sp<AMessage> onGetFormatAndSeekOptions(
+            int64_t frameTimeUs,
+            size_t numFrames,
+            int seekMode,
+            MediaSource::ReadOptions *options) override;
+
+    virtual status_t onInputReceived(
+            const sp<MediaCodecBuffer> &codecBuffer,
+            const sp<MetaData> &sampleMeta,
+            bool firstSample,
+            uint32_t *flags) override;
+
+    virtual status_t onOutputReceived(
+            const sp<MediaCodecBuffer> &videoFrameBuffer,
+            const sp<AMessage> &outputFormat,
+            int64_t timeUs,
+            bool *done) override;
+
+private:
+    bool mIsAvcOrHevc;
+    MediaSource::ReadOptions::SeekMode mSeekMode;
+    int64_t mTargetTimeUs;
+    size_t mNumFrames;
+    size_t mNumFramesDecoded;
+};
+
+struct ImageDecoder : public FrameDecoder {
+    ImageDecoder(
+            const AString &componentName,
+            const sp<MetaData> &trackMeta,
+            const sp<IMediaSource> &source) :
+                FrameDecoder(componentName, trackMeta, source),
+                mFrame(NULL), mGridRows(1), mGridCols(1), mTilesDecoded(0) {}
+
+protected:
+    virtual sp<AMessage> onGetFormatAndSeekOptions(
+            int64_t frameTimeUs,
+            size_t numFrames,
+            int seekMode,
+            MediaSource::ReadOptions *options) override;
+
+    virtual status_t onInputReceived(
+            const sp<MediaCodecBuffer> &codecBuffer __unused,
+            const sp<MetaData> &sampleMeta __unused,
+            bool firstSample __unused,
+            uint32_t *flags __unused) override { return OK; }
+
+    virtual status_t onOutputReceived(
+            const sp<MediaCodecBuffer> &videoFrameBuffer,
+            const sp<AMessage> &outputFormat,
+            int64_t timeUs,
+            bool *done) override;
+
+private:
+    VideoFrame *mFrame;
+    int32_t mGridRows;
+    int32_t mGridCols;
+    int32_t mTilesDecoded;
+};
+
+}  // namespace android
+
+#endif  // FRAME_DECODER_H_
diff --git a/media/libstagefright/include/HTTPBase.h b/media/libstagefright/include/HTTPBase.h
index d325e30..26d7e8a 100644
--- a/media/libstagefright/include/HTTPBase.h
+++ b/media/libstagefright/include/HTTPBase.h
@@ -18,8 +18,8 @@
 
 #define HTTP_BASE_H_
 
+#include <media/DataSource.h>
 #include <media/stagefright/foundation/ABase.h>
-#include <media/stagefright/DataSource.h>
 #include <media/stagefright/MediaErrors.h>
 #include <utils/threads.h>
 
diff --git a/media/libstagefright/include/NuCachedSource2.h b/media/libstagefright/include/NuCachedSource2.h
index 2639280..f439a1c 100644
--- a/media/libstagefright/include/NuCachedSource2.h
+++ b/media/libstagefright/include/NuCachedSource2.h
@@ -18,9 +18,9 @@
 
 #define NU_CACHED_SOURCE_2_H_
 
+#include <media/DataSource.h>
 #include <media/stagefright/foundation/ABase.h>
 #include <media/stagefright/foundation/AHandlerReflector.h>
-#include <media/stagefright/DataSource.h>
 
 namespace android {
 
@@ -43,7 +43,6 @@
     virtual uint32_t flags();
 
     virtual sp<DecryptHandle> DrmInitialization(const char* mime);
-    virtual void getDrmInfo(sp<DecryptHandle> &handle, DrmManagerClient **client);
     virtual String8 getUri();
 
     virtual String8 getMIMEType() const;
diff --git a/media/libstagefright/include/OmxNodeOwner.h b/media/libstagefright/include/OmxNodeOwner.h
deleted file mode 100644
index 64ec7f7..0000000
--- a/media/libstagefright/include/OmxNodeOwner.h
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef OMX_NODE_OWNER_H_
-
-#define OMX_NODE_OWNER_H_
-
-namespace android {
-
-struct OMXNodeInstance;
-
-/**
- * This struct is needed to separate OMX from OMXNodeInstance.
- *
- * TODO: This might not be needed after Treble transition is complete.
- */
-struct OmxNodeOwner {
-    virtual status_t freeNode(const sp<OMXNodeInstance> &instance) = 0;
-    virtual ~OmxNodeOwner() {}
-};
-
-}
-
-#endif  // OMX_NODE_OWNER_H_
diff --git a/media/libstagefright/include/SDPLoader.h b/media/libstagefright/include/SDPLoader.h
index 2c4f543..b901c97 100644
--- a/media/libstagefright/include/SDPLoader.h
+++ b/media/libstagefright/include/SDPLoader.h
@@ -25,7 +25,7 @@
 namespace android {
 
 struct HTTPBase;
-struct IMediaHTTPService;
+struct MediaHTTPService;
 
 struct SDPLoader : public AHandler {
     enum Flags {
@@ -38,7 +38,7 @@
     SDPLoader(
             const sp<AMessage> &notify,
             uint32_t flags,
-            const sp<IMediaHTTPService> &httpService);
+            const sp<MediaHTTPService> &httpService);
 
     void load(const char* url, const KeyedVector<String8, String8> *headers);
 
diff --git a/media/libstagefright/include/StagefrightMetadataRetriever.h b/media/libstagefright/include/StagefrightMetadataRetriever.h
index 277eb3e..58442fe 100644
--- a/media/libstagefright/include/StagefrightMetadataRetriever.h
+++ b/media/libstagefright/include/StagefrightMetadataRetriever.h
@@ -40,7 +40,14 @@
     virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
     virtual status_t setDataSource(const sp<DataSource>& source, const char *mime);
 
-    virtual VideoFrame *getFrameAtTime(int64_t timeUs, int option, int colorFormat, bool metaOnly);
+    virtual VideoFrame* getFrameAtTime(
+            int64_t timeUs, int option, int colorFormat, bool metaOnly);
+    virtual VideoFrame* getImageAtIndex(
+            int index, int colorFormat, bool metaOnly);
+    virtual status_t getFrameAtIndex(
+            std::vector<VideoFrame*>* frames,
+            int frameIndex, int numFrames, int colorFormat, bool metaOnly);
+
     virtual MediaAlbumArt *extractAlbumArt();
     virtual const char *extractMetadata(int keyCode);
 
@@ -56,6 +63,10 @@
     // Delete album art and clear metadata.
     void clearMetadata();
 
+    status_t getFrameInternal(
+            int64_t timeUs, int numFrames, int option, int colorFormat, bool metaOnly,
+            VideoFrame **outFrame, std::vector<VideoFrame*>* outFrames);
+
     StagefrightMetadataRetriever(const StagefrightMetadataRetriever &);
 
     StagefrightMetadataRetriever &operator=(
diff --git a/media/libstagefright/include/ThrottledSource.h b/media/libstagefright/include/ThrottledSource.h
index 673268b..71e62f7 100644
--- a/media/libstagefright/include/ThrottledSource.h
+++ b/media/libstagefright/include/ThrottledSource.h
@@ -18,7 +18,7 @@
 
 #define THROTTLED_SOURCE_H_
 
-#include <media/stagefright/DataSource.h>
+#include <media/DataSource.h>
 #include <utils/threads.h>
 
 namespace android {
@@ -58,10 +58,6 @@
         return mSource->DrmInitialization(mime);
     }
 
-    virtual void getDrmInfo(sp<DecryptHandle> &handle, DrmManagerClient **client) {
-        mSource->getDrmInfo(handle, client);
-    };
-
     virtual String8 getMIMEType() const {
         return mSource->getMIMEType();
     }
diff --git a/media/libstagefright/include/media/stagefright/AACWriter.h b/media/libstagefright/include/media/stagefright/AACWriter.h
index a1f63d7..aa60a19 100644
--- a/media/libstagefright/include/media/stagefright/AACWriter.h
+++ b/media/libstagefright/include/media/stagefright/AACWriter.h
@@ -31,7 +31,7 @@
 
     status_t initCheck() const;
 
-    virtual status_t addSource(const sp<IMediaSource> &source);
+    virtual status_t addSource(const sp<MediaSource> &source);
     virtual bool reachedEOS();
     virtual status_t start(MetaData *params = NULL);
     virtual status_t stop() { return reset(); }
@@ -48,7 +48,7 @@
 
     int   mFd;
     status_t mInitCheck;
-    sp<IMediaSource> mSource;
+    sp<MediaSource> mSource;
     bool mStarted;
     volatile bool mPaused;
     volatile bool mResumed;
diff --git a/media/libstagefright/include/media/stagefright/ACodec.h b/media/libstagefright/include/media/stagefright/ACodec.h
index 424246d..d1a9d25 100644
--- a/media/libstagefright/include/media/stagefright/ACodec.h
+++ b/media/libstagefright/include/media/stagefright/ACodec.h
@@ -95,11 +95,6 @@
 
     static status_t getOMXChannelMapping(size_t numChannels, OMX_AUDIO_CHANNELTYPE map[]);
 
-    // Save the flag.
-    void setTrebleFlag(bool trebleFlag);
-    // Return the saved flag.
-    bool getTrebleFlag() const;
-
 protected:
     virtual ~ACodec();
 
@@ -233,9 +228,7 @@
     sp<IOMX> mOMX;
     sp<IOMXNode> mOMXNode;
     int32_t mNodeGeneration;
-    bool mTrebleFlag;
     sp<TAllocator> mAllocator[2];
-    sp<MemoryDealer> mDealer[2];
 
     bool mUsingNativeWindow;
     sp<ANativeWindow> mNativeWindow;
diff --git a/media/libstagefright/include/media/stagefright/AMRWriter.h b/media/libstagefright/include/media/stagefright/AMRWriter.h
index fbbdf2e..7d2c879 100644
--- a/media/libstagefright/include/media/stagefright/AMRWriter.h
+++ b/media/libstagefright/include/media/stagefright/AMRWriter.h
@@ -20,7 +20,6 @@
 
 #include <stdio.h>
 
-#include <media/IMediaSource.h>
 #include <media/stagefright/MediaWriter.h>
 #include <utils/threads.h>
 
@@ -33,7 +32,7 @@
 
     status_t initCheck() const;
 
-    virtual status_t addSource(const sp<IMediaSource> &source);
+    virtual status_t addSource(const sp<MediaSource> &source);
     virtual bool reachedEOS();
     virtual status_t start(MetaData *params = NULL);
     virtual status_t stop() { return reset(); }
@@ -45,7 +44,7 @@
 private:
     int   mFd;
     status_t mInitCheck;
-    sp<IMediaSource> mSource;
+    sp<MediaSource> mSource;
     bool mStarted;
     volatile bool mPaused;
     volatile bool mResumed;
diff --git a/media/libstagefright/include/media/stagefright/AudioPlayer.h b/media/libstagefright/include/media/stagefright/AudioPlayer.h
index f7499b6..e971762 100644
--- a/media/libstagefright/include/media/stagefright/AudioPlayer.h
+++ b/media/libstagefright/include/media/stagefright/AudioPlayer.h
@@ -18,7 +18,7 @@
 
 #define AUDIO_PLAYER_H_
 
-#include <media/IMediaSource.h>
+#include <media/MediaSource.h>
 #include <media/MediaPlayerInterface.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <utils/threads.h>
@@ -50,7 +50,7 @@
     virtual ~AudioPlayer();
 
     // Caller retains ownership of "source".
-    void setSource(const sp<IMediaSource> &source);
+    void setSource(const sp<MediaSource> &source);
 
     status_t start(bool sourceAlreadyStarted = false);
 
@@ -66,7 +66,7 @@
     status_t getPlaybackRate(AudioPlaybackRate *rate /* nonnull */);
 
 private:
-    sp<IMediaSource> mSource;
+    sp<MediaSource> mSource;
     sp<AudioTrack> mAudioTrack;
 
     MediaBuffer *mInputBuffer;
diff --git a/media/libstagefright/include/media/stagefright/AudioSource.h b/media/libstagefright/include/media/stagefright/AudioSource.h
index 1595be4..9414aab 100644
--- a/media/libstagefright/include/media/stagefright/AudioSource.h
+++ b/media/libstagefright/include/media/stagefright/AudioSource.h
@@ -20,7 +20,7 @@
 
 #include <media/AudioRecord.h>
 #include <media/AudioSystem.h>
-#include <media/stagefright/MediaSource.h>
+#include <media/MediaSource.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <utils/List.h>
 
@@ -40,7 +40,8 @@
             uint32_t channels,
             uint32_t outSampleRate = 0,
             uid_t uid = -1,
-            pid_t pid = -1);
+            pid_t pid = -1,
+            audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE);
 
     status_t initCheck() const;
 
@@ -58,6 +59,11 @@
     status_t dataCallback(const AudioRecord::Buffer& buffer);
     virtual void signalBufferReturned(MediaBuffer *buffer);
 
+    status_t setInputDevice(audio_port_handle_t deviceId);
+    status_t getRoutedDeviceId(audio_port_handle_t* deviceId);
+    status_t addAudioDeviceCallback(const sp<AudioSystem::AudioDeviceCallback>& callback);
+    status_t removeAudioDeviceCallback(const sp<AudioSystem::AudioDeviceCallback>& callback);
+
 protected:
     virtual ~AudioSource();
 
diff --git a/media/libstagefright/include/media/stagefright/CCodec.h b/media/libstagefright/include/media/stagefright/CCodec.h
new file mode 100644
index 0000000..3e24bbe
--- /dev/null
+++ b/media/libstagefright/include/media/stagefright/CCodec.h
@@ -0,0 +1,128 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef C_CODEC_H_
+#define C_CODEC_H_
+
+#include <chrono>
+
+#include <C2Component.h>
+
+#include <android/native_window.h>
+#include <media/hardware/MetadataBufferType.h>
+#include <media/stagefright/foundation/Mutexed.h>
+#include <media/stagefright/CodecBase.h>
+#include <media/stagefright/FrameRenderTracker.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/SkipCutBuffer.h>
+#include <utils/NativeHandle.h>
+#include <hardware/gralloc.h>
+#include <nativebase/nativebase.h>
+
+namespace android {
+
+class CCodecBufferChannel;
+
+class CCodec : public CodecBase {
+public:
+    CCodec();
+
+    virtual std::shared_ptr<BufferChannelBase> getBufferChannel() override;
+    virtual void initiateAllocateComponent(const sp<AMessage> &msg) override;
+    virtual void initiateConfigureComponent(const sp<AMessage> &msg) override;
+    virtual void initiateCreateInputSurface() override;
+    virtual void initiateSetInputSurface(const sp<PersistentSurface> &surface) override;
+    virtual void initiateStart() override;
+    virtual void initiateShutdown(bool keepComponentAllocated = false) override;
+
+    virtual status_t setSurface(const sp<Surface> &surface) override;
+
+    virtual void signalFlush() override;
+    virtual void signalResume() override;
+
+    virtual void signalSetParameters(const sp<AMessage> &msg) override;
+    virtual void signalEndOfInputStream() override;
+    virtual void signalRequestIDRFrame() override;
+
+    void initiateReleaseIfStuck();
+
+protected:
+    virtual ~CCodec();
+
+    virtual void onMessageReceived(const sp<AMessage> &msg) override;
+
+private:
+    typedef std::chrono::time_point<std::chrono::steady_clock> TimePoint;
+
+    void initiateStop();
+    void initiateRelease(bool sendCallback = true);
+
+    void allocate(const AString &componentName);
+    void configure(const sp<AMessage> &msg);
+    void start();
+    void stop();
+    void flush();
+    void release(bool sendCallback);
+
+    void setDeadline(const TimePoint &deadline);
+
+    enum {
+        kWhatAllocate,
+        kWhatConfigure,
+        kWhatStart,
+        kWhatFlush,
+        kWhatStop,
+        kWhatRelease,
+    };
+
+    enum {
+        RELEASED,
+        ALLOCATED,
+        FLUSHED,
+        RUNNING,
+
+        ALLOCATING,  // RELEASED -> ALLOCATED
+        STARTING,    // ALLOCATED -> RUNNING
+        STOPPING,    // RUNNING -> ALLOCATED
+        FLUSHING,    // RUNNING -> FLUSHED
+        RESUMING,    // FLUSHED -> RUNNING
+        RELEASING,   // {ANY EXCEPT RELEASED} -> RELEASED
+    };
+
+    struct State {
+        inline State() : mState(RELEASED) {}
+
+        int mState;
+        std::shared_ptr<C2Component> mComp;
+    };
+
+    struct Formats {
+        sp<AMessage> mInputFormat;
+        sp<AMessage> mOutputFormat;
+    };
+
+    Mutexed<State> mState;
+    std::shared_ptr<CCodecBufferChannel> mChannel;
+    std::shared_ptr<C2Component::Listener> mListener;
+    Mutexed<TimePoint> mDeadline;
+    Mutexed<Formats> mFormats;
+
+    DISALLOW_EVIL_CONSTRUCTORS(CCodec);
+};
+
+}  // namespace android
+
+#endif  // C_CODEC_H_
diff --git a/media/libstagefright/include/media/stagefright/CallbackMediaSource.h b/media/libstagefright/include/media/stagefright/CallbackMediaSource.h
new file mode 100644
index 0000000..3459de1
--- /dev/null
+++ b/media/libstagefright/include/media/stagefright/CallbackMediaSource.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2017, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CALLBACK_MEDIA_SOURCE_H_
+#define CALLBACK_MEDIA_SOURCE_H_
+
+#include <media/MediaSource.h>
+#include <media/stagefright/foundation/ABase.h>
+
+namespace android {
+
+// A stagefright MediaSource that wraps a binder IMediaSource.
+class CallbackMediaSource : public MediaSource {
+public:
+    explicit CallbackMediaSource(const sp<IMediaSource> &source);
+    virtual ~CallbackMediaSource();
+    virtual status_t start(MetaData *params = NULL);
+    virtual status_t stop();
+    virtual sp<MetaData> getFormat();
+    virtual status_t read(
+            MediaBuffer **buffer, const ReadOptions *options = NULL);
+    virtual status_t pause();
+
+private:
+    sp<IMediaSource> mSource;
+
+    DISALLOW_EVIL_CONSTRUCTORS(CallbackMediaSource);
+};
+
+}  // namespace android
+
+#endif  // CALLBACK_MEDIA_SOURCE_H_
diff --git a/media/libstagefright/include/media/stagefright/CameraSource.h b/media/libstagefright/include/media/stagefright/CameraSource.h
index d6149c0..945e1be 100644
--- a/media/libstagefright/include/media/stagefright/CameraSource.h
+++ b/media/libstagefright/include/media/stagefright/CameraSource.h
@@ -19,8 +19,8 @@
 #define CAMERA_SOURCE_H_
 
 #include <deque>
+#include <media/MediaSource.h>
 #include <media/stagefright/MediaBuffer.h>
-#include <media/stagefright/MediaSource.h>
 #include <camera/android/hardware/ICamera.h>
 #include <camera/ICameraRecordingProxy.h>
 #include <camera/ICameraRecordingProxyListener.h>
diff --git a/media/libstagefright/include/media/stagefright/CodecBase.h b/media/libstagefright/include/media/stagefright/CodecBase.h
index 9197f7b..268662f 100644
--- a/media/libstagefright/include/media/stagefright/CodecBase.h
+++ b/media/libstagefright/include/media/stagefright/CodecBase.h
@@ -18,6 +18,7 @@
 
 #define CODEC_BASE_H_
 
+#include <list>
 #include <memory>
 
 #include <stdint.h>
@@ -26,7 +27,6 @@
 
 #include <media/hardware/CryptoAPI.h>
 #include <media/hardware/HardwareAPI.h>
-#include <media/IOMX.h>
 #include <media/MediaCodecInfo.h>
 #include <media/stagefright/foundation/AHandler.h>
 #include <media/stagefright/foundation/ColorUtils.h>
diff --git a/media/libstagefright/include/media/stagefright/DataSourceFactory.h b/media/libstagefright/include/media/stagefright/DataSourceFactory.h
new file mode 100644
index 0000000..2a1d491
--- /dev/null
+++ b/media/libstagefright/include/media/stagefright/DataSourceFactory.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef DATA_SOURCE_FACTORY_H_
+
+#define DATA_SOURCE_FACTORY_H_
+
+#include <sys/types.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+struct MediaHTTPService;
+class String8;
+struct HTTPBase;
+
+class DataSourceFactory {
+public:
+    static sp<DataSource> CreateFromURI(
+            const sp<MediaHTTPService> &httpService,
+            const char *uri,
+            const KeyedVector<String8, String8> *headers = NULL,
+            String8 *contentType = NULL,
+            HTTPBase *httpSource = NULL);
+
+    static sp<DataSource> CreateMediaHTTP(const sp<MediaHTTPService> &httpService);
+    static sp<DataSource> CreateFromFd(int fd, int64_t offset, int64_t length);
+};
+
+}  // namespace android
+
+#endif  // DATA_SOURCE_FACTORY_H_
diff --git a/media/libstagefright/include/media/stagefright/DataURISource.h b/media/libstagefright/include/media/stagefright/DataURISource.h
index 693562e..cf8d68e 100644
--- a/media/libstagefright/include/media/stagefright/DataURISource.h
+++ b/media/libstagefright/include/media/stagefright/DataURISource.h
@@ -18,7 +18,7 @@
 
 #define DATA_URI_SOURCE_H_
 
-#include <media/stagefright/DataSource.h>
+#include <media/DataSource.h>
 #include <media/stagefright/foundation/ABase.h>
 
 namespace android {
diff --git a/media/libstagefright/include/media/stagefright/FileSource.h b/media/libstagefright/include/media/stagefright/FileSource.h
index 7267e9a..8604890 100644
--- a/media/libstagefright/include/media/stagefright/FileSource.h
+++ b/media/libstagefright/include/media/stagefright/FileSource.h
@@ -20,7 +20,7 @@
 
 #include <stdio.h>
 
-#include <media/stagefright/DataSource.h>
+#include <media/DataSource.h>
 #include <media/stagefright/MediaErrors.h>
 #include <utils/threads.h>
 #include <drm/DrmManagerClient.h>
@@ -45,8 +45,6 @@
 
     virtual sp<DecryptHandle> DrmInitialization(const char *mime);
 
-    virtual void getDrmInfo(sp<DecryptHandle> &handle, DrmManagerClient **client);
-
     virtual String8 toString() {
         return mName;
     }
diff --git a/media/libstagefright/include/media/stagefright/InterfaceUtils.h b/media/libstagefright/include/media/stagefright/InterfaceUtils.h
new file mode 100644
index 0000000..783f109
--- /dev/null
+++ b/media/libstagefright/include/media/stagefright/InterfaceUtils.h
@@ -0,0 +1,44 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef INTERFACE_UTILS_H_
+#define INTERFACE_UTILS_H_
+
+#include <media/MediaExtractor.h>
+#include <media/MediaSource.h>
+#include <media/IMediaExtractor.h>
+#include <media/IMediaSource.h>
+
+namespace android {
+
+// Creates a DataSource which wraps the given IDataSource object.
+sp<DataSource> CreateDataSourceFromIDataSource(const sp<IDataSource> &source);
+
+// creates an IDataSource wrapper to the DataSource.
+sp<IDataSource> CreateIDataSourceFromDataSource(const sp<DataSource> &source);
+
+// Creates an IMediaExtractor wrapper to the given MediaExtractor.
+sp<IMediaExtractor> CreateIMediaExtractorFromMediaExtractor(const sp<MediaExtractor> &extractor);
+
+// Creates a MediaSource which wraps the given IMediaSource object.
+sp<MediaSource> CreateMediaSourceFromIMediaSource(const sp<IMediaSource> &source);
+
+// Creates an IMediaSource wrapper to the given MediaSource.
+sp<IMediaSource> CreateIMediaSourceFromMediaSource(const sp<MediaSource> &source);
+
+}  // namespace android
+
+#endif  // INTERFACE_UTILS_H_
diff --git a/media/libstagefright/include/media/stagefright/JPEGSource.h b/media/libstagefright/include/media/stagefright/JPEGSource.h
index 1b7e91b..9fcbfc2 100644
--- a/media/libstagefright/include/media/stagefright/JPEGSource.h
+++ b/media/libstagefright/include/media/stagefright/JPEGSource.h
@@ -18,7 +18,7 @@
 
 #define JPEG_SOURCE_H_
 
-#include <media/stagefright/MediaSource.h>
+#include <media/MediaSource.h>
 
 namespace android {
 
diff --git a/media/libstagefright/include/media/stagefright/MPEG2TSWriter.h b/media/libstagefright/include/media/stagefright/MPEG2TSWriter.h
index 4516fb6..3d7960b 100644
--- a/media/libstagefright/include/media/stagefright/MPEG2TSWriter.h
+++ b/media/libstagefright/include/media/stagefright/MPEG2TSWriter.h
@@ -34,7 +34,7 @@
             void *cookie,
             ssize_t (*write)(void *cookie, const void *data, size_t size));
 
-    virtual status_t addSource(const sp<IMediaSource> &source);
+    virtual status_t addSource(const sp<MediaSource> &source);
     virtual status_t start(MetaData *param = NULL);
     virtual status_t stop() { return reset(); }
     virtual status_t pause();
diff --git a/media/libstagefright/include/media/stagefright/MPEG4Writer.h b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
index 1c7b4a6..5d2c120 100644
--- a/media/libstagefright/include/media/stagefright/MPEG4Writer.h
+++ b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
@@ -20,7 +20,6 @@
 
 #include <stdio.h>
 
-#include <media/IMediaSource.h>
 #include <media/stagefright/MediaWriter.h>
 #include <utils/List.h>
 #include <utils/threads.h>
@@ -32,6 +31,7 @@
 struct AMessage;
 class MediaBuffer;
 class MetaData;
+struct ABuffer;
 
 class MPEG4Writer : public MediaWriter {
 public:
@@ -40,7 +40,7 @@
     // Limitations
     // No more than one video and/or one audio source can be added, but
     // multiple metadata sources can be added.
-    virtual status_t addSource(const sp<IMediaSource> &source);
+    virtual status_t addSource(const sp<MediaSource> &source);
 
     // Returns INVALID_OPERATION if there is no source or track.
     virtual status_t start(MetaData *param = NULL);
@@ -101,12 +101,12 @@
     bool mSendNotify;
     off64_t mOffset;
     off_t mMdatOffset;
-    uint8_t *mMoovBoxBuffer;
-    off64_t mMoovBoxBufferOffset;
-    bool  mWriteMoovBoxToMemory;
+    uint8_t *mInMemoryCache;
+    off64_t mInMemoryCacheOffset;
+    off64_t mInMemoryCacheSize;
+    bool  mWriteBoxToMemory;
     off64_t mFreeBoxOffset;
     bool mStreamableFile;
-    off64_t mEstimatedMoovBoxSize;
     off64_t mMoovExtraSize;
     uint32_t mInterleaveDurationUs;
     int32_t mTimeScale;
@@ -133,6 +133,8 @@
     status_t startTracks(MetaData *params);
     size_t numTracks();
     int64_t estimateMoovBoxSize(int32_t bitRate);
+    int64_t estimateFileLevelMetaSize();
+    void writeCachedBoxToFile(const char *type);
 
     struct Chunk {
         Track               *mTrack;        // Owner
@@ -165,6 +167,46 @@
     List<ChunkInfo> mChunkInfos;            // Chunk infos
     Condition       mChunkReadyCondition;   // Signal that chunks are available
 
+    // HEIF writing
+    typedef struct _ItemInfo {
+        bool isGrid() const { return !strcmp("grid", itemType); }
+        const char *itemType;
+        uint16_t itemId;
+        bool isPrimary;
+        bool isHidden;
+        union {
+            // image item
+            struct {
+                uint32_t offset;
+                uint32_t size;
+            };
+            // grid item
+            struct {
+                uint32_t rows;
+                uint32_t cols;
+                uint32_t width;
+                uint32_t height;
+            };
+        };
+        Vector<uint16_t> properties;
+        Vector<uint16_t> dimgRefs;
+    } ItemInfo;
+
+    typedef struct _ItemProperty {
+        uint32_t type;
+        int32_t width;
+        int32_t height;
+        sp<ABuffer> hvcc;
+    } ItemProperty;
+
+    bool mHasFileLevelMeta;
+    bool mHasMoovBox;
+    uint32_t mPrimaryItemId;
+    uint32_t mAssociationEntryCount;
+    uint32_t mNumGrids;
+    Vector<ItemInfo> mItems;
+    Vector<ItemProperty> mProperties;
+
     // Writer thread handling
     status_t startWriterThread();
     void stopWriterThread();
@@ -210,9 +252,11 @@
     void initInternal(int fd, bool isFirstSession);
 
     // Acquire lock before calling these methods
-    off64_t addSample_l(MediaBuffer *buffer);
-    off64_t addLengthPrefixedSample_l(MediaBuffer *buffer);
-    off64_t addMultipleLengthPrefixedSamples_l(MediaBuffer *buffer);
+    off64_t addSample_l(MediaBuffer *buffer, bool usePrefix, size_t *bytesWritten);
+    void addLengthPrefixedSample_l(MediaBuffer *buffer);
+    void addMultipleLengthPrefixedSamples_l(MediaBuffer *buffer);
+    uint16_t addProperty_l(const ItemProperty &);
+    uint16_t addItem_l(const ItemInfo &);
 
     bool exceedsFileSizeLimit();
     bool use32BitFileOffset() const;
@@ -231,10 +275,23 @@
     void finishCurrentSession();
 
     void addDeviceMeta();
-    void writeHdlr();
+    void writeHdlr(const char *handlerType);
     void writeKeys();
     void writeIlst();
-    void writeMetaBox();
+    void writeMoovLevelMetaBox();
+
+    // HEIF writing
+    void writeIlocBox();
+    void writeInfeBox(uint16_t itemId, const char *type, uint32_t flags);
+    void writeIinfBox();
+    void writeIpcoBox();
+    void writeIpmaBox();
+    void writeIprpBox();
+    void writeIdatBox();
+    void writeIrefBox();
+    void writePitmBox();
+    void writeFileLevelMetaBox();
+
     void sendSessionSummary();
     void release();
     status_t switchFd();
diff --git a/media/libstagefright/include/media/stagefright/MediaAdapter.h b/media/libstagefright/include/media/stagefright/MediaAdapter.h
index 369fce6..4b47160 100644
--- a/media/libstagefright/include/media/stagefright/MediaAdapter.h
+++ b/media/libstagefright/include/media/stagefright/MediaAdapter.h
@@ -17,8 +17,8 @@
 #ifndef MEDIA_ADAPTER_H
 #define MEDIA_ADAPTER_H
 
+#include <media/MediaSource.h>
 #include <media/stagefright/foundation/ABase.h>
-#include <media/stagefright/MediaSource.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MetaData.h>
 #include <utils/threads.h>
diff --git a/media/libstagefright/include/media/stagefright/MediaBuffer.h b/media/libstagefright/include/media/stagefright/MediaBuffer.h
index e74410d..367a467 100644
--- a/media/libstagefright/include/media/stagefright/MediaBuffer.h
+++ b/media/libstagefright/include/media/stagefright/MediaBuffer.h
@@ -58,8 +58,6 @@
 
     explicit MediaBuffer(size_t size);
 
-    explicit MediaBuffer(const sp<GraphicBuffer>& graphicBuffer);
-
     explicit MediaBuffer(const sp<ABuffer> &buffer);
 
     MediaBuffer(const sp<IMemory> &mem) :
@@ -88,8 +86,6 @@
 
     void set_range(size_t offset, size_t length);
 
-    sp<GraphicBuffer> graphicBuffer() const;
-
     sp<MetaData> meta_data();
 
     // Clears meta data and resets the range to the full extent.
@@ -167,7 +163,6 @@
 
     void *mData;
     size_t mSize, mRangeOffset, mRangeLength;
-    sp<GraphicBuffer> mGraphicBuffer;
     sp<ABuffer> mBuffer;
 
     bool mOwnsData;
diff --git a/media/libstagefright/include/media/stagefright/MediaClock.h b/media/libstagefright/include/media/stagefright/MediaClock.h
index dd1a809..7511913 100644
--- a/media/libstagefright/include/media/stagefright/MediaClock.h
+++ b/media/libstagefright/include/media/stagefright/MediaClock.h
@@ -18,7 +18,8 @@
 
 #define MEDIA_CLOCK_H_
 
-#include <media/stagefright/foundation/ABase.h>
+#include <list>
+#include <media/stagefright/foundation/AHandler.h>
 #include <utils/Mutex.h>
 #include <utils/RefBase.h>
 
@@ -26,8 +27,14 @@
 
 struct AMessage;
 
-struct MediaClock : public RefBase {
+struct MediaClock : public AHandler {
+    enum {
+        TIMER_REASON_REACHED = 0,
+        TIMER_REASON_RESET = 1,
+    };
+
     MediaClock();
+    void init();
 
     void setStartingTimeMedia(int64_t startingTimeMediaUs);
 
@@ -54,15 +61,38 @@
     // The result is saved in |outRealUs|.
     status_t getRealTimeFor(int64_t targetMediaUs, int64_t *outRealUs) const;
 
+    // request to set up a timer. The target time is |mediaTimeUs|, adjusted by
+    // system time of |adjustRealUs|. In other words, the wake up time is
+    // mediaTimeUs + (adjustRealUs / playbackRate)
+    void addTimer(const sp<AMessage> &notify, int64_t mediaTimeUs, int64_t adjustRealUs = 0);
+
+    void reset();
+
 protected:
     virtual ~MediaClock();
 
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+
 private:
+    enum {
+        kWhatTimeIsUp = 'tIsU',
+    };
+
+    struct Timer {
+        Timer(const sp<AMessage> &notify, int64_t mediaTimeUs, int64_t adjustRealUs);
+        const sp<AMessage> mNotify;
+        int64_t mMediaTimeUs;
+        int64_t mAdjustRealUs;
+    };
+
     status_t getMediaTime_l(
             int64_t realUs,
             int64_t *outMediaUs,
             bool allowPastMaxTime) const;
 
+    void processTimers_l();
+
+    sp<ALooper> mLooper;
     mutable Mutex mLock;
 
     int64_t mAnchorTimeMediaUs;
@@ -72,6 +102,9 @@
 
     float mPlaybackRate;
 
+    int32_t mGeneration;
+    std::list<Timer> mTimers;
+
     DISALLOW_EVIL_CONSTRUCTORS(MediaClock);
 };
 
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index 1030407..0bc02af 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -217,6 +217,7 @@
         STOPPING,
         RELEASING,
     };
+    std::string stateString(State state);
 
     enum {
         kPortIndexInput         = 0,
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecSource.h b/media/libstagefright/include/media/stagefright/MediaCodecSource.h
index 3ac539e..bc0653d 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecSource.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecSource.h
@@ -17,10 +17,10 @@
 #ifndef MediaCodecSource_H_
 #define MediaCodecSource_H_
 
+#include <media/MediaSource.h>
 #include <media/stagefright/foundation/ABase.h>
 #include <media/stagefright/foundation/AHandlerReflector.h>
 #include <media/stagefright/foundation/Mutexed.h>
-#include <media/stagefright/MediaSource.h>
 #include <media/stagefright/PersistentSurface.h>
 
 namespace android {
diff --git a/media/libstagefright/include/media/stagefright/MediaDefs.h b/media/libstagefright/include/media/stagefright/MediaDefs.h
index 359fb69..d20c5da 100644
--- a/media/libstagefright/include/media/stagefright/MediaDefs.h
+++ b/media/libstagefright/include/media/stagefright/MediaDefs.h
@@ -26,6 +26,6 @@
  *
  */
 
-#include <media/MediaDefs.h>
+#include <media/stagefright/foundation/MediaDefs.h>
 
 #endif  // STAGEFRIGHT_MEDIA_DEFS_H_
diff --git a/media/libstagefright/include/media/stagefright/MediaExtractor.h b/media/libstagefright/include/media/stagefright/MediaExtractor.h
deleted file mode 100644
index 6ec7eaf..0000000
--- a/media/libstagefright/include/media/stagefright/MediaExtractor.h
+++ /dev/null
@@ -1,111 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef MEDIA_EXTRACTOR_H_
-
-#define MEDIA_EXTRACTOR_H_
-
-#include <media/IMediaExtractor.h>
-#include <media/IMediaSource.h>
-#include <media/MediaAnalyticsItem.h>
-
-namespace android {
-class DataSource;
-struct MediaSource;
-class MetaData;
-
-class MediaExtractor : public BnMediaExtractor {
-public:
-    static sp<IMediaExtractor> Create(
-            const sp<DataSource> &source, const char *mime = NULL);
-    static sp<MediaExtractor> CreateFromService(
-            const sp<DataSource> &source, const char *mime = NULL);
-
-    virtual size_t countTracks() = 0;
-    virtual sp<IMediaSource> getTrack(size_t index) = 0;
-
-    enum GetTrackMetaDataFlags {
-        kIncludeExtensiveMetaData = 1
-    };
-    virtual sp<MetaData> getTrackMetaData(
-            size_t index, uint32_t flags = 0) = 0;
-
-    // Return container specific meta-data. The default implementation
-    // returns an empty metadata object.
-    virtual sp<MetaData> getMetaData();
-
-    status_t getMetrics(Parcel *reply);
-
-    enum Flags {
-        CAN_SEEK_BACKWARD  = 1,  // the "seek 10secs back button"
-        CAN_SEEK_FORWARD   = 2,  // the "seek 10secs forward button"
-        CAN_PAUSE          = 4,
-        CAN_SEEK           = 8,  // the "seek bar"
-    };
-
-    // If subclasses do _not_ override this, the default is
-    // CAN_SEEK_BACKWARD | CAN_SEEK_FORWARD | CAN_SEEK | CAN_PAUSE
-    virtual uint32_t flags() const;
-
-    // for DRM
-    virtual char* getDrmTrackInfo(size_t /*trackID*/, int * /*len*/) {
-        return NULL;
-    }
-    virtual void setUID(uid_t /*uid*/) {
-    }
-    virtual status_t setMediaCas(const HInterfaceToken &/*casToken*/) override {
-        return INVALID_OPERATION;
-    }
-
-    virtual const char * name() { return "<unspecified>"; }
-
-    virtual void release() {}
-
-protected:
-    MediaExtractor();
-    virtual ~MediaExtractor();
-
-    MediaAnalyticsItem *mAnalyticsItem;
-
-    virtual void populateMetrics();
-
-private:
-
-    typedef bool (*SnifferFunc)(
-            const sp<DataSource> &source, String8 *mimeType,
-            float *confidence, sp<AMessage> *meta);
-
-    static Mutex gSnifferMutex;
-    static List<SnifferFunc> gSniffers;
-    static bool gSniffersRegistered;
-
-    // The sniffer can optionally fill in "meta" with an AMessage containing
-    // a dictionary of values that helps the corresponding extractor initialize
-    // its state without duplicating effort already exerted by the sniffer.
-    static void RegisterSniffer_l(SnifferFunc func);
-
-    static bool sniff(const sp<DataSource> &source,
-            String8 *mimeType, float *confidence, sp<AMessage> *meta);
-
-    static void RegisterDefaultSniffers();
-
-    MediaExtractor(const MediaExtractor &);
-    MediaExtractor &operator=(const MediaExtractor &);
-};
-
-}  // namespace android
-
-#endif  // MEDIA_EXTRACTOR_H_
diff --git a/media/libstagefright/include/media/stagefright/MediaExtractorFactory.h b/media/libstagefright/include/media/stagefright/MediaExtractorFactory.h
new file mode 100644
index 0000000..f216ff8
--- /dev/null
+++ b/media/libstagefright/include/media/stagefright/MediaExtractorFactory.h
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MEDIA_EXTRACTOR_FACTORY_H_
+
+#define MEDIA_EXTRACTOR_FACTORY_H_
+
+#include <stdio.h>
+
+#include <media/IMediaExtractor.h>
+#include <media/MediaExtractor.h>
+
+namespace android {
+
+class DataSource;
+
+class MediaExtractorFactory {
+public:
+    static sp<IMediaExtractor> Create(
+            const sp<DataSource> &source, const char *mime = NULL);
+    // Creates media extractor from the given file descriptor. To avoid binder calls for
+    // reading file data, this tries to create remote file source in extractor service.
+    // If that fails, this falls back to local file source. The data source used for extractor
+    // will be alsp returned with |out|.
+    static sp<IMediaExtractor> CreateFromFd(
+            int fd, int64_t offset, int64_t length, const char *mime, sp<DataSource> *out);
+    static sp<MediaExtractor> CreateFromService(
+            const sp<DataSource> &source, const char *mime = NULL);
+
+private:
+    static Mutex gSnifferMutex;
+    static List<MediaExtractor::ExtractorDef> gSniffers;
+    static bool gSniffersRegistered;
+
+    static void RegisterSniffer_l(const MediaExtractor::ExtractorDef &def);
+
+    static MediaExtractor::CreatorFunc sniff(const sp<DataSource> &source,
+            String8 *mimeType, float *confidence, sp<AMessage> *meta);
+
+    static void RegisterDefaultSniffers();
+};
+
+}  // namespace android
+
+#endif  // MEDIA_EXTRACTOR_FACTORY_H_
diff --git a/media/libstagefright/include/media/stagefright/MediaFilter.h b/media/libstagefright/include/media/stagefright/MediaFilter.h
index 0c10d11..a28c49d 100644
--- a/media/libstagefright/include/media/stagefright/MediaFilter.h
+++ b/media/libstagefright/include/media/stagefright/MediaFilter.h
@@ -57,7 +57,7 @@
             OWNED_BY_UPSTREAM,
         };
 
-        IOMX::buffer_id mBufferID;
+        uint32_t mBufferID;
         int32_t mGeneration;
         int32_t mOutputFlags;
         Status mStatus;
@@ -121,7 +121,7 @@
 
     status_t allocateBuffersOnPort(OMX_U32 portIndex);
     BufferInfo *findBufferByID(
-            uint32_t portIndex, IOMX::buffer_id bufferID,
+            uint32_t portIndex, uint32_t bufferID,
             ssize_t *index = NULL);
     void postFillThisBuffer(BufferInfo *info);
     void postDrainThisBuffer(BufferInfo *info);
diff --git a/media/libstagefright/include/media/stagefright/MediaHTTP.h b/media/libstagefright/include/media/stagefright/MediaHTTP.h
index 006d8d8..fe0e613 100644
--- a/media/libstagefright/include/media/stagefright/MediaHTTP.h
+++ b/media/libstagefright/include/media/stagefright/MediaHTTP.h
@@ -24,10 +24,10 @@
 
 namespace android {
 
-struct IMediaHTTPConnection;
+struct MediaHTTPConnection;
 
 struct MediaHTTP : public HTTPBase {
-    MediaHTTP(const sp<IMediaHTTPConnection> &conn);
+    MediaHTTP(const sp<MediaHTTPConnection> &conn);
 
     virtual status_t connect(
             const char *uri,
@@ -50,13 +50,12 @@
     virtual ~MediaHTTP();
 
     virtual sp<DecryptHandle> DrmInitialization(const char* mime);
-    virtual void getDrmInfo(sp<DecryptHandle> &handle, DrmManagerClient **client);
     virtual String8 getUri();
     virtual String8 getMIMEType() const;
 
 private:
     status_t mInitCheck;
-    sp<IMediaHTTPConnection> mHTTPConnection;
+    sp<MediaHTTPConnection> mHTTPConnection;
 
     KeyedVector<String8, String8> mLastHeaders;
     AString mLastURI;
diff --git a/media/libstagefright/include/media/stagefright/MediaMuxer.h b/media/libstagefright/include/media/stagefright/MediaMuxer.h
index 63c3ca5..66f4d72 100644
--- a/media/libstagefright/include/media/stagefright/MediaMuxer.h
+++ b/media/libstagefright/include/media/stagefright/MediaMuxer.h
@@ -48,6 +48,7 @@
         OUTPUT_FORMAT_MPEG_4      = 0,
         OUTPUT_FORMAT_WEBM        = 1,
         OUTPUT_FORMAT_THREE_GPP   = 2,
+        OUTPUT_FORMAT_HEIF        = 3,
         OUTPUT_FORMAT_LIST_END // must be last - used to validate format type
     };
 
diff --git a/media/libstagefright/include/media/stagefright/MediaWriter.h b/media/libstagefright/include/media/stagefright/MediaWriter.h
index cd4af4d..c4bba0e 100644
--- a/media/libstagefright/include/media/stagefright/MediaWriter.h
+++ b/media/libstagefright/include/media/stagefright/MediaWriter.h
@@ -19,8 +19,8 @@
 #define MEDIA_WRITER_H_
 
 #include <utils/RefBase.h>
+#include <media/MediaSource.h>
 #include <media/IMediaRecorderClient.h>
-#include <media/IMediaSource.h>
 
 namespace android {
 
@@ -32,7 +32,7 @@
           mMaxFileDurationLimitUs(0) {
     }
 
-    virtual status_t addSource(const sp<IMediaSource> &source) = 0;
+    virtual status_t addSource(const sp<MediaSource> &source) = 0;
     virtual bool reachedEOS() = 0;
     virtual status_t start(MetaData *params = NULL) = 0;
     virtual status_t stop() = 0;
diff --git a/media/libstagefright/include/media/stagefright/MetaData.h b/media/libstagefright/include/media/stagefright/MetaData.h
index 6cfde9c..3438c56 100644
--- a/media/libstagefright/include/media/stagefright/MetaData.h
+++ b/media/libstagefright/include/media/stagefright/MetaData.h
@@ -215,7 +215,11 @@
 
     kKeyGridWidth        = 'grdW', // int32_t, HEIF grid width
     kKeyGridHeight       = 'grdH', // int32_t, HEIF grid height
+    kKeyGridRows         = 'grdR', // int32_t, HEIF grid rows
+    kKeyGridCols         = 'grdC', // int32_t, HEIF grid columns
     kKeyIccProfile       = 'prof', // raw data, ICC prifile data
+    kKeyIsPrimaryImage   = 'prim', // bool (int32_t), image track is the primary image
+    kKeyFrameCount       = 'nfrm', // int32_t, total number of frame in video track
 };
 
 enum {
diff --git a/media/libstagefright/include/media/stagefright/NuMediaExtractor.h b/media/libstagefright/include/media/stagefright/NuMediaExtractor.h
index 6a93bd5..eed0f05 100644
--- a/media/libstagefright/include/media/stagefright/NuMediaExtractor.h
+++ b/media/libstagefright/include/media/stagefright/NuMediaExtractor.h
@@ -17,9 +17,11 @@
 #ifndef NU_MEDIA_EXTRACTOR_H_
 #define NU_MEDIA_EXTRACTOR_H_
 
+#include <list>
+#include <media/mediaplayer.h>
 #include <media/stagefright/foundation/ABase.h>
-#include <media/stagefright/MediaSource.h>
 #include <media/IMediaExtractor.h>
+#include <media/MediaSource.h>
 #include <utils/Errors.h>
 #include <utils/KeyedVector.h>
 #include <utils/RefBase.h>
@@ -32,7 +34,7 @@
 struct ABuffer;
 struct AMessage;
 class DataSource;
-struct IMediaHTTPService;
+struct MediaHTTPService;
 class MediaBuffer;
 class MediaExtractor;
 struct MediaSource;
@@ -52,7 +54,7 @@
     NuMediaExtractor();
 
     status_t setDataSource(
-            const sp<IMediaHTTPService> &httpService,
+            const sp<MediaHTTPService> &httpService,
             const char *path,
             const KeyedVector<String8, String8> *headers = NULL);
 
@@ -67,7 +69,9 @@
 
     status_t getFileFormat(sp<AMessage> *format) const;
 
-    status_t selectTrack(size_t index);
+    status_t selectTrack(size_t index, int64_t startTimeUs = -1ll,
+            MediaSource::ReadOptions::SeekMode mode =
+                MediaSource::ReadOptions::SEEK_CLOSEST_SYNC);
     status_t unselectTrack(size_t index);
 
     status_t seekTo(
@@ -75,8 +79,12 @@
             MediaSource::ReadOptions::SeekMode mode =
                 MediaSource::ReadOptions::SEEK_CLOSEST_SYNC);
 
+    // Each selected track has a read pointer.
+    // advance() advances the read pointer with the lowest timestamp.
     status_t advance();
+    // readSampleData() reads the sample with the lowest timestamp.
     status_t readSampleData(const sp<ABuffer> &buffer);
+
     status_t getSampleTrackIndex(size_t *trackIndex);
     status_t getSampleTime(int64_t *sampleTimeUs);
     status_t getSampleMeta(sp<MetaData> *sampleMeta);
@@ -96,12 +104,20 @@
         kMaxTrackCount = 16384,
     };
 
+    struct Sample {
+        Sample();
+        Sample(MediaBuffer *buffer, int64_t timeUs);
+        MediaBuffer *mBuffer;
+        int64_t mSampleTimeUs;
+    };
+
     struct TrackInfo {
         sp<IMediaSource> mSource;
         size_t mTrackIndex;
+        media_track_type mTrackType;
+        size_t mMaxFetchCount;
         status_t mFinalResult;
-        MediaBuffer *mSample;
-        int64_t mSampleTimeUs;
+        std::list<Sample> mSamples;
 
         uint32_t mTrackFlags;  // bitmask of "TrackFlags"
     };
@@ -117,16 +133,23 @@
     int64_t mTotalBitrate;  // in bits/sec
     int64_t mDurationUs;
 
-    ssize_t fetchTrackSamples(
+    ssize_t fetchAllTrackSamples(
+            int64_t seekTimeUs = -1ll,
+            MediaSource::ReadOptions::SeekMode mode =
+                MediaSource::ReadOptions::SEEK_CLOSEST_SYNC);
+    void fetchTrackSamples(
+            TrackInfo *info,
             int64_t seekTimeUs = -1ll,
             MediaSource::ReadOptions::SeekMode mode =
                 MediaSource::ReadOptions::SEEK_CLOSEST_SYNC);
 
-    void releaseTrackSamples();
+    void releaseOneSample(TrackInfo *info);
+    void releaseTrackSamples(TrackInfo *info);
+    void releaseAllTrackSamples();
 
     bool getTotalBitrate(int64_t *bitRate) const;
     status_t updateDurationAndBitrate();
-    status_t appendVorbisNumPageSamples(TrackInfo *info, const sp<ABuffer> &buffer);
+    status_t appendVorbisNumPageSamples(MediaBuffer *mbuf, const sp<ABuffer> &buffer);
 
     DISALLOW_EVIL_CONSTRUCTORS(NuMediaExtractor);
 };
diff --git a/media/libstagefright/include/media/stagefright/OMXClient.h b/media/libstagefright/include/media/stagefright/OMXClient.h
index 2f159b0..bb133d3 100644
--- a/media/libstagefright/include/media/stagefright/OMXClient.h
+++ b/media/libstagefright/include/media/stagefright/OMXClient.h
@@ -27,16 +27,10 @@
     OMXClient();
 
     status_t connect();
-    status_t connect(bool* trebleFlag);
-    status_t connect(const char* name, bool* trebleFlag = nullptr);
-
-    status_t connectLegacy();
-    status_t connectTreble(const char* name = "default");
+    status_t connect(const char* name);
     void disconnect();
 
-    sp<IOMX> interface() {
-        return mOMX;
-    }
+    sp<IOMX> interface();
 
 private:
     sp<IOMX> mOMX;
diff --git a/media/libstagefright/include/media/stagefright/RemoteDataSource.h b/media/libstagefright/include/media/stagefright/RemoteDataSource.h
index c91ddfc..4ddc5e3 100644
--- a/media/libstagefright/include/media/stagefright/RemoteDataSource.h
+++ b/media/libstagefright/include/media/stagefright/RemoteDataSource.h
@@ -19,8 +19,8 @@
 
 #include <binder/IMemory.h>
 #include <binder/MemoryDealer.h>
+#include <media/DataSource.h>
 #include <media/IDataSource.h>
-#include <media/stagefright/DataSource.h>
 
 namespace android {
 
@@ -31,9 +31,6 @@
         if (source.get() == nullptr) {
             return nullptr;
         }
-        if (source->getIDataSource().get() != nullptr) {
-            return source->getIDataSource();
-        }
         return new RemoteDataSource(source);
     }
 
diff --git a/media/libstagefright/include/media/stagefright/RemoteMediaExtractor.h b/media/libstagefright/include/media/stagefright/RemoteMediaExtractor.h
new file mode 100644
index 0000000..98b8b4d
--- /dev/null
+++ b/media/libstagefright/include/media/stagefright/RemoteMediaExtractor.h
@@ -0,0 +1,57 @@
+/*
+ * Copyright 2017, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef REMOTE_MEDIA_EXTRACTOR_H_
+#define REMOTE_MEDIA_EXTRACTOR_H_
+
+#include <media/IMediaExtractor.h>
+#include <media/MediaExtractor.h>
+
+namespace android {
+
+class MediaAnalyticsItem;
+
+// IMediaExtractor wrapper to the MediaExtractor.
+class RemoteMediaExtractor : public BnMediaExtractor {
+public:
+    static sp<IMediaExtractor> wrap(const sp<MediaExtractor> &extractor);
+
+    virtual ~RemoteMediaExtractor();
+    virtual size_t countTracks();
+    virtual sp<IMediaSource> getTrack(size_t index);
+    virtual sp<MetaData> getTrackMetaData(size_t index, uint32_t flags = 0);
+    virtual sp<MetaData> getMetaData();
+    virtual status_t getMetrics(Parcel *reply);
+    virtual uint32_t flags() const;
+    virtual char* getDrmTrackInfo(size_t trackID, int * len);
+    virtual void setUID(uid_t uid);
+    virtual status_t setMediaCas(const HInterfaceToken &casToken);
+    virtual const char * name();
+    virtual void release();
+
+private:
+    sp<MediaExtractor> mExtractor;
+
+    MediaAnalyticsItem *mAnalyticsItem;
+
+    explicit RemoteMediaExtractor(const sp<MediaExtractor> &extractor);
+
+    DISALLOW_EVIL_CONSTRUCTORS(RemoteMediaExtractor);
+};
+
+}  // namespace android
+
+#endif  // REMOTE_MEDIA_EXTRACTOR_H_
diff --git a/media/libstagefright/include/media/stagefright/RemoteMediaSource.h b/media/libstagefright/include/media/stagefright/RemoteMediaSource.h
new file mode 100644
index 0000000..0a446a5
--- /dev/null
+++ b/media/libstagefright/include/media/stagefright/RemoteMediaSource.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright 2017, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef REMOTE_MEDIA_SOURCE_H_
+#define REMOTE_MEDIA_SOURCE_H_
+
+#include <media/IMediaSource.h>
+#include <media/MediaSource.h>
+#include <media/stagefright/foundation/ABase.h>
+
+namespace android {
+
+// IMediaSrouce wrapper to the MediaSource.
+class RemoteMediaSource : public BnMediaSource {
+public:
+    static sp<IMediaSource> wrap(const sp<MediaSource> &source);
+    virtual ~RemoteMediaSource();
+    virtual status_t start(MetaData *params = NULL);
+    virtual status_t stop();
+    virtual sp<MetaData> getFormat();
+    virtual status_t read(
+            MediaBuffer **buffer,
+            const MediaSource::ReadOptions *options = NULL);
+    virtual status_t pause();
+    virtual status_t setStopTimeUs(int64_t stopTimeUs);
+
+private:
+    sp<MediaSource> mSource;
+
+    explicit RemoteMediaSource(const sp<MediaSource> &source);
+
+    DISALLOW_EVIL_CONSTRUCTORS(RemoteMediaSource);
+};
+
+}  // namespace android
+
+#endif  // REMOTE_MEDIA_SOURCE_H_
diff --git a/media/libstagefright/include/media/stagefright/SimpleDecodingSource.h b/media/libstagefright/include/media/stagefright/SimpleDecodingSource.h
index a000fde..3006b45 100644
--- a/media/libstagefright/include/media/stagefright/SimpleDecodingSource.h
+++ b/media/libstagefright/include/media/stagefright/SimpleDecodingSource.h
@@ -17,7 +17,7 @@
 #ifndef SIMPLE_DECODING_SOURCE_H_
 #define SIMPLE_DECODING_SOURCE_H_
 
-#include <media/stagefright/MediaSource.h>
+#include <media/MediaSource.h>
 #include <media/stagefright/foundation/AString.h>
 #include <media/stagefright/foundation/Mutexed.h>
 
@@ -45,12 +45,13 @@
     // does not support secure input or pausing.
     // if |desiredCodec| is given, use this specific codec.
     static sp<SimpleDecodingSource> Create(
-            const sp<IMediaSource> &source, uint32_t flags,
+            const sp<MediaSource> &source, uint32_t flags,
             const sp<ANativeWindow> &nativeWindow,
-            const char *desiredCodec = NULL);
+            const char *desiredCodec = NULL,
+            bool skipMediaCodecList = false);
 
     static sp<SimpleDecodingSource> Create(
-            const sp<IMediaSource> &source, uint32_t flags = 0);
+            const sp<MediaSource> &source, uint32_t flags = 0);
 
     virtual ~SimpleDecodingSource();
 
@@ -68,16 +69,15 @@
 
     // unsupported methods
     virtual status_t pause() { return INVALID_OPERATION; }
-    virtual status_t setBuffers(const Vector<MediaBuffer *> &) { return INVALID_OPERATION; }
 
 private:
     // Construct this using a codec, source and looper.
     SimpleDecodingSource(
-            const sp<MediaCodec> &codec, const sp<IMediaSource> &source, const sp<ALooper> &looper,
+            const sp<MediaCodec> &codec, const sp<MediaSource> &source, const sp<ALooper> &looper,
             bool usingSurface, bool isVorbis, const sp<AMessage> &format);
 
     sp<MediaCodec> mCodec;
-    sp<IMediaSource> mSource;
+    sp<MediaSource> mSource;
     sp<ALooper> mLooper;
     bool mUsingSurface;
     bool mIsVorbis;
diff --git a/media/libstagefright/include/media/stagefright/SurfaceMediaSource.h b/media/libstagefright/include/media/stagefright/SurfaceMediaSource.h
index d1677fa..2e495f9 100644
--- a/media/libstagefright/include/media/stagefright/SurfaceMediaSource.h
+++ b/media/libstagefright/include/media/stagefright/SurfaceMediaSource.h
@@ -22,7 +22,7 @@
 
 #include <utils/threads.h>
 #include <utils/Vector.h>
-#include <media/stagefright/MediaSource.h>
+#include <media/MediaSource.h>
 #include <media/stagefright/MediaBuffer.h>
 
 #include <media/hardware/MetadataBufferType.h>
diff --git a/media/libstagefright/include/media/stagefright/Utils.h b/media/libstagefright/include/media/stagefright/Utils.h
index 77cbd4c..7d4a611 100644
--- a/media/libstagefright/include/media/stagefright/Utils.h
+++ b/media/libstagefright/include/media/stagefright/Utils.h
@@ -28,20 +28,6 @@
 
 namespace android {
 
-#define FOURCC(c1, c2, c3, c4) \
-    ((c1) << 24 | (c2) << 16 | (c3) << 8 | (c4))
-
-uint16_t U16_AT(const uint8_t *ptr);
-uint32_t U32_AT(const uint8_t *ptr);
-uint64_t U64_AT(const uint8_t *ptr);
-
-uint16_t U16LE_AT(const uint8_t *ptr);
-uint32_t U32LE_AT(const uint8_t *ptr);
-uint64_t U64LE_AT(const uint8_t *ptr);
-
-uint64_t ntoh64(uint64_t x);
-uint64_t hton64(uint64_t x);
-
 class MetaData;
 struct AMessage;
 status_t convertMetaDataToMessage(
@@ -95,7 +81,6 @@
 void readFromAMessage(const sp<AMessage> &msg, BufferingSettings *buffering /* nonnull */);
 
 AString nameForFd(int fd);
-void MakeFourCCString(uint32_t x, char *s);
 }  // namespace android
 
 #endif  // UTILS_H_
diff --git a/media/libstagefright/matroska/Android.bp b/media/libstagefright/matroska/Android.bp
deleted file mode 100644
index ec2fb4b..0000000
--- a/media/libstagefright/matroska/Android.bp
+++ /dev/null
@@ -1,35 +0,0 @@
-cc_library_static {
-    name: "libstagefright_matroska",
-
-    srcs: ["MatroskaExtractor.cpp"],
-
-    include_dirs: [
-        "external/flac/include",
-        "external/libvpx/libwebm",
-        "frameworks/native/include/media/openmax",
-        "frameworks/av/media/libstagefright/flac/dec",
-        "frameworks/av/media/libstagefright/include",
-    ],
-
-    cflags: [
-        "-Wno-multichar",
-        "-Werror",
-        "-Wall",
-    ],
-
-    sanitize: {
-        misc_undefined: [
-            "signed-integer-overflow",
-            "unsigned-integer-overflow",
-        ],
-        cfi: true,
-        diag: {
-            cfi: true,
-        },
-    },
-
-    shared_libs: [
-        "libmedia",
-        "libstagefright_flacdec"
-    ],
-}
diff --git a/media/libstagefright/mpeg2ts/ATSParser.cpp b/media/libstagefright/mpeg2ts/ATSParser.cpp
index a256a4d..464ee90 100644
--- a/media/libstagefright/mpeg2ts/ATSParser.cpp
+++ b/media/libstagefright/mpeg2ts/ATSParser.cpp
@@ -21,7 +21,6 @@
 #include "AnotherPacketSource.h"
 #include "CasManager.h"
 #include "ESQueue.h"
-#include "include/avc_utils.h"
 
 #include <android/hardware/cas/native/1.0/IDescrambler.h>
 #include <cutils/native_handle.h>
@@ -29,11 +28,13 @@
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/ByteUtils.h>
+#include <media/stagefright/foundation/MediaKeys.h>
+#include <media/stagefright/foundation/avc_utils.h>
 #include <media/stagefright/foundation/hexdump.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/MetaData.h>
-#include <media/stagefright/Utils.h>
 #include <media/IStreamSource.h>
 #include <utils/KeyedVector.h>
 #include <utils/Vector.h>
@@ -341,7 +342,7 @@
     if ((type & DISCONTINUITY_TIME)
             && extra != NULL
             && extra->findInt64(
-                IStreamListener::kKeyMediaTimeUs, &mediaTimeUs)) {
+                kATSParserKeyMediaTimeUs, &mediaTimeUs)) {
         mFirstPTSValid = false;
     }
 
@@ -1032,7 +1033,7 @@
         uint64_t resumeAtPTS;
         if (extra != NULL
                 && extra->findInt64(
-                    IStreamListener::kKeyResumeAtPTS,
+                    kATSParserKeyResumeAtPTS,
                     (int64_t *)&resumeAtPTS)) {
             int64_t resumeAtMediaTimeUs =
                 mProgram->convertPTSToTimestamp(resumeAtPTS);
@@ -1694,12 +1695,12 @@
         DiscontinuityType type, const sp<AMessage> &extra) {
     int64_t mediaTimeUs;
     if ((type & DISCONTINUITY_TIME) && extra != NULL) {
-        if (extra->findInt64(IStreamListener::kKeyMediaTimeUs, &mediaTimeUs)) {
+        if (extra->findInt64(kATSParserKeyMediaTimeUs, &mediaTimeUs)) {
             mAbsoluteTimeAnchorUs = mediaTimeUs;
         }
         if ((mFlags & TS_TIMESTAMPS_ARE_ABSOLUTE)
                 && extra->findInt64(
-                    IStreamListener::kKeyRecentMediaTimeUs, &mediaTimeUs)) {
+                    kATSParserKeyRecentMediaTimeUs, &mediaTimeUs)) {
             if (mAbsoluteTimeAnchorUs >= 0ll) {
                 mediaTimeUs -= mAbsoluteTimeAnchorUs;
             }
diff --git a/media/libstagefright/mpeg2ts/ATSParser.h b/media/libstagefright/mpeg2ts/ATSParser.h
index 41c19cd..6079afc 100644
--- a/media/libstagefright/mpeg2ts/ATSParser.h
+++ b/media/libstagefright/mpeg2ts/ATSParser.h
@@ -20,9 +20,9 @@
 
 #include <sys/types.h>
 
+#include <media/MediaSource.h>
 #include <media/stagefright/foundation/ABase.h>
 #include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/MediaSource.h>
 #include <utils/KeyedVector.h>
 #include <utils/Vector.h>
 #include <utils/RefBase.h>
diff --git a/media/libstagefright/mpeg2ts/Android.bp b/media/libstagefright/mpeg2ts/Android.bp
index 21259c4..7654eb3 100644
--- a/media/libstagefright/mpeg2ts/Android.bp
+++ b/media/libstagefright/mpeg2ts/Android.bp
@@ -1,5 +1,5 @@
 cc_library_static {
-    name: "libstagefright_mpeg2ts",
+    name: "libstagefright_mpeg2support",
 
     srcs: [
         "AnotherPacketSource.cpp",
@@ -7,8 +7,6 @@
         "CasManager.cpp",
         "ESQueue.cpp",
         "HlsSampleDecryptor.cpp",
-        "MPEG2PSExtractor.cpp",
-        "MPEG2TSExtractor.cpp",
     ],
 
     include_dirs: [
diff --git a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp b/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
index 433b1fc..1dac171 100644
--- a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
+++ b/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
@@ -19,13 +19,12 @@
 
 #include "AnotherPacketSource.h"
 
-#include "include/avc_utils.h"
-
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/foundation/AString.h>
 #include <media/stagefright/foundation/hexdump.h>
+#include <media/stagefright/foundation/avc_utils.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MetaData.h>
@@ -663,7 +662,7 @@
                         && !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC);
             }
         }
-        if (isAvc && !IsIDR(buffer)) {
+        if (isAvc && !IsIDR(buffer->data(), buffer->size())) {
             continue;
         }
 
diff --git a/media/libstagefright/mpeg2ts/AnotherPacketSource.h b/media/libstagefright/mpeg2ts/AnotherPacketSource.h
index b0890d7..3abd573 100644
--- a/media/libstagefright/mpeg2ts/AnotherPacketSource.h
+++ b/media/libstagefright/mpeg2ts/AnotherPacketSource.h
@@ -18,8 +18,8 @@
 
 #define ANOTHER_PACKET_SOURCE_H_
 
+#include <media/MediaSource.h>
 #include <media/stagefright/foundation/ABase.h>
-#include <media/stagefright/MediaSource.h>
 #include <utils/threads.h>
 #include <utils/List.h>
 
diff --git a/media/libstagefright/mpeg2ts/ESQueue.cpp b/media/libstagefright/mpeg2ts/ESQueue.cpp
index 1cf9744..b621fd0 100644
--- a/media/libstagefright/mpeg2ts/ESQueue.cpp
+++ b/media/libstagefright/mpeg2ts/ESQueue.cpp
@@ -24,15 +24,14 @@
 #include <media/stagefright/foundation/ABitReader.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/ByteUtils.h>
+#include <media/stagefright/foundation/avc_utils.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MetaData.h>
-#include <media/stagefright/Utils.h>
 #include <media/cas/DescramblerAPI.h>
 #include <media/hardware/CryptoAPI.h>
 
-#include "include/avc_utils.h"
-
 #include <inttypes.h>
 #include <netinet/in.h>
 
diff --git a/media/libstagefright/omx/1.0/WOmxNode.cpp b/media/libstagefright/omx/1.0/WOmxNode.cpp
index 9f82283..1dc7c7b 100644
--- a/media/libstagefright/omx/1.0/WOmxNode.cpp
+++ b/media/libstagefright/omx/1.0/WOmxNode.cpp
@@ -154,7 +154,8 @@
                     hidl_handle const& outNativeHandle) {
                 fnStatus = toStatusT(status);
                 *buffer = outBuffer;
-                *native_handle = NativeHandle::create(
+                *native_handle = outNativeHandle.getNativeHandle() == nullptr ?
+                        nullptr : NativeHandle::create(
                         native_handle_clone(outNativeHandle), true);
             }));
     return transStatus == NO_ERROR ? fnStatus : transStatus;
diff --git a/media/libstagefright/omx/Android.bp b/media/libstagefright/omx/Android.bp
index bd3c1c6..8539864 100644
--- a/media/libstagefright/omx/Android.bp
+++ b/media/libstagefright/omx/Android.bp
@@ -9,8 +9,6 @@
         "FrameDropper.cpp",
         "GraphicBufferSource.cpp",
         "BWGraphicBufferSource.cpp",
-        "OMX.cpp",
-        "OMXStore.cpp",
         "OMXMaster.cpp",
         "OMXNodeInstance.cpp",
         "OMXUtils.cpp",
@@ -57,16 +55,12 @@
         "libhidlmemory",
         "libhidltransport",
         "libnativewindow", // TODO(b/62923479): use header library
-        "android.hidl.memory@1.0",
-        "android.hidl.token@1.0-utils",
-        "android.hardware.media@1.0",
+        "libvndksupport",
         "android.hardware.media.omx@1.0",
-        "android.hardware.graphics.common@1.0",
         "android.hardware.graphics.bufferqueue@1.0",
     ],
 
     export_shared_lib_headers: [
-        "android.hidl.memory@1.0",
         "libmedia_omx",
         "libstagefright_foundation",
         "libstagefright_xmlparser",
@@ -109,6 +103,7 @@
     ],
     shared_libs: [
         "libmedia_omx",
+        "libstagefright_foundation",
         "liblog",
     ],
     export_shared_lib_headers: [
diff --git a/media/libstagefright/omx/GraphicBufferSource.cpp b/media/libstagefright/omx/GraphicBufferSource.cpp
index 1917d2a..f331dbb 100644
--- a/media/libstagefright/omx/GraphicBufferSource.cpp
+++ b/media/libstagefright/omx/GraphicBufferSource.cpp
@@ -46,6 +46,36 @@
 
 namespace android {
 
+namespace {
+// kTimestampFluctuation is an upper bound of timestamp fluctuation from the
+// source that GraphicBufferSource allows. The unit of kTimestampFluctuation is
+// frames. More specifically, GraphicBufferSource will drop a frame if
+//
+// expectedNewFrametimestamp - actualNewFrameTimestamp <
+//     (0.5 - kTimestampFluctuation) * expectedtimePeriodBetweenFrames
+//
+// where
+// - expectedNewFrameTimestamp is the calculated ideal timestamp of the new
+//   incoming frame
+// - actualNewFrameTimestamp is the timestamp received from the source
+// - expectedTimePeriodBetweenFrames is the ideal difference of the timestamps
+//   of two adjacent frames
+//
+// See GraphicBufferSource::calculateCodecTimestamp_l() for more detail about
+// how kTimestampFluctuation is used.
+//
+// kTimestampFluctuation should be non-negative. A higher value causes a smaller
+// chance of dropping frames, but at the same time a higher bound on the
+// difference between the source timestamp and the interpreted (snapped)
+// timestamp.
+//
+// The value of 0.05 means that GraphicBufferSource expects the input timestamps
+// to fluctuate no more than 5% from the regular time period.
+//
+// TODO: Justify the choice of this value, or make it configurable.
+constexpr double kTimestampFluctuation = 0.05;
+}
+
 /**
  * A copiable object managing a buffer in the buffer cache managed by the producer. This object
  * holds a reference to the buffer, and maintains which buffer slot it belongs to (if any), and
@@ -732,14 +762,16 @@
             mFrameCount = 0;
         } else {
             // snap to nearest capture point
-            int64_t nFrames = std::llround(
-                    (timeUs - mPrevCaptureUs) * mCaptureFps / 1000000);
-            if (nFrames <= 0) {
+            double nFrames = (timeUs - mPrevCaptureUs) * mCaptureFps / 1000000;
+            if (nFrames < 0.5 - kTimestampFluctuation) {
                 // skip this frame as it's too close to previous capture
                 ALOGV("skipping frame, timeUs %lld", static_cast<long long>(timeUs));
                 return false;
             }
-            mFrameCount += nFrames;
+            if (nFrames <= 1.0) {
+                nFrames = 1.0;
+            }
+            mFrameCount += std::llround(nFrames);
             mPrevCaptureUs = mBaseCaptureUs + std::llround(
                     mFrameCount * 1000000 / mCaptureFps);
             mPrevFrameUs = mBaseFrameUs + std::llround(
@@ -1199,7 +1231,8 @@
     Mutex::Autolock autoLock(mMutex);
 
     mSkipFramesBeforeNs =
-            (skipFramesBeforeUs > 0) ? (skipFramesBeforeUs * 1000) : -1ll;
+            (skipFramesBeforeUs > 0 && skipFramesBeforeUs <= INT64_MAX / 1000) ?
+            (skipFramesBeforeUs * 1000) : -1ll;
 
     return OK;
 }
diff --git a/media/libstagefright/omx/OMX.cpp b/media/libstagefright/omx/OMX.cpp
deleted file mode 100644
index 09c4019..0000000
--- a/media/libstagefright/omx/OMX.cpp
+++ /dev/null
@@ -1,198 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <inttypes.h>
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "OMX"
-#include <utils/Log.h>
-
-#include <dlfcn.h>
-
-#include <media/stagefright/omx/OMX.h>
-#include <media/stagefright/omx/OMXNodeInstance.h>
-#include <media/stagefright/omx/BWGraphicBufferSource.h>
-#include <media/stagefright/omx/OMXMaster.h>
-#include <media/stagefright/omx/OMXUtils.h>
-#include <media/stagefright/foundation/ADebug.h>
-
-namespace android {
-
-// node ids are created by concatenating the pid with a 16-bit counter
-static size_t kMaxNodeInstances = (1 << 16);
-
-OMX::OMX() : mMaster(new OMXMaster), mParser() {
-}
-
-OMX::~OMX() {
-    delete mMaster;
-    mMaster = NULL;
-}
-
-void OMX::binderDied(const wp<IBinder> &the_late_who) {
-    sp<OMXNodeInstance> instance;
-
-    {
-        Mutex::Autolock autoLock(mLock);
-
-        ssize_t index = mLiveNodes.indexOfKey(the_late_who);
-
-        if (index < 0) {
-            ALOGE("b/27597103, nonexistent observer on binderDied");
-            android_errorWriteLog(0x534e4554, "27597103");
-            return;
-        }
-
-        instance = mLiveNodes.editValueAt(index);
-        mLiveNodes.removeItemsAt(index);
-    }
-
-    instance->onObserverDied();
-}
-
-status_t OMX::listNodes(List<ComponentInfo> *list) {
-    list->clear();
-
-    OMX_U32 index = 0;
-    char componentName[256];
-    while (mMaster->enumerateComponents(
-                componentName, sizeof(componentName), index) == OMX_ErrorNone) {
-        list->push_back(ComponentInfo());
-        ComponentInfo &info = *--list->end();
-
-        info.mName = componentName;
-
-        Vector<String8> roles;
-        OMX_ERRORTYPE err =
-            mMaster->getRolesOfComponent(componentName, &roles);
-
-        if (err == OMX_ErrorNone) {
-            for (OMX_U32 i = 0; i < roles.size(); ++i) {
-                info.mRoles.push_back(roles[i]);
-            }
-        }
-
-        ++index;
-    }
-
-    return OK;
-}
-
-status_t OMX::allocateNode(
-        const char *name, const sp<IOMXObserver> &observer,
-        sp<IOMXNode> *omxNode) {
-    Mutex::Autolock autoLock(mLock);
-
-    omxNode->clear();
-
-    if (mLiveNodes.size() == kMaxNodeInstances) {
-        return NO_MEMORY;
-    }
-
-    sp<OMXNodeInstance> instance = new OMXNodeInstance(this, observer, name);
-
-    OMX_COMPONENTTYPE *handle;
-    OMX_ERRORTYPE err = mMaster->makeComponentInstance(
-            name, &OMXNodeInstance::kCallbacks,
-            instance.get(), &handle);
-
-    if (err != OMX_ErrorNone) {
-        ALOGE("FAILED to allocate omx component '%s' err=%s(%#x)", name, asString(err), err);
-
-        return StatusFromOMXError(err);
-    }
-    instance->setHandle(handle);
-
-    // Find quirks from mParser
-    const auto& codec = mParser.getCodecMap().find(name);
-    if (codec == mParser.getCodecMap().cend()) {
-        ALOGW("Failed to obtain quirks for omx component '%s' from XML files",
-                name);
-    } else {
-        uint32_t quirks = 0;
-        for (const auto& quirk : codec->second.quirkSet) {
-            if (quirk == "requires-allocate-on-input-ports") {
-                quirks |= OMXNodeInstance::
-                        kRequiresAllocateBufferOnInputPorts;
-            }
-            if (quirk == "requires-allocate-on-output-ports") {
-                quirks |= OMXNodeInstance::
-                        kRequiresAllocateBufferOnOutputPorts;
-            }
-        }
-        instance->setQuirks(quirks);
-    }
-
-    mLiveNodes.add(IInterface::asBinder(observer), instance);
-    IInterface::asBinder(observer)->linkToDeath(this);
-
-    *omxNode = instance;
-
-    return OK;
-}
-
-status_t OMX::freeNode(const sp<OMXNodeInstance> &instance) {
-    if (instance == NULL) {
-        return OK;
-    }
-
-    {
-        Mutex::Autolock autoLock(mLock);
-        ssize_t index = mLiveNodes.indexOfKey(IInterface::asBinder(instance->observer()));
-        if (index < 0) {
-            // This could conceivably happen if the observer dies at roughly the
-            // same time that a client attempts to free the node explicitly.
-
-            // NOTE: it's guaranteed that this method is called at most once per
-            //       instance.
-            ALOGV("freeNode: instance already removed from book-keeping.");
-        } else {
-            mLiveNodes.removeItemsAt(index);
-            IInterface::asBinder(instance->observer())->unlinkToDeath(this);
-        }
-    }
-
-    CHECK(instance->handle() != NULL);
-    OMX_ERRORTYPE err = mMaster->destroyComponentInstance(
-            static_cast<OMX_COMPONENTTYPE *>(instance->handle()));
-    ALOGV("freeNode: handle destroyed: %p", instance->handle());
-
-    return StatusFromOMXError(err);
-}
-
-status_t OMX::createInputSurface(
-        sp<IGraphicBufferProducer> *bufferProducer,
-        sp<IGraphicBufferSource> *bufferSource) {
-    if (bufferProducer == NULL || bufferSource == NULL) {
-        ALOGE("b/25884056");
-        return BAD_VALUE;
-    }
-
-    sp<GraphicBufferSource> graphicBufferSource = new GraphicBufferSource();
-    status_t err = graphicBufferSource->initCheck();
-    if (err != OK) {
-        ALOGE("Failed to create persistent input surface: %s (%d)",
-                strerror(-err), err);
-        return err;
-    }
-
-    *bufferProducer = graphicBufferSource->getIGraphicBufferProducer();
-    *bufferSource = new BWGraphicBufferSource(graphicBufferSource);
-
-    return OK;
-}
-
-}  // namespace android
diff --git a/media/libstagefright/omx/OMXMaster.cpp b/media/libstagefright/omx/OMXMaster.cpp
index fd97fdc..0967b5f 100644
--- a/media/libstagefright/omx/OMXMaster.cpp
+++ b/media/libstagefright/omx/OMXMaster.cpp
@@ -22,6 +22,8 @@
 #include <media/stagefright/omx/SoftOMXPlugin.h>
 #include <media/stagefright/foundation/ADebug.h>
 
+#include <vndksupport/linker.h>
+
 #include <dlfcn.h>
 #include <fcntl.h>
 
@@ -67,7 +69,7 @@
 }
 
 void OMXMaster::addPlugin(const char *libname) {
-    mVendorLibHandle = dlopen(libname, RTLD_NOW);
+    mVendorLibHandle = android_load_sphal_library(libname, RTLD_NOW);
 
     if (mVendorLibHandle == NULL) {
         return;
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index 015a148..ff58eb6 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -344,7 +344,7 @@
 ////////////////////////////////////////////////////////////////////////////////
 
 OMXNodeInstance::OMXNodeInstance(
-        OmxNodeOwner *owner, const sp<IOMXObserver> &observer, const char *name)
+        Omx *owner, const sp<IOMXObserver> &observer, const char *name)
     : mOwner(owner),
       mHandle(NULL),
       mObserver(observer),
diff --git a/media/libstagefright/omx/OMXStore.cpp b/media/libstagefright/omx/OMXStore.cpp
deleted file mode 100644
index 345336d..0000000
--- a/media/libstagefright/omx/OMXStore.cpp
+++ /dev/null
@@ -1,119 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "OMXStore"
-#include <utils/Log.h>
-
-#include <media/stagefright/omx/OMXUtils.h>
-#include <media/stagefright/omx/OMX.h>
-#include <media/stagefright/omx/OMXStore.h>
-#include <media/stagefright/xmlparser/MediaCodecsXmlParser.h>
-
-#include <map>
-#include <string>
-
-namespace android {
-
-namespace {
-    struct RoleProperties {
-        std::string type;
-        bool isEncoder;
-        bool preferPlatformNodes;
-        std::multimap<size_t, IOMXStore::NodeInfo> nodeList;
-    };
-}  // Unnamed namespace
-
-OMXStore::OMXStore(
-        const char* owner,
-        const char* const* searchDirs,
-        const char* mainXmlName,
-        const char* performanceXmlName,
-        const char* profilingResultsXmlPath) {
-    MediaCodecsXmlParser parser(
-            searchDirs,
-            mainXmlName,
-            performanceXmlName,
-            profilingResultsXmlPath);
-    mParsingStatus = parser.getParsingStatus();
-
-    const auto& serviceAttributeMap = parser.getServiceAttributeMap();
-    mServiceAttributeList.reserve(serviceAttributeMap.size());
-    for (const auto& attributePair : serviceAttributeMap) {
-        Attribute attribute;
-        attribute.key = attributePair.first;
-        attribute.value = attributePair.second;
-        mServiceAttributeList.push_back(std::move(attribute));
-    }
-
-    const auto& roleMap = parser.getRoleMap();
-    mRoleList.reserve(roleMap.size());
-    for (const auto& rolePair : roleMap) {
-        RoleInfo role;
-        role.role = rolePair.first;
-        role.type = rolePair.second.type;
-        role.isEncoder = rolePair.second.isEncoder;
-        // TODO: Currently, preferPlatformNodes information is not available in
-        // the xml file. Once we have a way to provide this information, it
-        // should be parsed properly.
-        role.preferPlatformNodes = rolePair.first.compare(0, 5, "audio") == 0;
-        std::vector<NodeInfo>& nodeList = role.nodes;
-        nodeList.reserve(rolePair.second.nodeList.size());
-        for (const auto& nodePair : rolePair.second.nodeList) {
-            NodeInfo node;
-            node.name = nodePair.second.name;
-            node.owner = owner;
-            std::vector<Attribute>& attributeList = node.attributes;
-            attributeList.reserve(nodePair.second.attributeList.size());
-            for (const auto& attributePair : nodePair.second.attributeList) {
-                Attribute attribute;
-                attribute.key = attributePair.first;
-                attribute.value = attributePair.second;
-                attributeList.push_back(std::move(attribute));
-            }
-            nodeList.push_back(std::move(node));
-        }
-        mRoleList.push_back(std::move(role));
-    }
-
-    mPrefix = parser.getCommonPrefix();
-}
-
-status_t OMXStore::listServiceAttributes(std::vector<Attribute>* attributes) {
-    *attributes = mServiceAttributeList;
-    return mParsingStatus;
-}
-
-status_t OMXStore::getNodePrefix(std::string* prefix) {
-    *prefix = mPrefix;
-    return mParsingStatus;
-}
-
-status_t OMXStore::listRoles(std::vector<RoleInfo>* roleList) {
-    *roleList = mRoleList;
-    return mParsingStatus;
-}
-
-status_t OMXStore::getOmx(const std::string& name, sp<IOMX>* omx) {
-    *omx = new OMX();
-    return NO_ERROR;
-}
-
-OMXStore::~OMXStore() {
-}
-
-}  // namespace android
-
diff --git a/media/libstagefright/omx/OMXUtils.cpp b/media/libstagefright/omx/OMXUtils.cpp
index 5894837..e032985 100644
--- a/media/libstagefright/omx/OMXUtils.cpp
+++ b/media/libstagefright/omx/OMXUtils.cpp
@@ -22,9 +22,9 @@
 #include <media/stagefright/omx/OMXUtils.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/foundation/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/hardware/HardwareAPI.h>
-#include <media/MediaDefs.h>
 #include <system/graphics-base.h>
 
 namespace android {
diff --git a/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp b/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
index cb811a0..8e92539 100644
--- a/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
+++ b/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
@@ -26,8 +26,8 @@
 #include <media/stagefright/foundation/ALooper.h>
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/foundation/MediaDefs.h>
 #include <media/hardware/HardwareAPI.h>
-#include <media/MediaDefs.h>
 
 namespace android {
 
diff --git a/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp b/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp
index 6e563b7..2fbbb44 100644
--- a/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp
+++ b/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp
@@ -26,9 +26,9 @@
 #include <media/stagefright/foundation/ALooper.h>
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/foundation/MediaDefs.h>
 #include <media/hardware/HardwareAPI.h>
 #include <media/openmax/OMX_IndexExt.h>
-#include <media/MediaDefs.h>
 
 #include <ui/Fence.h>
 #include <ui/GraphicBufferMapper.h>
diff --git a/media/libstagefright/omx/include/media/stagefright/omx/1.0/Conversion.h b/media/libstagefright/omx/include/media/stagefright/omx/1.0/Conversion.h
index 8d8a2d9..a79d403 100644
--- a/media/libstagefright/omx/include/media/stagefright/omx/1.0/Conversion.h
+++ b/media/libstagefright/omx/include/media/stagefright/omx/1.0/Conversion.h
@@ -1862,7 +1862,8 @@
 inline size_t getFlattenedSize(HGraphicBufferProducer::QueueBufferInput const& t) {
     return minFlattenedSize(t) +
             getFenceFlattenedSize(t.fence) +
-            getFlattenedSize(t.surfaceDamage);
+            getFlattenedSize(t.surfaceDamage) +
+            sizeof(HdrMetadata::validTypes);
 }
 
 /**
@@ -1916,7 +1917,12 @@
     if (status != NO_ERROR) {
         return status;
     }
-    return flatten(t.surfaceDamage, buffer, size);
+    status = flatten(t.surfaceDamage, buffer, size);
+    if (status != NO_ERROR) {
+        return status;
+    }
+    FlattenableUtils::write(buffer, size, decltype(HdrMetadata::validTypes)(0));
+    return NO_ERROR;
 }
 
 /**
@@ -1968,6 +1974,7 @@
     if (status != NO_ERROR) {
         return status;
     }
+    // HdrMetadata ignored
     return unflatten(&(t->surfaceDamage), buffer, size);
 }
 
diff --git a/media/libstagefright/omx/include/media/stagefright/omx/1.0/Omx.h b/media/libstagefright/omx/include/media/stagefright/omx/1.0/Omx.h
index a6a9d3e..baa7b81 100644
--- a/media/libstagefright/omx/include/media/stagefright/omx/1.0/Omx.h
+++ b/media/libstagefright/omx/include/media/stagefright/omx/1.0/Omx.h
@@ -20,13 +20,13 @@
 #include <hidl/MQDescriptor.h>
 #include <hidl/Status.h>
 
-#include <media/stagefright/omx/OMXNodeInstance.h>
 #include <media/stagefright/xmlparser/MediaCodecsXmlParser.h>
 #include <android/hardware/media/omx/1.0/IOmx.h>
 
 namespace android {
 
 struct OMXMaster;
+struct OMXNodeInstance;
 
 namespace hardware {
 namespace media {
@@ -50,10 +50,9 @@
 using ::android::wp;
 
 using ::android::OMXMaster;
-using ::android::OmxNodeOwner;
 using ::android::OMXNodeInstance;
 
-struct Omx : public IOmx, public hidl_death_recipient, public OmxNodeOwner {
+struct Omx : public IOmx, public hidl_death_recipient {
     Omx();
     virtual ~Omx();
 
@@ -68,8 +67,8 @@
     // Method from hidl_death_recipient
     void serviceDied(uint64_t cookie, const wp<IBase>& who) override;
 
-    // Method from OmxNodeOwner
-    virtual status_t freeNode(sp<OMXNodeInstance> const& instance) override;
+    // Method for OMXNodeInstance
+    status_t freeNode(sp<OMXNodeInstance> const& instance);
 
 protected:
     OMXMaster* mMaster;
diff --git a/media/libstagefright/omx/include/media/stagefright/omx/OMX.h b/media/libstagefright/omx/include/media/stagefright/omx/OMX.h
deleted file mode 100644
index 594b4c0..0000000
--- a/media/libstagefright/omx/include/media/stagefright/omx/OMX.h
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_OMX_H_
-#define ANDROID_OMX_H_
-
-#include <media/IOMX.h>
-#include <utils/threads.h>
-#include <utils/KeyedVector.h>
-#include <media/stagefright/xmlparser/MediaCodecsXmlParser.h>
-#include "OmxNodeOwner.h"
-
-namespace android {
-
-struct OMXMaster;
-struct OMXNodeInstance;
-
-class OMX : public BnOMX,
-            public OmxNodeOwner,
-            public IBinder::DeathRecipient {
-public:
-    OMX();
-
-    virtual status_t listNodes(List<ComponentInfo> *list);
-
-    virtual status_t allocateNode(
-            const char *name, const sp<IOMXObserver> &observer,
-            sp<IOMXNode> *omxNode);
-
-    virtual status_t createInputSurface(
-            sp<IGraphicBufferProducer> *bufferProducer,
-            sp<IGraphicBufferSource> *bufferSource);
-
-    virtual void binderDied(const wp<IBinder> &the_late_who);
-
-    virtual status_t freeNode(const sp<OMXNodeInstance>& instance);
-
-protected:
-    virtual ~OMX();
-
-private:
-    Mutex mLock;
-    OMXMaster *mMaster;
-    MediaCodecsXmlParser mParser;
-
-    KeyedVector<wp<IBinder>, sp<OMXNodeInstance> > mLiveNodes;
-
-    OMX(const OMX &);
-    OMX &operator=(const OMX &);
-};
-
-}  // namespace android
-
-#endif  // ANDROID_OMX_H_
diff --git a/media/libstagefright/omx/include/media/stagefright/omx/OMXNodeInstance.h b/media/libstagefright/omx/include/media/stagefright/omx/OMXNodeInstance.h
index 1065ca5..c436121 100644
--- a/media/libstagefright/omx/include/media/stagefright/omx/OMXNodeInstance.h
+++ b/media/libstagefright/omx/include/media/stagefright/omx/OMXNodeInstance.h
@@ -25,9 +25,9 @@
 #include <utils/threads.h>
 #include <utils/KeyedVector.h>
 #include <utils/SortedVector.h>
-#include "OmxNodeOwner.h"
 
 #include <android/hidl/memory/1.0/IMemory.h>
+#include <media/stagefright/omx/1.0/Omx.h>
 
 namespace android {
 class GraphicBuffer;
@@ -35,11 +35,12 @@
 class IOMXObserver;
 struct OMXMaster;
 class OMXBuffer;
-typedef hidl::memory::V1_0::IMemory IHidlMemory;
+using IHidlMemory = hidl::memory::V1_0::IMemory;
+using hardware::media::omx::V1_0::implementation::Omx;
 
 struct OMXNodeInstance : public BnOMXNode {
     OMXNodeInstance(
-            OmxNodeOwner *owner, const sp<IOMXObserver> &observer, const char *name);
+            Omx *owner, const sp<IOMXObserver> &observer, const char *name);
 
     void setHandle(OMX_HANDLETYPE handle);
 
@@ -122,7 +123,7 @@
 
     Mutex mLock;
 
-    OmxNodeOwner *mOwner;
+    Omx *mOwner;
     OMX_HANDLETYPE mHandle;
     sp<IOMXObserver> mObserver;
     sp<CallbackDispatcher> mDispatcher;
diff --git a/media/libstagefright/omx/include/media/stagefright/omx/OMXStore.h b/media/libstagefright/omx/include/media/stagefright/omx/OMXStore.h
deleted file mode 100644
index e00d713..0000000
--- a/media/libstagefright/omx/include/media/stagefright/omx/OMXStore.h
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_OMXSTORE_H_
-#define ANDROID_OMXSTORE_H_
-
-#include <media/IOMXStore.h>
-#include <media/IOMX.h>
-#include <media/stagefright/xmlparser/MediaCodecsXmlParser.h>
-
-#include <vector>
-#include <string>
-
-namespace android {
-
-class OMXStore : public BnOMXStore {
-public:
-    OMXStore(
-            const char* owner = "default",
-            const char* const* searchDirs
-                = MediaCodecsXmlParser::defaultSearchDirs,
-            const char* mainXmlName
-                = MediaCodecsXmlParser::defaultMainXmlName,
-            const char* performanceXmlName
-                = MediaCodecsXmlParser::defaultPerformanceXmlName,
-            const char* profilingResultsXmlPath
-                = MediaCodecsXmlParser::defaultProfilingResultsXmlPath);
-
-    status_t listServiceAttributes(
-            std::vector<Attribute>* attributes) override;
-
-    status_t getNodePrefix(std::string* prefix) override;
-
-    status_t listRoles(std::vector<RoleInfo>* roleList) override;
-
-    status_t getOmx(const std::string& name, sp<IOMX>* omx) override;
-
-    ~OMXStore() override;
-
-protected:
-    status_t mParsingStatus;
-    std::string mPrefix;
-    std::vector<Attribute> mServiceAttributeList;
-    std::vector<RoleInfo> mRoleList;
-};
-
-}  // namespace android
-
-#endif  // ANDROID_OMXSTORE_H_
diff --git a/media/libstagefright/omx/tests/Android.bp b/media/libstagefright/omx/tests/Android.bp
index 8bcb99e..999d9d4 100644
--- a/media/libstagefright/omx/tests/Android.bp
+++ b/media/libstagefright/omx/tests/Android.bp
@@ -8,6 +8,8 @@
         "libstagefright",
         "libbinder",
         "libmedia",
+        "libmedia_omx",
+        "libmediaextractor",
         "libutils",
         "liblog",
         "libstagefright_foundation",
diff --git a/media/libstagefright/omx/tests/OMXHarness.cpp b/media/libstagefright/omx/tests/OMXHarness.cpp
index 3266439..86c7211 100644
--- a/media/libstagefright/omx/tests/OMXHarness.cpp
+++ b/media/libstagefright/omx/tests/OMXHarness.cpp
@@ -25,20 +25,22 @@
 
 #include <binder/ProcessState.h>
 #include <binder/IServiceManager.h>
-#include <binder/MemoryDealer.h>
+#include <cutils/properties.h>
+#include <media/DataSource.h>
 #include <media/IMediaHTTPService.h>
-#include <media/IMediaCodecService.h>
+#include <media/MediaExtractor.h>
+#include <media/MediaSource.h>
+#include <media/OMXBuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/ALooper.h>
-#include <media/stagefright/DataSource.h>
+#include <media/stagefright/DataSourceFactory.h>
+#include <media/stagefright/InterfaceUtils.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/MediaExtractor.h>
-#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MediaExtractorFactory.h>
 #include <media/stagefright/MetaData.h>
 #include <media/stagefright/SimpleDecodingSource.h>
-#include <media/OMXBuffer.h>
 #include <android/hardware/media/omx/1.0/IOmx.h>
 #include <media/omx/1.0/WOmx.h>
 #include <system/window.h>
@@ -67,7 +69,7 @@
 /////////////////////////////////////////////////////////////////////
 
 Harness::Harness()
-    : mInitCheck(NO_INIT), mUseTreble(false) {
+    : mInitCheck(NO_INIT) {
     mInitCheck = initOMX();
 }
 
@@ -79,21 +81,12 @@
 }
 
 status_t Harness::initOMX() {
-    if (property_get_bool("persist.media.treble_omx", true)) {
-        using namespace ::android::hardware::media::omx::V1_0;
-        sp<IOmx> tOmx = IOmx::getService();
-        if (tOmx == nullptr) {
-            return NO_INIT;
-        }
-        mOMX = new utils::LWOmx(tOmx);
-        mUseTreble = true;
-    } else {
-        sp<IServiceManager> sm = defaultServiceManager();
-        sp<IBinder> binder = sm->getService(String16("media.codec"));
-        sp<IMediaCodecService> service = interface_cast<IMediaCodecService>(binder);
-        mOMX = service->getOMX();
-        mUseTreble = false;
+    using namespace ::android::hardware::media::omx::V1_0;
+    sp<IOmx> tOmx = IOmx::getService();
+    if (tOmx == nullptr) {
+        return NO_INIT;
     }
+    mOMX = new utils::LWOmx(tOmx);
 
     return mOMX != 0 ? OK : NO_INIT;
 }
@@ -221,25 +214,19 @@
     for (OMX_U32 i = 0; i < def.nBufferCountActual; ++i) {
         Buffer buffer;
         buffer.mFlags = 0;
-        if (mUseTreble) {
-            bool success;
-            auto transStatus = mAllocator->allocate(def.nBufferSize,
-                    [&success, &buffer](
-                            bool s,
-                            hidl_memory const& m) {
-                        success = s;
-                        buffer.mHidlMemory = m;
-                    });
-            EXPECT(transStatus.isOk(),
-                    "Cannot call allocator");
-            EXPECT(success,
-                    "Cannot allocate memory");
-            err = mOMXNode->useBuffer(portIndex, buffer.mHidlMemory, &buffer.mID);
-        } else {
-            buffer.mMemory = mDealer->allocate(def.nBufferSize);
-            CHECK(buffer.mMemory != NULL);
-            err = mOMXNode->useBuffer(portIndex, buffer.mMemory, &buffer.mID);
-        }
+        bool success;
+        auto transStatus = mAllocator->allocate(def.nBufferSize,
+                [&success, &buffer](
+                        bool s,
+                        hidl_memory const& m) {
+                    success = s;
+                    buffer.mHidlMemory = m;
+                });
+        EXPECT(transStatus.isOk(),
+                "Cannot call allocator");
+        EXPECT(success,
+                "Cannot allocate memory");
+        err = mOMXNode->useBuffer(portIndex, buffer.mHidlMemory, &buffer.mID);
 
         EXPECT_SUCCESS(err, "useBuffer");
 
@@ -291,13 +278,13 @@
 
 static sp<IMediaExtractor> CreateExtractorFromURI(const char *uri) {
     sp<DataSource> source =
-        DataSource::CreateFromURI(NULL /* httpService */, uri);
+        DataSourceFactory::CreateFromURI(NULL /* httpService */, uri);
 
     if (source == NULL) {
         return NULL;
     }
 
-    return MediaExtractor::Create(source);
+    return MediaExtractorFactory::Create(source);
 }
 
 status_t Harness::testStateTransitions(
@@ -308,13 +295,11 @@
         return OK;
     }
 
-    if (mUseTreble) {
-        mAllocator = IAllocator::getService("ashmem");
-        EXPECT(mAllocator != nullptr,
-                "Cannot obtain hidl AshmemAllocator");
-    } else {
-        mDealer = new MemoryDealer(16 * 1024 * 1024, "OMXHarness");
-    }
+    mAllocator = IAllocator::getService("ashmem");
+    EXPECT(mAllocator != nullptr,
+            "Cannot obtain hidl AshmemAllocator");
+    // TODO: When Treble has MemoryHeap/MemoryDealer, we should specify the heap
+    // size to be 16 * 1024 * 1024.
 
     sp<CodecObserver> observer = new CodecObserver(this, ++mCurGeneration);
 
@@ -543,7 +528,7 @@
     return NULL;
 }
 
-static sp<IMediaSource> CreateSourceForMime(const char *mime) {
+static sp<MediaSource> CreateSourceForMime(const char *mime) {
     const char *url = GetURLForMime(mime);
 
     if (url == NULL) {
@@ -564,7 +549,7 @@
         CHECK(meta->findCString(kKeyMIMEType, &trackMime));
 
         if (!strcasecmp(mime, trackMime)) {
-            return extractor->getTrack(i);
+            return CreateMediaSourceFromIMediaSource(extractor->getTrack(i));
         }
     }
 
@@ -610,7 +595,7 @@
         return OK;
     }
 
-    sp<IMediaSource> source = CreateSourceForMime(mime);
+    sp<MediaSource> source = CreateSourceForMime(mime);
 
     if (source == NULL) {
         printf("  * Unable to open test content for type '%s', "
@@ -620,14 +605,14 @@
         return OK;
     }
 
-    sp<IMediaSource> seekSource = CreateSourceForMime(mime);
+    sp<MediaSource> seekSource = CreateSourceForMime(mime);
     if (source == NULL || seekSource == NULL) {
         return UNKNOWN_ERROR;
     }
 
     CHECK_EQ(seekSource->start(), (status_t)OK);
 
-    sp<IMediaSource> codec = SimpleDecodingSource::Create(
+    sp<MediaSource> codec = SimpleDecodingSource::Create(
             source, 0 /* flags */, NULL /* nativeWindow */, componentName);
 
     CHECK(codec != NULL);
diff --git a/media/libstagefright/omx/tests/OMXHarness.h b/media/libstagefright/omx/tests/OMXHarness.h
index 4fc0f79..dca787c 100644
--- a/media/libstagefright/omx/tests/OMXHarness.h
+++ b/media/libstagefright/omx/tests/OMXHarness.h
@@ -93,8 +93,6 @@
     Condition mMessageAddedCondition;
     int32_t mLastMsgGeneration;
     int32_t mCurGeneration;
-    bool mUseTreble;
-    sp<MemoryDealer> mDealer;
     sp<IAllocator> mAllocator;
 
     status_t initOMX();
diff --git a/media/libstagefright/rtsp/AH263Assembler.cpp b/media/libstagefright/rtsp/AH263Assembler.cpp
index 75cd911..3436e95 100644
--- a/media/libstagefright/rtsp/AH263Assembler.cpp
+++ b/media/libstagefright/rtsp/AH263Assembler.cpp
@@ -25,7 +25,7 @@
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/foundation/hexdump.h>
-#include <media/stagefright/Utils.h>
+#include <media/stagefright/foundation/ByteUtils.h>
 
 namespace android {
 
diff --git a/media/libstagefright/rtsp/AMPEG2TSAssembler.cpp b/media/libstagefright/rtsp/AMPEG2TSAssembler.cpp
index dca5c89..0988774 100644
--- a/media/libstagefright/rtsp/AMPEG2TSAssembler.cpp
+++ b/media/libstagefright/rtsp/AMPEG2TSAssembler.cpp
@@ -26,10 +26,10 @@
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/ByteUtils.h>
 #include <media/stagefright/foundation/hexdump.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MetaData.h>
-#include <media/stagefright/Utils.h>
 
 namespace android {
 
diff --git a/media/libstagefright/rtsp/AMPEG4ElementaryAssembler.cpp b/media/libstagefright/rtsp/AMPEG4ElementaryAssembler.cpp
index 156004c..1e434cb 100644
--- a/media/libstagefright/rtsp/AMPEG4ElementaryAssembler.cpp
+++ b/media/libstagefright/rtsp/AMPEG4ElementaryAssembler.cpp
@@ -27,8 +27,8 @@
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/ByteUtils.h>
 #include <media/stagefright/foundation/hexdump.h>
-#include <media/stagefright/Utils.h>
 
 #include <ctype.h>
 #include <stdint.h>
diff --git a/media/libstagefright/rtsp/APacketSource.cpp b/media/libstagefright/rtsp/APacketSource.cpp
index 8ba9e02..68f8bdd 100644
--- a/media/libstagefright/rtsp/APacketSource.cpp
+++ b/media/libstagefright/rtsp/APacketSource.cpp
@@ -23,8 +23,6 @@
 #include "ARawAudioAssembler.h"
 #include "ASessionDescription.h"
 
-#include "include/avc_utils.h"
-
 #include <ctype.h>
 
 #include <media/stagefright/foundation/ABitReader.h>
@@ -32,6 +30,7 @@
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/foundation/AString.h>
+#include <media/stagefright/foundation/avc_utils.h>
 #include <media/stagefright/foundation/base64.h>
 #include <media/stagefright/foundation/hexdump.h>
 #include <media/stagefright/MediaDefs.h>
diff --git a/media/libstagefright/rtsp/ARTPWriter.cpp b/media/libstagefright/rtsp/ARTPWriter.cpp
index 1f6b6f7..8604b69 100644
--- a/media/libstagefright/rtsp/ARTPWriter.cpp
+++ b/media/libstagefright/rtsp/ARTPWriter.cpp
@@ -22,13 +22,13 @@
 
 #include <fcntl.h>
 
+#include <media/MediaSource.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/foundation/hexdump.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaDefs.h>
-#include <media/stagefright/MediaSource.h>
 #include <media/stagefright/MetaData.h>
 #include <utils/ByteOrder.h>
 
@@ -104,7 +104,7 @@
     mFd = -1;
 }
 
-status_t ARTPWriter::addSource(const sp<IMediaSource> &source) {
+status_t ARTPWriter::addSource(const sp<MediaSource> &source) {
     mSource = source;
     return OK;
 }
diff --git a/media/libstagefright/rtsp/ARTPWriter.h b/media/libstagefright/rtsp/ARTPWriter.h
index 3c7042e..92a64f2 100644
--- a/media/libstagefright/rtsp/ARTPWriter.h
+++ b/media/libstagefright/rtsp/ARTPWriter.h
@@ -37,7 +37,7 @@
 struct ARTPWriter : public MediaWriter {
     explicit ARTPWriter(int fd);
 
-    virtual status_t addSource(const sp<IMediaSource> &source);
+    virtual status_t addSource(const sp<MediaSource> &source);
     virtual bool reachedEOS();
     virtual status_t start(MetaData *params);
     virtual status_t stop();
@@ -72,7 +72,7 @@
     int mRTCPFd;
 #endif
 
-    sp<IMediaSource> mSource;
+    sp<MediaSource> mSource;
     sp<ALooper> mLooper;
     sp<AHandlerReflector<ARTPWriter> > mReflector;
 
diff --git a/media/libstagefright/rtsp/SDPLoader.cpp b/media/libstagefright/rtsp/SDPLoader.cpp
index 0f46c83..d459cbd 100644
--- a/media/libstagefright/rtsp/SDPLoader.cpp
+++ b/media/libstagefright/rtsp/SDPLoader.cpp
@@ -22,8 +22,8 @@
 
 #include "ASessionDescription.h"
 
-#include <media/IMediaHTTPConnection.h>
-#include <media/IMediaHTTPService.h>
+#include <media/MediaHTTPConnection.h>
+#include <media/MediaHTTPService.h>
 #include <media/stagefright/MediaHTTP.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
@@ -36,7 +36,7 @@
 SDPLoader::SDPLoader(
         const sp<AMessage> &notify,
         uint32_t flags,
-        const sp<IMediaHTTPService> &httpService)
+        const sp<MediaHTTPService> &httpService)
     : mNotify(notify),
       mFlags(flags),
       mNetLooper(new ALooper),
diff --git a/media/libstagefright/rtsp/VideoSource.h b/media/libstagefright/rtsp/VideoSource.h
index ae0c85b..4be9bf6 100644
--- a/media/libstagefright/rtsp/VideoSource.h
+++ b/media/libstagefright/rtsp/VideoSource.h
@@ -18,9 +18,9 @@
 
 #define VIDEO_SOURCE_H_
 
+#include <media/MediaSource.h>
 #include <media/stagefright/MediaBufferGroup.h>
 #include <media/stagefright/MediaDefs.h>
-#include <media/stagefright/MediaSource.h>
 #include <media/stagefright/MetaData.h>
 
 namespace android {
diff --git a/media/libstagefright/rtsp/rtp_test.cpp b/media/libstagefright/rtsp/rtp_test.cpp
index e612a8d..98a8fb4 100644
--- a/media/libstagefright/rtsp/rtp_test.cpp
+++ b/media/libstagefright/rtsp/rtp_test.cpp
@@ -20,10 +20,10 @@
 
 #include <binder/ProcessState.h>
 
+#include <media/DataSource.h>
 #include <media/stagefright/foundation/base64.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/ALooper.h>
-#include <media/stagefright/DataSource.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MetaData.h>
 #include <media/stagefright/SimpleDecodingSource.h>
diff --git a/media/libstagefright/tests/DummyRecorder.cpp b/media/libstagefright/tests/DummyRecorder.cpp
index 8f17088..4f560cb 100644
--- a/media/libstagefright/tests/DummyRecorder.cpp
+++ b/media/libstagefright/tests/DummyRecorder.cpp
@@ -17,8 +17,8 @@
 #define LOG_TAG "DummyRecorder"
 // #define LOG_NDEBUG 0
 
+#include <media/MediaSource.h>
 #include <media/stagefright/MediaBuffer.h>
-#include <media/stagefright/MediaSource.h>
 #include "DummyRecorder.h"
 
 #include <utils/Log.h>
diff --git a/media/libstagefright/tests/SurfaceMediaSource_test.cpp b/media/libstagefright/tests/SurfaceMediaSource_test.cpp
index 7c464ff..051108f 100644
--- a/media/libstagefright/tests/SurfaceMediaSource_test.cpp
+++ b/media/libstagefright/tests/SurfaceMediaSource_test.cpp
@@ -94,10 +94,10 @@
             ASSERT_TRUE(mSurfaceControl != NULL);
             ASSERT_TRUE(mSurfaceControl->isValid());
 
-            SurfaceComposerClient::openGlobalTransaction();
-            ASSERT_EQ(NO_ERROR, mSurfaceControl->setLayer(0x7FFFFFFF));
-            ASSERT_EQ(NO_ERROR, mSurfaceControl->show());
-            SurfaceComposerClient::closeGlobalTransaction();
+            SurfaceComposerClient::Transaction{}
+                    .setLayer(mSurfaceControl, 0x7FFFFFFF)
+                    .show(mSurfaceControl)
+                    .apply();
 
             sp<ANativeWindow> window = mSurfaceControl->getSurface();
             mEglSurface = eglCreateWindowSurface(mEglDisplay, mGlConfig,
diff --git a/media/libstagefright/timedtext/TextDescriptions.cpp b/media/libstagefright/timedtext/TextDescriptions.cpp
index c762a74..088eaae 100644
--- a/media/libstagefright/timedtext/TextDescriptions.cpp
+++ b/media/libstagefright/timedtext/TextDescriptions.cpp
@@ -15,7 +15,7 @@
  */
 
 #include "TextDescriptions.h"
-#include <media/stagefright/Utils.h>
+#include <media/stagefright/foundation/ByteUtils.h>
 #include <media/stagefright/MediaErrors.h>
 
 namespace android {
diff --git a/media/libstagefright/webm/WebmFrameThread.cpp b/media/libstagefright/webm/WebmFrameThread.cpp
index 71bfbc9..420890b 100644
--- a/media/libstagefright/webm/WebmFrameThread.cpp
+++ b/media/libstagefright/webm/WebmFrameThread.cpp
@@ -252,7 +252,7 @@
 }
 
 WebmFrameMediaSourceThread::WebmFrameMediaSourceThread(
-        const sp<IMediaSource>& source,
+        const sp<MediaSource>& source,
         int type,
         LinkedBlockingQueue<const sp<WebmFrame> >& sink,
         uint64_t timeCodeScale,
diff --git a/media/libstagefright/webm/WebmFrameThread.h b/media/libstagefright/webm/WebmFrameThread.h
index 528984f..76c91f1 100644
--- a/media/libstagefright/webm/WebmFrameThread.h
+++ b/media/libstagefright/webm/WebmFrameThread.h
@@ -20,8 +20,8 @@
 #include "WebmFrame.h"
 #include "LinkedBlockingQueue.h"
 
+#include <media/MediaSource.h>
 #include <media/stagefright/FileSource.h>
-#include <media/stagefright/MediaSource.h>
 
 #include <utils/List.h>
 #include <utils/Errors.h>
@@ -123,7 +123,7 @@
 class WebmFrameMediaSourceThread: public WebmFrameSourceThread {
 public:
     WebmFrameMediaSourceThread(
-            const sp<IMediaSource>& source,
+            const sp<MediaSource>& source,
             int type,
             LinkedBlockingQueue<const sp<WebmFrame> >& sink,
             uint64_t timeCodeScale,
@@ -142,7 +142,7 @@
     }
 
 private:
-    const sp<IMediaSource> mSource;
+    const sp<MediaSource> mSource;
     const uint64_t mTimeCodeScale;
     uint64_t mStartTimeUs;
 
diff --git a/media/libstagefright/webm/WebmWriter.cpp b/media/libstagefright/webm/WebmWriter.cpp
index d6c6930..4d73eb8 100644
--- a/media/libstagefright/webm/WebmWriter.cpp
+++ b/media/libstagefright/webm/WebmWriter.cpp
@@ -360,7 +360,7 @@
     return err;
 }
 
-status_t WebmWriter::addSource(const sp<IMediaSource> &source) {
+status_t WebmWriter::addSource(const sp<MediaSource> &source) {
     Mutex::Autolock l(mLock);
     if (mStarted) {
         ALOGE("Attempt to add source AFTER recording is started");
diff --git a/media/libstagefright/webm/WebmWriter.h b/media/libstagefright/webm/WebmWriter.h
index 9f3b19f..ffe4c79 100644
--- a/media/libstagefright/webm/WebmWriter.h
+++ b/media/libstagefright/webm/WebmWriter.h
@@ -21,7 +21,7 @@
 #include "WebmFrameThread.h"
 #include "LinkedBlockingQueue.h"
 
-#include <media/stagefright/MediaSource.h>
+#include <media/MediaSource.h>
 #include <media/stagefright/MediaWriter.h>
 
 #include <utils/Errors.h>
@@ -40,7 +40,7 @@
     ~WebmWriter() { reset(); }
 
 
-    virtual status_t addSource(const sp<IMediaSource> &source);
+    virtual status_t addSource(const sp<MediaSource> &source);
     virtual status_t start(MetaData *param = NULL);
     virtual status_t stop();
     virtual status_t pause();
@@ -85,7 +85,7 @@
         const char *mName;
         sp<WebmElement> (*mMakeTrack)(const sp<MetaData>&);
 
-        sp<IMediaSource> mSource;
+        sp<MediaSource> mSource;
         sp<WebmElement> mTrackEntry;
         sp<WebmFrameSourceThread> mThread;
         LinkedBlockingQueue<const sp<WebmFrame> > mSink;
diff --git a/media/libstagefright/wifi-display/Android.bp b/media/libstagefright/wifi-display/Android.bp
deleted file mode 100644
index fb08c5b..0000000
--- a/media/libstagefright/wifi-display/Android.bp
+++ /dev/null
@@ -1,51 +0,0 @@
-cc_library_shared {
-    name: "libstagefright_wfd",
-
-    srcs: [
-        "MediaSender.cpp",
-        "Parameters.cpp",
-        "rtp/RTPSender.cpp",
-        "source/Converter.cpp",
-        "source/MediaPuller.cpp",
-        "source/PlaybackSession.cpp",
-        "source/RepeaterSource.cpp",
-        "source/TSPacketizer.cpp",
-        "source/WifiDisplaySource.cpp",
-        "VideoFormats.cpp",
-    ],
-
-    include_dirs: [
-        "frameworks/av/media/libstagefright",
-        "frameworks/native/include/media/openmax",
-        "frameworks/native/include/media/hardware",
-        "frameworks/av/media/libstagefright/mpeg2ts",
-    ],
-
-    shared_libs: [
-        "libbinder",
-        "libcutils",
-        "liblog",
-        "libmedia",
-        "libstagefright",
-        "libstagefright_foundation",
-        "libui",
-        "libgui",
-        "libutils",
-    ],
-
-    cflags: [
-        "-Wno-multichar",
-        "-Werror",
-        "-Wall",
-    ],
-
-    sanitize: {
-        misc_undefined: [
-            "signed-integer-overflow",
-        ],
-        cfi: true,
-        diag: {
-            cfi: true,
-        },
-    },
-}
diff --git a/media/libstagefright/wifi-display/MediaSender.cpp b/media/libstagefright/wifi-display/MediaSender.cpp
deleted file mode 100644
index cc412f5..0000000
--- a/media/libstagefright/wifi-display/MediaSender.cpp
+++ /dev/null
@@ -1,519 +0,0 @@
-/*
- * Copyright 2013, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "MediaSender"
-#include <utils/Log.h>
-
-#include "MediaSender.h"
-
-#include "rtp/RTPSender.h"
-#include "source/TSPacketizer.h"
-
-#include "include/avc_utils.h"
-
-#include <media/IHDCP.h>
-#include <media/stagefright/MediaBuffer.h>
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/foundation/ANetworkSession.h>
-#include <ui/GraphicBuffer.h>
-
-namespace android {
-
-MediaSender::MediaSender(
-        const sp<ANetworkSession> &netSession,
-        const sp<AMessage> &notify)
-    : mNetSession(netSession),
-      mNotify(notify),
-      mMode(MODE_UNDEFINED),
-      mGeneration(0),
-      mPrevTimeUs(-1ll),
-      mInitDoneCount(0),
-      mLogFile(NULL) {
-    // mLogFile = fopen("/data/misc/log.ts", "wb");
-}
-
-MediaSender::~MediaSender() {
-    if (mLogFile != NULL) {
-        fclose(mLogFile);
-        mLogFile = NULL;
-    }
-}
-
-status_t MediaSender::setHDCP(const sp<IHDCP> &hdcp) {
-    if (mMode != MODE_UNDEFINED) {
-        return INVALID_OPERATION;
-    }
-
-    mHDCP = hdcp;
-
-    return OK;
-}
-
-ssize_t MediaSender::addTrack(const sp<AMessage> &format, uint32_t flags) {
-    if (mMode != MODE_UNDEFINED) {
-        return INVALID_OPERATION;
-    }
-
-    TrackInfo info;
-    info.mFormat = format;
-    info.mFlags = flags;
-    info.mPacketizerTrackIndex = -1;
-
-    AString mime;
-    CHECK(format->findString("mime", &mime));
-    info.mIsAudio = !strncasecmp("audio/", mime.c_str(), 6);
-
-    size_t index = mTrackInfos.size();
-    mTrackInfos.push_back(info);
-
-    return index;
-}
-
-status_t MediaSender::initAsync(
-        ssize_t trackIndex,
-        const char *remoteHost,
-        int32_t remoteRTPPort,
-        RTPSender::TransportMode rtpMode,
-        int32_t remoteRTCPPort,
-        RTPSender::TransportMode rtcpMode,
-        int32_t *localRTPPort) {
-    if (trackIndex < 0) {
-        if (mMode != MODE_UNDEFINED) {
-            return INVALID_OPERATION;
-        }
-
-        uint32_t flags = 0;
-        if (mHDCP != NULL) {
-            // XXX Determine proper HDCP version.
-            flags |= TSPacketizer::EMIT_HDCP20_DESCRIPTOR;
-        }
-        mTSPacketizer = new TSPacketizer(flags);
-
-        status_t err = OK;
-        for (size_t i = 0; i < mTrackInfos.size(); ++i) {
-            TrackInfo *info = &mTrackInfos.editItemAt(i);
-
-            ssize_t packetizerTrackIndex =
-                mTSPacketizer->addTrack(info->mFormat);
-
-            if (packetizerTrackIndex < 0) {
-                err = packetizerTrackIndex;
-                break;
-            }
-
-            info->mPacketizerTrackIndex = packetizerTrackIndex;
-        }
-
-        if (err == OK) {
-            sp<AMessage> notify = new AMessage(kWhatSenderNotify, this);
-            notify->setInt32("generation", mGeneration);
-            mTSSender = new RTPSender(mNetSession, notify);
-            looper()->registerHandler(mTSSender);
-
-            err = mTSSender->initAsync(
-                    remoteHost,
-                    remoteRTPPort,
-                    rtpMode,
-                    remoteRTCPPort,
-                    rtcpMode,
-                    localRTPPort);
-
-            if (err != OK) {
-                looper()->unregisterHandler(mTSSender->id());
-                mTSSender.clear();
-            }
-        }
-
-        if (err != OK) {
-            for (size_t i = 0; i < mTrackInfos.size(); ++i) {
-                TrackInfo *info = &mTrackInfos.editItemAt(i);
-                info->mPacketizerTrackIndex = -1;
-            }
-
-            mTSPacketizer.clear();
-            return err;
-        }
-
-        mMode = MODE_TRANSPORT_STREAM;
-        mInitDoneCount = 1;
-
-        return OK;
-    }
-
-    if (mMode == MODE_TRANSPORT_STREAM) {
-        return INVALID_OPERATION;
-    }
-
-    if ((size_t)trackIndex >= mTrackInfos.size()) {
-        return -ERANGE;
-    }
-
-    TrackInfo *info = &mTrackInfos.editItemAt(trackIndex);
-
-    if (info->mSender != NULL) {
-        return INVALID_OPERATION;
-    }
-
-    sp<AMessage> notify = new AMessage(kWhatSenderNotify, this);
-    notify->setInt32("generation", mGeneration);
-    notify->setSize("trackIndex", trackIndex);
-
-    info->mSender = new RTPSender(mNetSession, notify);
-    looper()->registerHandler(info->mSender);
-
-    status_t err = info->mSender->initAsync(
-            remoteHost,
-            remoteRTPPort,
-            rtpMode,
-            remoteRTCPPort,
-            rtcpMode,
-            localRTPPort);
-
-    if (err != OK) {
-        looper()->unregisterHandler(info->mSender->id());
-        info->mSender.clear();
-
-        return err;
-    }
-
-    if (mMode == MODE_UNDEFINED) {
-        mInitDoneCount = mTrackInfos.size();
-    }
-
-    mMode = MODE_ELEMENTARY_STREAMS;
-
-    return OK;
-}
-
-status_t MediaSender::queueAccessUnit(
-        size_t trackIndex, const sp<ABuffer> &accessUnit) {
-    if (mMode == MODE_UNDEFINED) {
-        return INVALID_OPERATION;
-    }
-
-    if (trackIndex >= mTrackInfos.size()) {
-        return -ERANGE;
-    }
-
-    if (mMode == MODE_TRANSPORT_STREAM) {
-        TrackInfo *info = &mTrackInfos.editItemAt(trackIndex);
-        info->mAccessUnits.push_back(accessUnit);
-
-        mTSPacketizer->extractCSDIfNecessary(info->mPacketizerTrackIndex);
-
-        for (;;) {
-            ssize_t minTrackIndex = -1;
-            int64_t minTimeUs = -1ll;
-
-            for (size_t i = 0; i < mTrackInfos.size(); ++i) {
-                const TrackInfo &info = mTrackInfos.itemAt(i);
-
-                if (info.mAccessUnits.empty()) {
-                    minTrackIndex = -1;
-                    minTimeUs = -1ll;
-                    break;
-                }
-
-                int64_t timeUs;
-                const sp<ABuffer> &accessUnit = *info.mAccessUnits.begin();
-                CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
-
-                if (minTrackIndex < 0 || timeUs < minTimeUs) {
-                    minTrackIndex = i;
-                    minTimeUs = timeUs;
-                }
-            }
-
-            if (minTrackIndex < 0) {
-                return OK;
-            }
-
-            TrackInfo *info = &mTrackInfos.editItemAt(minTrackIndex);
-            sp<ABuffer> accessUnit = *info->mAccessUnits.begin();
-            info->mAccessUnits.erase(info->mAccessUnits.begin());
-
-            sp<ABuffer> tsPackets;
-            status_t err = packetizeAccessUnit(
-                    minTrackIndex, accessUnit, &tsPackets);
-
-            if (err == OK) {
-                if (mLogFile != NULL) {
-                    fwrite(tsPackets->data(), 1, tsPackets->size(), mLogFile);
-                }
-
-                int64_t timeUs;
-                CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
-                tsPackets->meta()->setInt64("timeUs", timeUs);
-
-                err = mTSSender->queueBuffer(
-                        tsPackets,
-                        33 /* packetType */,
-                        RTPSender::PACKETIZATION_TRANSPORT_STREAM);
-            }
-
-            if (err != OK) {
-                return err;
-            }
-        }
-    }
-
-    TrackInfo *info = &mTrackInfos.editItemAt(trackIndex);
-
-    return info->mSender->queueBuffer(
-            accessUnit,
-            info->mIsAudio ? 96 : 97 /* packetType */,
-            info->mIsAudio
-                ? RTPSender::PACKETIZATION_AAC : RTPSender::PACKETIZATION_H264);
-}
-
-void MediaSender::onMessageReceived(const sp<AMessage> &msg) {
-    switch (msg->what()) {
-        case kWhatSenderNotify:
-        {
-            int32_t generation;
-            CHECK(msg->findInt32("generation", &generation));
-            if (generation != mGeneration) {
-                break;
-            }
-
-            onSenderNotify(msg);
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-}
-
-void MediaSender::onSenderNotify(const sp<AMessage> &msg) {
-    int32_t what;
-    CHECK(msg->findInt32("what", &what));
-
-    switch (what) {
-        case RTPSender::kWhatInitDone:
-        {
-            --mInitDoneCount;
-
-            int32_t err;
-            CHECK(msg->findInt32("err", &err));
-
-            if (err != OK) {
-                notifyInitDone(err);
-                ++mGeneration;
-                break;
-            }
-
-            if (mInitDoneCount == 0) {
-                notifyInitDone(OK);
-            }
-            break;
-        }
-
-        case RTPSender::kWhatError:
-        {
-            int32_t err;
-            CHECK(msg->findInt32("err", &err));
-
-            notifyError(err);
-            break;
-        }
-
-        case kWhatNetworkStall:
-        {
-            size_t numBytesQueued;
-            CHECK(msg->findSize("numBytesQueued", &numBytesQueued));
-
-            notifyNetworkStall(numBytesQueued);
-            break;
-        }
-
-        case kWhatInformSender:
-        {
-            int64_t avgLatencyUs;
-            CHECK(msg->findInt64("avgLatencyUs", &avgLatencyUs));
-
-            int64_t maxLatencyUs;
-            CHECK(msg->findInt64("maxLatencyUs", &maxLatencyUs));
-
-            sp<AMessage> notify = mNotify->dup();
-            notify->setInt32("what", kWhatInformSender);
-            notify->setInt64("avgLatencyUs", avgLatencyUs);
-            notify->setInt64("maxLatencyUs", maxLatencyUs);
-            notify->post();
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-}
-
-void MediaSender::notifyInitDone(status_t err) {
-    sp<AMessage> notify = mNotify->dup();
-    notify->setInt32("what", kWhatInitDone);
-    notify->setInt32("err", err);
-    notify->post();
-}
-
-void MediaSender::notifyError(status_t err) {
-    sp<AMessage> notify = mNotify->dup();
-    notify->setInt32("what", kWhatError);
-    notify->setInt32("err", err);
-    notify->post();
-}
-
-void MediaSender::notifyNetworkStall(size_t numBytesQueued) {
-    sp<AMessage> notify = mNotify->dup();
-    notify->setInt32("what", kWhatNetworkStall);
-    notify->setSize("numBytesQueued", numBytesQueued);
-    notify->post();
-}
-
-status_t MediaSender::packetizeAccessUnit(
-        size_t trackIndex,
-        sp<ABuffer> accessUnit,
-        sp<ABuffer> *tsPackets) {
-    const TrackInfo &info = mTrackInfos.itemAt(trackIndex);
-
-    uint32_t flags = 0;
-
-    bool isHDCPEncrypted = false;
-    uint64_t inputCTR;
-    uint8_t HDCP_private_data[16];
-
-    bool manuallyPrependSPSPPS =
-        !info.mIsAudio
-        && (info.mFlags & FLAG_MANUALLY_PREPEND_SPS_PPS)
-        && IsIDR(accessUnit);
-
-    if (mHDCP != NULL && !info.mIsAudio) {
-        isHDCPEncrypted = true;
-
-        if (manuallyPrependSPSPPS) {
-            accessUnit = mTSPacketizer->prependCSD(
-                    info.mPacketizerTrackIndex, accessUnit);
-        }
-
-        status_t err;
-        native_handle_t* handle;
-        if (accessUnit->meta()->findPointer("handle", (void**)&handle)
-                && handle != NULL) {
-            int32_t rangeLength, rangeOffset;
-            sp<AMessage> notify;
-            CHECK(accessUnit->meta()->findInt32("rangeOffset", &rangeOffset));
-            CHECK(accessUnit->meta()->findInt32("rangeLength", &rangeLength));
-            CHECK(accessUnit->meta()->findMessage("notify", &notify)
-                    && notify != NULL);
-            CHECK_GE((int32_t)accessUnit->size(), rangeLength);
-
-            sp<GraphicBuffer> grbuf(new GraphicBuffer(
-                    rangeOffset + rangeLength /* width */, 1 /* height */,
-                    HAL_PIXEL_FORMAT_Y8, 1 /* layerCount */,
-                    GRALLOC_USAGE_HW_VIDEO_ENCODER,
-                    rangeOffset + rangeLength /* stride */, handle,
-                    false /* keepOwnership */));
-
-            err = mHDCP->encryptNative(
-                    grbuf, rangeOffset, rangeLength,
-                    trackIndex  /* streamCTR */,
-                    &inputCTR,
-                    accessUnit->data());
-            notify->post();
-        } else {
-            err = mHDCP->encrypt(
-                    accessUnit->data(), accessUnit->size(),
-                    trackIndex  /* streamCTR */,
-                    &inputCTR,
-                    accessUnit->data());
-        }
-
-        if (err != OK) {
-            ALOGE("Failed to HDCP-encrypt media data (err %d)",
-                  err);
-
-            return err;
-        }
-
-        HDCP_private_data[0] = 0x00;
-
-        HDCP_private_data[1] =
-            (((trackIndex >> 30) & 3) << 1) | 1;
-
-        HDCP_private_data[2] = (trackIndex >> 22) & 0xff;
-
-        HDCP_private_data[3] =
-            (((trackIndex >> 15) & 0x7f) << 1) | 1;
-
-        HDCP_private_data[4] = (trackIndex >> 7) & 0xff;
-
-        HDCP_private_data[5] =
-            ((trackIndex & 0x7f) << 1) | 1;
-
-        HDCP_private_data[6] = 0x00;
-
-        HDCP_private_data[7] =
-            (((inputCTR >> 60) & 0x0f) << 1) | 1;
-
-        HDCP_private_data[8] = (inputCTR >> 52) & 0xff;
-
-        HDCP_private_data[9] =
-            (((inputCTR >> 45) & 0x7f) << 1) | 1;
-
-        HDCP_private_data[10] = (inputCTR >> 37) & 0xff;
-
-        HDCP_private_data[11] =
-            (((inputCTR >> 30) & 0x7f) << 1) | 1;
-
-        HDCP_private_data[12] = (inputCTR >> 22) & 0xff;
-
-        HDCP_private_data[13] =
-            (((inputCTR >> 15) & 0x7f) << 1) | 1;
-
-        HDCP_private_data[14] = (inputCTR >> 7) & 0xff;
-
-        HDCP_private_data[15] =
-            ((inputCTR & 0x7f) << 1) | 1;
-
-        flags |= TSPacketizer::IS_ENCRYPTED;
-    } else if (manuallyPrependSPSPPS) {
-        flags |= TSPacketizer::PREPEND_SPS_PPS_TO_IDR_FRAMES;
-    }
-
-    int64_t timeUs = ALooper::GetNowUs();
-    if (mPrevTimeUs < 0ll || mPrevTimeUs + 100000ll <= timeUs) {
-        flags |= TSPacketizer::EMIT_PCR;
-        flags |= TSPacketizer::EMIT_PAT_AND_PMT;
-
-        mPrevTimeUs = timeUs;
-    }
-
-    mTSPacketizer->packetize(
-            info.mPacketizerTrackIndex,
-            accessUnit,
-            tsPackets,
-            flags,
-            !isHDCPEncrypted ? NULL : HDCP_private_data,
-            !isHDCPEncrypted ? 0 : sizeof(HDCP_private_data),
-            info.mIsAudio ? 2 : 0 /* numStuffingBytes */);
-
-    return OK;
-}
-
-}  // namespace android
-
diff --git a/media/libstagefright/wifi-display/MediaSender.h b/media/libstagefright/wifi-display/MediaSender.h
deleted file mode 100644
index 04538ea..0000000
--- a/media/libstagefright/wifi-display/MediaSender.h
+++ /dev/null
@@ -1,132 +0,0 @@
-/*
- * Copyright 2013, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef MEDIA_SENDER_H_
-
-#define MEDIA_SENDER_H_
-
-#include "rtp/RTPSender.h"
-
-#include <media/stagefright/foundation/ABase.h>
-#include <media/stagefright/foundation/AHandler.h>
-#include <utils/Errors.h>
-#include <utils/Vector.h>
-
-namespace android {
-
-struct ABuffer;
-struct ANetworkSession;
-struct AMessage;
-struct IHDCP;
-struct TSPacketizer;
-
-// This class facilitates sending of data from one or more media tracks
-// through one or more RTP channels, either providing a 1:1 mapping from
-// track to RTP channel or muxing all tracks into a single RTP channel and
-// using transport stream encapsulation.
-// Optionally the (video) data is encrypted using the provided hdcp object.
-struct MediaSender : public AHandler {
-    enum {
-        kWhatInitDone,
-        kWhatError,
-        kWhatNetworkStall,
-        kWhatInformSender,
-    };
-
-    MediaSender(
-            const sp<ANetworkSession> &netSession,
-            const sp<AMessage> &notify);
-
-    status_t setHDCP(const sp<IHDCP> &hdcp);
-
-    enum FlagBits {
-        FLAG_MANUALLY_PREPEND_SPS_PPS = 1,
-    };
-    ssize_t addTrack(const sp<AMessage> &format, uint32_t flags);
-
-    // If trackIndex == -1, initialize for transport stream muxing.
-    status_t initAsync(
-            ssize_t trackIndex,
-            const char *remoteHost,
-            int32_t remoteRTPPort,
-            RTPSender::TransportMode rtpMode,
-            int32_t remoteRTCPPort,
-            RTPSender::TransportMode rtcpMode,
-            int32_t *localRTPPort);
-
-    status_t queueAccessUnit(
-            size_t trackIndex, const sp<ABuffer> &accessUnit);
-
-protected:
-    virtual void onMessageReceived(const sp<AMessage> &msg);
-    virtual ~MediaSender();
-
-private:
-    enum {
-        kWhatSenderNotify,
-    };
-
-    enum Mode {
-        MODE_UNDEFINED,
-        MODE_TRANSPORT_STREAM,
-        MODE_ELEMENTARY_STREAMS,
-    };
-
-    struct TrackInfo {
-        sp<AMessage> mFormat;
-        uint32_t mFlags;
-        sp<RTPSender> mSender;
-        List<sp<ABuffer> > mAccessUnits;
-        ssize_t mPacketizerTrackIndex;
-        bool mIsAudio;
-    };
-
-    sp<ANetworkSession> mNetSession;
-    sp<AMessage> mNotify;
-
-    sp<IHDCP> mHDCP;
-
-    Mode mMode;
-    int32_t mGeneration;
-
-    Vector<TrackInfo> mTrackInfos;
-
-    sp<TSPacketizer> mTSPacketizer;
-    sp<RTPSender> mTSSender;
-    int64_t mPrevTimeUs;
-
-    size_t mInitDoneCount;
-
-    FILE *mLogFile;
-
-    void onSenderNotify(const sp<AMessage> &msg);
-
-    void notifyInitDone(status_t err);
-    void notifyError(status_t err);
-    void notifyNetworkStall(size_t numBytesQueued);
-
-    status_t packetizeAccessUnit(
-            size_t trackIndex,
-            sp<ABuffer> accessUnit,
-            sp<ABuffer> *tsPackets);
-
-    DISALLOW_EVIL_CONSTRUCTORS(MediaSender);
-};
-
-}  // namespace android
-
-#endif  // MEDIA_SENDER_H_
-
diff --git a/media/libstagefright/wifi-display/Parameters.cpp b/media/libstagefright/wifi-display/Parameters.cpp
deleted file mode 100644
index d2a61ea..0000000
--- a/media/libstagefright/wifi-display/Parameters.cpp
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * Copyright 2012, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "Parameters.h"
-
-#include <media/stagefright/MediaErrors.h>
-
-namespace android {
-
-// static
-sp<Parameters> Parameters::Parse(const char *data, size_t size) {
-    sp<Parameters> params = new Parameters;
-    status_t err = params->parse(data, size);
-
-    if (err != OK) {
-        return NULL;
-    }
-
-    return params;
-}
-
-Parameters::Parameters() {}
-
-Parameters::~Parameters() {}
-
-status_t Parameters::parse(const char *data, size_t size) {
-    size_t i = 0;
-    while (i < size) {
-        size_t nameStart = i;
-        while (i < size && data[i] != ':') {
-            ++i;
-        }
-
-        if (i == size || i == nameStart) {
-            return ERROR_MALFORMED;
-        }
-
-        AString name(&data[nameStart], i - nameStart);
-        name.trim();
-        name.tolower();
-
-        ++i;
-
-        size_t valueStart = i;
-
-        while (i + 1 < size && (data[i] != '\r' || data[i + 1] != '\n')) {
-            ++i;
-        }
-
-        AString value(&data[valueStart], i - valueStart);
-        value.trim();
-
-        mDict.add(name, value);
-
-        while (i + 1 < size && data[i] == '\r' && data[i + 1] == '\n') {
-            i += 2;
-        }
-    }
-
-    return OK;
-}
-
-bool Parameters::findParameter(const char *name, AString *value) const {
-    AString key = name;
-    key.tolower();
-
-    ssize_t index = mDict.indexOfKey(key);
-
-    if (index < 0) {
-        value->clear();
-
-        return false;
-    }
-
-    *value = mDict.valueAt(index);
-    return true;
-}
-
-}  // namespace android
diff --git a/media/libstagefright/wifi-display/Parameters.h b/media/libstagefright/wifi-display/Parameters.h
deleted file mode 100644
index a5e787e..0000000
--- a/media/libstagefright/wifi-display/Parameters.h
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright 2012, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <media/stagefright/foundation/ABase.h>
-#include <media/stagefright/foundation/AString.h>
-#include <utils/KeyedVector.h>
-#include <utils/RefBase.h>
-
-namespace android {
-
-struct Parameters : public RefBase {
-    static sp<Parameters> Parse(const char *data, size_t size);
-
-    bool findParameter(const char *name, AString *value) const;
-
-protected:
-    virtual ~Parameters();
-
-private:
-    KeyedVector<AString, AString> mDict;
-
-    Parameters();
-    status_t parse(const char *data, size_t size);
-
-    DISALLOW_EVIL_CONSTRUCTORS(Parameters);
-};
-
-}  // namespace android
diff --git a/media/libstagefright/wifi-display/VideoFormats.cpp b/media/libstagefright/wifi-display/VideoFormats.cpp
deleted file mode 100644
index dbc511c..0000000
--- a/media/libstagefright/wifi-display/VideoFormats.cpp
+++ /dev/null
@@ -1,550 +0,0 @@
-/*
- * Copyright 2013, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "VideoFormats"
-#include <utils/Log.h>
-
-#include "VideoFormats.h"
-
-#include <media/stagefright/foundation/ADebug.h>
-
-namespace android {
-
-// static
-const VideoFormats::config_t VideoFormats::mResolutionTable[][32] = {
-    {
-        // CEA Resolutions
-        { 640, 480, 60, false, 0, 0},
-        { 720, 480, 60, false, 0, 0},
-        { 720, 480, 60, true, 0, 0},
-        { 720, 576, 50, false, 0, 0},
-        { 720, 576, 50, true, 0, 0},
-        { 1280, 720, 30, false, 0, 0},
-        { 1280, 720, 60, false, 0, 0},
-        { 1920, 1080, 30, false, 0, 0},
-        { 1920, 1080, 60, false, 0, 0},
-        { 1920, 1080, 60, true, 0, 0},
-        { 1280, 720, 25, false, 0, 0},
-        { 1280, 720, 50, false, 0, 0},
-        { 1920, 1080, 25, false, 0, 0},
-        { 1920, 1080, 50, false, 0, 0},
-        { 1920, 1080, 50, true, 0, 0},
-        { 1280, 720, 24, false, 0, 0},
-        { 1920, 1080, 24, false, 0, 0},
-        { 0, 0, 0, false, 0, 0},
-        { 0, 0, 0, false, 0, 0},
-        { 0, 0, 0, false, 0, 0},
-        { 0, 0, 0, false, 0, 0},
-        { 0, 0, 0, false, 0, 0},
-        { 0, 0, 0, false, 0, 0},
-        { 0, 0, 0, false, 0, 0},
-        { 0, 0, 0, false, 0, 0},
-        { 0, 0, 0, false, 0, 0},
-        { 0, 0, 0, false, 0, 0},
-        { 0, 0, 0, false, 0, 0},
-        { 0, 0, 0, false, 0, 0},
-        { 0, 0, 0, false, 0, 0},
-        { 0, 0, 0, false, 0, 0},
-        { 0, 0, 0, false, 0, 0},
-    },
-    {
-        // VESA Resolutions
-        { 800, 600, 30, false, 0, 0},
-        { 800, 600, 60, false, 0, 0},
-        { 1024, 768, 30, false, 0, 0},
-        { 1024, 768, 60, false, 0, 0},
-        { 1152, 864, 30, false, 0, 0},
-        { 1152, 864, 60, false, 0, 0},
-        { 1280, 768, 30, false, 0, 0},
-        { 1280, 768, 60, false, 0, 0},
-        { 1280, 800, 30, false, 0, 0},
-        { 1280, 800, 60, false, 0, 0},
-        { 1360, 768, 30, false, 0, 0},
-        { 1360, 768, 60, false, 0, 0},
-        { 1366, 768, 30, false, 0, 0},
-        { 1366, 768, 60, false, 0, 0},
-        { 1280, 1024, 30, false, 0, 0},
-        { 1280, 1024, 60, false, 0, 0},
-        { 1400, 1050, 30, false, 0, 0},
-        { 1400, 1050, 60, false, 0, 0},
-        { 1440, 900, 30, false, 0, 0},
-        { 1440, 900, 60, false, 0, 0},
-        { 1600, 900, 30, false, 0, 0},
-        { 1600, 900, 60, false, 0, 0},
-        { 1600, 1200, 30, false, 0, 0},
-        { 1600, 1200, 60, false, 0, 0},
-        { 1680, 1024, 30, false, 0, 0},
-        { 1680, 1024, 60, false, 0, 0},
-        { 1680, 1050, 30, false, 0, 0},
-        { 1680, 1050, 60, false, 0, 0},
-        { 1920, 1200, 30, false, 0, 0},
-        { 1920, 1200, 60, false, 0, 0},
-        { 0, 0, 0, false, 0, 0},
-        { 0, 0, 0, false, 0, 0},
-    },
-    {
-        // HH Resolutions
-        { 800, 480, 30, false, 0, 0},
-        { 800, 480, 60, false, 0, 0},
-        { 854, 480, 30, false, 0, 0},
-        { 854, 480, 60, false, 0, 0},
-        { 864, 480, 30, false, 0, 0},
-        { 864, 480, 60, false, 0, 0},
-        { 640, 360, 30, false, 0, 0},
-        { 640, 360, 60, false, 0, 0},
-        { 960, 540, 30, false, 0, 0},
-        { 960, 540, 60, false, 0, 0},
-        { 848, 480, 30, false, 0, 0},
-        { 848, 480, 60, false, 0, 0},
-        { 0, 0, 0, false, 0, 0},
-        { 0, 0, 0, false, 0, 0},
-        { 0, 0, 0, false, 0, 0},
-        { 0, 0, 0, false, 0, 0},
-        { 0, 0, 0, false, 0, 0},
-        { 0, 0, 0, false, 0, 0},
-        { 0, 0, 0, false, 0, 0},
-        { 0, 0, 0, false, 0, 0},
-        { 0, 0, 0, false, 0, 0},
-        { 0, 0, 0, false, 0, 0},
-        { 0, 0, 0, false, 0, 0},
-        { 0, 0, 0, false, 0, 0},
-        { 0, 0, 0, false, 0, 0},
-        { 0, 0, 0, false, 0, 0},
-        { 0, 0, 0, false, 0, 0},
-        { 0, 0, 0, false, 0, 0},
-        { 0, 0, 0, false, 0, 0},
-        { 0, 0, 0, false, 0, 0},
-        { 0, 0, 0, false, 0, 0},
-        { 0, 0, 0, false, 0, 0},
-    }
-};
-
-VideoFormats::VideoFormats() {
-    memcpy(mConfigs, mResolutionTable, sizeof(mConfigs));
-
-    for (size_t i = 0; i < kNumResolutionTypes; ++i) {
-        mResolutionEnabled[i] = 0;
-    }
-
-    setNativeResolution(RESOLUTION_CEA, 0);  // default to 640x480 p60
-}
-
-void VideoFormats::setNativeResolution(ResolutionType type, size_t index) {
-    CHECK_LT(type, kNumResolutionTypes);
-    CHECK(GetConfiguration(type, index, NULL, NULL, NULL, NULL));
-
-    mNativeType = type;
-    mNativeIndex = index;
-
-    setResolutionEnabled(type, index);
-}
-
-void VideoFormats::getNativeResolution(
-        ResolutionType *type, size_t *index) const {
-    *type = mNativeType;
-    *index = mNativeIndex;
-}
-
-void VideoFormats::disableAll() {
-    for (size_t i = 0; i < kNumResolutionTypes; ++i) {
-        mResolutionEnabled[i] = 0;
-        for (size_t j = 0; j < 32; j++) {
-            mConfigs[i][j].profile = mConfigs[i][j].level = 0;
-        }
-    }
-}
-
-void VideoFormats::enableAll() {
-    for (size_t i = 0; i < kNumResolutionTypes; ++i) {
-        mResolutionEnabled[i] = 0xffffffff;
-        for (size_t j = 0; j < 32; j++) {
-            mConfigs[i][j].profile = (1ul << PROFILE_CBP);
-            mConfigs[i][j].level = (1ul << LEVEL_31);
-        }
-    }
-}
-
-void VideoFormats::enableResolutionUpto(
-        ResolutionType type, size_t index,
-        ProfileType profile, LevelType level) {
-    size_t width, height, fps, score;
-    bool interlaced;
-    if (!GetConfiguration(type, index, &width, &height,
-            &fps, &interlaced)) {
-        ALOGE("Maximum resolution not found!");
-        return;
-    }
-    score = width * height * fps * (!interlaced + 1);
-    for (size_t i = 0; i < kNumResolutionTypes; ++i) {
-        for (size_t j = 0; j < 32; j++) {
-            if (GetConfiguration((ResolutionType)i, j,
-                    &width, &height, &fps, &interlaced)
-                    && score >= width * height * fps * (!interlaced + 1)) {
-                setResolutionEnabled((ResolutionType)i, j);
-                setProfileLevel((ResolutionType)i, j, profile, level);
-            }
-        }
-    }
-}
-
-void VideoFormats::setResolutionEnabled(
-        ResolutionType type, size_t index, bool enabled) {
-    CHECK_LT(type, kNumResolutionTypes);
-    CHECK(GetConfiguration(type, index, NULL, NULL, NULL, NULL));
-
-    if (enabled) {
-        mResolutionEnabled[type] |= (1ul << index);
-        mConfigs[type][index].profile = (1ul << PROFILE_CBP);
-        mConfigs[type][index].level = (1ul << LEVEL_31);
-    } else {
-        mResolutionEnabled[type] &= ~(1ul << index);
-        mConfigs[type][index].profile = 0;
-        mConfigs[type][index].level = 0;
-    }
-}
-
-void VideoFormats::setProfileLevel(
-        ResolutionType type, size_t index,
-        ProfileType profile, LevelType level) {
-    CHECK_LT(type, kNumResolutionTypes);
-    CHECK(GetConfiguration(type, index, NULL, NULL, NULL, NULL));
-
-    mConfigs[type][index].profile = (1ul << profile);
-    mConfigs[type][index].level = (1ul << level);
-}
-
-void VideoFormats::getProfileLevel(
-        ResolutionType type, size_t index,
-        ProfileType *profile, LevelType *level) const{
-    CHECK_LT(type, kNumResolutionTypes);
-    CHECK(GetConfiguration(type, index, NULL, NULL, NULL, NULL));
-
-    int i, bestProfile = -1, bestLevel = -1;
-
-    for (i = 0; i < kNumProfileTypes; ++i) {
-        if (mConfigs[type][index].profile & (1ul << i)) {
-            bestProfile = i;
-        }
-    }
-
-    for (i = 0; i < kNumLevelTypes; ++i) {
-        if (mConfigs[type][index].level & (1ul << i)) {
-            bestLevel = i;
-        }
-    }
-
-    if (bestProfile == -1 || bestLevel == -1) {
-        ALOGE("Profile or level not set for resolution type %d, index %zu",
-                type, index);
-        bestProfile = PROFILE_CBP;
-        bestLevel = LEVEL_31;
-    }
-
-    *profile = (ProfileType) bestProfile;
-    *level = (LevelType) bestLevel;
-}
-
-bool VideoFormats::isResolutionEnabled(
-        ResolutionType type, size_t index) const {
-    CHECK_LT(type, kNumResolutionTypes);
-    CHECK(GetConfiguration(type, index, NULL, NULL, NULL, NULL));
-
-    return mResolutionEnabled[type] & (1ul << index);
-}
-
-// static
-bool VideoFormats::GetConfiguration(
-        ResolutionType type,
-        size_t index,
-        size_t *width, size_t *height, size_t *framesPerSecond,
-        bool *interlaced) {
-    CHECK_LT(type, kNumResolutionTypes);
-
-    if (index >= 32) {
-        return false;
-    }
-
-    const config_t *config = &mResolutionTable[type][index];
-
-    if (config->width == 0) {
-        return false;
-    }
-
-    if (width) {
-        *width = config->width;
-    }
-
-    if (height) {
-        *height = config->height;
-    }
-
-    if (framesPerSecond) {
-        *framesPerSecond = config->framesPerSecond;
-    }
-
-    if (interlaced) {
-        *interlaced = config->interlaced;
-    }
-
-    return true;
-}
-
-bool VideoFormats::parseH264Codec(const char *spec) {
-    unsigned profile, level, res[3];
-
-    if (sscanf(
-            spec,
-            "%02x %02x %08X %08X %08X",
-            &profile,
-            &level,
-            &res[0],
-            &res[1],
-            &res[2]) != 5) {
-        return false;
-    }
-
-    for (size_t i = 0; i < kNumResolutionTypes; ++i) {
-        for (size_t j = 0; j < 32; ++j) {
-            if (res[i] & (1ul << j)){
-                mResolutionEnabled[i] |= (1ul << j);
-                if (profile > mConfigs[i][j].profile) {
-                    // prefer higher profile (even if level is lower)
-                    mConfigs[i][j].profile = profile;
-                    mConfigs[i][j].level = level;
-                } else if (profile == mConfigs[i][j].profile &&
-                           level > mConfigs[i][j].level) {
-                    mConfigs[i][j].level = level;
-                }
-            }
-        }
-    }
-
-    return true;
-}
-
-// static
-bool VideoFormats::GetProfileLevel(
-        ProfileType profile, LevelType level, unsigned *profileIdc,
-        unsigned *levelIdc, unsigned *constraintSet) {
-    CHECK_LT(profile, kNumProfileTypes);
-    CHECK_LT(level, kNumLevelTypes);
-
-    static const unsigned kProfileIDC[kNumProfileTypes] = {
-        66,     // PROFILE_CBP
-        100,    // PROFILE_CHP
-    };
-
-    static const unsigned kLevelIDC[kNumLevelTypes] = {
-        31,     // LEVEL_31
-        32,     // LEVEL_32
-        40,     // LEVEL_40
-        41,     // LEVEL_41
-        42,     // LEVEL_42
-    };
-
-    static const unsigned kConstraintSet[kNumProfileTypes] = {
-        0xc0,   // PROFILE_CBP
-        0x0c,   // PROFILE_CHP
-    };
-
-    if (profileIdc) {
-        *profileIdc = kProfileIDC[profile];
-    }
-
-    if (levelIdc) {
-        *levelIdc = kLevelIDC[level];
-    }
-
-    if (constraintSet) {
-        *constraintSet = kConstraintSet[profile];
-    }
-
-    return true;
-}
-
-bool VideoFormats::parseFormatSpec(const char *spec) {
-    CHECK_EQ(kNumResolutionTypes, 3);
-
-    disableAll();
-
-    unsigned native, dummy;
-    size_t size = strlen(spec);
-    size_t offset = 0;
-
-    if (sscanf(spec, "%02x %02x ", &native, &dummy) != 2) {
-        return false;
-    }
-
-    offset += 6; // skip native and preferred-display-mode-supported
-    CHECK_LE(offset + 58, size);
-    while (offset < size) {
-        parseH264Codec(spec + offset);
-        offset += 60; // skip H.264-codec + ", "
-    }
-
-    mNativeIndex = native >> 3;
-    mNativeType = (ResolutionType)(native & 7);
-
-    bool success;
-    if (mNativeType >= kNumResolutionTypes) {
-        success = false;
-    } else {
-        success = GetConfiguration(
-                mNativeType, mNativeIndex, NULL, NULL, NULL, NULL);
-    }
-
-    if (!success) {
-        ALOGW("sink advertised an illegal native resolution, fortunately "
-              "this value is ignored for the time being...");
-    }
-
-    return true;
-}
-
-AString VideoFormats::getFormatSpec(bool forM4Message) const {
-    CHECK_EQ(kNumResolutionTypes, 3);
-
-    // wfd_video_formats:
-    // 1 byte "native"
-    // 1 byte "preferred-display-mode-supported" 0 or 1
-    // one or more avc codec structures
-    //   1 byte profile
-    //   1 byte level
-    //   4 byte CEA mask
-    //   4 byte VESA mask
-    //   4 byte HH mask
-    //   1 byte latency
-    //   2 byte min-slice-slice
-    //   2 byte slice-enc-params
-    //   1 byte framerate-control-support
-    //   max-hres (none or 2 byte)
-    //   max-vres (none or 2 byte)
-
-    return AStringPrintf(
-            "%02x 00 %02x %02x %08x %08x %08x 00 0000 0000 00 none none",
-            forM4Message ? 0x00 : ((mNativeIndex << 3) | mNativeType),
-            mConfigs[mNativeType][mNativeIndex].profile,
-            mConfigs[mNativeType][mNativeIndex].level,
-            mResolutionEnabled[0],
-            mResolutionEnabled[1],
-            mResolutionEnabled[2]);
-}
-
-// static
-bool VideoFormats::PickBestFormat(
-        const VideoFormats &sinkSupported,
-        const VideoFormats &sourceSupported,
-        ResolutionType *chosenType,
-        size_t *chosenIndex,
-        ProfileType *chosenProfile,
-        LevelType *chosenLevel) {
-#if 0
-    // Support for the native format is a great idea, the spec includes
-    // these features, but nobody supports it and the tests don't validate it.
-
-    ResolutionType nativeType;
-    size_t nativeIndex;
-    sinkSupported.getNativeResolution(&nativeType, &nativeIndex);
-    if (sinkSupported.isResolutionEnabled(nativeType, nativeIndex)) {
-        if (sourceSupported.isResolutionEnabled(nativeType, nativeIndex)) {
-            ALOGI("Choosing sink's native resolution");
-            *chosenType = nativeType;
-            *chosenIndex = nativeIndex;
-            return true;
-        }
-    } else {
-        ALOGW("Sink advertised native resolution that it doesn't "
-              "actually support... ignoring");
-    }
-
-    sourceSupported.getNativeResolution(&nativeType, &nativeIndex);
-    if (sourceSupported.isResolutionEnabled(nativeType, nativeIndex)) {
-        if (sinkSupported.isResolutionEnabled(nativeType, nativeIndex)) {
-            ALOGI("Choosing source's native resolution");
-            *chosenType = nativeType;
-            *chosenIndex = nativeIndex;
-            return true;
-        }
-    } else {
-        ALOGW("Source advertised native resolution that it doesn't "
-              "actually support... ignoring");
-    }
-#endif
-
-    bool first = true;
-    uint32_t bestScore = 0;
-    size_t bestType = 0;
-    size_t bestIndex = 0;
-    for (size_t i = 0; i < kNumResolutionTypes; ++i) {
-        for (size_t j = 0; j < 32; ++j) {
-            size_t width, height, framesPerSecond;
-            bool interlaced;
-            if (!GetConfiguration(
-                        (ResolutionType)i,
-                        j,
-                        &width, &height, &framesPerSecond, &interlaced)) {
-                break;
-            }
-
-            if (!sinkSupported.isResolutionEnabled((ResolutionType)i, j)
-                    || !sourceSupported.isResolutionEnabled(
-                        (ResolutionType)i, j)) {
-                continue;
-            }
-
-            ALOGV("type %zu, index %zu, %zu x %zu %c%zu supported",
-                  i, j, width, height, interlaced ? 'i' : 'p', framesPerSecond);
-
-            uint32_t score = width * height * framesPerSecond;
-            if (!interlaced) {
-                score *= 2;
-            }
-
-            if (first || score > bestScore) {
-                bestScore = score;
-                bestType = i;
-                bestIndex = j;
-
-                first = false;
-            }
-        }
-    }
-
-    if (first) {
-        return false;
-    }
-
-    *chosenType = (ResolutionType)bestType;
-    *chosenIndex = bestIndex;
-
-    // Pick the best profile/level supported by both sink and source.
-    ProfileType srcProfile, sinkProfile;
-    LevelType srcLevel, sinkLevel;
-    sourceSupported.getProfileLevel(
-                        (ResolutionType)bestType, bestIndex,
-                        &srcProfile, &srcLevel);
-    sinkSupported.getProfileLevel(
-                        (ResolutionType)bestType, bestIndex,
-                        &sinkProfile, &sinkLevel);
-    *chosenProfile = srcProfile < sinkProfile ? srcProfile : sinkProfile;
-    *chosenLevel = srcLevel < sinkLevel ? srcLevel : sinkLevel;
-
-    return true;
-}
-
-}  // namespace android
-
diff --git a/media/libstagefright/wifi-display/VideoFormats.h b/media/libstagefright/wifi-display/VideoFormats.h
deleted file mode 100644
index fd38fd1..0000000
--- a/media/libstagefright/wifi-display/VideoFormats.h
+++ /dev/null
@@ -1,125 +0,0 @@
-/*
- * Copyright 2013, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef VIDEO_FORMATS_H_
-
-#define VIDEO_FORMATS_H_
-
-#include <media/stagefright/foundation/ABase.h>
-
-#include <stdint.h>
-
-namespace android {
-
-struct AString;
-
-// This class encapsulates that video resolution capabilities of a wfd source
-// or sink as outlined in the wfd specs. Currently three sets of resolutions
-// are specified, each of which supports up to 32 resolutions.
-// In addition to its capabilities each sink/source also publishes its
-// "native" resolution, presumably one that is preferred among all others
-// because it wouldn't require any scaling and directly corresponds to the
-// display capabilities/pixels.
-struct VideoFormats {
-    VideoFormats();
-
-    struct config_t {
-        size_t width, height, framesPerSecond;
-        bool interlaced;
-        unsigned char profile, level;
-    };
-
-    enum ProfileType {
-        PROFILE_CBP = 0,
-        PROFILE_CHP,
-        kNumProfileTypes,
-    };
-
-    enum LevelType {
-        LEVEL_31 = 0,
-        LEVEL_32,
-        LEVEL_40,
-        LEVEL_41,
-        LEVEL_42,
-        kNumLevelTypes,
-    };
-
-    enum ResolutionType {
-        RESOLUTION_CEA,
-        RESOLUTION_VESA,
-        RESOLUTION_HH,
-        kNumResolutionTypes,
-    };
-
-    void setNativeResolution(ResolutionType type, size_t index);
-    void getNativeResolution(ResolutionType *type, size_t *index) const;
-
-    void disableAll();
-    void enableAll();
-    void enableResolutionUpto(
-            ResolutionType type, size_t index,
-            ProfileType profile, LevelType level);
-
-    void setResolutionEnabled(
-            ResolutionType type, size_t index, bool enabled = true);
-
-    bool isResolutionEnabled(ResolutionType type, size_t index) const;
-
-    void setProfileLevel(
-            ResolutionType type, size_t index,
-            ProfileType profile, LevelType level);
-
-    void getProfileLevel(
-            ResolutionType type, size_t index,
-            ProfileType *profile, LevelType *level) const;
-
-    static bool GetConfiguration(
-            ResolutionType type, size_t index,
-            size_t *width, size_t *height, size_t *framesPerSecond,
-            bool *interlaced);
-
-    static bool GetProfileLevel(
-            ProfileType profile, LevelType level,
-            unsigned *profileIdc, unsigned *levelIdc,
-            unsigned *constraintSet);
-
-    bool parseFormatSpec(const char *spec);
-    AString getFormatSpec(bool forM4Message = false) const;
-
-    static bool PickBestFormat(
-            const VideoFormats &sinkSupported,
-            const VideoFormats &sourceSupported,
-            ResolutionType *chosenType,
-            size_t *chosenIndex,
-            ProfileType *chosenProfile,
-            LevelType *chosenLevel);
-
-private:
-    bool parseH264Codec(const char *spec);
-    ResolutionType mNativeType;
-    size_t mNativeIndex;
-
-    uint32_t mResolutionEnabled[kNumResolutionTypes];
-    static const config_t mResolutionTable[kNumResolutionTypes][32];
-    config_t mConfigs[kNumResolutionTypes][32];
-
-    DISALLOW_EVIL_CONSTRUCTORS(VideoFormats);
-};
-
-}  // namespace android
-
-#endif  // VIDEO_FORMATS_H_
-
diff --git a/media/libstagefright/wifi-display/rtp/RTPBase.h b/media/libstagefright/wifi-display/rtp/RTPBase.h
deleted file mode 100644
index 194f1ee..0000000
--- a/media/libstagefright/wifi-display/rtp/RTPBase.h
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Copyright 2013, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef RTP_BASE_H_
-
-#define RTP_BASE_H_
-
-namespace android {
-
-struct RTPBase {
-    enum PacketizationMode {
-        PACKETIZATION_TRANSPORT_STREAM,
-        PACKETIZATION_H264,
-        PACKETIZATION_AAC,
-        PACKETIZATION_NONE,
-    };
-
-    enum TransportMode {
-        TRANSPORT_UNDEFINED,
-        TRANSPORT_NONE,
-        TRANSPORT_UDP,
-        TRANSPORT_TCP,
-        TRANSPORT_TCP_INTERLEAVED,
-    };
-
-    // Really UDP _payload_ size
-    const unsigned int kMaxUDPPacketSize = 1472;   // 1472 good, 1473 bad on Android@Home
-
-    static int32_t PickRandomRTPPort();
-};
-
-}  // namespace android
-
-#endif  // RTP_BASE_H_
-
-
diff --git a/media/libstagefright/wifi-display/rtp/RTPSender.cpp b/media/libstagefright/wifi-display/rtp/RTPSender.cpp
deleted file mode 100644
index ca9fdd2..0000000
--- a/media/libstagefright/wifi-display/rtp/RTPSender.cpp
+++ /dev/null
@@ -1,808 +0,0 @@
-/*
- * Copyright 2013, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "RTPSender"
-#include <utils/Log.h>
-
-#include "RTPSender.h"
-
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/foundation/ANetworkSession.h>
-#include <media/stagefright/foundation/hexdump.h>
-#include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/Utils.h>
-
-#include "include/avc_utils.h"
-
-namespace android {
-
-RTPSender::RTPSender(
-        const sp<ANetworkSession> &netSession,
-        const sp<AMessage> &notify)
-    : mNetSession(netSession),
-      mNotify(notify),
-      mRTPMode(TRANSPORT_UNDEFINED),
-      mRTCPMode(TRANSPORT_UNDEFINED),
-      mRTPSessionID(0),
-      mRTCPSessionID(0),
-      mRTPConnected(false),
-      mRTCPConnected(false),
-      mLastNTPTime(0),
-      mLastRTPTime(0),
-      mNumRTPSent(0),
-      mNumRTPOctetsSent(0),
-      mNumSRsSent(0),
-      mRTPSeqNo(0),
-      mHistorySize(0) {
-}
-
-RTPSender::~RTPSender() {
-    if (mRTCPSessionID != 0) {
-        mNetSession->destroySession(mRTCPSessionID);
-        mRTCPSessionID = 0;
-    }
-
-    if (mRTPSessionID != 0) {
-        mNetSession->destroySession(mRTPSessionID);
-        mRTPSessionID = 0;
-    }
-}
-
-// static
-int32_t RTPBase::PickRandomRTPPort() {
-    // Pick an even integer in range [1024, 65534)
-
-    static const size_t kRange = (65534 - 1024) / 2;
-
-    return (int32_t)(((float)(kRange + 1) * rand()) / RAND_MAX) * 2 + 1024;
-}
-
-status_t RTPSender::initAsync(
-        const char *remoteHost,
-        int32_t remoteRTPPort,
-        TransportMode rtpMode,
-        int32_t remoteRTCPPort,
-        TransportMode rtcpMode,
-        int32_t *outLocalRTPPort) {
-    if (mRTPMode != TRANSPORT_UNDEFINED
-            || rtpMode == TRANSPORT_UNDEFINED
-            || rtpMode == TRANSPORT_NONE
-            || rtcpMode == TRANSPORT_UNDEFINED) {
-        return INVALID_OPERATION;
-    }
-
-    CHECK_NE(rtpMode, TRANSPORT_TCP_INTERLEAVED);
-    CHECK_NE(rtcpMode, TRANSPORT_TCP_INTERLEAVED);
-
-    if ((rtcpMode == TRANSPORT_NONE && remoteRTCPPort >= 0)
-            || (rtcpMode != TRANSPORT_NONE && remoteRTCPPort < 0)) {
-        return INVALID_OPERATION;
-    }
-
-    sp<AMessage> rtpNotify = new AMessage(kWhatRTPNotify, this);
-
-    sp<AMessage> rtcpNotify;
-    if (remoteRTCPPort >= 0) {
-        rtcpNotify = new AMessage(kWhatRTCPNotify, this);
-    }
-
-    CHECK_EQ(mRTPSessionID, 0);
-    CHECK_EQ(mRTCPSessionID, 0);
-
-    int32_t localRTPPort;
-
-    for (;;) {
-        localRTPPort = PickRandomRTPPort();
-
-        status_t err;
-        if (rtpMode == TRANSPORT_UDP) {
-            err = mNetSession->createUDPSession(
-                    localRTPPort,
-                    remoteHost,
-                    remoteRTPPort,
-                    rtpNotify,
-                    &mRTPSessionID);
-        } else {
-            CHECK_EQ(rtpMode, TRANSPORT_TCP);
-            err = mNetSession->createTCPDatagramSession(
-                    localRTPPort,
-                    remoteHost,
-                    remoteRTPPort,
-                    rtpNotify,
-                    &mRTPSessionID);
-        }
-
-        if (err != OK) {
-            continue;
-        }
-
-        if (remoteRTCPPort < 0) {
-            break;
-        }
-
-        if (rtcpMode == TRANSPORT_UDP) {
-            err = mNetSession->createUDPSession(
-                    localRTPPort + 1,
-                    remoteHost,
-                    remoteRTCPPort,
-                    rtcpNotify,
-                    &mRTCPSessionID);
-        } else {
-            CHECK_EQ(rtcpMode, TRANSPORT_TCP);
-            err = mNetSession->createTCPDatagramSession(
-                    localRTPPort + 1,
-                    remoteHost,
-                    remoteRTCPPort,
-                    rtcpNotify,
-                    &mRTCPSessionID);
-        }
-
-        if (err == OK) {
-            break;
-        }
-
-        mNetSession->destroySession(mRTPSessionID);
-        mRTPSessionID = 0;
-    }
-
-    if (rtpMode == TRANSPORT_UDP) {
-        mRTPConnected = true;
-    }
-
-    if (rtcpMode == TRANSPORT_UDP) {
-        mRTCPConnected = true;
-    }
-
-    mRTPMode = rtpMode;
-    mRTCPMode = rtcpMode;
-    *outLocalRTPPort = localRTPPort;
-
-    if (mRTPMode == TRANSPORT_UDP
-            && (mRTCPMode == TRANSPORT_UDP || mRTCPMode == TRANSPORT_NONE)) {
-        notifyInitDone(OK);
-    }
-
-    return OK;
-}
-
-status_t RTPSender::queueBuffer(
-        const sp<ABuffer> &buffer, uint8_t packetType, PacketizationMode mode) {
-    status_t err;
-
-    switch (mode) {
-        case PACKETIZATION_NONE:
-            err = queueRawPacket(buffer, packetType);
-            break;
-
-        case PACKETIZATION_TRANSPORT_STREAM:
-            err = queueTSPackets(buffer, packetType);
-            break;
-
-        case PACKETIZATION_H264:
-            err  = queueAVCBuffer(buffer, packetType);
-            break;
-
-        default:
-            TRESPASS();
-    }
-
-    return err;
-}
-
-status_t RTPSender::queueRawPacket(
-        const sp<ABuffer> &packet, uint8_t packetType) {
-    CHECK_LE(packet->size(), kMaxUDPPacketSize - 12);
-
-    int64_t timeUs;
-    CHECK(packet->meta()->findInt64("timeUs", &timeUs));
-
-    sp<ABuffer> udpPacket = new ABuffer(12 + packet->size());
-
-    udpPacket->setInt32Data(mRTPSeqNo);
-
-    uint8_t *rtp = udpPacket->data();
-    rtp[0] = 0x80;
-    rtp[1] = packetType;
-
-    rtp[2] = (mRTPSeqNo >> 8) & 0xff;
-    rtp[3] = mRTPSeqNo & 0xff;
-    ++mRTPSeqNo;
-
-    uint32_t rtpTime = (timeUs * 9) / 100ll;
-
-    rtp[4] = rtpTime >> 24;
-    rtp[5] = (rtpTime >> 16) & 0xff;
-    rtp[6] = (rtpTime >> 8) & 0xff;
-    rtp[7] = rtpTime & 0xff;
-
-    rtp[8] = kSourceID >> 24;
-    rtp[9] = (kSourceID >> 16) & 0xff;
-    rtp[10] = (kSourceID >> 8) & 0xff;
-    rtp[11] = kSourceID & 0xff;
-
-    memcpy(&rtp[12], packet->data(), packet->size());
-
-    return sendRTPPacket(
-            udpPacket,
-            true /* storeInHistory */,
-            true /* timeValid */,
-            ALooper::GetNowUs());
-}
-
-status_t RTPSender::queueTSPackets(
-        const sp<ABuffer> &tsPackets, uint8_t packetType) {
-    CHECK_EQ(0u, tsPackets->size() % 188);
-
-    int64_t timeUs;
-    CHECK(tsPackets->meta()->findInt64("timeUs", &timeUs));
-
-    size_t srcOffset = 0;
-    while (srcOffset < tsPackets->size()) {
-        sp<ABuffer> udpPacket =
-            new ABuffer(12 + kMaxNumTSPacketsPerRTPPacket * 188);
-
-        udpPacket->setInt32Data(mRTPSeqNo);
-
-        uint8_t *rtp = udpPacket->data();
-        rtp[0] = 0x80;
-        rtp[1] = packetType;
-
-        rtp[2] = (mRTPSeqNo >> 8) & 0xff;
-        rtp[3] = mRTPSeqNo & 0xff;
-        ++mRTPSeqNo;
-
-        int64_t nowUs = ALooper::GetNowUs();
-        uint32_t rtpTime = (nowUs * 9) / 100ll;
-
-        rtp[4] = rtpTime >> 24;
-        rtp[5] = (rtpTime >> 16) & 0xff;
-        rtp[6] = (rtpTime >> 8) & 0xff;
-        rtp[7] = rtpTime & 0xff;
-
-        rtp[8] = kSourceID >> 24;
-        rtp[9] = (kSourceID >> 16) & 0xff;
-        rtp[10] = (kSourceID >> 8) & 0xff;
-        rtp[11] = kSourceID & 0xff;
-
-        size_t numTSPackets = (tsPackets->size() - srcOffset) / 188;
-        if (numTSPackets > kMaxNumTSPacketsPerRTPPacket) {
-            numTSPackets = kMaxNumTSPacketsPerRTPPacket;
-        }
-
-        memcpy(&rtp[12], tsPackets->data() + srcOffset, numTSPackets * 188);
-
-        udpPacket->setRange(0, 12 + numTSPackets * 188);
-
-        srcOffset += numTSPackets * 188;
-        bool isLastPacket = (srcOffset == tsPackets->size());
-
-        status_t err = sendRTPPacket(
-                udpPacket,
-                true /* storeInHistory */,
-                isLastPacket /* timeValid */,
-                timeUs);
-
-        if (err != OK) {
-            return err;
-        }
-    }
-
-    return OK;
-}
-
-status_t RTPSender::queueAVCBuffer(
-        const sp<ABuffer> &accessUnit, uint8_t packetType) {
-    int64_t timeUs;
-    CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
-
-    uint32_t rtpTime = (timeUs * 9 / 100ll);
-
-    List<sp<ABuffer> > packets;
-
-    sp<ABuffer> out = new ABuffer(kMaxUDPPacketSize);
-    size_t outBytesUsed = 12;  // Placeholder for RTP header.
-
-    const uint8_t *data = accessUnit->data();
-    size_t size = accessUnit->size();
-    const uint8_t *nalStart;
-    size_t nalSize;
-    while (getNextNALUnit(
-                &data, &size, &nalStart, &nalSize,
-                true /* startCodeFollows */) == OK) {
-        size_t bytesNeeded = nalSize + 2;
-        if (outBytesUsed == 12) {
-            ++bytesNeeded;
-        }
-
-        if (outBytesUsed + bytesNeeded > out->capacity()) {
-            bool emitSingleNALPacket = false;
-
-            if (outBytesUsed == 12
-                    && outBytesUsed + nalSize <= out->capacity()) {
-                // We haven't emitted anything into the current packet yet and
-                // this NAL unit fits into a single-NAL-unit-packet while
-                // it wouldn't have fit as part of a STAP-A packet.
-
-                memcpy(out->data() + outBytesUsed, nalStart, nalSize);
-                outBytesUsed += nalSize;
-
-                emitSingleNALPacket = true;
-            }
-
-            if (outBytesUsed > 12) {
-                out->setRange(0, outBytesUsed);
-                packets.push_back(out);
-                out = new ABuffer(kMaxUDPPacketSize);
-                outBytesUsed = 12;  // Placeholder for RTP header
-            }
-
-            if (emitSingleNALPacket) {
-                continue;
-            }
-        }
-
-        if (outBytesUsed + bytesNeeded <= out->capacity()) {
-            uint8_t *dst = out->data() + outBytesUsed;
-
-            if (outBytesUsed == 12) {
-                *dst++ = 24;  // STAP-A header
-            }
-
-            *dst++ = (nalSize >> 8) & 0xff;
-            *dst++ = nalSize & 0xff;
-            memcpy(dst, nalStart, nalSize);
-
-            outBytesUsed += bytesNeeded;
-            continue;
-        }
-
-        // This single NAL unit does not fit into a single RTP packet,
-        // we need to emit an FU-A.
-
-        CHECK_EQ(outBytesUsed, 12u);
-
-        uint8_t nalType = nalStart[0] & 0x1f;
-        uint8_t nri = (nalStart[0] >> 5) & 3;
-
-        size_t srcOffset = 1;
-        while (srcOffset < nalSize) {
-            size_t copy = out->capacity() - outBytesUsed - 2;
-            if (copy > nalSize - srcOffset) {
-                copy = nalSize - srcOffset;
-            }
-
-            uint8_t *dst = out->data() + outBytesUsed;
-            dst[0] = (nri << 5) | 28;
-
-            dst[1] = nalType;
-
-            if (srcOffset == 1) {
-                dst[1] |= 0x80;
-            }
-
-            if (srcOffset + copy == nalSize) {
-                dst[1] |= 0x40;
-            }
-
-            memcpy(&dst[2], nalStart + srcOffset, copy);
-            srcOffset += copy;
-
-            out->setRange(0, outBytesUsed + copy + 2);
-
-            packets.push_back(out);
-            out = new ABuffer(kMaxUDPPacketSize);
-            outBytesUsed = 12;  // Placeholder for RTP header
-        }
-    }
-
-    if (outBytesUsed > 12) {
-        out->setRange(0, outBytesUsed);
-        packets.push_back(out);
-    }
-
-    while (!packets.empty()) {
-        sp<ABuffer> out = *packets.begin();
-        packets.erase(packets.begin());
-
-        out->setInt32Data(mRTPSeqNo);
-
-        bool last = packets.empty();
-
-        uint8_t *dst = out->data();
-
-        dst[0] = 0x80;
-
-        dst[1] = packetType;
-        if (last) {
-            dst[1] |= 1 << 7;  // M-bit
-        }
-
-        dst[2] = (mRTPSeqNo >> 8) & 0xff;
-        dst[3] = mRTPSeqNo & 0xff;
-        ++mRTPSeqNo;
-
-        dst[4] = rtpTime >> 24;
-        dst[5] = (rtpTime >> 16) & 0xff;
-        dst[6] = (rtpTime >> 8) & 0xff;
-        dst[7] = rtpTime & 0xff;
-        dst[8] = kSourceID >> 24;
-        dst[9] = (kSourceID >> 16) & 0xff;
-        dst[10] = (kSourceID >> 8) & 0xff;
-        dst[11] = kSourceID & 0xff;
-
-        status_t err = sendRTPPacket(out, true /* storeInHistory */);
-
-        if (err != OK) {
-            return err;
-        }
-    }
-
-    return OK;
-}
-
-status_t RTPSender::sendRTPPacket(
-        const sp<ABuffer> &buffer, bool storeInHistory,
-        bool timeValid, int64_t timeUs) {
-    CHECK(mRTPConnected);
-
-    status_t err = mNetSession->sendRequest(
-            mRTPSessionID, buffer->data(), buffer->size(),
-            timeValid, timeUs);
-
-    if (err != OK) {
-        return err;
-    }
-
-    mLastNTPTime = GetNowNTP();
-    mLastRTPTime = U32_AT(buffer->data() + 4);
-
-    ++mNumRTPSent;
-    mNumRTPOctetsSent += buffer->size() - 12;
-
-    if (storeInHistory) {
-        if (mHistorySize == kMaxHistorySize) {
-            mHistory.erase(mHistory.begin());
-        } else {
-            ++mHistorySize;
-        }
-        mHistory.push_back(buffer);
-    }
-
-    return OK;
-}
-
-// static
-uint64_t RTPSender::GetNowNTP() {
-    struct timeval tv;
-    gettimeofday(&tv, NULL /* timezone */);
-
-    uint64_t nowUs = tv.tv_sec * 1000000ll + tv.tv_usec;
-
-    nowUs += ((70ll * 365 + 17) * 24) * 60 * 60 * 1000000ll;
-
-    uint64_t hi = nowUs / 1000000ll;
-    uint64_t lo = ((1ll << 32) * (nowUs % 1000000ll)) / 1000000ll;
-
-    return (hi << 32) | lo;
-}
-
-void RTPSender::onMessageReceived(const sp<AMessage> &msg) {
-    switch (msg->what()) {
-        case kWhatRTPNotify:
-        case kWhatRTCPNotify:
-            onNetNotify(msg->what() == kWhatRTPNotify, msg);
-            break;
-
-        default:
-            TRESPASS();
-    }
-}
-
-void RTPSender::onNetNotify(bool isRTP, const sp<AMessage> &msg) {
-    int32_t reason;
-    CHECK(msg->findInt32("reason", &reason));
-
-    switch (reason) {
-        case ANetworkSession::kWhatError:
-        {
-            int32_t sessionID;
-            CHECK(msg->findInt32("sessionID", &sessionID));
-
-            int32_t err;
-            CHECK(msg->findInt32("err", &err));
-
-            int32_t errorOccuredDuringSend;
-            CHECK(msg->findInt32("send", &errorOccuredDuringSend));
-
-            AString detail;
-            CHECK(msg->findString("detail", &detail));
-
-            ALOGE("An error occurred during %s in session %d "
-                  "(%d, '%s' (%s)).",
-                  errorOccuredDuringSend ? "send" : "receive",
-                  sessionID,
-                  err,
-                  detail.c_str(),
-                  strerror(-err));
-
-            mNetSession->destroySession(sessionID);
-
-            if (sessionID == mRTPSessionID) {
-                mRTPSessionID = 0;
-            } else if (sessionID == mRTCPSessionID) {
-                mRTCPSessionID = 0;
-            }
-
-            if (!mRTPConnected
-                    || (mRTPMode != TRANSPORT_NONE && !mRTCPConnected)) {
-                // We haven't completed initialization, attach the error
-                // to the notification instead.
-                notifyInitDone(err);
-                break;
-            }
-
-            notifyError(err);
-            break;
-        }
-
-        case ANetworkSession::kWhatDatagram:
-        {
-            sp<ABuffer> data;
-            CHECK(msg->findBuffer("data", &data));
-
-            if (isRTP) {
-                ALOGW("Huh? Received data on RTP connection...");
-            } else {
-                onRTCPData(data);
-            }
-            break;
-        }
-
-        case ANetworkSession::kWhatConnected:
-        {
-            int32_t sessionID;
-            CHECK(msg->findInt32("sessionID", &sessionID));
-
-            if  (isRTP) {
-                CHECK_EQ(mRTPMode, TRANSPORT_TCP);
-                CHECK_EQ(sessionID, mRTPSessionID);
-                mRTPConnected = true;
-            } else {
-                CHECK_EQ(mRTCPMode, TRANSPORT_TCP);
-                CHECK_EQ(sessionID, mRTCPSessionID);
-                mRTCPConnected = true;
-            }
-
-            if (mRTPConnected
-                    && (mRTCPMode == TRANSPORT_NONE || mRTCPConnected)) {
-                notifyInitDone(OK);
-            }
-            break;
-        }
-
-        case ANetworkSession::kWhatNetworkStall:
-        {
-            size_t numBytesQueued;
-            CHECK(msg->findSize("numBytesQueued", &numBytesQueued));
-
-            notifyNetworkStall(numBytesQueued);
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-}
-
-status_t RTPSender::onRTCPData(const sp<ABuffer> &buffer) {
-    const uint8_t *data = buffer->data();
-    size_t size = buffer->size();
-
-    while (size > 0) {
-        if (size < 8) {
-            // Too short to be a valid RTCP header
-            return ERROR_MALFORMED;
-        }
-
-        if ((data[0] >> 6) != 2) {
-            // Unsupported version.
-            return ERROR_UNSUPPORTED;
-        }
-
-        if (data[0] & 0x20) {
-            // Padding present.
-
-            size_t paddingLength = data[size - 1];
-
-            if (paddingLength + 12 > size) {
-                // If we removed this much padding we'd end up with something
-                // that's too short to be a valid RTP header.
-                return ERROR_MALFORMED;
-            }
-
-            size -= paddingLength;
-        }
-
-        size_t headerLength = 4 * (data[2] << 8 | data[3]) + 4;
-
-        if (size < headerLength) {
-            // Only received a partial packet?
-            return ERROR_MALFORMED;
-        }
-
-        switch (data[1]) {
-            case 200:
-            case 201:  // RR
-                parseReceiverReport(data, headerLength);
-                break;
-
-            case 202:  // SDES
-            case 203:
-                break;
-
-            case 204:  // APP
-                parseAPP(data, headerLength);
-                break;
-
-            case 205:  // TSFB (transport layer specific feedback)
-                parseTSFB(data, headerLength);
-                break;
-
-            case 206:  // PSFB (payload specific feedback)
-                // hexdump(data, headerLength);
-                break;
-
-            default:
-            {
-                ALOGW("Unknown RTCP packet type %u of size %zu",
-                        (unsigned)data[1], headerLength);
-                break;
-            }
-        }
-
-        data += headerLength;
-        size -= headerLength;
-    }
-
-    return OK;
-}
-
-status_t RTPSender::parseReceiverReport(
-        const uint8_t *data, size_t /* size */) {
-    float fractionLost = data[12] / 256.0f;
-
-    ALOGI("lost %.2f %% of packets during report interval.",
-          100.0f * fractionLost);
-
-    return OK;
-}
-
-status_t RTPSender::parseTSFB(const uint8_t *data, size_t size) {
-    if ((data[0] & 0x1f) != 1) {
-        return ERROR_UNSUPPORTED;  // We only support NACK for now.
-    }
-
-    uint32_t srcId = U32_AT(&data[8]);
-    if (srcId != kSourceID) {
-        return ERROR_MALFORMED;
-    }
-
-    for (size_t i = 12; i < size; i += 4) {
-        uint16_t seqNo = U16_AT(&data[i]);
-        uint16_t blp = U16_AT(&data[i + 2]);
-
-        List<sp<ABuffer> >::iterator it = mHistory.begin();
-        bool foundSeqNo = false;
-        while (it != mHistory.end()) {
-            const sp<ABuffer> &buffer = *it;
-
-            uint16_t bufferSeqNo = buffer->int32Data() & 0xffff;
-
-            bool retransmit = false;
-            if (bufferSeqNo == seqNo) {
-                retransmit = true;
-            } else if (blp != 0) {
-                for (size_t i = 0; i < 16; ++i) {
-                    if ((blp & (1 << i))
-                        && (bufferSeqNo == ((seqNo + i + 1) & 0xffff))) {
-                        blp &= ~(1 << i);
-                        retransmit = true;
-                    }
-                }
-            }
-
-            if (retransmit) {
-                ALOGV("retransmitting seqNo %d", bufferSeqNo);
-
-                CHECK_EQ((status_t)OK,
-                         sendRTPPacket(buffer, false /* storeInHistory */));
-
-                if (bufferSeqNo == seqNo) {
-                    foundSeqNo = true;
-                }
-
-                if (foundSeqNo && blp == 0) {
-                    break;
-                }
-            }
-
-            ++it;
-        }
-
-        if (!foundSeqNo || blp != 0) {
-            ALOGI("Some sequence numbers were no longer available for "
-                  "retransmission (seqNo = %d, foundSeqNo = %d, blp = 0x%04x)",
-                  seqNo, foundSeqNo, blp);
-
-            if (!mHistory.empty()) {
-                int32_t earliest = (*mHistory.begin())->int32Data() & 0xffff;
-                int32_t latest = (*--mHistory.end())->int32Data() & 0xffff;
-
-                ALOGI("have seq numbers from %d - %d", earliest, latest);
-            }
-        }
-    }
-
-    return OK;
-}
-
-status_t RTPSender::parseAPP(const uint8_t *data, size_t size) {
-    static const size_t late_offset = 8;
-    static const char late_string[] = "late";
-    static const size_t avgLatencyUs_offset = late_offset + sizeof(late_string) - 1;
-    static const size_t maxLatencyUs_offset = avgLatencyUs_offset + sizeof(int64_t);
-
-    if ((size >= (maxLatencyUs_offset + sizeof(int64_t)))
-            && !memcmp(late_string, &data[late_offset], sizeof(late_string) - 1)) {
-        int64_t avgLatencyUs = (int64_t)U64_AT(&data[avgLatencyUs_offset]);
-        int64_t maxLatencyUs = (int64_t)U64_AT(&data[maxLatencyUs_offset]);
-
-        sp<AMessage> notify = mNotify->dup();
-        notify->setInt32("what", kWhatInformSender);
-        notify->setInt64("avgLatencyUs", avgLatencyUs);
-        notify->setInt64("maxLatencyUs", maxLatencyUs);
-        notify->post();
-    }
-
-    return OK;
-}
-
-void RTPSender::notifyInitDone(status_t err) {
-    sp<AMessage> notify = mNotify->dup();
-    notify->setInt32("what", kWhatInitDone);
-    notify->setInt32("err", err);
-    notify->post();
-}
-
-void RTPSender::notifyError(status_t err) {
-    sp<AMessage> notify = mNotify->dup();
-    notify->setInt32("what", kWhatError);
-    notify->setInt32("err", err);
-    notify->post();
-}
-
-void RTPSender::notifyNetworkStall(size_t numBytesQueued) {
-    sp<AMessage> notify = mNotify->dup();
-    notify->setInt32("what", kWhatNetworkStall);
-    notify->setSize("numBytesQueued", numBytesQueued);
-    notify->post();
-}
-
-}  // namespace android
-
diff --git a/media/libstagefright/wifi-display/rtp/RTPSender.h b/media/libstagefright/wifi-display/rtp/RTPSender.h
deleted file mode 100644
index bedfd01..0000000
--- a/media/libstagefright/wifi-display/rtp/RTPSender.h
+++ /dev/null
@@ -1,119 +0,0 @@
-/*
- * Copyright 2013, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef RTP_SENDER_H_
-
-#define RTP_SENDER_H_
-
-#include "RTPBase.h"
-
-#include <media/stagefright/foundation/AHandler.h>
-
-namespace android {
-
-struct ABuffer;
-struct ANetworkSession;
-
-// An object of this class facilitates sending of media data over an RTP
-// channel. The channel is established over a UDP or TCP connection depending
-// on which "TransportMode" was chosen. In addition different RTP packetization
-// schemes are supported such as "Transport Stream Packets over RTP",
-// or "AVC/H.264 encapsulation as specified in RFC 3984 (non-interleaved mode)"
-struct RTPSender : public RTPBase, public AHandler {
-    enum {
-        kWhatInitDone,
-        kWhatError,
-        kWhatNetworkStall,
-        kWhatInformSender,
-    };
-    RTPSender(
-            const sp<ANetworkSession> &netSession,
-            const sp<AMessage> &notify);
-
-    status_t initAsync(
-              const char *remoteHost,
-              int32_t remoteRTPPort,
-              TransportMode rtpMode,
-              int32_t remoteRTCPPort,
-              TransportMode rtcpMode,
-              int32_t *outLocalRTPPort);
-
-    status_t queueBuffer(
-            const sp<ABuffer> &buffer,
-            uint8_t packetType,
-            PacketizationMode mode);
-
-protected:
-    virtual ~RTPSender();
-    virtual void onMessageReceived(const sp<AMessage> &msg);
-
-private:
-    enum {
-        kWhatRTPNotify,
-        kWhatRTCPNotify,
-    };
-
-    const unsigned int kMaxNumTSPacketsPerRTPPacket = (kMaxUDPPacketSize - 12) / 188;
-    const unsigned int kMaxHistorySize              = 1024;
-    const unsigned int kSourceID                    = 0xdeadbeef;
-
-    sp<ANetworkSession> mNetSession;
-    sp<AMessage> mNotify;
-    TransportMode mRTPMode;
-    TransportMode mRTCPMode;
-    int32_t mRTPSessionID;
-    int32_t mRTCPSessionID;
-    bool mRTPConnected;
-    bool mRTCPConnected;
-
-    uint64_t mLastNTPTime;
-    uint32_t mLastRTPTime;
-    uint32_t mNumRTPSent;
-    uint32_t mNumRTPOctetsSent;
-    uint32_t mNumSRsSent;
-
-    uint32_t mRTPSeqNo;
-
-    List<sp<ABuffer> > mHistory;
-    size_t mHistorySize;
-
-    static uint64_t GetNowNTP();
-
-    status_t queueRawPacket(const sp<ABuffer> &tsPackets, uint8_t packetType);
-    status_t queueTSPackets(const sp<ABuffer> &tsPackets, uint8_t packetType);
-    status_t queueAVCBuffer(const sp<ABuffer> &accessUnit, uint8_t packetType);
-
-    status_t sendRTPPacket(
-            const sp<ABuffer> &packet, bool storeInHistory,
-            bool timeValid = false, int64_t timeUs = -1ll);
-
-    void onNetNotify(bool isRTP, const sp<AMessage> &msg);
-
-    status_t onRTCPData(const sp<ABuffer> &data);
-    status_t parseReceiverReport(const uint8_t *data, size_t size);
-    status_t parseTSFB(const uint8_t *data, size_t size);
-    status_t parseAPP(const uint8_t *data, size_t size);
-
-    void notifyInitDone(status_t err);
-    void notifyError(status_t err);
-    void notifyNetworkStall(size_t numBytesQueued);
-
-    DISALLOW_EVIL_CONSTRUCTORS(RTPSender);
-};
-
-}  // namespace android
-
-#endif  // RTP_SENDER_H_
diff --git a/media/libstagefright/wifi-display/source/Converter.cpp b/media/libstagefright/wifi-display/source/Converter.cpp
deleted file mode 100644
index 273af18..0000000
--- a/media/libstagefright/wifi-display/source/Converter.cpp
+++ /dev/null
@@ -1,821 +0,0 @@
-/*
- * Copyright 2012, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "Converter"
-#include <utils/Log.h>
-
-#include "Converter.h"
-
-#include "MediaPuller.h"
-#include "include/avc_utils.h"
-
-#include <cutils/properties.h>
-#include <gui/Surface.h>
-#include <media/ICrypto.h>
-#include <media/MediaCodecBuffer.h>
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/MediaBuffer.h>
-#include <media/stagefright/MediaCodec.h>
-#include <media/stagefright/MediaDefs.h>
-#include <media/stagefright/MediaErrors.h>
-
-#include <arpa/inet.h>
-
-#include <OMX_Video.h>
-
-namespace android {
-
-Converter::Converter(
-        const sp<AMessage> &notify,
-        const sp<ALooper> &codecLooper,
-        const sp<AMessage> &outputFormat,
-        uint32_t flags)
-    : mNotify(notify),
-      mCodecLooper(codecLooper),
-      mOutputFormat(outputFormat),
-      mFlags(flags),
-      mIsVideo(false),
-      mIsH264(false),
-      mIsPCMAudio(false),
-      mNeedToManuallyPrependSPSPPS(false),
-      mDoMoreWorkPending(false)
-#if ENABLE_SILENCE_DETECTION
-      ,mFirstSilentFrameUs(-1ll)
-      ,mInSilentMode(false)
-#endif
-      ,mPrevVideoBitrate(-1)
-      ,mNumFramesToDrop(0)
-      ,mEncodingSuspended(false)
-    {
-    AString mime;
-    CHECK(mOutputFormat->findString("mime", &mime));
-
-    if (!strncasecmp("video/", mime.c_str(), 6)) {
-        mIsVideo = true;
-
-        mIsH264 = !strcasecmp(mime.c_str(), MEDIA_MIMETYPE_VIDEO_AVC);
-    } else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_RAW, mime.c_str())) {
-        mIsPCMAudio = true;
-    }
-}
-
-void Converter::releaseEncoder() {
-    if (mEncoder == NULL) {
-        return;
-    }
-
-    mEncoder->release();
-    mEncoder.clear();
-
-    mInputBufferQueue.clear();
-    mEncoderInputBuffers.clear();
-    mEncoderOutputBuffers.clear();
-}
-
-Converter::~Converter() {
-    CHECK(mEncoder == NULL);
-}
-
-void Converter::shutdownAsync() {
-    ALOGV("shutdown");
-    (new AMessage(kWhatShutdown, this))->post();
-}
-
-status_t Converter::init() {
-    status_t err = initEncoder();
-
-    if (err != OK) {
-        releaseEncoder();
-    }
-
-    return err;
-}
-
-sp<IGraphicBufferProducer> Converter::getGraphicBufferProducer() {
-    CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
-    return mGraphicBufferProducer;
-}
-
-size_t Converter::getInputBufferCount() const {
-    return mEncoderInputBuffers.size();
-}
-
-sp<AMessage> Converter::getOutputFormat() const {
-    return mOutputFormat;
-}
-
-bool Converter::needToManuallyPrependSPSPPS() const {
-    return mNeedToManuallyPrependSPSPPS;
-}
-
-// static
-int32_t Converter::GetInt32Property(
-        const char *propName, int32_t defaultValue) {
-    char val[PROPERTY_VALUE_MAX];
-    if (property_get(propName, val, NULL)) {
-        char *end;
-        unsigned long x = strtoul(val, &end, 10);
-
-        if (*end == '\0' && end > val && x > 0) {
-            return x;
-        }
-    }
-
-    return defaultValue;
-}
-
-status_t Converter::initEncoder() {
-    AString outputMIME;
-    CHECK(mOutputFormat->findString("mime", &outputMIME));
-
-    bool isAudio = !strncasecmp(outputMIME.c_str(), "audio/", 6);
-
-    if (!mIsPCMAudio) {
-        mEncoder = MediaCodec::CreateByType(
-                mCodecLooper, outputMIME.c_str(), true /* encoder */);
-
-        if (mEncoder == NULL) {
-            return ERROR_UNSUPPORTED;
-        }
-    }
-
-    if (mIsPCMAudio) {
-        return OK;
-    }
-
-    int32_t audioBitrate = GetInt32Property("media.wfd.audio-bitrate", 128000);
-    int32_t videoBitrate = GetInt32Property("media.wfd.video-bitrate", 5000000);
-    mPrevVideoBitrate = videoBitrate;
-
-    ALOGI("using audio bitrate of %d bps, video bitrate of %d bps",
-          audioBitrate, videoBitrate);
-
-    if (isAudio) {
-        mOutputFormat->setInt32("bitrate", audioBitrate);
-    } else {
-        mOutputFormat->setInt32("bitrate", videoBitrate);
-        mOutputFormat->setInt32("bitrate-mode", OMX_Video_ControlRateConstant);
-        mOutputFormat->setInt32("frame-rate", 30);
-        mOutputFormat->setInt32("i-frame-interval", 15);  // Iframes every 15 secs
-
-        // Configure encoder to use intra macroblock refresh mode
-        mOutputFormat->setInt32("intra-refresh-mode", OMX_VIDEO_IntraRefreshCyclic);
-
-        int width, height, mbs;
-        if (!mOutputFormat->findInt32("width", &width)
-                || !mOutputFormat->findInt32("height", &height)) {
-            return ERROR_UNSUPPORTED;
-        }
-
-        // Update macroblocks in a cyclic fashion with 10% of all MBs within
-        // frame gets updated at one time. It takes about 10 frames to
-        // completely update a whole video frame. If the frame rate is 30,
-        // it takes about 333 ms in the best case (if next frame is not an IDR)
-        // to recover from a lost/corrupted packet.
-        mbs = (((width + 15) / 16) * ((height + 15) / 16) * 10) / 100;
-        mOutputFormat->setInt32("intra-refresh-CIR-mbs", mbs);
-    }
-
-    ALOGV("output format is '%s'", mOutputFormat->debugString(0).c_str());
-
-    mNeedToManuallyPrependSPSPPS = false;
-
-    status_t err = NO_INIT;
-
-    if (!isAudio) {
-        sp<AMessage> tmp = mOutputFormat->dup();
-        tmp->setInt32("prepend-sps-pps-to-idr-frames", 1);
-
-        err = mEncoder->configure(
-                tmp,
-                NULL /* nativeWindow */,
-                NULL /* crypto */,
-                MediaCodec::CONFIGURE_FLAG_ENCODE);
-
-        if (err == OK) {
-            // Encoder supported prepending SPS/PPS, we don't need to emulate
-            // it.
-            mOutputFormat = tmp;
-        } else {
-            mNeedToManuallyPrependSPSPPS = true;
-
-            ALOGI("We going to manually prepend SPS and PPS to IDR frames.");
-        }
-    }
-
-    if (err != OK) {
-        // We'll get here for audio or if we failed to configure the encoder
-        // to automatically prepend SPS/PPS in the case of video.
-
-        err = mEncoder->configure(
-                    mOutputFormat,
-                    NULL /* nativeWindow */,
-                    NULL /* crypto */,
-                    MediaCodec::CONFIGURE_FLAG_ENCODE);
-    }
-
-    if (err != OK) {
-        return err;
-    }
-
-    if (mFlags & FLAG_USE_SURFACE_INPUT) {
-        CHECK(mIsVideo);
-
-        err = mEncoder->createInputSurface(&mGraphicBufferProducer);
-
-        if (err != OK) {
-            return err;
-        }
-    }
-
-    err = mEncoder->start();
-
-    if (err != OK) {
-        return err;
-    }
-
-    err = mEncoder->getInputBuffers(&mEncoderInputBuffers);
-
-    if (err != OK) {
-        return err;
-    }
-
-    err = mEncoder->getOutputBuffers(&mEncoderOutputBuffers);
-
-    if (err != OK) {
-        return err;
-    }
-
-    if (mFlags & FLAG_USE_SURFACE_INPUT) {
-        scheduleDoMoreWork();
-    }
-
-    return OK;
-}
-
-void Converter::notifyError(status_t err) {
-    sp<AMessage> notify = mNotify->dup();
-    notify->setInt32("what", kWhatError);
-    notify->setInt32("err", err);
-    notify->post();
-}
-
-// static
-bool Converter::IsSilence(const sp<ABuffer> &accessUnit) {
-    const uint8_t *ptr = accessUnit->data();
-    const uint8_t *end = ptr + accessUnit->size();
-    while (ptr < end) {
-        if (*ptr != 0) {
-            return false;
-        }
-        ++ptr;
-    }
-
-    return true;
-}
-
-void Converter::onMessageReceived(const sp<AMessage> &msg) {
-    switch (msg->what()) {
-        case kWhatMediaPullerNotify:
-        {
-            int32_t what;
-            CHECK(msg->findInt32("what", &what));
-
-            if (!mIsPCMAudio && mEncoder == NULL) {
-                ALOGV("got msg '%s' after encoder shutdown.",
-                      msg->debugString().c_str());
-
-                if (what == MediaPuller::kWhatAccessUnit) {
-                    sp<ABuffer> accessUnit;
-                    CHECK(msg->findBuffer("accessUnit", &accessUnit));
-
-                    accessUnit->setMediaBufferBase(NULL);
-                }
-                break;
-            }
-
-            if (what == MediaPuller::kWhatEOS) {
-                mInputBufferQueue.push_back(NULL);
-
-                feedEncoderInputBuffers();
-
-                scheduleDoMoreWork();
-            } else {
-                CHECK_EQ(what, MediaPuller::kWhatAccessUnit);
-
-                sp<ABuffer> accessUnit;
-                CHECK(msg->findBuffer("accessUnit", &accessUnit));
-
-                if (mNumFramesToDrop > 0 || mEncodingSuspended) {
-                    if (mNumFramesToDrop > 0) {
-                        --mNumFramesToDrop;
-                        ALOGI("dropping frame.");
-                    }
-
-                    accessUnit->setMediaBufferBase(NULL);
-                    break;
-                }
-
-#if 0
-                MediaBuffer *mbuf =
-                    (MediaBuffer *)(accessUnit->getMediaBufferBase());
-                if (mbuf != NULL) {
-                    ALOGI("queueing mbuf %p", mbuf);
-                    mbuf->release();
-                }
-#endif
-
-#if ENABLE_SILENCE_DETECTION
-                if (!mIsVideo) {
-                    if (IsSilence(accessUnit)) {
-                        if (mInSilentMode) {
-                            break;
-                        }
-
-                        int64_t nowUs = ALooper::GetNowUs();
-
-                        if (mFirstSilentFrameUs < 0ll) {
-                            mFirstSilentFrameUs = nowUs;
-                        } else if (nowUs >= mFirstSilentFrameUs + 10000000ll) {
-                            mInSilentMode = true;
-                            ALOGI("audio in silent mode now.");
-                            break;
-                        }
-                    } else {
-                        if (mInSilentMode) {
-                            ALOGI("audio no longer in silent mode.");
-                        }
-                        mInSilentMode = false;
-                        mFirstSilentFrameUs = -1ll;
-                    }
-                }
-#endif
-
-                mInputBufferQueue.push_back(accessUnit);
-
-                feedEncoderInputBuffers();
-
-                scheduleDoMoreWork();
-            }
-            break;
-        }
-
-        case kWhatEncoderActivity:
-        {
-#if 0
-            int64_t whenUs;
-            if (msg->findInt64("whenUs", &whenUs)) {
-                int64_t nowUs = ALooper::GetNowUs();
-                ALOGI("[%s] kWhatEncoderActivity after %lld us",
-                      mIsVideo ? "video" : "audio", nowUs - whenUs);
-            }
-#endif
-
-            mDoMoreWorkPending = false;
-
-            if (mEncoder == NULL) {
-                break;
-            }
-
-            status_t err = doMoreWork();
-
-            if (err != OK) {
-                notifyError(err);
-            } else {
-                scheduleDoMoreWork();
-            }
-            break;
-        }
-
-        case kWhatRequestIDRFrame:
-        {
-            if (mEncoder == NULL) {
-                break;
-            }
-
-            if (mIsVideo) {
-                ALOGV("requesting IDR frame");
-                mEncoder->requestIDRFrame();
-            }
-            break;
-        }
-
-        case kWhatShutdown:
-        {
-            ALOGI("shutting down %s encoder", mIsVideo ? "video" : "audio");
-
-            releaseEncoder();
-
-            AString mime;
-            CHECK(mOutputFormat->findString("mime", &mime));
-            ALOGI("encoder (%s) shut down.", mime.c_str());
-
-            sp<AMessage> notify = mNotify->dup();
-            notify->setInt32("what", kWhatShutdownCompleted);
-            notify->post();
-            break;
-        }
-
-        case kWhatDropAFrame:
-        {
-            ++mNumFramesToDrop;
-            break;
-        }
-
-        case kWhatReleaseOutputBuffer:
-        {
-            if (mEncoder != NULL) {
-                size_t bufferIndex;
-                CHECK(msg->findInt32("bufferIndex", (int32_t*)&bufferIndex));
-                CHECK(bufferIndex < mEncoderOutputBuffers.size());
-                mEncoder->releaseOutputBuffer(bufferIndex);
-            }
-            break;
-        }
-
-        case kWhatSuspendEncoding:
-        {
-            int32_t suspend;
-            CHECK(msg->findInt32("suspend", &suspend));
-
-            mEncodingSuspended = suspend;
-
-            if (mFlags & FLAG_USE_SURFACE_INPUT) {
-                sp<AMessage> params = new AMessage;
-                params->setInt32("drop-input-frames",suspend);
-                mEncoder->setParameters(params);
-            }
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-}
-
-void Converter::scheduleDoMoreWork() {
-    if (mIsPCMAudio) {
-        // There's no encoder involved in this case.
-        return;
-    }
-
-    if (mDoMoreWorkPending) {
-        return;
-    }
-
-    mDoMoreWorkPending = true;
-
-#if 1
-    if (mEncoderActivityNotify == NULL) {
-        mEncoderActivityNotify = new AMessage(kWhatEncoderActivity, this);
-    }
-    mEncoder->requestActivityNotification(mEncoderActivityNotify->dup());
-#else
-    sp<AMessage> notify = new AMessage(kWhatEncoderActivity, this);
-    notify->setInt64("whenUs", ALooper::GetNowUs());
-    mEncoder->requestActivityNotification(notify);
-#endif
-}
-
-status_t Converter::feedRawAudioInputBuffers() {
-    // Split incoming PCM audio into buffers of 6 AUs of 80 audio frames each
-    // and add a 4 byte header according to the wifi display specs.
-
-    while (!mInputBufferQueue.empty()) {
-        sp<ABuffer> buffer = *mInputBufferQueue.begin();
-        mInputBufferQueue.erase(mInputBufferQueue.begin());
-
-        int16_t *ptr = (int16_t *)buffer->data();
-        int16_t *stop = (int16_t *)(buffer->data() + buffer->size());
-        while (ptr < stop) {
-            *ptr = htons(*ptr);
-            ++ptr;
-        }
-
-        static const size_t kFrameSize = 2 * sizeof(int16_t);  // stereo
-        static const size_t kFramesPerAU = 80;
-        static const size_t kNumAUsPerPESPacket = 6;
-
-        if (mPartialAudioAU != NULL) {
-            size_t bytesMissingForFullAU =
-                kNumAUsPerPESPacket * kFramesPerAU * kFrameSize
-                - mPartialAudioAU->size() + 4;
-
-            size_t copy = buffer->size();
-            if(copy > bytesMissingForFullAU) {
-                copy = bytesMissingForFullAU;
-            }
-
-            memcpy(mPartialAudioAU->data() + mPartialAudioAU->size(),
-                   buffer->data(),
-                   copy);
-
-            mPartialAudioAU->setRange(0, mPartialAudioAU->size() + copy);
-
-            buffer->setRange(buffer->offset() + copy, buffer->size() - copy);
-
-            int64_t timeUs;
-            CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
-
-            int64_t copyUs = (int64_t)((copy / kFrameSize) * 1E6 / 48000.0);
-            timeUs += copyUs;
-            buffer->meta()->setInt64("timeUs", timeUs);
-
-            if (bytesMissingForFullAU == copy) {
-                sp<AMessage> notify = mNotify->dup();
-                notify->setInt32("what", kWhatAccessUnit);
-                notify->setBuffer("accessUnit", mPartialAudioAU);
-                notify->post();
-
-                mPartialAudioAU.clear();
-            }
-        }
-
-        while (buffer->size() > 0) {
-            sp<ABuffer> partialAudioAU =
-                new ABuffer(
-                        4
-                        + kNumAUsPerPESPacket * kFrameSize * kFramesPerAU);
-
-            uint8_t *ptr = partialAudioAU->data();
-            ptr[0] = 0xa0;  // 10100000b
-            ptr[1] = kNumAUsPerPESPacket;
-            ptr[2] = 0;  // reserved, audio _emphasis_flag = 0
-
-            static const unsigned kQuantizationWordLength = 0;  // 16-bit
-            static const unsigned kAudioSamplingFrequency = 2;  // 48Khz
-            static const unsigned kNumberOfAudioChannels = 1;  // stereo
-
-            ptr[3] = (kQuantizationWordLength << 6)
-                    | (kAudioSamplingFrequency << 3)
-                    | kNumberOfAudioChannels;
-
-            size_t copy = buffer->size();
-            if (copy > partialAudioAU->size() - 4) {
-                copy = partialAudioAU->size() - 4;
-            }
-
-            memcpy(&ptr[4], buffer->data(), copy);
-
-            partialAudioAU->setRange(0, 4 + copy);
-            buffer->setRange(buffer->offset() + copy, buffer->size() - copy);
-
-            int64_t timeUs;
-            CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
-
-            partialAudioAU->meta()->setInt64("timeUs", timeUs);
-
-            int64_t copyUs = (int64_t)((copy / kFrameSize) * 1E6 / 48000.0);
-            timeUs += copyUs;
-            buffer->meta()->setInt64("timeUs", timeUs);
-
-            if (copy == partialAudioAU->capacity() - 4) {
-                sp<AMessage> notify = mNotify->dup();
-                notify->setInt32("what", kWhatAccessUnit);
-                notify->setBuffer("accessUnit", partialAudioAU);
-                notify->post();
-
-                partialAudioAU.clear();
-                continue;
-            }
-
-            mPartialAudioAU = partialAudioAU;
-        }
-    }
-
-    return OK;
-}
-
-status_t Converter::feedEncoderInputBuffers() {
-    if (mIsPCMAudio) {
-        return feedRawAudioInputBuffers();
-    }
-
-    while (!mInputBufferQueue.empty()
-            && !mAvailEncoderInputIndices.empty()) {
-        sp<ABuffer> buffer = *mInputBufferQueue.begin();
-        mInputBufferQueue.erase(mInputBufferQueue.begin());
-
-        size_t bufferIndex = *mAvailEncoderInputIndices.begin();
-        mAvailEncoderInputIndices.erase(mAvailEncoderInputIndices.begin());
-
-        int64_t timeUs = 0ll;
-        uint32_t flags = 0;
-
-        if (buffer != NULL) {
-            CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
-
-            memcpy(mEncoderInputBuffers.itemAt(bufferIndex)->data(),
-                   buffer->data(),
-                   buffer->size());
-
-            MediaBuffer *mediaBuffer =
-                (MediaBuffer *)(buffer->getMediaBufferBase());
-            if (mediaBuffer != NULL) {
-                mEncoderInputBuffers.itemAt(bufferIndex)->setMediaBufferBase(
-                        mediaBuffer);
-
-                buffer->setMediaBufferBase(NULL);
-            }
-        } else {
-            flags = MediaCodec::BUFFER_FLAG_EOS;
-        }
-
-        status_t err = mEncoder->queueInputBuffer(
-                bufferIndex, 0, (buffer == NULL) ? 0 : buffer->size(),
-                timeUs, flags);
-
-        if (err != OK) {
-            return err;
-        }
-    }
-
-    return OK;
-}
-
-sp<ABuffer> Converter::prependCSD(const sp<ABuffer> &accessUnit) const {
-    CHECK(mCSD0 != NULL);
-
-    sp<ABuffer> dup = new ABuffer(accessUnit->size() + mCSD0->size());
-    memcpy(dup->data(), mCSD0->data(), mCSD0->size());
-    memcpy(dup->data() + mCSD0->size(), accessUnit->data(), accessUnit->size());
-
-    int64_t timeUs;
-    CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
-
-    dup->meta()->setInt64("timeUs", timeUs);
-
-    return dup;
-}
-
-status_t Converter::doMoreWork() {
-    status_t err;
-
-    if (!(mFlags & FLAG_USE_SURFACE_INPUT)) {
-        for (;;) {
-            size_t bufferIndex;
-            err = mEncoder->dequeueInputBuffer(&bufferIndex);
-
-            if (err != OK) {
-                break;
-            }
-
-            mAvailEncoderInputIndices.push_back(bufferIndex);
-        }
-
-        feedEncoderInputBuffers();
-    }
-
-    for (;;) {
-        size_t bufferIndex;
-        size_t offset;
-        size_t size;
-        int64_t timeUs;
-        uint32_t flags;
-        native_handle_t* handle = NULL;
-        err = mEncoder->dequeueOutputBuffer(
-                &bufferIndex, &offset, &size, &timeUs, &flags);
-
-        if (err != OK) {
-            if (err == INFO_FORMAT_CHANGED) {
-                continue;
-            } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) {
-                mEncoder->getOutputBuffers(&mEncoderOutputBuffers);
-                continue;
-            }
-
-            if (err == -EAGAIN) {
-                err = OK;
-            }
-            break;
-        }
-
-        if (flags & MediaCodec::BUFFER_FLAG_EOS) {
-            sp<AMessage> notify = mNotify->dup();
-            notify->setInt32("what", kWhatEOS);
-            notify->post();
-        } else {
-#if 0
-            if (mIsVideo) {
-                int32_t videoBitrate = GetInt32Property(
-                        "media.wfd.video-bitrate", 5000000);
-
-                setVideoBitrate(videoBitrate);
-            }
-#endif
-
-            sp<ABuffer> buffer;
-            sp<MediaCodecBuffer> outbuf = mEncoderOutputBuffers.itemAt(bufferIndex);
-
-            if (outbuf->meta()->findPointer("handle", (void**)&handle) &&
-                    handle != NULL) {
-                int32_t rangeLength, rangeOffset;
-                CHECK(outbuf->meta()->findInt32("rangeOffset", &rangeOffset));
-                CHECK(outbuf->meta()->findInt32("rangeLength", &rangeLength));
-                outbuf->meta()->setPointer("handle", NULL);
-
-                // MediaSender will post the following message when HDCP
-                // is done, to release the output buffer back to encoder.
-                sp<AMessage> notify(new AMessage(kWhatReleaseOutputBuffer, this));
-                notify->setInt32("bufferIndex", bufferIndex);
-
-                buffer = new ABuffer(
-                        rangeLength > (int32_t)size ? rangeLength : size);
-                buffer->meta()->setPointer("handle", handle);
-                buffer->meta()->setInt32("rangeOffset", rangeOffset);
-                buffer->meta()->setInt32("rangeLength", rangeLength);
-                buffer->meta()->setMessage("notify", notify);
-            } else {
-                buffer = new ABuffer(size);
-            }
-
-            buffer->meta()->setInt64("timeUs", timeUs);
-
-            ALOGV("[%s] time %lld us (%.2f secs)",
-                    mIsVideo ? "video" : "audio", (long long)timeUs, timeUs / 1E6);
-
-            memcpy(buffer->data(), outbuf->base() + offset, size);
-
-            if (flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) {
-                if (!handle) {
-                    if (mIsH264) {
-                        mCSD0 = buffer;
-                    }
-                    mOutputFormat->setBuffer("csd-0", buffer);
-                }
-            } else {
-                if (mNeedToManuallyPrependSPSPPS
-                        && mIsH264
-                        && (mFlags & FLAG_PREPEND_CSD_IF_NECESSARY)
-                        && IsIDR(buffer)) {
-                    buffer = prependCSD(buffer);
-                }
-
-                sp<AMessage> notify = mNotify->dup();
-                notify->setInt32("what", kWhatAccessUnit);
-                notify->setBuffer("accessUnit", buffer);
-                notify->post();
-            }
-        }
-
-        if (!handle) {
-            mEncoder->releaseOutputBuffer(bufferIndex);
-        }
-
-        if (flags & MediaCodec::BUFFER_FLAG_EOS) {
-            break;
-        }
-    }
-
-    return err;
-}
-
-void Converter::requestIDRFrame() {
-    (new AMessage(kWhatRequestIDRFrame, this))->post();
-}
-
-void Converter::dropAFrame() {
-    // Unsupported in surface input mode.
-    CHECK(!(mFlags & FLAG_USE_SURFACE_INPUT));
-
-    (new AMessage(kWhatDropAFrame, this))->post();
-}
-
-void Converter::suspendEncoding(bool suspend) {
-    sp<AMessage> msg = new AMessage(kWhatSuspendEncoding, this);
-    msg->setInt32("suspend", suspend);
-    msg->post();
-}
-
-int32_t Converter::getVideoBitrate() const {
-    return mPrevVideoBitrate;
-}
-
-void Converter::setVideoBitrate(int32_t bitRate) {
-    if (mIsVideo && mEncoder != NULL && bitRate != mPrevVideoBitrate) {
-        sp<AMessage> params = new AMessage;
-        params->setInt32("video-bitrate", bitRate);
-
-        mEncoder->setParameters(params);
-
-        mPrevVideoBitrate = bitRate;
-    }
-}
-
-}  // namespace android
diff --git a/media/libstagefright/wifi-display/source/Converter.h b/media/libstagefright/wifi-display/source/Converter.h
deleted file mode 100644
index ad95ab5..0000000
--- a/media/libstagefright/wifi-display/source/Converter.h
+++ /dev/null
@@ -1,157 +0,0 @@
-/*
- * Copyright 2012, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef CONVERTER_H_
-
-#define CONVERTER_H_
-
-#include <media/stagefright/foundation/AHandler.h>
-
-namespace android {
-
-struct ABuffer;
-class IGraphicBufferProducer;
-struct MediaCodec;
-class MediaCodecBuffer;
-
-#define ENABLE_SILENCE_DETECTION        0
-
-// Utility class that receives media access units and converts them into
-// media access unit of a different format.
-// Right now this'll convert raw video into H.264 and raw audio into AAC.
-struct Converter : public AHandler {
-    enum {
-        kWhatAccessUnit,
-        kWhatEOS,
-        kWhatError,
-        kWhatShutdownCompleted,
-    };
-
-    enum FlagBits {
-        FLAG_USE_SURFACE_INPUT          = 1,
-        FLAG_PREPEND_CSD_IF_NECESSARY   = 2,
-    };
-    Converter(const sp<AMessage> &notify,
-              const sp<ALooper> &codecLooper,
-              const sp<AMessage> &outputFormat,
-              uint32_t flags = 0);
-
-    status_t init();
-
-    sp<IGraphicBufferProducer> getGraphicBufferProducer();
-
-    size_t getInputBufferCount() const;
-
-    sp<AMessage> getOutputFormat() const;
-    bool needToManuallyPrependSPSPPS() const;
-
-    void feedAccessUnit(const sp<ABuffer> &accessUnit);
-    void signalEOS();
-
-    void requestIDRFrame();
-
-    void dropAFrame();
-    void suspendEncoding(bool suspend);
-
-    void shutdownAsync();
-
-    int32_t getVideoBitrate() const;
-    void setVideoBitrate(int32_t bitrate);
-
-    static int32_t GetInt32Property(const char *propName, int32_t defaultValue);
-
-    enum {
-        // MUST not conflict with private enums below.
-        kWhatMediaPullerNotify = 'pulN',
-    };
-
-protected:
-    virtual ~Converter();
-    virtual void onMessageReceived(const sp<AMessage> &msg);
-
-private:
-    enum {
-        kWhatDoMoreWork,
-        kWhatRequestIDRFrame,
-        kWhatSuspendEncoding,
-        kWhatShutdown,
-        kWhatEncoderActivity,
-        kWhatDropAFrame,
-        kWhatReleaseOutputBuffer,
-    };
-
-    sp<AMessage> mNotify;
-    sp<ALooper> mCodecLooper;
-    sp<AMessage> mOutputFormat;
-    uint32_t mFlags;
-    bool mIsVideo;
-    bool mIsH264;
-    bool mIsPCMAudio;
-    bool mNeedToManuallyPrependSPSPPS;
-
-    sp<MediaCodec> mEncoder;
-    sp<AMessage> mEncoderActivityNotify;
-
-    sp<IGraphicBufferProducer> mGraphicBufferProducer;
-
-    Vector<sp<MediaCodecBuffer> > mEncoderInputBuffers;
-    Vector<sp<MediaCodecBuffer> > mEncoderOutputBuffers;
-
-    List<size_t> mAvailEncoderInputIndices;
-
-    List<sp<ABuffer> > mInputBufferQueue;
-
-    sp<ABuffer> mCSD0;
-
-    bool mDoMoreWorkPending;
-
-#if ENABLE_SILENCE_DETECTION
-    int64_t mFirstSilentFrameUs;
-    bool mInSilentMode;
-#endif
-
-    sp<ABuffer> mPartialAudioAU;
-
-    int32_t mPrevVideoBitrate;
-
-    int32_t mNumFramesToDrop;
-    bool mEncodingSuspended;
-
-    status_t initEncoder();
-    void releaseEncoder();
-
-    status_t feedEncoderInputBuffers();
-
-    void scheduleDoMoreWork();
-    status_t doMoreWork();
-
-    void notifyError(status_t err);
-
-    // Packetizes raw PCM audio data available in mInputBufferQueue
-    // into a format suitable for transport stream inclusion and
-    // notifies the observer.
-    status_t feedRawAudioInputBuffers();
-
-    static bool IsSilence(const sp<ABuffer> &accessUnit);
-
-    sp<ABuffer> prependCSD(const sp<ABuffer> &accessUnit) const;
-
-    DISALLOW_EVIL_CONSTRUCTORS(Converter);
-};
-
-}  // namespace android
-
-#endif  // CONVERTER_H_
diff --git a/media/libstagefright/wifi-display/source/MediaPuller.cpp b/media/libstagefright/wifi-display/source/MediaPuller.cpp
deleted file mode 100644
index ce07a4e..0000000
--- a/media/libstagefright/wifi-display/source/MediaPuller.cpp
+++ /dev/null
@@ -1,224 +0,0 @@
-/*
- * Copyright 2012, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "MediaPuller"
-#include <utils/Log.h>
-
-#include "MediaPuller.h"
-
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/MediaBuffer.h>
-#include <media/stagefright/MediaSource.h>
-#include <media/stagefright/MetaData.h>
-
-namespace android {
-
-MediaPuller::MediaPuller(
-        const sp<MediaSource> &source, const sp<AMessage> &notify)
-    : mSource(source),
-      mNotify(notify),
-      mPullGeneration(0),
-      mIsAudio(false),
-      mPaused(false) {
-    sp<MetaData> meta = source->getFormat();
-    const char *mime;
-    CHECK(meta->findCString(kKeyMIMEType, &mime));
-
-    mIsAudio = !strncasecmp(mime, "audio/", 6);
-}
-
-MediaPuller::~MediaPuller() {
-}
-
-status_t MediaPuller::postSynchronouslyAndReturnError(
-        const sp<AMessage> &msg) {
-    sp<AMessage> response;
-    status_t err = msg->postAndAwaitResponse(&response);
-
-    if (err != OK) {
-        return err;
-    }
-
-    if (!response->findInt32("err", &err)) {
-        err = OK;
-    }
-
-    return err;
-}
-
-status_t MediaPuller::start() {
-    return postSynchronouslyAndReturnError(new AMessage(kWhatStart, this));
-}
-
-void MediaPuller::stopAsync(const sp<AMessage> &notify) {
-    sp<AMessage> msg = new AMessage(kWhatStop, this);
-    msg->setMessage("notify", notify);
-    msg->post();
-}
-
-void MediaPuller::pause() {
-    (new AMessage(kWhatPause, this))->post();
-}
-
-void MediaPuller::resume() {
-    (new AMessage(kWhatResume, this))->post();
-}
-
-void MediaPuller::onMessageReceived(const sp<AMessage> &msg) {
-    switch (msg->what()) {
-        case kWhatStart:
-        {
-            status_t err;
-            if (mIsAudio) {
-                // This atrocity causes AudioSource to deliver absolute
-                // systemTime() based timestamps (off by 1 us).
-                sp<MetaData> params = new MetaData;
-                params->setInt64(kKeyTime, 1ll);
-                err = mSource->start(params.get());
-            } else {
-                err = mSource->start();
-                if (err != OK) {
-                    ALOGE("source failed to start w/ err %d", err);
-                }
-            }
-
-            if (err == OK) {
-                schedulePull();
-            }
-
-            sp<AMessage> response = new AMessage;
-            response->setInt32("err", err);
-
-            sp<AReplyToken> replyID;
-            CHECK(msg->senderAwaitsResponse(&replyID));
-            response->postReply(replyID);
-            break;
-        }
-
-        case kWhatStop:
-        {
-            sp<MetaData> meta = mSource->getFormat();
-            const char *tmp;
-            CHECK(meta->findCString(kKeyMIMEType, &tmp));
-            AString mime = tmp;
-
-            ALOGI("MediaPuller(%s) stopping.", mime.c_str());
-            mSource->stop();
-            ALOGI("MediaPuller(%s) stopped.", mime.c_str());
-            ++mPullGeneration;
-
-            sp<AMessage> notify;
-            CHECK(msg->findMessage("notify", &notify));
-            notify->post();
-            break;
-        }
-
-        case kWhatPull:
-        {
-            int32_t generation;
-            CHECK(msg->findInt32("generation", &generation));
-
-            if (generation != mPullGeneration) {
-                break;
-            }
-
-            MediaBuffer *mbuf;
-            status_t err = mSource->read(&mbuf);
-
-            if (mPaused) {
-                if (err == OK) {
-                    mbuf->release();
-                    mbuf = NULL;
-                }
-
-                schedulePull();
-                break;
-            }
-
-            if (err != OK) {
-                if (err == ERROR_END_OF_STREAM) {
-                    ALOGI("stream ended.");
-                } else {
-                    ALOGE("error %d reading stream.", err);
-                }
-
-                sp<AMessage> notify = mNotify->dup();
-                notify->setInt32("what", kWhatEOS);
-                notify->post();
-            } else {
-                int64_t timeUs;
-                CHECK(mbuf->meta_data()->findInt64(kKeyTime, &timeUs));
-
-                sp<ABuffer> accessUnit = new ABuffer(mbuf->range_length());
-
-                memcpy(accessUnit->data(),
-                       (const uint8_t *)mbuf->data() + mbuf->range_offset(),
-                       mbuf->range_length());
-
-                accessUnit->meta()->setInt64("timeUs", timeUs);
-
-                if (mIsAudio) {
-                    mbuf->release();
-                    mbuf = NULL;
-                } else {
-                    // video encoder will release MediaBuffer when done
-                    // with underlying data.
-                    accessUnit->setMediaBufferBase(mbuf);
-                }
-
-                sp<AMessage> notify = mNotify->dup();
-
-                notify->setInt32("what", kWhatAccessUnit);
-                notify->setBuffer("accessUnit", accessUnit);
-                notify->post();
-
-                if (mbuf != NULL) {
-                    ALOGV("posted mbuf %p", mbuf);
-                }
-
-                schedulePull();
-            }
-            break;
-        }
-
-        case kWhatPause:
-        {
-            mPaused = true;
-            break;
-        }
-
-        case kWhatResume:
-        {
-            mPaused = false;
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-}
-
-void MediaPuller::schedulePull() {
-    sp<AMessage> msg = new AMessage(kWhatPull, this);
-    msg->setInt32("generation", mPullGeneration);
-    msg->post();
-}
-
-}  // namespace android
-
diff --git a/media/libstagefright/wifi-display/source/MediaPuller.h b/media/libstagefright/wifi-display/source/MediaPuller.h
deleted file mode 100644
index 1291bb3..0000000
--- a/media/libstagefright/wifi-display/source/MediaPuller.h
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Copyright 2012, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef MEDIA_PULLER_H_
-
-#define MEDIA_PULLER_H_
-
-#include <media/stagefright/foundation/AHandler.h>
-
-namespace android {
-
-struct MediaSource;
-
-struct MediaPuller : public AHandler {
-    enum {
-        kWhatEOS,
-        kWhatAccessUnit
-    };
-
-    MediaPuller(const sp<MediaSource> &source, const sp<AMessage> &notify);
-
-    status_t start();
-    void stopAsync(const sp<AMessage> &notify);
-
-    void pause();
-    void resume();
-
-protected:
-    virtual void onMessageReceived(const sp<AMessage> &msg);
-    virtual ~MediaPuller();
-
-private:
-    enum {
-        kWhatStart,
-        kWhatStop,
-        kWhatPull,
-        kWhatPause,
-        kWhatResume,
-    };
-
-    sp<MediaSource> mSource;
-    sp<AMessage> mNotify;
-    int32_t mPullGeneration;
-    bool mIsAudio;
-    bool mPaused;
-
-    status_t postSynchronouslyAndReturnError(const sp<AMessage> &msg);
-    void schedulePull();
-
-    DISALLOW_EVIL_CONSTRUCTORS(MediaPuller);
-};
-
-}  // namespace android
-
-#endif  // MEDIA_PULLER_H_
diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.cpp b/media/libstagefright/wifi-display/source/PlaybackSession.cpp
deleted file mode 100644
index f1ecca0..0000000
--- a/media/libstagefright/wifi-display/source/PlaybackSession.cpp
+++ /dev/null
@@ -1,1112 +0,0 @@
-/*
- * Copyright 2012, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "PlaybackSession"
-#include <utils/Log.h>
-
-#include "PlaybackSession.h"
-
-#include "Converter.h"
-#include "MediaPuller.h"
-#include "RepeaterSource.h"
-#include "include/avc_utils.h"
-#include "WifiDisplaySource.h"
-
-#include <binder/IServiceManager.h>
-#include <cutils/properties.h>
-#include <media/IHDCP.h>
-#include <media/IMediaHTTPService.h>
-#include <media/stagefright/foundation/ABitReader.h>
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/foundation/hexdump.h>
-#include <media/stagefright/AudioSource.h>
-#include <media/stagefright/MediaDefs.h>
-#include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/MediaSource.h>
-#include <media/stagefright/MetaData.h>
-#include <media/stagefright/NuMediaExtractor.h>
-#include <media/stagefright/SurfaceMediaSource.h>
-#include <media/stagefright/Utils.h>
-
-#include <OMX_IVCommon.h>
-
-namespace android {
-
-struct WifiDisplaySource::PlaybackSession::Track : public AHandler {
-    enum {
-        kWhatStopped,
-    };
-
-    Track(const sp<AMessage> &notify,
-          const sp<ALooper> &pullLooper,
-          const sp<ALooper> &codecLooper,
-          const sp<MediaPuller> &mediaPuller,
-          const sp<Converter> &converter);
-
-    Track(const sp<AMessage> &notify, const sp<AMessage> &format);
-
-    void setRepeaterSource(const sp<RepeaterSource> &source);
-
-    sp<AMessage> getFormat();
-    bool isAudio() const;
-
-    const sp<Converter> &converter() const;
-    const sp<RepeaterSource> &repeaterSource() const;
-
-    ssize_t mediaSenderTrackIndex() const;
-    void setMediaSenderTrackIndex(size_t index);
-
-    status_t start();
-    void stopAsync();
-
-    void pause();
-    void resume();
-
-    void queueAccessUnit(const sp<ABuffer> &accessUnit);
-    sp<ABuffer> dequeueAccessUnit();
-
-    bool hasOutputBuffer(int64_t *timeUs) const;
-    void queueOutputBuffer(const sp<ABuffer> &accessUnit);
-    sp<ABuffer> dequeueOutputBuffer();
-
-#if SUSPEND_VIDEO_IF_IDLE
-    bool isSuspended() const;
-#endif
-
-    size_t countQueuedOutputBuffers() const {
-        return mQueuedOutputBuffers.size();
-    }
-
-    void requestIDRFrame();
-
-protected:
-    virtual void onMessageReceived(const sp<AMessage> &msg);
-    virtual ~Track();
-
-private:
-    enum {
-        kWhatMediaPullerStopped,
-    };
-
-    sp<AMessage> mNotify;
-    sp<ALooper> mPullLooper;
-    sp<ALooper> mCodecLooper;
-    sp<MediaPuller> mMediaPuller;
-    sp<Converter> mConverter;
-    sp<AMessage> mFormat;
-    bool mStarted;
-    ssize_t mMediaSenderTrackIndex;
-    bool mIsAudio;
-    List<sp<ABuffer> > mQueuedAccessUnits;
-    sp<RepeaterSource> mRepeaterSource;
-    List<sp<ABuffer> > mQueuedOutputBuffers;
-    int64_t mLastOutputBufferQueuedTimeUs;
-
-    static bool IsAudioFormat(const sp<AMessage> &format);
-
-    DISALLOW_EVIL_CONSTRUCTORS(Track);
-};
-
-WifiDisplaySource::PlaybackSession::Track::Track(
-        const sp<AMessage> &notify,
-        const sp<ALooper> &pullLooper,
-        const sp<ALooper> &codecLooper,
-        const sp<MediaPuller> &mediaPuller,
-        const sp<Converter> &converter)
-    : mNotify(notify),
-      mPullLooper(pullLooper),
-      mCodecLooper(codecLooper),
-      mMediaPuller(mediaPuller),
-      mConverter(converter),
-      mStarted(false),
-      mIsAudio(IsAudioFormat(mConverter->getOutputFormat())),
-      mLastOutputBufferQueuedTimeUs(-1ll) {
-}
-
-WifiDisplaySource::PlaybackSession::Track::Track(
-        const sp<AMessage> &notify, const sp<AMessage> &format)
-    : mNotify(notify),
-      mFormat(format),
-      mStarted(false),
-      mIsAudio(IsAudioFormat(format)),
-      mLastOutputBufferQueuedTimeUs(-1ll) {
-}
-
-WifiDisplaySource::PlaybackSession::Track::~Track() {
-    CHECK(!mStarted);
-}
-
-// static
-bool WifiDisplaySource::PlaybackSession::Track::IsAudioFormat(
-        const sp<AMessage> &format) {
-    AString mime;
-    CHECK(format->findString("mime", &mime));
-
-    return !strncasecmp(mime.c_str(), "audio/", 6);
-}
-
-sp<AMessage> WifiDisplaySource::PlaybackSession::Track::getFormat() {
-    return mFormat != NULL ? mFormat : mConverter->getOutputFormat();
-}
-
-bool WifiDisplaySource::PlaybackSession::Track::isAudio() const {
-    return mIsAudio;
-}
-
-const sp<Converter> &WifiDisplaySource::PlaybackSession::Track::converter() const {
-    return mConverter;
-}
-
-const sp<RepeaterSource> &
-WifiDisplaySource::PlaybackSession::Track::repeaterSource() const {
-    return mRepeaterSource;
-}
-
-ssize_t WifiDisplaySource::PlaybackSession::Track::mediaSenderTrackIndex() const {
-    CHECK_GE(mMediaSenderTrackIndex, 0);
-    return mMediaSenderTrackIndex;
-}
-
-void WifiDisplaySource::PlaybackSession::Track::setMediaSenderTrackIndex(
-        size_t index) {
-    mMediaSenderTrackIndex = index;
-}
-
-status_t WifiDisplaySource::PlaybackSession::Track::start() {
-    ALOGV("Track::start isAudio=%d", mIsAudio);
-
-    CHECK(!mStarted);
-
-    status_t err = OK;
-
-    if (mMediaPuller != NULL) {
-        err = mMediaPuller->start();
-    }
-
-    if (err == OK) {
-        mStarted = true;
-    }
-
-    return err;
-}
-
-void WifiDisplaySource::PlaybackSession::Track::stopAsync() {
-    ALOGV("Track::stopAsync isAudio=%d", mIsAudio);
-
-    if (mConverter != NULL) {
-        mConverter->shutdownAsync();
-    }
-
-    sp<AMessage> msg = new AMessage(kWhatMediaPullerStopped, this);
-
-    if (mStarted && mMediaPuller != NULL) {
-        if (mRepeaterSource != NULL) {
-            // Let's unblock MediaPuller's MediaSource::read().
-            mRepeaterSource->wakeUp();
-        }
-
-        mMediaPuller->stopAsync(msg);
-    } else {
-        mStarted = false;
-        msg->post();
-    }
-}
-
-void WifiDisplaySource::PlaybackSession::Track::pause() {
-    mMediaPuller->pause();
-}
-
-void WifiDisplaySource::PlaybackSession::Track::resume() {
-    mMediaPuller->resume();
-}
-
-void WifiDisplaySource::PlaybackSession::Track::onMessageReceived(
-        const sp<AMessage> &msg) {
-    switch (msg->what()) {
-        case kWhatMediaPullerStopped:
-        {
-            mConverter.clear();
-
-            mStarted = false;
-
-            sp<AMessage> notify = mNotify->dup();
-            notify->setInt32("what", kWhatStopped);
-            notify->post();
-
-            ALOGI("kWhatStopped %s posted", mIsAudio ? "audio" : "video");
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-}
-
-void WifiDisplaySource::PlaybackSession::Track::queueAccessUnit(
-        const sp<ABuffer> &accessUnit) {
-    mQueuedAccessUnits.push_back(accessUnit);
-}
-
-sp<ABuffer> WifiDisplaySource::PlaybackSession::Track::dequeueAccessUnit() {
-    if (mQueuedAccessUnits.empty()) {
-        return NULL;
-    }
-
-    sp<ABuffer> accessUnit = *mQueuedAccessUnits.begin();
-    CHECK(accessUnit != NULL);
-
-    mQueuedAccessUnits.erase(mQueuedAccessUnits.begin());
-
-    return accessUnit;
-}
-
-void WifiDisplaySource::PlaybackSession::Track::setRepeaterSource(
-        const sp<RepeaterSource> &source) {
-    mRepeaterSource = source;
-}
-
-void WifiDisplaySource::PlaybackSession::Track::requestIDRFrame() {
-    if (mIsAudio) {
-        return;
-    }
-
-    if (mRepeaterSource != NULL) {
-        mRepeaterSource->wakeUp();
-    }
-
-    mConverter->requestIDRFrame();
-}
-
-bool WifiDisplaySource::PlaybackSession::Track::hasOutputBuffer(
-        int64_t *timeUs) const {
-    *timeUs = 0ll;
-
-    if (mQueuedOutputBuffers.empty()) {
-        return false;
-    }
-
-    const sp<ABuffer> &outputBuffer = *mQueuedOutputBuffers.begin();
-
-    CHECK(outputBuffer->meta()->findInt64("timeUs", timeUs));
-
-    return true;
-}
-
-void WifiDisplaySource::PlaybackSession::Track::queueOutputBuffer(
-        const sp<ABuffer> &accessUnit) {
-    mQueuedOutputBuffers.push_back(accessUnit);
-    mLastOutputBufferQueuedTimeUs = ALooper::GetNowUs();
-}
-
-sp<ABuffer> WifiDisplaySource::PlaybackSession::Track::dequeueOutputBuffer() {
-    CHECK(!mQueuedOutputBuffers.empty());
-
-    sp<ABuffer> outputBuffer = *mQueuedOutputBuffers.begin();
-    mQueuedOutputBuffers.erase(mQueuedOutputBuffers.begin());
-
-    return outputBuffer;
-}
-
-#if SUSPEND_VIDEO_IF_IDLE
-bool WifiDisplaySource::PlaybackSession::Track::isSuspended() const {
-    if (!mQueuedOutputBuffers.empty()) {
-        return false;
-    }
-
-    if (mLastOutputBufferQueuedTimeUs < 0ll) {
-        // We've never seen an output buffer queued, but tracks start
-        // out live, not suspended.
-        return false;
-    }
-
-    // If we've not seen new output data for 60ms or more, we consider
-    // this track suspended for the time being.
-    return (ALooper::GetNowUs() - mLastOutputBufferQueuedTimeUs) > 60000ll;
-}
-#endif
-
-////////////////////////////////////////////////////////////////////////////////
-
-WifiDisplaySource::PlaybackSession::PlaybackSession(
-        const String16 &opPackageName,
-        const sp<ANetworkSession> &netSession,
-        const sp<AMessage> &notify,
-        const in_addr &interfaceAddr,
-        const sp<IHDCP> &hdcp,
-        const char *path)
-    : mOpPackageName(opPackageName),
-      mNetSession(netSession),
-      mNotify(notify),
-      mInterfaceAddr(interfaceAddr),
-      mHDCP(hdcp),
-      mLocalRTPPort(-1),
-      mWeAreDead(false),
-      mPaused(false),
-      mLastLifesignUs(),
-      mVideoTrackIndex(-1),
-      mPrevTimeUs(-1ll),
-      mPullExtractorPending(false),
-      mPullExtractorGeneration(0),
-      mFirstSampleTimeRealUs(-1ll),
-      mFirstSampleTimeUs(-1ll) {
-    if (path != NULL) {
-        mMediaPath.setTo(path);
-    }
-}
-
-status_t WifiDisplaySource::PlaybackSession::init(
-        const char *clientIP,
-        int32_t clientRtp,
-        RTPSender::TransportMode rtpMode,
-        int32_t clientRtcp,
-        RTPSender::TransportMode rtcpMode,
-        bool enableAudio,
-        bool usePCMAudio,
-        bool enableVideo,
-        VideoFormats::ResolutionType videoResolutionType,
-        size_t videoResolutionIndex,
-        VideoFormats::ProfileType videoProfileType,
-        VideoFormats::LevelType videoLevelType) {
-    sp<AMessage> notify = new AMessage(kWhatMediaSenderNotify, this);
-    mMediaSender = new MediaSender(mNetSession, notify);
-    looper()->registerHandler(mMediaSender);
-
-    mMediaSender->setHDCP(mHDCP);
-
-    status_t err = setupPacketizer(
-            enableAudio,
-            usePCMAudio,
-            enableVideo,
-            videoResolutionType,
-            videoResolutionIndex,
-            videoProfileType,
-            videoLevelType);
-
-    if (err == OK) {
-        err = mMediaSender->initAsync(
-                -1 /* trackIndex */,
-                clientIP,
-                clientRtp,
-                rtpMode,
-                clientRtcp,
-                rtcpMode,
-                &mLocalRTPPort);
-    }
-
-    if (err != OK) {
-        mLocalRTPPort = -1;
-
-        looper()->unregisterHandler(mMediaSender->id());
-        mMediaSender.clear();
-
-        return err;
-    }
-
-    updateLiveness();
-
-    return OK;
-}
-
-WifiDisplaySource::PlaybackSession::~PlaybackSession() {
-}
-
-int32_t WifiDisplaySource::PlaybackSession::getRTPPort() const {
-    return mLocalRTPPort;
-}
-
-int64_t WifiDisplaySource::PlaybackSession::getLastLifesignUs() const {
-    return mLastLifesignUs;
-}
-
-void WifiDisplaySource::PlaybackSession::updateLiveness() {
-    mLastLifesignUs = ALooper::GetNowUs();
-}
-
-status_t WifiDisplaySource::PlaybackSession::play() {
-    updateLiveness();
-
-    (new AMessage(kWhatResume, this))->post();
-
-    return OK;
-}
-
-status_t WifiDisplaySource::PlaybackSession::onMediaSenderInitialized() {
-    for (size_t i = 0; i < mTracks.size(); ++i) {
-        CHECK_EQ((status_t)OK, mTracks.editValueAt(i)->start());
-    }
-
-    sp<AMessage> notify = mNotify->dup();
-    notify->setInt32("what", kWhatSessionEstablished);
-    notify->post();
-
-    return OK;
-}
-
-status_t WifiDisplaySource::PlaybackSession::pause() {
-    updateLiveness();
-
-    (new AMessage(kWhatPause, this))->post();
-
-    return OK;
-}
-
-void WifiDisplaySource::PlaybackSession::destroyAsync() {
-    ALOGI("destroyAsync");
-
-    for (size_t i = 0; i < mTracks.size(); ++i) {
-        mTracks.valueAt(i)->stopAsync();
-    }
-}
-
-void WifiDisplaySource::PlaybackSession::onMessageReceived(
-        const sp<AMessage> &msg) {
-    switch (msg->what()) {
-        case kWhatConverterNotify:
-        {
-            if (mWeAreDead) {
-                ALOGV("dropping msg '%s' because we're dead",
-                      msg->debugString().c_str());
-
-                break;
-            }
-
-            int32_t what;
-            CHECK(msg->findInt32("what", &what));
-
-            size_t trackIndex;
-            CHECK(msg->findSize("trackIndex", &trackIndex));
-
-            if (what == Converter::kWhatAccessUnit) {
-                sp<ABuffer> accessUnit;
-                CHECK(msg->findBuffer("accessUnit", &accessUnit));
-
-                const sp<Track> &track = mTracks.valueFor(trackIndex);
-
-                status_t err = mMediaSender->queueAccessUnit(
-                        track->mediaSenderTrackIndex(),
-                        accessUnit);
-
-                if (err != OK) {
-                    notifySessionDead();
-                }
-                break;
-            } else if (what == Converter::kWhatEOS) {
-                CHECK_EQ(what, Converter::kWhatEOS);
-
-                ALOGI("output EOS on track %zu", trackIndex);
-
-                ssize_t index = mTracks.indexOfKey(trackIndex);
-                CHECK_GE(index, 0);
-
-                const sp<Converter> &converter =
-                    mTracks.valueAt(index)->converter();
-                looper()->unregisterHandler(converter->id());
-
-                mTracks.removeItemsAt(index);
-
-                if (mTracks.isEmpty()) {
-                    ALOGI("Reached EOS");
-                }
-            } else if (what != Converter::kWhatShutdownCompleted) {
-                CHECK_EQ(what, Converter::kWhatError);
-
-                status_t err;
-                CHECK(msg->findInt32("err", &err));
-
-                ALOGE("converter signaled error %d", err);
-
-                notifySessionDead();
-            }
-            break;
-        }
-
-        case kWhatMediaSenderNotify:
-        {
-            int32_t what;
-            CHECK(msg->findInt32("what", &what));
-
-            if (what == MediaSender::kWhatInitDone) {
-                status_t err;
-                CHECK(msg->findInt32("err", &err));
-
-                if (err == OK) {
-                    onMediaSenderInitialized();
-                } else {
-                    notifySessionDead();
-                }
-            } else if (what == MediaSender::kWhatError) {
-                notifySessionDead();
-            } else if (what == MediaSender::kWhatNetworkStall) {
-                size_t numBytesQueued;
-                CHECK(msg->findSize("numBytesQueued", &numBytesQueued));
-
-                if (mVideoTrackIndex >= 0) {
-                    const sp<Track> &videoTrack =
-                        mTracks.valueFor(mVideoTrackIndex);
-
-                    sp<Converter> converter = videoTrack->converter();
-                    if (converter != NULL) {
-                        converter->dropAFrame();
-                    }
-                }
-            } else if (what == MediaSender::kWhatInformSender) {
-                onSinkFeedback(msg);
-            } else {
-                TRESPASS();
-            }
-            break;
-        }
-
-        case kWhatTrackNotify:
-        {
-            int32_t what;
-            CHECK(msg->findInt32("what", &what));
-
-            size_t trackIndex;
-            CHECK(msg->findSize("trackIndex", &trackIndex));
-
-            if (what == Track::kWhatStopped) {
-                ALOGI("Track %zu stopped", trackIndex);
-
-                sp<Track> track = mTracks.valueFor(trackIndex);
-                looper()->unregisterHandler(track->id());
-                mTracks.removeItem(trackIndex);
-                track.clear();
-
-                if (!mTracks.isEmpty()) {
-                    ALOGI("not all tracks are stopped yet");
-                    break;
-                }
-
-                looper()->unregisterHandler(mMediaSender->id());
-                mMediaSender.clear();
-
-                sp<AMessage> notify = mNotify->dup();
-                notify->setInt32("what", kWhatSessionDestroyed);
-                notify->post();
-            }
-            break;
-        }
-
-        case kWhatPause:
-        {
-            if (mExtractor != NULL) {
-                ++mPullExtractorGeneration;
-                mFirstSampleTimeRealUs = -1ll;
-                mFirstSampleTimeUs = -1ll;
-            }
-
-            if (mPaused) {
-                break;
-            }
-
-            for (size_t i = 0; i < mTracks.size(); ++i) {
-                mTracks.editValueAt(i)->pause();
-            }
-
-            mPaused = true;
-            break;
-        }
-
-        case kWhatResume:
-        {
-            if (mExtractor != NULL) {
-                schedulePullExtractor();
-            }
-
-            if (!mPaused) {
-                break;
-            }
-
-            for (size_t i = 0; i < mTracks.size(); ++i) {
-                mTracks.editValueAt(i)->resume();
-            }
-
-            mPaused = false;
-            break;
-        }
-
-        case kWhatPullExtractorSample:
-        {
-            int32_t generation;
-            CHECK(msg->findInt32("generation", &generation));
-
-            if (generation != mPullExtractorGeneration) {
-                break;
-            }
-
-            mPullExtractorPending = false;
-
-            onPullExtractor();
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-}
-
-void WifiDisplaySource::PlaybackSession::onSinkFeedback(const sp<AMessage> &msg) {
-    int64_t avgLatencyUs;
-    CHECK(msg->findInt64("avgLatencyUs", &avgLatencyUs));
-
-    int64_t maxLatencyUs;
-    CHECK(msg->findInt64("maxLatencyUs", &maxLatencyUs));
-
-    ALOGI("sink reports avg. latency of %lld ms (max %lld ms)",
-          avgLatencyUs / 1000ll,
-          maxLatencyUs / 1000ll);
-
-    if (mVideoTrackIndex >= 0) {
-        const sp<Track> &videoTrack = mTracks.valueFor(mVideoTrackIndex);
-        sp<Converter> converter = videoTrack->converter();
-
-        if (converter != NULL) {
-            int32_t videoBitrate =
-                Converter::GetInt32Property("media.wfd.video-bitrate", -1);
-
-            char val[PROPERTY_VALUE_MAX];
-            if (videoBitrate < 0
-                    && property_get("media.wfd.video-bitrate", val, NULL)
-                    && !strcasecmp("adaptive", val)) {
-                videoBitrate = converter->getVideoBitrate();
-
-                if (avgLatencyUs > 300000ll) {
-                    videoBitrate *= 0.6;
-                } else if (avgLatencyUs < 100000ll) {
-                    videoBitrate *= 1.1;
-                }
-            }
-
-            if (videoBitrate > 0) {
-                if (videoBitrate < 500000) {
-                    videoBitrate = 500000;
-                } else if (videoBitrate > 10000000) {
-                    videoBitrate = 10000000;
-                }
-
-                if (videoBitrate != converter->getVideoBitrate()) {
-                    ALOGI("setting video bitrate to %d bps", videoBitrate);
-
-                    converter->setVideoBitrate(videoBitrate);
-                }
-            }
-        }
-
-        sp<RepeaterSource> repeaterSource = videoTrack->repeaterSource();
-        if (repeaterSource != NULL) {
-            double rateHz =
-                Converter::GetInt32Property(
-                        "media.wfd.video-framerate", -1);
-
-            char val[PROPERTY_VALUE_MAX];
-            if (rateHz < 0.0
-                    && property_get("media.wfd.video-framerate", val, NULL)
-                    && !strcasecmp("adaptive", val)) {
-                 rateHz = repeaterSource->getFrameRate();
-
-                if (avgLatencyUs > 300000ll) {
-                    rateHz *= 0.9;
-                } else if (avgLatencyUs < 200000ll) {
-                    rateHz *= 1.1;
-                }
-            }
-
-            if (rateHz > 0) {
-                if (rateHz < 5.0) {
-                    rateHz = 5.0;
-                } else if (rateHz > 30.0) {
-                    rateHz = 30.0;
-                }
-
-                if (rateHz != repeaterSource->getFrameRate()) {
-                    ALOGI("setting frame rate to %.2f Hz", rateHz);
-
-                    repeaterSource->setFrameRate(rateHz);
-                }
-            }
-        }
-    }
-}
-
-status_t WifiDisplaySource::PlaybackSession::setupMediaPacketizer(
-        bool enableAudio, bool enableVideo) {
-    mExtractor = new NuMediaExtractor;
-
-    status_t err = mExtractor->setDataSource(
-            NULL /* httpService */, mMediaPath.c_str());
-
-    if (err != OK) {
-        return err;
-    }
-
-    size_t n = mExtractor->countTracks();
-    bool haveAudio = false;
-    bool haveVideo = false;
-    for (size_t i = 0; i < n; ++i) {
-        sp<AMessage> format;
-        err = mExtractor->getTrackFormat(i, &format);
-
-        if (err != OK) {
-            continue;
-        }
-
-        AString mime;
-        CHECK(format->findString("mime", &mime));
-
-        bool isAudio = !strncasecmp(mime.c_str(), "audio/", 6);
-        bool isVideo = !strncasecmp(mime.c_str(), "video/", 6);
-
-        if (isAudio && enableAudio && !haveAudio) {
-            haveAudio = true;
-        } else if (isVideo && enableVideo && !haveVideo) {
-            haveVideo = true;
-        } else {
-            continue;
-        }
-
-        err = mExtractor->selectTrack(i);
-
-        size_t trackIndex = mTracks.size();
-
-        sp<AMessage> notify = new AMessage(kWhatTrackNotify, this);
-        notify->setSize("trackIndex", trackIndex);
-
-        sp<Track> track = new Track(notify, format);
-        looper()->registerHandler(track);
-
-        mTracks.add(trackIndex, track);
-
-        mExtractorTrackToInternalTrack.add(i, trackIndex);
-
-        if (isVideo) {
-            mVideoTrackIndex = trackIndex;
-        }
-
-        uint32_t flags = MediaSender::FLAG_MANUALLY_PREPEND_SPS_PPS;
-
-        ssize_t mediaSenderTrackIndex =
-            mMediaSender->addTrack(format, flags);
-        CHECK_GE(mediaSenderTrackIndex, 0);
-
-        track->setMediaSenderTrackIndex(mediaSenderTrackIndex);
-
-        if ((haveAudio || !enableAudio) && (haveVideo || !enableVideo)) {
-            break;
-        }
-    }
-
-    return OK;
-}
-
-void WifiDisplaySource::PlaybackSession::schedulePullExtractor() {
-    if (mPullExtractorPending) {
-        return;
-    }
-
-    int64_t delayUs = 1000000; // default delay is 1 sec
-    int64_t sampleTimeUs;
-    status_t err = mExtractor->getSampleTime(&sampleTimeUs);
-
-    if (err == OK) {
-        int64_t nowUs = ALooper::GetNowUs();
-
-        if (mFirstSampleTimeRealUs < 0ll) {
-            mFirstSampleTimeRealUs = nowUs;
-            mFirstSampleTimeUs = sampleTimeUs;
-        }
-
-        int64_t whenUs = sampleTimeUs - mFirstSampleTimeUs + mFirstSampleTimeRealUs;
-        delayUs = whenUs - nowUs;
-    } else {
-        ALOGW("could not get sample time (%d)", err);
-    }
-
-    sp<AMessage> msg = new AMessage(kWhatPullExtractorSample, this);
-    msg->setInt32("generation", mPullExtractorGeneration);
-    msg->post(delayUs);
-
-    mPullExtractorPending = true;
-}
-
-void WifiDisplaySource::PlaybackSession::onPullExtractor() {
-    sp<ABuffer> accessUnit = new ABuffer(1024 * 1024);
-    status_t err = mExtractor->readSampleData(accessUnit);
-    if (err != OK) {
-        // EOS.
-        return;
-    }
-
-    int64_t timeUs;
-    CHECK_EQ((status_t)OK, mExtractor->getSampleTime(&timeUs));
-
-    accessUnit->meta()->setInt64(
-            "timeUs", mFirstSampleTimeRealUs + timeUs - mFirstSampleTimeUs);
-
-    size_t trackIndex;
-    CHECK_EQ((status_t)OK, mExtractor->getSampleTrackIndex(&trackIndex));
-
-    sp<AMessage> msg = new AMessage(kWhatConverterNotify, this);
-
-    msg->setSize(
-            "trackIndex", mExtractorTrackToInternalTrack.valueFor(trackIndex));
-
-    msg->setInt32("what", Converter::kWhatAccessUnit);
-    msg->setBuffer("accessUnit", accessUnit);
-    msg->post();
-
-    mExtractor->advance();
-
-    schedulePullExtractor();
-}
-
-status_t WifiDisplaySource::PlaybackSession::setupPacketizer(
-        bool enableAudio,
-        bool usePCMAudio,
-        bool enableVideo,
-        VideoFormats::ResolutionType videoResolutionType,
-        size_t videoResolutionIndex,
-        VideoFormats::ProfileType videoProfileType,
-        VideoFormats::LevelType videoLevelType) {
-    CHECK(enableAudio || enableVideo);
-
-    if (!mMediaPath.empty()) {
-        return setupMediaPacketizer(enableAudio, enableVideo);
-    }
-
-    if (enableVideo) {
-        status_t err = addVideoSource(
-                videoResolutionType, videoResolutionIndex, videoProfileType,
-                videoLevelType);
-
-        if (err != OK) {
-            return err;
-        }
-    }
-
-    if (!enableAudio) {
-        return OK;
-    }
-
-    return addAudioSource(usePCMAudio);
-}
-
-status_t WifiDisplaySource::PlaybackSession::addSource(
-        bool isVideo, const sp<MediaSource> &source, bool isRepeaterSource,
-        bool usePCMAudio, unsigned profileIdc, unsigned levelIdc,
-        unsigned constraintSet, size_t *numInputBuffers) {
-    CHECK(!usePCMAudio || !isVideo);
-    CHECK(!isRepeaterSource || isVideo);
-    CHECK(!profileIdc || isVideo);
-    CHECK(!levelIdc || isVideo);
-    CHECK(!constraintSet || isVideo);
-
-    sp<ALooper> pullLooper = new ALooper;
-    pullLooper->setName("pull_looper");
-
-    pullLooper->start(
-            false /* runOnCallingThread */,
-            false /* canCallJava */,
-            PRIORITY_AUDIO);
-
-    sp<ALooper> codecLooper = new ALooper;
-    codecLooper->setName("codec_looper");
-
-    codecLooper->start(
-            false /* runOnCallingThread */,
-            false /* canCallJava */,
-            PRIORITY_AUDIO);
-
-    size_t trackIndex;
-
-    sp<AMessage> notify;
-
-    trackIndex = mTracks.size();
-
-    sp<AMessage> format;
-    status_t err = convertMetaDataToMessage(source->getFormat(), &format);
-    CHECK_EQ(err, (status_t)OK);
-
-    if (isVideo) {
-        format->setString("mime", MEDIA_MIMETYPE_VIDEO_AVC);
-        format->setInt32(
-                "android._input-metadata-buffer-type", kMetadataBufferTypeANWBuffer);
-        format->setInt32("android._store-metadata-in-buffers-output", (mHDCP != NULL)
-                && (mHDCP->getCaps() & HDCPModule::HDCP_CAPS_ENCRYPT_NATIVE));
-        format->setInt32(
-                "color-format", OMX_COLOR_FormatAndroidOpaque);
-        format->setInt32("profile-idc", profileIdc);
-        format->setInt32("level-idc", levelIdc);
-        format->setInt32("constraint-set", constraintSet);
-    } else {
-        if (usePCMAudio) {
-            format->setInt32("pcm-encoding", kAudioEncodingPcm16bit);
-            format->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW);
-        } else {
-            format->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC);
-        }
-    }
-
-    notify = new AMessage(kWhatConverterNotify, this);
-    notify->setSize("trackIndex", trackIndex);
-
-    sp<Converter> converter = new Converter(notify, codecLooper, format);
-
-    looper()->registerHandler(converter);
-
-    err = converter->init();
-    if (err != OK) {
-        ALOGE("%s converter returned err %d", isVideo ? "video" : "audio", err);
-
-        looper()->unregisterHandler(converter->id());
-        return err;
-    }
-
-    notify = new AMessage(Converter::kWhatMediaPullerNotify, converter);
-    notify->setSize("trackIndex", trackIndex);
-
-    sp<MediaPuller> puller = new MediaPuller(source, notify);
-    pullLooper->registerHandler(puller);
-
-    if (numInputBuffers != NULL) {
-        *numInputBuffers = converter->getInputBufferCount();
-    }
-
-    notify = new AMessage(kWhatTrackNotify, this);
-    notify->setSize("trackIndex", trackIndex);
-
-    sp<Track> track = new Track(
-            notify, pullLooper, codecLooper, puller, converter);
-
-    if (isRepeaterSource) {
-        track->setRepeaterSource(static_cast<RepeaterSource *>(source.get()));
-    }
-
-    looper()->registerHandler(track);
-
-    mTracks.add(trackIndex, track);
-
-    if (isVideo) {
-        mVideoTrackIndex = trackIndex;
-    }
-
-    uint32_t flags = 0;
-    if (converter->needToManuallyPrependSPSPPS()) {
-        flags |= MediaSender::FLAG_MANUALLY_PREPEND_SPS_PPS;
-    }
-
-    ssize_t mediaSenderTrackIndex =
-        mMediaSender->addTrack(converter->getOutputFormat(), flags);
-    CHECK_GE(mediaSenderTrackIndex, 0);
-
-    track->setMediaSenderTrackIndex(mediaSenderTrackIndex);
-
-    return OK;
-}
-
-status_t WifiDisplaySource::PlaybackSession::addVideoSource(
-        VideoFormats::ResolutionType videoResolutionType,
-        size_t videoResolutionIndex,
-        VideoFormats::ProfileType videoProfileType,
-        VideoFormats::LevelType videoLevelType) {
-    size_t width, height, framesPerSecond;
-    bool interlaced;
-    CHECK(VideoFormats::GetConfiguration(
-                videoResolutionType,
-                videoResolutionIndex,
-                &width,
-                &height,
-                &framesPerSecond,
-                &interlaced));
-
-    unsigned profileIdc, levelIdc, constraintSet;
-    CHECK(VideoFormats::GetProfileLevel(
-                videoProfileType,
-                videoLevelType,
-                &profileIdc,
-                &levelIdc,
-                &constraintSet));
-
-    sp<SurfaceMediaSource> source = new SurfaceMediaSource(width, height);
-
-    source->setUseAbsoluteTimestamps();
-
-    sp<RepeaterSource> videoSource =
-        new RepeaterSource(source, framesPerSecond);
-
-    size_t numInputBuffers;
-    status_t err = addSource(
-            true /* isVideo */, videoSource, true /* isRepeaterSource */,
-            false /* usePCMAudio */, profileIdc, levelIdc, constraintSet,
-            &numInputBuffers);
-
-    if (err != OK) {
-        return err;
-    }
-
-    err = source->setMaxAcquiredBufferCount(numInputBuffers);
-    CHECK_EQ(err, (status_t)OK);
-
-    mProducer = source->getProducer();
-
-    return OK;
-}
-
-status_t WifiDisplaySource::PlaybackSession::addAudioSource(bool usePCMAudio) {
-    sp<AudioSource> audioSource = new AudioSource(
-            AUDIO_SOURCE_REMOTE_SUBMIX,
-            mOpPackageName,
-            48000 /* sampleRate */,
-            2 /* channelCount */);
-
-    if (audioSource->initCheck() == OK) {
-        return addSource(
-                false /* isVideo */, audioSource, false /* isRepeaterSource */,
-                usePCMAudio, 0 /* profileIdc */, 0 /* levelIdc */,
-                0 /* constraintSet */, NULL /* numInputBuffers */);
-    }
-
-    ALOGW("Unable to instantiate audio source");
-
-    return OK;
-}
-
-sp<IGraphicBufferProducer> WifiDisplaySource::PlaybackSession::getSurfaceTexture() {
-    return mProducer;
-}
-
-void WifiDisplaySource::PlaybackSession::requestIDRFrame() {
-    for (size_t i = 0; i < mTracks.size(); ++i) {
-        const sp<Track> &track = mTracks.valueAt(i);
-
-        track->requestIDRFrame();
-    }
-}
-
-void WifiDisplaySource::PlaybackSession::notifySessionDead() {
-    // Inform WifiDisplaySource of our premature death (wish).
-    sp<AMessage> notify = mNotify->dup();
-    notify->setInt32("what", kWhatSessionDead);
-    notify->post();
-
-    mWeAreDead = true;
-}
-
-}  // namespace android
-
diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.h b/media/libstagefright/wifi-display/source/PlaybackSession.h
deleted file mode 100644
index f6673df..0000000
--- a/media/libstagefright/wifi-display/source/PlaybackSession.h
+++ /dev/null
@@ -1,176 +0,0 @@
-/*
- * Copyright 2012, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef PLAYBACK_SESSION_H_
-
-#define PLAYBACK_SESSION_H_
-
-#include "MediaSender.h"
-#include "VideoFormats.h"
-#include "WifiDisplaySource.h"
-
-#include <utils/String16.h>
-
-namespace android {
-
-struct ABuffer;
-struct IHDCP;
-class IGraphicBufferProducer;
-struct MediaPuller;
-struct MediaSource;
-struct MediaSender;
-struct NuMediaExtractor;
-
-// Encapsulates the state of an RTP/RTCP session in the context of wifi
-// display.
-struct WifiDisplaySource::PlaybackSession : public AHandler {
-    PlaybackSession(
-            const String16 &opPackageName,
-            const sp<ANetworkSession> &netSession,
-            const sp<AMessage> &notify,
-            const struct in_addr &interfaceAddr,
-            const sp<IHDCP> &hdcp,
-            const char *path = NULL);
-
-    status_t init(
-            const char *clientIP,
-            int32_t clientRtp,
-            RTPSender::TransportMode rtpMode,
-            int32_t clientRtcp,
-            RTPSender::TransportMode rtcpMode,
-            bool enableAudio,
-            bool usePCMAudio,
-            bool enableVideo,
-            VideoFormats::ResolutionType videoResolutionType,
-            size_t videoResolutionIndex,
-            VideoFormats::ProfileType videoProfileType,
-            VideoFormats::LevelType videoLevelType);
-
-    void destroyAsync();
-
-    int32_t getRTPPort() const;
-
-    int64_t getLastLifesignUs() const;
-    void updateLiveness();
-
-    status_t play();
-    status_t finishPlay();
-    status_t pause();
-
-    sp<IGraphicBufferProducer> getSurfaceTexture();
-
-    void requestIDRFrame();
-
-    enum {
-        kWhatSessionDead,
-        kWhatBinaryData,
-        kWhatSessionEstablished,
-        kWhatSessionDestroyed,
-    };
-
-protected:
-    virtual void onMessageReceived(const sp<AMessage> &msg);
-    virtual ~PlaybackSession();
-
-private:
-    struct Track;
-
-    enum {
-        kWhatMediaPullerNotify,
-        kWhatConverterNotify,
-        kWhatTrackNotify,
-        kWhatUpdateSurface,
-        kWhatPause,
-        kWhatResume,
-        kWhatMediaSenderNotify,
-        kWhatPullExtractorSample,
-    };
-
-    String16 mOpPackageName;
-
-    sp<ANetworkSession> mNetSession;
-    sp<AMessage> mNotify;
-    in_addr mInterfaceAddr;
-    sp<IHDCP> mHDCP;
-    AString mMediaPath;
-
-    sp<MediaSender> mMediaSender;
-    int32_t mLocalRTPPort;
-
-    bool mWeAreDead;
-    bool mPaused;
-
-    int64_t mLastLifesignUs;
-
-    sp<IGraphicBufferProducer> mProducer;
-
-    KeyedVector<size_t, sp<Track> > mTracks;
-    ssize_t mVideoTrackIndex;
-
-    int64_t mPrevTimeUs;
-
-    sp<NuMediaExtractor> mExtractor;
-    KeyedVector<size_t, size_t> mExtractorTrackToInternalTrack;
-    bool mPullExtractorPending;
-    int32_t mPullExtractorGeneration;
-    int64_t mFirstSampleTimeRealUs;
-    int64_t mFirstSampleTimeUs;
-
-    status_t setupMediaPacketizer(bool enableAudio, bool enableVideo);
-
-    status_t setupPacketizer(
-            bool enableAudio,
-            bool usePCMAudio,
-            bool enableVideo,
-            VideoFormats::ResolutionType videoResolutionType,
-            size_t videoResolutionIndex,
-            VideoFormats::ProfileType videoProfileType,
-            VideoFormats::LevelType videoLevelType);
-
-    status_t addSource(
-            bool isVideo,
-            const sp<MediaSource> &source,
-            bool isRepeaterSource,
-            bool usePCMAudio,
-            unsigned profileIdc,
-            unsigned levelIdc,
-            unsigned contraintSet,
-            size_t *numInputBuffers);
-
-    status_t addVideoSource(
-            VideoFormats::ResolutionType videoResolutionType,
-            size_t videoResolutionIndex,
-            VideoFormats::ProfileType videoProfileType,
-            VideoFormats::LevelType videoLevelType);
-
-    status_t addAudioSource(bool usePCMAudio);
-
-    status_t onMediaSenderInitialized();
-
-    void notifySessionDead();
-
-    void schedulePullExtractor();
-    void onPullExtractor();
-
-    void onSinkFeedback(const sp<AMessage> &msg);
-
-    DISALLOW_EVIL_CONSTRUCTORS(PlaybackSession);
-};
-
-}  // namespace android
-
-#endif  // PLAYBACK_SESSION_H_
-
diff --git a/media/libstagefright/wifi-display/source/RepeaterSource.cpp b/media/libstagefright/wifi-display/source/RepeaterSource.cpp
deleted file mode 100644
index af6b663..0000000
--- a/media/libstagefright/wifi-display/source/RepeaterSource.cpp
+++ /dev/null
@@ -1,219 +0,0 @@
-//#define LOG_NDEBUG 0
-#define LOG_TAG "RepeaterSource"
-#include <utils/Log.h>
-
-#include "RepeaterSource.h"
-
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/ALooper.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/MediaBuffer.h>
-#include <media/stagefright/MetaData.h>
-
-namespace android {
-
-RepeaterSource::RepeaterSource(const sp<MediaSource> &source, double rateHz)
-    : mStarted(false),
-      mSource(source),
-      mRateHz(rateHz),
-      mBuffer(NULL),
-      mResult(OK),
-      mLastBufferUpdateUs(-1ll),
-      mStartTimeUs(-1ll),
-      mFrameCount(0) {
-}
-
-RepeaterSource::~RepeaterSource() {
-    CHECK(!mStarted);
-}
-
-double RepeaterSource::getFrameRate() const {
-    return mRateHz;
-}
-
-void RepeaterSource::setFrameRate(double rateHz) {
-    Mutex::Autolock autoLock(mLock);
-
-    if (rateHz == mRateHz) {
-        return;
-    }
-
-    if (mStartTimeUs >= 0ll) {
-        int64_t nextTimeUs = mStartTimeUs + (mFrameCount * 1000000ll) / mRateHz;
-        mStartTimeUs = nextTimeUs;
-        mFrameCount = 0;
-    }
-    mRateHz = rateHz;
-}
-
-status_t RepeaterSource::start(MetaData *params) {
-    CHECK(!mStarted);
-
-    status_t err = mSource->start(params);
-
-    if (err != OK) {
-        return err;
-    }
-
-    mBuffer = NULL;
-    mResult = OK;
-    mStartTimeUs = -1ll;
-    mFrameCount = 0;
-
-    mLooper = new ALooper;
-    mLooper->setName("repeater_looper");
-    mLooper->start();
-
-    mReflector = new AHandlerReflector<RepeaterSource>(this);
-    mLooper->registerHandler(mReflector);
-
-    postRead();
-
-    mStarted = true;
-
-    return OK;
-}
-
-status_t RepeaterSource::stop() {
-    CHECK(mStarted);
-
-    ALOGV("stopping");
-
-    status_t err = mSource->stop();
-
-    if (mLooper != NULL) {
-        mLooper->stop();
-        mLooper.clear();
-
-        mReflector.clear();
-    }
-
-    if (mBuffer != NULL) {
-        ALOGV("releasing mbuf %p", mBuffer);
-        mBuffer->release();
-        mBuffer = NULL;
-    }
-
-
-    ALOGV("stopped");
-
-    mStarted = false;
-
-    return err;
-}
-
-sp<MetaData> RepeaterSource::getFormat() {
-    return mSource->getFormat();
-}
-
-status_t RepeaterSource::read(
-        MediaBuffer **buffer, const ReadOptions *options) {
-    int64_t seekTimeUs;
-    ReadOptions::SeekMode seekMode;
-    CHECK(options == NULL || !options->getSeekTo(&seekTimeUs, &seekMode));
-
-    for (;;) {
-        int64_t bufferTimeUs = -1ll;
-
-        if (mStartTimeUs < 0ll) {
-            Mutex::Autolock autoLock(mLock);
-            while ((mLastBufferUpdateUs < 0ll || mBuffer == NULL)
-                    && mResult == OK) {
-                mCondition.wait(mLock);
-            }
-
-            ALOGV("now resuming.");
-            mStartTimeUs = ALooper::GetNowUs();
-            bufferTimeUs = mStartTimeUs;
-        } else {
-            bufferTimeUs = mStartTimeUs + (mFrameCount * 1000000ll) / mRateHz;
-
-            int64_t nowUs = ALooper::GetNowUs();
-            int64_t delayUs = bufferTimeUs - nowUs;
-
-            if (delayUs > 0ll) {
-                usleep(delayUs);
-            }
-        }
-
-        bool stale = false;
-
-        {
-            Mutex::Autolock autoLock(mLock);
-            if (mResult != OK) {
-                CHECK(mBuffer == NULL);
-                return mResult;
-            }
-
-#if SUSPEND_VIDEO_IF_IDLE
-            int64_t nowUs = ALooper::GetNowUs();
-            if (nowUs - mLastBufferUpdateUs > 1000000ll) {
-                mLastBufferUpdateUs = -1ll;
-                stale = true;
-            } else
-#endif
-            {
-                mBuffer->add_ref();
-                *buffer = mBuffer;
-                (*buffer)->meta_data()->setInt64(kKeyTime, bufferTimeUs);
-                ++mFrameCount;
-            }
-        }
-
-        if (!stale) {
-            break;
-        }
-
-        mStartTimeUs = -1ll;
-        mFrameCount = 0;
-        ALOGV("now dormant");
-    }
-
-    return OK;
-}
-
-void RepeaterSource::postRead() {
-    (new AMessage(kWhatRead, mReflector))->post();
-}
-
-void RepeaterSource::onMessageReceived(const sp<AMessage> &msg) {
-    switch (msg->what()) {
-        case kWhatRead:
-        {
-            MediaBuffer *buffer;
-            status_t err = mSource->read(&buffer);
-
-            ALOGV("read mbuf %p", buffer);
-
-            Mutex::Autolock autoLock(mLock);
-            if (mBuffer != NULL) {
-                mBuffer->release();
-                mBuffer = NULL;
-            }
-            mBuffer = buffer;
-            mResult = err;
-            mLastBufferUpdateUs = ALooper::GetNowUs();
-
-            mCondition.broadcast();
-
-            if (err == OK) {
-                postRead();
-            }
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-}
-
-void RepeaterSource::wakeUp() {
-    ALOGV("wakeUp");
-    Mutex::Autolock autoLock(mLock);
-    if (mLastBufferUpdateUs < 0ll && mBuffer != NULL) {
-        mLastBufferUpdateUs = ALooper::GetNowUs();
-        mCondition.broadcast();
-    }
-}
-
-}  // namespace android
diff --git a/media/libstagefright/wifi-display/source/RepeaterSource.h b/media/libstagefright/wifi-display/source/RepeaterSource.h
deleted file mode 100644
index 8d414fd..0000000
--- a/media/libstagefright/wifi-display/source/RepeaterSource.h
+++ /dev/null
@@ -1,67 +0,0 @@
-#ifndef REPEATER_SOURCE_H_
-
-#define REPEATER_SOURCE_H_
-
-#include <media/stagefright/foundation/ABase.h>
-#include <media/stagefright/foundation/AHandlerReflector.h>
-#include <media/stagefright/MediaSource.h>
-
-#define SUSPEND_VIDEO_IF_IDLE   0
-
-namespace android {
-
-// This MediaSource delivers frames at a constant rate by repeating buffers
-// if necessary.
-struct RepeaterSource : public MediaSource {
-    RepeaterSource(const sp<MediaSource> &source, double rateHz);
-
-    virtual status_t start(MetaData *params);
-    virtual status_t stop();
-    virtual sp<MetaData> getFormat();
-
-    virtual status_t read(
-            MediaBuffer **buffer, const ReadOptions *options);
-
-    void onMessageReceived(const sp<AMessage> &msg);
-
-    // If RepeaterSource is currently dormant, because SurfaceFlinger didn't
-    // send updates in a while, this is its wakeup call.
-    void wakeUp();
-
-    double getFrameRate() const;
-    void setFrameRate(double rateHz);
-
-protected:
-    virtual ~RepeaterSource();
-
-private:
-    enum {
-        kWhatRead,
-    };
-
-    Mutex mLock;
-    Condition mCondition;
-
-    bool mStarted;
-
-    sp<MediaSource> mSource;
-    double mRateHz;
-
-    sp<ALooper> mLooper;
-    sp<AHandlerReflector<RepeaterSource> > mReflector;
-
-    MediaBuffer *mBuffer;
-    status_t mResult;
-    int64_t mLastBufferUpdateUs;
-
-    int64_t mStartTimeUs;
-    int32_t mFrameCount;
-
-    void postRead();
-
-    DISALLOW_EVIL_CONSTRUCTORS(RepeaterSource);
-};
-
-}  // namespace android
-
-#endif // REPEATER_SOURCE_H_
diff --git a/media/libstagefright/wifi-display/source/TSPacketizer.cpp b/media/libstagefright/wifi-display/source/TSPacketizer.cpp
deleted file mode 100644
index 865ba94..0000000
--- a/media/libstagefright/wifi-display/source/TSPacketizer.cpp
+++ /dev/null
@@ -1,1055 +0,0 @@
-/*
- * Copyright 2012, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "TSPacketizer"
-#include <utils/Log.h>
-
-#include "TSPacketizer.h"
-#include "include/avc_utils.h"
-
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/foundation/hexdump.h>
-#include <media/stagefright/MediaDefs.h>
-#include <media/stagefright/MediaErrors.h>
-
-#include <arpa/inet.h>
-
-namespace android {
-
-struct TSPacketizer::Track : public RefBase {
-    Track(const sp<AMessage> &format,
-          unsigned PID, unsigned streamType, unsigned streamID);
-
-    unsigned PID() const;
-    unsigned streamType() const;
-    unsigned streamID() const;
-
-    // Returns the previous value.
-    unsigned incrementContinuityCounter();
-
-    bool isAudio() const;
-    bool isVideo() const;
-
-    bool isH264() const;
-    bool isAAC() const;
-    bool lacksADTSHeader() const;
-    bool isPCMAudio() const;
-
-    sp<ABuffer> prependCSD(const sp<ABuffer> &accessUnit) const;
-    sp<ABuffer> prependADTSHeader(const sp<ABuffer> &accessUnit) const;
-
-    size_t countDescriptors() const;
-    sp<ABuffer> descriptorAt(size_t index) const;
-
-    void finalize();
-    void extractCSDIfNecessary();
-
-protected:
-    virtual ~Track();
-
-private:
-    sp<AMessage> mFormat;
-
-    unsigned mPID;
-    unsigned mStreamType;
-    unsigned mStreamID;
-    unsigned mContinuityCounter;
-
-    AString mMIME;
-    Vector<sp<ABuffer> > mCSD;
-
-    Vector<sp<ABuffer> > mDescriptors;
-
-    bool mAudioLacksATDSHeaders;
-    bool mFinalized;
-    bool mExtractedCSD;
-
-    DISALLOW_EVIL_CONSTRUCTORS(Track);
-};
-
-TSPacketizer::Track::Track(
-        const sp<AMessage> &format,
-        unsigned PID, unsigned streamType, unsigned streamID)
-    : mFormat(format),
-      mPID(PID),
-      mStreamType(streamType),
-      mStreamID(streamID),
-      mContinuityCounter(0),
-      mAudioLacksATDSHeaders(false),
-      mFinalized(false),
-      mExtractedCSD(false) {
-    CHECK(format->findString("mime", &mMIME));
-}
-
-void TSPacketizer::Track::extractCSDIfNecessary() {
-    if (mExtractedCSD) {
-        return;
-    }
-
-    if (!strcasecmp(mMIME.c_str(), MEDIA_MIMETYPE_VIDEO_AVC)
-            || !strcasecmp(mMIME.c_str(), MEDIA_MIMETYPE_AUDIO_AAC)) {
-        for (size_t i = 0;; ++i) {
-            sp<ABuffer> csd;
-            if (!mFormat->findBuffer(AStringPrintf("csd-%d", i).c_str(), &csd)) {
-                break;
-            }
-
-            mCSD.push(csd);
-        }
-
-        if (!strcasecmp(mMIME.c_str(), MEDIA_MIMETYPE_AUDIO_AAC)) {
-            int32_t isADTS;
-            if (!mFormat->findInt32("is-adts", &isADTS) || isADTS == 0) {
-                mAudioLacksATDSHeaders = true;
-            }
-        }
-    }
-
-    mExtractedCSD = true;
-}
-
-TSPacketizer::Track::~Track() {
-}
-
-unsigned TSPacketizer::Track::PID() const {
-    return mPID;
-}
-
-unsigned TSPacketizer::Track::streamType() const {
-    return mStreamType;
-}
-
-unsigned TSPacketizer::Track::streamID() const {
-    return mStreamID;
-}
-
-unsigned TSPacketizer::Track::incrementContinuityCounter() {
-    unsigned prevCounter = mContinuityCounter;
-
-    if (++mContinuityCounter == 16) {
-        mContinuityCounter = 0;
-    }
-
-    return prevCounter;
-}
-
-bool TSPacketizer::Track::isAudio() const {
-    return !strncasecmp("audio/", mMIME.c_str(), 6);
-}
-
-bool TSPacketizer::Track::isVideo() const {
-    return !strncasecmp("video/", mMIME.c_str(), 6);
-}
-
-bool TSPacketizer::Track::isH264() const {
-    return !strcasecmp(mMIME.c_str(), MEDIA_MIMETYPE_VIDEO_AVC);
-}
-
-bool TSPacketizer::Track::isAAC() const {
-    return !strcasecmp(mMIME.c_str(), MEDIA_MIMETYPE_AUDIO_AAC);
-}
-
-bool TSPacketizer::Track::isPCMAudio() const {
-    return !strcasecmp(mMIME.c_str(), MEDIA_MIMETYPE_AUDIO_RAW);
-}
-
-bool TSPacketizer::Track::lacksADTSHeader() const {
-    return mAudioLacksATDSHeaders;
-}
-
-sp<ABuffer> TSPacketizer::Track::prependCSD(
-        const sp<ABuffer> &accessUnit) const {
-    size_t size = 0;
-    for (size_t i = 0; i < mCSD.size(); ++i) {
-        size += mCSD.itemAt(i)->size();
-    }
-
-    sp<ABuffer> dup = new ABuffer(accessUnit->size() + size);
-    size_t offset = 0;
-    for (size_t i = 0; i < mCSD.size(); ++i) {
-        const sp<ABuffer> &csd = mCSD.itemAt(i);
-
-        memcpy(dup->data() + offset, csd->data(), csd->size());
-        offset += csd->size();
-    }
-
-    memcpy(dup->data() + offset, accessUnit->data(), accessUnit->size());
-
-    return dup;
-}
-
-sp<ABuffer> TSPacketizer::Track::prependADTSHeader(
-        const sp<ABuffer> &accessUnit) const {
-    CHECK_EQ(mCSD.size(), 1u);
-
-    const uint8_t *codec_specific_data = mCSD.itemAt(0)->data();
-
-    const uint32_t aac_frame_length = accessUnit->size() + 7;
-
-    sp<ABuffer> dup = new ABuffer(aac_frame_length);
-
-    unsigned profile = (codec_specific_data[0] >> 3) - 1;
-
-    unsigned sampling_freq_index =
-        ((codec_specific_data[0] & 7) << 1)
-        | (codec_specific_data[1] >> 7);
-
-    unsigned channel_configuration =
-        (codec_specific_data[1] >> 3) & 0x0f;
-
-    uint8_t *ptr = dup->data();
-
-    *ptr++ = 0xff;
-    *ptr++ = 0xf9;  // b11111001, ID=1(MPEG-2), layer=0, protection_absent=1
-
-    *ptr++ =
-        profile << 6
-        | sampling_freq_index << 2
-        | ((channel_configuration >> 2) & 1);  // private_bit=0
-
-    // original_copy=0, home=0, copyright_id_bit=0, copyright_id_start=0
-    *ptr++ =
-        (channel_configuration & 3) << 6
-        | aac_frame_length >> 11;
-    *ptr++ = (aac_frame_length >> 3) & 0xff;
-    *ptr++ = (aac_frame_length & 7) << 5;
-
-    // adts_buffer_fullness=0, number_of_raw_data_blocks_in_frame=0
-    *ptr++ = 0;
-
-    memcpy(ptr, accessUnit->data(), accessUnit->size());
-
-    return dup;
-}
-
-size_t TSPacketizer::Track::countDescriptors() const {
-    return mDescriptors.size();
-}
-
-sp<ABuffer> TSPacketizer::Track::descriptorAt(size_t index) const {
-    CHECK_LT(index, mDescriptors.size());
-    return mDescriptors.itemAt(index);
-}
-
-void TSPacketizer::Track::finalize() {
-    if (mFinalized) {
-        return;
-    }
-
-    if (isH264()) {
-        {
-            // AVC video descriptor (40)
-
-            sp<ABuffer> descriptor = new ABuffer(6);
-            uint8_t *data = descriptor->data();
-            data[0] = 40;  // descriptor_tag
-            data[1] = 4;  // descriptor_length
-
-            if (mCSD.size() > 0) {
-                CHECK_GE(mCSD.size(), 1u);
-                const sp<ABuffer> &sps = mCSD.itemAt(0);
-                CHECK(!memcmp("\x00\x00\x00\x01", sps->data(), 4));
-                CHECK_GE(sps->size(), 7u);
-                // profile_idc, constraint_set*, level_idc
-                memcpy(&data[2], sps->data() + 4, 3);
-            } else {
-                int32_t profileIdc, levelIdc, constraintSet;
-                CHECK(mFormat->findInt32("profile-idc", &profileIdc));
-                CHECK(mFormat->findInt32("level-idc", &levelIdc));
-                CHECK(mFormat->findInt32("constraint-set", &constraintSet));
-                CHECK_GE(profileIdc, 0);
-                CHECK_GE(levelIdc, 0);
-                data[2] = profileIdc;    // profile_idc
-                data[3] = constraintSet; // constraint_set*
-                data[4] = levelIdc;      // level_idc
-            }
-
-            // AVC_still_present=0, AVC_24_hour_picture_flag=0, reserved
-            data[5] = 0x3f;
-
-            mDescriptors.push_back(descriptor);
-        }
-
-        {
-            // AVC timing and HRD descriptor (42)
-
-            sp<ABuffer> descriptor = new ABuffer(4);
-            uint8_t *data = descriptor->data();
-            data[0] = 42;  // descriptor_tag
-            data[1] = 2;  // descriptor_length
-
-            // hrd_management_valid_flag = 0
-            // reserved = 111111b
-            // picture_and_timing_info_present = 0
-
-            data[2] = 0x7e;
-
-            // fixed_frame_rate_flag = 0
-            // temporal_poc_flag = 0
-            // picture_to_display_conversion_flag = 0
-            // reserved = 11111b
-            data[3] = 0x1f;
-
-            mDescriptors.push_back(descriptor);
-        }
-    } else if (isPCMAudio()) {
-        // LPCM audio stream descriptor (0x83)
-
-        int32_t channelCount;
-        CHECK(mFormat->findInt32("channel-count", &channelCount));
-        CHECK_EQ(channelCount, 2);
-
-        int32_t sampleRate;
-        CHECK(mFormat->findInt32("sample-rate", &sampleRate));
-        CHECK(sampleRate == 44100 || sampleRate == 48000);
-
-        sp<ABuffer> descriptor = new ABuffer(4);
-        uint8_t *data = descriptor->data();
-        data[0] = 0x83;  // descriptor_tag
-        data[1] = 2;  // descriptor_length
-
-        unsigned sampling_frequency = (sampleRate == 44100) ? 1 : 2;
-
-        data[2] = (sampling_frequency << 5)
-                    | (3 /* reserved */ << 1)
-                    | 0 /* emphasis_flag */;
-
-        data[3] =
-            (1 /* number_of_channels = stereo */ << 5)
-            | 0xf /* reserved */;
-
-        mDescriptors.push_back(descriptor);
-    }
-
-    mFinalized = true;
-}
-
-////////////////////////////////////////////////////////////////////////////////
-
-TSPacketizer::TSPacketizer(uint32_t flags)
-    : mFlags(flags),
-      mPATContinuityCounter(0),
-      mPMTContinuityCounter(0) {
-    initCrcTable();
-
-    if (flags & (EMIT_HDCP20_DESCRIPTOR | EMIT_HDCP21_DESCRIPTOR)) {
-        int32_t hdcpVersion;
-        if (flags & EMIT_HDCP20_DESCRIPTOR) {
-            CHECK(!(flags & EMIT_HDCP21_DESCRIPTOR));
-            hdcpVersion = 0x20;
-        } else {
-            CHECK(!(flags & EMIT_HDCP20_DESCRIPTOR));
-
-            // HDCP2.0 _and_ HDCP 2.1 specs say to set the version
-            // inside the HDCP descriptor to 0x20!!!
-            hdcpVersion = 0x20;
-        }
-
-        // HDCP descriptor
-        sp<ABuffer> descriptor = new ABuffer(7);
-        uint8_t *data = descriptor->data();
-        data[0] = 0x05;  // descriptor_tag
-        data[1] = 5;  // descriptor_length
-        data[2] = 'H';
-        data[3] = 'D';
-        data[4] = 'C';
-        data[5] = 'P';
-        data[6] = hdcpVersion;
-
-        mProgramInfoDescriptors.push_back(descriptor);
-    }
-}
-
-TSPacketizer::~TSPacketizer() {
-}
-
-ssize_t TSPacketizer::addTrack(const sp<AMessage> &format) {
-    AString mime;
-    CHECK(format->findString("mime", &mime));
-
-    unsigned PIDStart;
-    bool isVideo = !strncasecmp("video/", mime.c_str(), 6);
-    bool isAudio = !strncasecmp("audio/", mime.c_str(), 6);
-
-    if (isVideo) {
-        PIDStart = 0x1011;
-    } else if (isAudio) {
-        PIDStart = 0x1100;
-    } else {
-        return ERROR_UNSUPPORTED;
-    }
-
-    unsigned streamType;
-    unsigned streamIDStart;
-    unsigned streamIDStop;
-
-    if (!strcasecmp(mime.c_str(), MEDIA_MIMETYPE_VIDEO_AVC)) {
-        streamType = 0x1b;
-        streamIDStart = 0xe0;
-        streamIDStop = 0xef;
-    } else if (!strcasecmp(mime.c_str(), MEDIA_MIMETYPE_AUDIO_AAC)) {
-        streamType = 0x0f;
-        streamIDStart = 0xc0;
-        streamIDStop = 0xdf;
-    } else if (!strcasecmp(mime.c_str(), MEDIA_MIMETYPE_AUDIO_RAW)) {
-        streamType = 0x83;
-        streamIDStart = 0xbd;
-        streamIDStop = 0xbd;
-    } else {
-        return ERROR_UNSUPPORTED;
-    }
-
-    size_t numTracksOfThisType = 0;
-    unsigned PID = PIDStart;
-
-    for (size_t i = 0; i < mTracks.size(); ++i) {
-        const sp<Track> &track = mTracks.itemAt(i);
-
-        if (track->streamType() == streamType) {
-            ++numTracksOfThisType;
-        }
-
-        if ((isAudio && track->isAudio()) || (isVideo && track->isVideo())) {
-            ++PID;
-        }
-    }
-
-    unsigned streamID = streamIDStart + numTracksOfThisType;
-    if (streamID > streamIDStop) {
-        return -ERANGE;
-    }
-
-    sp<Track> track = new Track(format, PID, streamType, streamID);
-    return mTracks.add(track);
-}
-
-status_t TSPacketizer::extractCSDIfNecessary(size_t trackIndex) {
-    if (trackIndex >= mTracks.size()) {
-        return -ERANGE;
-    }
-
-    const sp<Track> &track = mTracks.itemAt(trackIndex);
-    track->extractCSDIfNecessary();
-
-    return OK;
-}
-
-status_t TSPacketizer::packetize(
-        size_t trackIndex,
-        const sp<ABuffer> &_accessUnit,
-        sp<ABuffer> *packets,
-        uint32_t flags,
-        const uint8_t *PES_private_data, size_t PES_private_data_len,
-        size_t numStuffingBytes) {
-    sp<ABuffer> accessUnit = _accessUnit;
-
-    int64_t timeUs;
-    CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
-
-    packets->clear();
-
-    if (trackIndex >= mTracks.size()) {
-        return -ERANGE;
-    }
-
-    const sp<Track> &track = mTracks.itemAt(trackIndex);
-
-    if (track->isH264() && (flags & PREPEND_SPS_PPS_TO_IDR_FRAMES)
-            && IsIDR(accessUnit)) {
-        // prepend codec specific data, i.e. SPS and PPS.
-        accessUnit = track->prependCSD(accessUnit);
-    } else if (track->isAAC() && track->lacksADTSHeader()) {
-        CHECK(!(flags & IS_ENCRYPTED));
-        accessUnit = track->prependADTSHeader(accessUnit);
-    }
-
-    // 0x47
-    // transport_error_indicator = b0
-    // payload_unit_start_indicator = b1
-    // transport_priority = b0
-    // PID
-    // transport_scrambling_control = b00
-    // adaptation_field_control = b??
-    // continuity_counter = b????
-    // -- payload follows
-    // packet_startcode_prefix = 0x000001
-    // stream_id
-    // PES_packet_length = 0x????
-    // reserved = b10
-    // PES_scrambling_control = b00
-    // PES_priority = b0
-    // data_alignment_indicator = b1
-    // copyright = b0
-    // original_or_copy = b0
-    // PTS_DTS_flags = b10  (PTS only)
-    // ESCR_flag = b0
-    // ES_rate_flag = b0
-    // DSM_trick_mode_flag = b0
-    // additional_copy_info_flag = b0
-    // PES_CRC_flag = b0
-    // PES_extension_flag = b0
-    // PES_header_data_length = 0x05
-    // reserved = b0010 (PTS)
-    // PTS[32..30] = b???
-    // reserved = b1
-    // PTS[29..15] = b??? ???? ???? ???? (15 bits)
-    // reserved = b1
-    // PTS[14..0] = b??? ???? ???? ???? (15 bits)
-    // reserved = b1
-    // the first fragment of "buffer" follows
-
-    // Each transport packet (except for the last one contributing to the PES
-    // payload) must contain a multiple of 16 bytes of payload per HDCP spec.
-    bool alignPayload =
-        (mFlags & (EMIT_HDCP20_DESCRIPTOR | EMIT_HDCP21_DESCRIPTOR));
-
-    /*
-       a) The very first PES transport stream packet contains
-
-       4 bytes of TS header
-       ... padding
-       14 bytes of static PES header
-       PES_private_data_len + 1 bytes (only if PES_private_data_len > 0)
-       numStuffingBytes bytes
-
-       followed by the payload
-
-       b) Subsequent PES transport stream packets contain
-
-       4 bytes of TS header
-       ... padding
-
-       followed by the payload
-    */
-
-    size_t PES_packet_length = accessUnit->size() + 8 + numStuffingBytes;
-    if (PES_private_data_len > 0) {
-        PES_packet_length += PES_private_data_len + 1;
-    }
-
-    size_t numTSPackets = 1;
-
-    {
-        // Make sure the PES header fits into a single TS packet:
-        size_t PES_header_size = 14 + numStuffingBytes;
-        if (PES_private_data_len > 0) {
-            PES_header_size += PES_private_data_len + 1;
-        }
-
-        CHECK_LE(PES_header_size, 188u - 4u);
-
-        size_t sizeAvailableForPayload = 188 - 4 - PES_header_size;
-        size_t numBytesOfPayload = accessUnit->size();
-
-        if (numBytesOfPayload > sizeAvailableForPayload) {
-            numBytesOfPayload = sizeAvailableForPayload;
-
-            if (alignPayload && numBytesOfPayload > 16) {
-                numBytesOfPayload -= (numBytesOfPayload % 16);
-            }
-        }
-
-        size_t numPaddingBytes = sizeAvailableForPayload - numBytesOfPayload;
-        ALOGV("packet 1 contains %zd padding bytes and %zd bytes of payload",
-              numPaddingBytes, numBytesOfPayload);
-
-        size_t numBytesOfPayloadRemaining = accessUnit->size() - numBytesOfPayload;
-
-#if 0
-        // The following hopefully illustrates the logic that led to the
-        // more efficient computation in the #else block...
-
-        while (numBytesOfPayloadRemaining > 0) {
-            size_t sizeAvailableForPayload = 188 - 4;
-
-            size_t numBytesOfPayload = numBytesOfPayloadRemaining;
-
-            if (numBytesOfPayload > sizeAvailableForPayload) {
-                numBytesOfPayload = sizeAvailableForPayload;
-
-                if (alignPayload && numBytesOfPayload > 16) {
-                    numBytesOfPayload -= (numBytesOfPayload % 16);
-                }
-            }
-
-            size_t numPaddingBytes = sizeAvailableForPayload - numBytesOfPayload;
-            ALOGI("packet %zd contains %zd padding bytes and %zd bytes of payload",
-                    numTSPackets + 1, numPaddingBytes, numBytesOfPayload);
-
-            numBytesOfPayloadRemaining -= numBytesOfPayload;
-            ++numTSPackets;
-        }
-#else
-        // This is how many bytes of payload each subsequent TS packet
-        // can contain at most.
-        sizeAvailableForPayload = 188 - 4;
-        size_t sizeAvailableForAlignedPayload = sizeAvailableForPayload;
-        if (alignPayload) {
-            // We're only going to use a subset of the available space
-            // since we need to make each fragment a multiple of 16 in size.
-            sizeAvailableForAlignedPayload -=
-                (sizeAvailableForAlignedPayload % 16);
-        }
-
-        size_t numFullTSPackets =
-            numBytesOfPayloadRemaining / sizeAvailableForAlignedPayload;
-
-        numTSPackets += numFullTSPackets;
-
-        numBytesOfPayloadRemaining -=
-            numFullTSPackets * sizeAvailableForAlignedPayload;
-
-        // numBytesOfPayloadRemaining < sizeAvailableForAlignedPayload
-        if (numFullTSPackets == 0 && numBytesOfPayloadRemaining > 0) {
-            // There wasn't enough payload left to form a full aligned payload,
-            // the last packet doesn't have to be aligned.
-            ++numTSPackets;
-        } else if (numFullTSPackets > 0
-                && numBytesOfPayloadRemaining
-                    + sizeAvailableForAlignedPayload > sizeAvailableForPayload) {
-            // The last packet emitted had a full aligned payload and together
-            // with the bytes remaining does exceed the unaligned payload
-            // size, so we need another packet.
-            ++numTSPackets;
-        }
-#endif
-    }
-
-    if (flags & EMIT_PAT_AND_PMT) {
-        numTSPackets += 2;
-    }
-
-    if (flags & EMIT_PCR) {
-        ++numTSPackets;
-    }
-
-    sp<ABuffer> buffer = new ABuffer(numTSPackets * 188);
-    uint8_t *packetDataStart = buffer->data();
-
-    if (flags & EMIT_PAT_AND_PMT) {
-        // Program Association Table (PAT):
-        // 0x47
-        // transport_error_indicator = b0
-        // payload_unit_start_indicator = b1
-        // transport_priority = b0
-        // PID = b0000000000000 (13 bits)
-        // transport_scrambling_control = b00
-        // adaptation_field_control = b01 (no adaptation field, payload only)
-        // continuity_counter = b????
-        // skip = 0x00
-        // --- payload follows
-        // table_id = 0x00
-        // section_syntax_indicator = b1
-        // must_be_zero = b0
-        // reserved = b11
-        // section_length = 0x00d
-        // transport_stream_id = 0x0000
-        // reserved = b11
-        // version_number = b00001
-        // current_next_indicator = b1
-        // section_number = 0x00
-        // last_section_number = 0x00
-        //   one program follows:
-        //   program_number = 0x0001
-        //   reserved = b111
-        //   program_map_PID = kPID_PMT (13 bits!)
-        // CRC = 0x????????
-
-        if (++mPATContinuityCounter == 16) {
-            mPATContinuityCounter = 0;
-        }
-
-        uint8_t *ptr = packetDataStart;
-        *ptr++ = 0x47;
-        *ptr++ = 0x40;
-        *ptr++ = 0x00;
-        *ptr++ = 0x10 | mPATContinuityCounter;
-        *ptr++ = 0x00;
-
-        uint8_t *crcDataStart = ptr;
-        *ptr++ = 0x00;
-        *ptr++ = 0xb0;
-        *ptr++ = 0x0d;
-        *ptr++ = 0x00;
-        *ptr++ = 0x00;
-        *ptr++ = 0xc3;
-        *ptr++ = 0x00;
-        *ptr++ = 0x00;
-        *ptr++ = 0x00;
-        *ptr++ = 0x01;
-        *ptr++ = 0xe0 | (kPID_PMT >> 8);
-        *ptr++ = kPID_PMT & 0xff;
-
-        CHECK_EQ(ptr - crcDataStart, 12);
-        uint32_t crc = htonl(crc32(crcDataStart, ptr - crcDataStart));
-        memcpy(ptr, &crc, 4);
-        ptr += 4;
-
-        size_t sizeLeft = packetDataStart + 188 - ptr;
-        memset(ptr, 0xff, sizeLeft);
-
-        packetDataStart += 188;
-
-        // Program Map (PMT):
-        // 0x47
-        // transport_error_indicator = b0
-        // payload_unit_start_indicator = b1
-        // transport_priority = b0
-        // PID = kPID_PMT (13 bits)
-        // transport_scrambling_control = b00
-        // adaptation_field_control = b01 (no adaptation field, payload only)
-        // continuity_counter = b????
-        // skip = 0x00
-        // -- payload follows
-        // table_id = 0x02
-        // section_syntax_indicator = b1
-        // must_be_zero = b0
-        // reserved = b11
-        // section_length = 0x???
-        // program_number = 0x0001
-        // reserved = b11
-        // version_number = b00001
-        // current_next_indicator = b1
-        // section_number = 0x00
-        // last_section_number = 0x00
-        // reserved = b111
-        // PCR_PID = kPCR_PID (13 bits)
-        // reserved = b1111
-        // program_info_length = 0x???
-        //   program_info_descriptors follow
-        // one or more elementary stream descriptions follow:
-        //   stream_type = 0x??
-        //   reserved = b111
-        //   elementary_PID = b? ???? ???? ???? (13 bits)
-        //   reserved = b1111
-        //   ES_info_length = 0x000
-        // CRC = 0x????????
-
-        if (++mPMTContinuityCounter == 16) {
-            mPMTContinuityCounter = 0;
-        }
-
-        ptr = packetDataStart;
-        *ptr++ = 0x47;
-        *ptr++ = 0x40 | (kPID_PMT >> 8);
-        *ptr++ = kPID_PMT & 0xff;
-        *ptr++ = 0x10 | mPMTContinuityCounter;
-        *ptr++ = 0x00;
-
-        crcDataStart = ptr;
-        *ptr++ = 0x02;
-
-        *ptr++ = 0x00;  // section_length to be filled in below.
-        *ptr++ = 0x00;
-
-        *ptr++ = 0x00;
-        *ptr++ = 0x01;
-        *ptr++ = 0xc3;
-        *ptr++ = 0x00;
-        *ptr++ = 0x00;
-        *ptr++ = 0xe0 | (kPID_PCR >> 8);
-        *ptr++ = kPID_PCR & 0xff;
-
-        size_t program_info_length = 0;
-        for (size_t i = 0; i < mProgramInfoDescriptors.size(); ++i) {
-            program_info_length += mProgramInfoDescriptors.itemAt(i)->size();
-        }
-
-        CHECK_LT(program_info_length, 0x400u);
-        *ptr++ = 0xf0 | (program_info_length >> 8);
-        *ptr++ = (program_info_length & 0xff);
-
-        for (size_t i = 0; i < mProgramInfoDescriptors.size(); ++i) {
-            const sp<ABuffer> &desc = mProgramInfoDescriptors.itemAt(i);
-            memcpy(ptr, desc->data(), desc->size());
-            ptr += desc->size();
-        }
-
-        for (size_t i = 0; i < mTracks.size(); ++i) {
-            const sp<Track> &track = mTracks.itemAt(i);
-
-            // Make sure all the decriptors have been added.
-            track->finalize();
-
-            *ptr++ = track->streamType();
-            *ptr++ = 0xe0 | (track->PID() >> 8);
-            *ptr++ = track->PID() & 0xff;
-
-            size_t ES_info_length = 0;
-            for (size_t i = 0; i < track->countDescriptors(); ++i) {
-                ES_info_length += track->descriptorAt(i)->size();
-            }
-            CHECK_LE(ES_info_length, 0xfffu);
-
-            *ptr++ = 0xf0 | (ES_info_length >> 8);
-            *ptr++ = (ES_info_length & 0xff);
-
-            for (size_t i = 0; i < track->countDescriptors(); ++i) {
-                const sp<ABuffer> &descriptor = track->descriptorAt(i);
-                memcpy(ptr, descriptor->data(), descriptor->size());
-                ptr += descriptor->size();
-            }
-        }
-
-        size_t section_length = ptr - (crcDataStart + 3) + 4 /* CRC */;
-
-        crcDataStart[1] = 0xb0 | (section_length >> 8);
-        crcDataStart[2] = section_length & 0xff;
-
-        crc = htonl(crc32(crcDataStart, ptr - crcDataStart));
-        memcpy(ptr, &crc, 4);
-        ptr += 4;
-
-        sizeLeft = packetDataStart + 188 - ptr;
-        memset(ptr, 0xff, sizeLeft);
-
-        packetDataStart += 188;
-    }
-
-    if (flags & EMIT_PCR) {
-        // PCR stream
-        // 0x47
-        // transport_error_indicator = b0
-        // payload_unit_start_indicator = b1
-        // transport_priority = b0
-        // PID = kPCR_PID (13 bits)
-        // transport_scrambling_control = b00
-        // adaptation_field_control = b10 (adaptation field only, no payload)
-        // continuity_counter = b0000 (does not increment)
-        // adaptation_field_length = 183
-        // discontinuity_indicator = b0
-        // random_access_indicator = b0
-        // elementary_stream_priority_indicator = b0
-        // PCR_flag = b1
-        // OPCR_flag = b0
-        // splicing_point_flag = b0
-        // transport_private_data_flag = b0
-        // adaptation_field_extension_flag = b0
-        // program_clock_reference_base = b?????????????????????????????????
-        // reserved = b111111
-        // program_clock_reference_extension = b?????????
-
-        int64_t nowUs = ALooper::GetNowUs();
-
-        uint64_t PCR = nowUs * 27;  // PCR based on a 27MHz clock
-        uint64_t PCR_base = PCR / 300;
-        uint32_t PCR_ext = PCR % 300;
-
-        uint8_t *ptr = packetDataStart;
-        *ptr++ = 0x47;
-        *ptr++ = 0x40 | (kPID_PCR >> 8);
-        *ptr++ = kPID_PCR & 0xff;
-        *ptr++ = 0x20;
-        *ptr++ = 0xb7;  // adaptation_field_length
-        *ptr++ = 0x10;
-        *ptr++ = (PCR_base >> 25) & 0xff;
-        *ptr++ = (PCR_base >> 17) & 0xff;
-        *ptr++ = (PCR_base >> 9) & 0xff;
-        *ptr++ = ((PCR_base & 1) << 7) | 0x7e | ((PCR_ext >> 8) & 1);
-        *ptr++ = (PCR_ext & 0xff);
-
-        size_t sizeLeft = packetDataStart + 188 - ptr;
-        memset(ptr, 0xff, sizeLeft);
-
-        packetDataStart += 188;
-    }
-
-    uint64_t PTS = (timeUs * 9ll) / 100ll;
-
-    if (PES_packet_length >= 65536) {
-        // This really should only happen for video.
-        CHECK(track->isVideo());
-
-        // It's valid to set this to 0 for video according to the specs.
-        PES_packet_length = 0;
-    }
-
-    size_t sizeAvailableForPayload = 188 - 4 - 14 - numStuffingBytes;
-    if (PES_private_data_len > 0) {
-        sizeAvailableForPayload -= PES_private_data_len + 1;
-    }
-
-    size_t copy = accessUnit->size();
-
-    if (copy > sizeAvailableForPayload) {
-        copy = sizeAvailableForPayload;
-
-        if (alignPayload && copy > 16) {
-            copy -= (copy % 16);
-        }
-    }
-
-    size_t numPaddingBytes = sizeAvailableForPayload - copy;
-
-    uint8_t *ptr = packetDataStart;
-    *ptr++ = 0x47;
-    *ptr++ = 0x40 | (track->PID() >> 8);
-    *ptr++ = track->PID() & 0xff;
-
-    *ptr++ = (numPaddingBytes > 0 ? 0x30 : 0x10)
-                | track->incrementContinuityCounter();
-
-    if (numPaddingBytes > 0) {
-        *ptr++ = numPaddingBytes - 1;
-        if (numPaddingBytes >= 2) {
-            *ptr++ = 0x00;
-            memset(ptr, 0xff, numPaddingBytes - 2);
-            ptr += numPaddingBytes - 2;
-        }
-    }
-
-    *ptr++ = 0x00;
-    *ptr++ = 0x00;
-    *ptr++ = 0x01;
-    *ptr++ = track->streamID();
-    *ptr++ = PES_packet_length >> 8;
-    *ptr++ = PES_packet_length & 0xff;
-    *ptr++ = 0x84;
-    *ptr++ = (PES_private_data_len > 0) ? 0x81 : 0x80;
-
-    size_t headerLength = 0x05 + numStuffingBytes;
-    if (PES_private_data_len > 0) {
-        headerLength += 1 + PES_private_data_len;
-    }
-
-    *ptr++ = headerLength;
-
-    *ptr++ = 0x20 | (((PTS >> 30) & 7) << 1) | 1;
-    *ptr++ = (PTS >> 22) & 0xff;
-    *ptr++ = (((PTS >> 15) & 0x7f) << 1) | 1;
-    *ptr++ = (PTS >> 7) & 0xff;
-    *ptr++ = ((PTS & 0x7f) << 1) | 1;
-
-    if (PES_private_data_len > 0) {
-        *ptr++ = 0x8e;  // PES_private_data_flag, reserved.
-        memcpy(ptr, PES_private_data, PES_private_data_len);
-        ptr += PES_private_data_len;
-    }
-
-    for (size_t i = 0; i < numStuffingBytes; ++i) {
-        *ptr++ = 0xff;
-    }
-
-    memcpy(ptr, accessUnit->data(), copy);
-    ptr += copy;
-
-    CHECK_EQ(ptr, packetDataStart + 188);
-    packetDataStart += 188;
-
-    size_t offset = copy;
-    while (offset < accessUnit->size()) {
-        // for subsequent fragments of "buffer":
-        // 0x47
-        // transport_error_indicator = b0
-        // payload_unit_start_indicator = b0
-        // transport_priority = b0
-        // PID = b0 0001 1110 ???? (13 bits) [0x1e0 + 1 + sourceIndex]
-        // transport_scrambling_control = b00
-        // adaptation_field_control = b??
-        // continuity_counter = b????
-        // the fragment of "buffer" follows.
-
-        size_t sizeAvailableForPayload = 188 - 4;
-
-        size_t copy = accessUnit->size() - offset;
-
-        if (copy > sizeAvailableForPayload) {
-            copy = sizeAvailableForPayload;
-
-            if (alignPayload && copy > 16) {
-                copy -= (copy % 16);
-            }
-        }
-
-        size_t numPaddingBytes = sizeAvailableForPayload - copy;
-
-        uint8_t *ptr = packetDataStart;
-        *ptr++ = 0x47;
-        *ptr++ = 0x00 | (track->PID() >> 8);
-        *ptr++ = track->PID() & 0xff;
-
-        *ptr++ = (numPaddingBytes > 0 ? 0x30 : 0x10)
-                    | track->incrementContinuityCounter();
-
-        if (numPaddingBytes > 0) {
-            *ptr++ = numPaddingBytes - 1;
-            if (numPaddingBytes >= 2) {
-                *ptr++ = 0x00;
-                memset(ptr, 0xff, numPaddingBytes - 2);
-                ptr += numPaddingBytes - 2;
-            }
-        }
-
-        memcpy(ptr, accessUnit->data() + offset, copy);
-        ptr += copy;
-        CHECK_EQ(ptr, packetDataStart + 188);
-
-        offset += copy;
-        packetDataStart += 188;
-    }
-
-    CHECK(packetDataStart == buffer->data() + buffer->capacity());
-
-    *packets = buffer;
-
-    return OK;
-}
-
-void TSPacketizer::initCrcTable() {
-    uint32_t poly = 0x04C11DB7;
-
-    for (int i = 0; i < 256; i++) {
-        uint32_t crc = i << 24;
-        for (int j = 0; j < 8; j++) {
-            crc = (crc << 1) ^ ((crc & 0x80000000) ? (poly) : 0);
-        }
-        mCrcTable[i] = crc;
-    }
-}
-
-uint32_t TSPacketizer::crc32(const uint8_t *start, size_t size) const {
-    uint32_t crc = 0xFFFFFFFF;
-    const uint8_t *p;
-
-    for (p = start; p < start + size; ++p) {
-        crc = (crc << 8) ^ mCrcTable[((crc >> 24) ^ *p) & 0xFF];
-    }
-
-    return crc;
-}
-
-sp<ABuffer> TSPacketizer::prependCSD(
-        size_t trackIndex, const sp<ABuffer> &accessUnit) const {
-    CHECK_LT(trackIndex, mTracks.size());
-
-    const sp<Track> &track = mTracks.itemAt(trackIndex);
-    CHECK(track->isH264() && IsIDR(accessUnit));
-
-    int64_t timeUs;
-    CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
-
-    sp<ABuffer> accessUnit2 = track->prependCSD(accessUnit);
-
-    accessUnit2->meta()->setInt64("timeUs", timeUs);
-
-    return accessUnit2;
-}
-
-}  // namespace android
-
diff --git a/media/libstagefright/wifi-display/source/TSPacketizer.h b/media/libstagefright/wifi-display/source/TSPacketizer.h
deleted file mode 100644
index 0dcb179..0000000
--- a/media/libstagefright/wifi-display/source/TSPacketizer.h
+++ /dev/null
@@ -1,94 +0,0 @@
-/*
- * Copyright 2012, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef TS_PACKETIZER_H_
-
-#define TS_PACKETIZER_H_
-
-#include <media/stagefright/foundation/ABase.h>
-#include <utils/Errors.h>
-#include <utils/RefBase.h>
-#include <utils/Vector.h>
-
-namespace android {
-
-struct ABuffer;
-struct AMessage;
-
-// Forms the packets of a transport stream given access units.
-// Emits metadata tables (PAT and PMT) and timestamp stream (PCR) based
-// on flags.
-struct TSPacketizer : public RefBase {
-    enum {
-        EMIT_HDCP20_DESCRIPTOR = 1,
-        EMIT_HDCP21_DESCRIPTOR = 2,
-    };
-    explicit TSPacketizer(uint32_t flags);
-
-    // Returns trackIndex or error.
-    ssize_t addTrack(const sp<AMessage> &format);
-
-    enum {
-        EMIT_PAT_AND_PMT                = 1,
-        EMIT_PCR                        = 2,
-        IS_ENCRYPTED                    = 4,
-        PREPEND_SPS_PPS_TO_IDR_FRAMES   = 8,
-    };
-    status_t packetize(
-            size_t trackIndex, const sp<ABuffer> &accessUnit,
-            sp<ABuffer> *packets,
-            uint32_t flags,
-            const uint8_t *PES_private_data, size_t PES_private_data_len,
-            size_t numStuffingBytes = 0);
-
-    status_t extractCSDIfNecessary(size_t trackIndex);
-
-    // XXX to be removed once encoder config option takes care of this for
-    // encrypted mode.
-    sp<ABuffer> prependCSD(
-            size_t trackIndex, const sp<ABuffer> &accessUnit) const;
-
-protected:
-    virtual ~TSPacketizer();
-
-private:
-    enum {
-        kPID_PMT = 0x100,
-        kPID_PCR = 0x1000,
-    };
-
-    struct Track;
-
-    uint32_t mFlags;
-    Vector<sp<Track> > mTracks;
-
-    Vector<sp<ABuffer> > mProgramInfoDescriptors;
-
-    unsigned mPATContinuityCounter;
-    unsigned mPMTContinuityCounter;
-
-    uint32_t mCrcTable[256];
-
-    void initCrcTable();
-    uint32_t crc32(const uint8_t *start, size_t size) const;
-
-    DISALLOW_EVIL_CONSTRUCTORS(TSPacketizer);
-};
-
-}  // namespace android
-
-#endif  // TS_PACKETIZER_H_
-
diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp
deleted file mode 100644
index 4695e5d..0000000
--- a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp
+++ /dev/null
@@ -1,1737 +0,0 @@
-/*
- * Copyright 2012, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "WifiDisplaySource"
-#include <utils/Log.h>
-
-#include "WifiDisplaySource.h"
-#include "PlaybackSession.h"
-#include "Parameters.h"
-#include "rtp/RTPSender.h"
-
-#include <binder/IServiceManager.h>
-#include <gui/IGraphicBufferProducer.h>
-#include <media/IHDCP.h>
-#include <media/IMediaPlayerService.h>
-#include <media/IRemoteDisplayClient.h>
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/foundation/ParsedMessage.h>
-#include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/Utils.h>
-
-#include <arpa/inet.h>
-#include <cutils/properties.h>
-
-#include <ctype.h>
-
-namespace android {
-
-// static
-const int64_t WifiDisplaySource::kReaperIntervalUs;
-const int64_t WifiDisplaySource::kTeardownTriggerTimeouSecs;
-const int64_t WifiDisplaySource::kPlaybackSessionTimeoutSecs;
-const int64_t WifiDisplaySource::kPlaybackSessionTimeoutUs;
-const AString WifiDisplaySource::sUserAgent = MakeUserAgent();
-
-WifiDisplaySource::WifiDisplaySource(
-        const String16 &opPackageName,
-        const sp<ANetworkSession> &netSession,
-        const sp<IRemoteDisplayClient> &client,
-        const char *path)
-    : mOpPackageName(opPackageName),
-      mState(INITIALIZED),
-      mNetSession(netSession),
-      mClient(client),
-      mSessionID(0),
-      mStopReplyID(NULL),
-      mChosenRTPPort(-1),
-      mUsingPCMAudio(false),
-      mClientSessionID(0),
-      mReaperPending(false),
-      mNextCSeq(1),
-      mUsingHDCP(false),
-      mIsHDCP2_0(false),
-      mHDCPPort(0),
-      mHDCPInitializationComplete(false),
-      mSetupTriggerDeferred(false),
-      mPlaybackSessionEstablished(false) {
-    if (path != NULL) {
-        mMediaPath.setTo(path);
-    }
-
-    mSupportedSourceVideoFormats.disableAll();
-
-    mSupportedSourceVideoFormats.setNativeResolution(
-            VideoFormats::RESOLUTION_CEA, 5);  // 1280x720 p30
-
-    // Enable all resolutions up to 1280x720p30
-    mSupportedSourceVideoFormats.enableResolutionUpto(
-            VideoFormats::RESOLUTION_CEA, 5,
-            VideoFormats::PROFILE_CHP,  // Constrained High Profile
-            VideoFormats::LEVEL_32);    // Level 3.2
-}
-
-WifiDisplaySource::~WifiDisplaySource() {
-}
-
-static status_t PostAndAwaitResponse(
-        const sp<AMessage> &msg, sp<AMessage> *response) {
-    status_t err = msg->postAndAwaitResponse(response);
-
-    if (err != OK) {
-        return err;
-    }
-
-    if (response == NULL || !(*response)->findInt32("err", &err)) {
-        err = OK;
-    }
-
-    return err;
-}
-
-status_t WifiDisplaySource::start(const char *iface) {
-    CHECK_EQ(mState, INITIALIZED);
-
-    sp<AMessage> msg = new AMessage(kWhatStart, this);
-    msg->setString("iface", iface);
-
-    sp<AMessage> response;
-    return PostAndAwaitResponse(msg, &response);
-}
-
-status_t WifiDisplaySource::stop() {
-    sp<AMessage> msg = new AMessage(kWhatStop, this);
-
-    sp<AMessage> response;
-    return PostAndAwaitResponse(msg, &response);
-}
-
-status_t WifiDisplaySource::pause() {
-    sp<AMessage> msg = new AMessage(kWhatPause, this);
-
-    sp<AMessage> response;
-    return PostAndAwaitResponse(msg, &response);
-}
-
-status_t WifiDisplaySource::resume() {
-    sp<AMessage> msg = new AMessage(kWhatResume, this);
-
-    sp<AMessage> response;
-    return PostAndAwaitResponse(msg, &response);
-}
-
-void WifiDisplaySource::onMessageReceived(const sp<AMessage> &msg) {
-    switch (msg->what()) {
-        case kWhatStart:
-        {
-            sp<AReplyToken> replyID;
-            CHECK(msg->senderAwaitsResponse(&replyID));
-
-            AString iface;
-            CHECK(msg->findString("iface", &iface));
-
-            status_t err = OK;
-
-            ssize_t colonPos = iface.find(":");
-
-            unsigned long port;
-
-            if (colonPos >= 0) {
-                const char *s = iface.c_str() + colonPos + 1;
-
-                char *end;
-                port = strtoul(s, &end, 10);
-
-                if (end == s || *end != '\0' || port > 65535) {
-                    err = -EINVAL;
-                } else {
-                    iface.erase(colonPos, iface.size() - colonPos);
-                }
-            } else {
-                port = kWifiDisplayDefaultPort;
-            }
-
-            if (err == OK) {
-                if (inet_aton(iface.c_str(), &mInterfaceAddr) != 0) {
-                    sp<AMessage> notify = new AMessage(kWhatRTSPNotify, this);
-
-                    err = mNetSession->createRTSPServer(
-                            mInterfaceAddr, port, notify, &mSessionID);
-                } else {
-                    err = -EINVAL;
-                }
-            }
-
-            mState = AWAITING_CLIENT_CONNECTION;
-
-            sp<AMessage> response = new AMessage;
-            response->setInt32("err", err);
-            response->postReply(replyID);
-            break;
-        }
-
-        case kWhatRTSPNotify:
-        {
-            int32_t reason;
-            CHECK(msg->findInt32("reason", &reason));
-
-            switch (reason) {
-                case ANetworkSession::kWhatError:
-                {
-                    int32_t sessionID;
-                    CHECK(msg->findInt32("sessionID", &sessionID));
-
-                    int32_t err;
-                    CHECK(msg->findInt32("err", &err));
-
-                    AString detail;
-                    CHECK(msg->findString("detail", &detail));
-
-                    ALOGE("An error occurred in session %d (%d, '%s/%s').",
-                          sessionID,
-                          err,
-                          detail.c_str(),
-                          strerror(-err));
-
-                    mNetSession->destroySession(sessionID);
-
-                    if (sessionID == mClientSessionID) {
-                        mClientSessionID = 0;
-
-                        mClient->onDisplayError(
-                                IRemoteDisplayClient::kDisplayErrorUnknown);
-                    }
-                    break;
-                }
-
-                case ANetworkSession::kWhatClientConnected:
-                {
-                    int32_t sessionID;
-                    CHECK(msg->findInt32("sessionID", &sessionID));
-
-                    if (mClientSessionID > 0) {
-                        ALOGW("A client tried to connect, but we already "
-                              "have one.");
-
-                        mNetSession->destroySession(sessionID);
-                        break;
-                    }
-
-                    CHECK_EQ(mState, AWAITING_CLIENT_CONNECTION);
-
-                    CHECK(msg->findString("client-ip", &mClientInfo.mRemoteIP));
-                    CHECK(msg->findString("server-ip", &mClientInfo.mLocalIP));
-
-                    if (mClientInfo.mRemoteIP == mClientInfo.mLocalIP) {
-                        // Disallow connections from the local interface
-                        // for security reasons.
-                        mNetSession->destroySession(sessionID);
-                        break;
-                    }
-
-                    CHECK(msg->findInt32(
-                                "server-port", &mClientInfo.mLocalPort));
-                    mClientInfo.mPlaybackSessionID = -1;
-
-                    mClientSessionID = sessionID;
-
-                    ALOGI("We now have a client (%d) connected.", sessionID);
-
-                    mState = AWAITING_CLIENT_SETUP;
-
-                    status_t err = sendM1(sessionID);
-                    CHECK_EQ(err, (status_t)OK);
-                    break;
-                }
-
-                case ANetworkSession::kWhatData:
-                {
-                    status_t err = onReceiveClientData(msg);
-
-                    if (err != OK) {
-                        mClient->onDisplayError(
-                                IRemoteDisplayClient::kDisplayErrorUnknown);
-                    }
-
-#if 0
-                    // testing only.
-                    char val[PROPERTY_VALUE_MAX];
-                    if (property_get("media.wfd.trigger", val, NULL)) {
-                        if (!strcasecmp(val, "pause") && mState == PLAYING) {
-                            mState = PLAYING_TO_PAUSED;
-                            sendTrigger(mClientSessionID, TRIGGER_PAUSE);
-                        } else if (!strcasecmp(val, "play")
-                                    && mState == PAUSED) {
-                            mState = PAUSED_TO_PLAYING;
-                            sendTrigger(mClientSessionID, TRIGGER_PLAY);
-                        }
-                    }
-#endif
-                    break;
-                }
-
-                case ANetworkSession::kWhatNetworkStall:
-                {
-                    break;
-                }
-
-                default:
-                    TRESPASS();
-            }
-            break;
-        }
-
-        case kWhatStop:
-        {
-            CHECK(msg->senderAwaitsResponse(&mStopReplyID));
-
-            CHECK_LT(mState, AWAITING_CLIENT_TEARDOWN);
-
-            if (mState >= AWAITING_CLIENT_PLAY) {
-                // We have a session, i.e. a previous SETUP succeeded.
-
-                status_t err = sendTrigger(
-                        mClientSessionID, TRIGGER_TEARDOWN);
-
-                if (err == OK) {
-                    mState = AWAITING_CLIENT_TEARDOWN;
-
-                    (new AMessage(kWhatTeardownTriggerTimedOut, this))->post(
-                            kTeardownTriggerTimeouSecs * 1000000ll);
-
-                    break;
-                }
-
-                // fall through.
-            }
-
-            finishStop();
-            break;
-        }
-
-        case kWhatPause:
-        {
-            sp<AReplyToken> replyID;
-            CHECK(msg->senderAwaitsResponse(&replyID));
-
-            status_t err = OK;
-
-            if (mState != PLAYING) {
-                err = INVALID_OPERATION;
-            } else {
-                mState = PLAYING_TO_PAUSED;
-                sendTrigger(mClientSessionID, TRIGGER_PAUSE);
-            }
-
-            sp<AMessage> response = new AMessage;
-            response->setInt32("err", err);
-            response->postReply(replyID);
-            break;
-        }
-
-        case kWhatResume:
-        {
-            sp<AReplyToken> replyID;
-            CHECK(msg->senderAwaitsResponse(&replyID));
-
-            status_t err = OK;
-
-            if (mState != PAUSED) {
-                err = INVALID_OPERATION;
-            } else {
-                mState = PAUSED_TO_PLAYING;
-                sendTrigger(mClientSessionID, TRIGGER_PLAY);
-            }
-
-            sp<AMessage> response = new AMessage;
-            response->setInt32("err", err);
-            response->postReply(replyID);
-            break;
-        }
-
-        case kWhatReapDeadClients:
-        {
-            mReaperPending = false;
-
-            if (mClientSessionID == 0
-                    || mClientInfo.mPlaybackSession == NULL) {
-                break;
-            }
-
-            if (mClientInfo.mPlaybackSession->getLastLifesignUs()
-                    + kPlaybackSessionTimeoutUs < ALooper::GetNowUs()) {
-                ALOGI("playback session timed out, reaping.");
-
-                mNetSession->destroySession(mClientSessionID);
-                mClientSessionID = 0;
-
-                mClient->onDisplayError(
-                        IRemoteDisplayClient::kDisplayErrorUnknown);
-            } else {
-                scheduleReaper();
-            }
-            break;
-        }
-
-        case kWhatPlaybackSessionNotify:
-        {
-            int32_t playbackSessionID;
-            CHECK(msg->findInt32("playbackSessionID", &playbackSessionID));
-
-            int32_t what;
-            CHECK(msg->findInt32("what", &what));
-
-            if (what == PlaybackSession::kWhatSessionDead) {
-                ALOGI("playback session wants to quit.");
-
-                mClient->onDisplayError(
-                        IRemoteDisplayClient::kDisplayErrorUnknown);
-            } else if (what == PlaybackSession::kWhatSessionEstablished) {
-                mPlaybackSessionEstablished = true;
-
-                if (mClient != NULL) {
-                    if (!mSinkSupportsVideo) {
-                        mClient->onDisplayConnected(
-                                NULL,  // SurfaceTexture
-                                0, // width,
-                                0, // height,
-                                mUsingHDCP
-                                    ? IRemoteDisplayClient::kDisplayFlagSecure
-                                    : 0,
-                                0);
-                    } else {
-                        size_t width, height;
-
-                        CHECK(VideoFormats::GetConfiguration(
-                                    mChosenVideoResolutionType,
-                                    mChosenVideoResolutionIndex,
-                                    &width,
-                                    &height,
-                                    NULL /* framesPerSecond */,
-                                    NULL /* interlaced */));
-
-                        mClient->onDisplayConnected(
-                                mClientInfo.mPlaybackSession
-                                    ->getSurfaceTexture(),
-                                width,
-                                height,
-                                mUsingHDCP
-                                    ? IRemoteDisplayClient::kDisplayFlagSecure
-                                    : 0,
-                                playbackSessionID);
-                    }
-                }
-
-                finishPlay();
-
-                if (mState == ABOUT_TO_PLAY) {
-                    mState = PLAYING;
-                }
-            } else if (what == PlaybackSession::kWhatSessionDestroyed) {
-                disconnectClient2();
-            } else {
-                CHECK_EQ(what, PlaybackSession::kWhatBinaryData);
-
-                int32_t channel;
-                CHECK(msg->findInt32("channel", &channel));
-
-                sp<ABuffer> data;
-                CHECK(msg->findBuffer("data", &data));
-
-                CHECK_LE(channel, 0xff);
-                CHECK_LE(data->size(), 0xffffu);
-
-                int32_t sessionID;
-                CHECK(msg->findInt32("sessionID", &sessionID));
-
-                char header[4];
-                header[0] = '$';
-                header[1] = channel;
-                header[2] = data->size() >> 8;
-                header[3] = data->size() & 0xff;
-
-                mNetSession->sendRequest(
-                        sessionID, header, sizeof(header));
-
-                mNetSession->sendRequest(
-                        sessionID, data->data(), data->size());
-            }
-            break;
-        }
-
-        case kWhatKeepAlive:
-        {
-            int32_t sessionID;
-            CHECK(msg->findInt32("sessionID", &sessionID));
-
-            if (mClientSessionID != sessionID) {
-                // Obsolete event, client is already gone.
-                break;
-            }
-
-            sendM16(sessionID);
-            break;
-        }
-
-        case kWhatTeardownTriggerTimedOut:
-        {
-            if (mState == AWAITING_CLIENT_TEARDOWN) {
-                ALOGI("TEARDOWN trigger timed out, forcing disconnection.");
-
-                CHECK(mStopReplyID != NULL);
-                finishStop();
-                break;
-            }
-            break;
-        }
-
-        case kWhatHDCPNotify:
-        {
-            int32_t msgCode, ext1, ext2;
-            CHECK(msg->findInt32("msg", &msgCode));
-            CHECK(msg->findInt32("ext1", &ext1));
-            CHECK(msg->findInt32("ext2", &ext2));
-
-            ALOGI("Saw HDCP notification code %d, ext1 %d, ext2 %d",
-                    msgCode, ext1, ext2);
-
-            switch (msgCode) {
-                case HDCPModule::HDCP_INITIALIZATION_COMPLETE:
-                {
-                    mHDCPInitializationComplete = true;
-
-                    if (mSetupTriggerDeferred) {
-                        mSetupTriggerDeferred = false;
-
-                        sendTrigger(mClientSessionID, TRIGGER_SETUP);
-                    }
-                    break;
-                }
-
-                case HDCPModule::HDCP_SHUTDOWN_COMPLETE:
-                case HDCPModule::HDCP_SHUTDOWN_FAILED:
-                {
-                    // Ugly hack to make sure that the call to
-                    // HDCPObserver::notify is completely handled before
-                    // we clear the HDCP instance and unload the shared
-                    // library :(
-                    (new AMessage(kWhatFinishStop2, this))->post(300000ll);
-                    break;
-                }
-
-                default:
-                {
-                    ALOGE("HDCP failure, shutting down.");
-
-                    mClient->onDisplayError(
-                            IRemoteDisplayClient::kDisplayErrorUnknown);
-                    break;
-                }
-            }
-            break;
-        }
-
-        case kWhatFinishStop2:
-        {
-            finishStop2();
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-}
-
-void WifiDisplaySource::registerResponseHandler(
-        int32_t sessionID, int32_t cseq, HandleRTSPResponseFunc func) {
-    ResponseID id;
-    id.mSessionID = sessionID;
-    id.mCSeq = cseq;
-    mResponseHandlers.add(id, func);
-}
-
-status_t WifiDisplaySource::sendM1(int32_t sessionID) {
-    AString request = "OPTIONS * RTSP/1.0\r\n";
-    AppendCommonResponse(&request, mNextCSeq);
-
-    request.append(
-            "Require: org.wfa.wfd1.0\r\n"
-            "\r\n");
-
-    status_t err =
-        mNetSession->sendRequest(sessionID, request.c_str(), request.size());
-
-    if (err != OK) {
-        return err;
-    }
-
-    registerResponseHandler(
-            sessionID, mNextCSeq, &WifiDisplaySource::onReceiveM1Response);
-
-    ++mNextCSeq;
-
-    return OK;
-}
-
-status_t WifiDisplaySource::sendM3(int32_t sessionID) {
-    AString body =
-        "wfd_content_protection\r\n"
-        "wfd_video_formats\r\n"
-        "wfd_audio_codecs\r\n"
-        "wfd_client_rtp_ports\r\n";
-
-    AString request = "GET_PARAMETER rtsp://localhost/wfd1.0 RTSP/1.0\r\n";
-    AppendCommonResponse(&request, mNextCSeq);
-
-    request.append("Content-Type: text/parameters\r\n");
-    request.append(AStringPrintf("Content-Length: %d\r\n", body.size()));
-    request.append("\r\n");
-    request.append(body);
-
-    status_t err =
-        mNetSession->sendRequest(sessionID, request.c_str(), request.size());
-
-    if (err != OK) {
-        return err;
-    }
-
-    registerResponseHandler(
-            sessionID, mNextCSeq, &WifiDisplaySource::onReceiveM3Response);
-
-    ++mNextCSeq;
-
-    return OK;
-}
-
-status_t WifiDisplaySource::sendM4(int32_t sessionID) {
-    CHECK_EQ(sessionID, mClientSessionID);
-
-    AString body;
-
-    if (mSinkSupportsVideo) {
-        body.append("wfd_video_formats: ");
-
-        VideoFormats chosenVideoFormat;
-        chosenVideoFormat.disableAll();
-        chosenVideoFormat.setNativeResolution(
-                mChosenVideoResolutionType, mChosenVideoResolutionIndex);
-        chosenVideoFormat.setProfileLevel(
-                mChosenVideoResolutionType, mChosenVideoResolutionIndex,
-                mChosenVideoProfile, mChosenVideoLevel);
-
-        body.append(chosenVideoFormat.getFormatSpec(true /* forM4Message */));
-        body.append("\r\n");
-    }
-
-    if (mSinkSupportsAudio) {
-        body.append(
-                AStringPrintf("wfd_audio_codecs: %s\r\n",
-                             (mUsingPCMAudio
-                                ? "LPCM 00000002 00" // 2 ch PCM 48kHz
-                                : "AAC 00000001 00")));  // 2 ch AAC 48kHz
-    }
-
-    body.append(
-            AStringPrintf(
-                "wfd_presentation_URL: rtsp://%s/wfd1.0/streamid=0 none\r\n",
-                mClientInfo.mLocalIP.c_str()));
-
-    body.append(
-            AStringPrintf(
-                "wfd_client_rtp_ports: %s\r\n", mWfdClientRtpPorts.c_str()));
-
-    AString request = "SET_PARAMETER rtsp://localhost/wfd1.0 RTSP/1.0\r\n";
-    AppendCommonResponse(&request, mNextCSeq);
-
-    request.append("Content-Type: text/parameters\r\n");
-    request.append(AStringPrintf("Content-Length: %d\r\n", body.size()));
-    request.append("\r\n");
-    request.append(body);
-
-    status_t err =
-        mNetSession->sendRequest(sessionID, request.c_str(), request.size());
-
-    if (err != OK) {
-        return err;
-    }
-
-    registerResponseHandler(
-            sessionID, mNextCSeq, &WifiDisplaySource::onReceiveM4Response);
-
-    ++mNextCSeq;
-
-    return OK;
-}
-
-status_t WifiDisplaySource::sendTrigger(
-        int32_t sessionID, TriggerType triggerType) {
-    AString body = "wfd_trigger_method: ";
-    switch (triggerType) {
-        case TRIGGER_SETUP:
-            body.append("SETUP");
-            break;
-        case TRIGGER_TEARDOWN:
-            ALOGI("Sending TEARDOWN trigger.");
-            body.append("TEARDOWN");
-            break;
-        case TRIGGER_PAUSE:
-            body.append("PAUSE");
-            break;
-        case TRIGGER_PLAY:
-            body.append("PLAY");
-            break;
-        default:
-            TRESPASS();
-    }
-
-    body.append("\r\n");
-
-    AString request = "SET_PARAMETER rtsp://localhost/wfd1.0 RTSP/1.0\r\n";
-    AppendCommonResponse(&request, mNextCSeq);
-
-    request.append("Content-Type: text/parameters\r\n");
-    request.append(AStringPrintf("Content-Length: %d\r\n", body.size()));
-    request.append("\r\n");
-    request.append(body);
-
-    status_t err =
-        mNetSession->sendRequest(sessionID, request.c_str(), request.size());
-
-    if (err != OK) {
-        return err;
-    }
-
-    registerResponseHandler(
-            sessionID, mNextCSeq, &WifiDisplaySource::onReceiveM5Response);
-
-    ++mNextCSeq;
-
-    return OK;
-}
-
-status_t WifiDisplaySource::sendM16(int32_t sessionID) {
-    AString request = "GET_PARAMETER rtsp://localhost/wfd1.0 RTSP/1.0\r\n";
-    AppendCommonResponse(&request, mNextCSeq);
-
-    CHECK_EQ(sessionID, mClientSessionID);
-    request.append(
-            AStringPrintf("Session: %d\r\n", mClientInfo.mPlaybackSessionID));
-    request.append("\r\n");  // Empty body
-
-    status_t err =
-        mNetSession->sendRequest(sessionID, request.c_str(), request.size());
-
-    if (err != OK) {
-        return err;
-    }
-
-    registerResponseHandler(
-            sessionID, mNextCSeq, &WifiDisplaySource::onReceiveM16Response);
-
-    ++mNextCSeq;
-
-    scheduleKeepAlive(sessionID);
-
-    return OK;
-}
-
-status_t WifiDisplaySource::onReceiveM1Response(
-        int32_t /* sessionID */, const sp<ParsedMessage> &msg) {
-    int32_t statusCode;
-    if (!msg->getStatusCode(&statusCode)) {
-        return ERROR_MALFORMED;
-    }
-
-    if (statusCode != 200) {
-        return ERROR_UNSUPPORTED;
-    }
-
-    return OK;
-}
-
-// sink_audio_list := ("LPCM"|"AAC"|"AC3" HEXDIGIT*8 HEXDIGIT*2)
-//                       (", " sink_audio_list)*
-static void GetAudioModes(const char *s, const char *prefix, uint32_t *modes) {
-    *modes = 0;
-
-    size_t prefixLen = strlen(prefix);
-
-    while (*s != '0') {
-        if (!strncmp(s, prefix, prefixLen) && s[prefixLen] == ' ') {
-            unsigned latency;
-            if (sscanf(&s[prefixLen + 1], "%08x %02x", modes, &latency) != 2) {
-                *modes = 0;
-            }
-
-            return;
-        }
-
-        const char *commaPos = strchr(s, ',');
-        if (commaPos != NULL) {
-            s = commaPos + 1;
-
-            while (isspace(*s)) {
-                ++s;
-            }
-        } else {
-            break;
-        }
-    }
-}
-
-status_t WifiDisplaySource::onReceiveM3Response(
-        int32_t sessionID, const sp<ParsedMessage> &msg) {
-    int32_t statusCode;
-    if (!msg->getStatusCode(&statusCode)) {
-        return ERROR_MALFORMED;
-    }
-
-    if (statusCode != 200) {
-        return ERROR_UNSUPPORTED;
-    }
-
-    sp<Parameters> params =
-        Parameters::Parse(msg->getContent(), strlen(msg->getContent()));
-
-    if (params == NULL) {
-        return ERROR_MALFORMED;
-    }
-
-    AString value;
-    if (!params->findParameter("wfd_client_rtp_ports", &value)) {
-        ALOGE("Sink doesn't report its choice of wfd_client_rtp_ports.");
-        return ERROR_MALFORMED;
-    }
-
-    unsigned port0 = 0, port1 = 0;
-    if (sscanf(value.c_str(),
-               "RTP/AVP/UDP;unicast %u %u mode=play",
-               &port0,
-               &port1) == 2
-        || sscanf(value.c_str(),
-               "RTP/AVP/TCP;unicast %u %u mode=play",
-               &port0,
-               &port1) == 2) {
-            if (port0 == 0 || port0 > 65535 || port1 != 0) {
-                ALOGE("Sink chose its wfd_client_rtp_ports poorly (%s)",
-                      value.c_str());
-
-                return ERROR_MALFORMED;
-            }
-    } else if (strcmp(value.c_str(), "RTP/AVP/TCP;interleaved mode=play")) {
-        ALOGE("Unsupported value for wfd_client_rtp_ports (%s)",
-              value.c_str());
-
-        return ERROR_UNSUPPORTED;
-    }
-
-    mWfdClientRtpPorts = value;
-    mChosenRTPPort = port0;
-
-    if (!params->findParameter("wfd_video_formats", &value)) {
-        ALOGE("Sink doesn't report its choice of wfd_video_formats.");
-        return ERROR_MALFORMED;
-    }
-
-    mSinkSupportsVideo = false;
-
-    if  (!(value == "none")) {
-        mSinkSupportsVideo = true;
-        if (!mSupportedSinkVideoFormats.parseFormatSpec(value.c_str())) {
-            ALOGE("Failed to parse sink provided wfd_video_formats (%s)",
-                  value.c_str());
-
-            return ERROR_MALFORMED;
-        }
-
-        if (!VideoFormats::PickBestFormat(
-                    mSupportedSinkVideoFormats,
-                    mSupportedSourceVideoFormats,
-                    &mChosenVideoResolutionType,
-                    &mChosenVideoResolutionIndex,
-                    &mChosenVideoProfile,
-                    &mChosenVideoLevel)) {
-            ALOGE("Sink and source share no commonly supported video "
-                  "formats.");
-
-            return ERROR_UNSUPPORTED;
-        }
-
-        size_t width, height, framesPerSecond;
-        bool interlaced;
-        CHECK(VideoFormats::GetConfiguration(
-                    mChosenVideoResolutionType,
-                    mChosenVideoResolutionIndex,
-                    &width,
-                    &height,
-                    &framesPerSecond,
-                    &interlaced));
-
-        ALOGI("Picked video resolution %zu x %zu %c%zu",
-              width, height, interlaced ? 'i' : 'p', framesPerSecond);
-
-        ALOGI("Picked AVC profile %d, level %d",
-              mChosenVideoProfile, mChosenVideoLevel);
-    } else {
-        ALOGI("Sink doesn't support video at all.");
-    }
-
-    if (!params->findParameter("wfd_audio_codecs", &value)) {
-        ALOGE("Sink doesn't report its choice of wfd_audio_codecs.");
-        return ERROR_MALFORMED;
-    }
-
-    mSinkSupportsAudio = false;
-
-    if  (!(value == "none")) {
-        mSinkSupportsAudio = true;
-
-        uint32_t modes;
-        GetAudioModes(value.c_str(), "AAC", &modes);
-
-        bool supportsAAC = (modes & 1) != 0;  // AAC 2ch 48kHz
-
-        GetAudioModes(value.c_str(), "LPCM", &modes);
-
-        bool supportsPCM = (modes & 2) != 0;  // LPCM 2ch 48kHz
-
-        if (supportsPCM
-                && property_get_bool("media.wfd.use-pcm-audio", false)) {
-            ALOGI("Using PCM audio.");
-            mUsingPCMAudio = true;
-        } else if (supportsAAC) {
-            ALOGI("Using AAC audio.");
-            mUsingPCMAudio = false;
-        } else if (supportsPCM) {
-            ALOGI("Using PCM audio.");
-            mUsingPCMAudio = true;
-        } else {
-            ALOGI("Sink doesn't support an audio format we do.");
-            return ERROR_UNSUPPORTED;
-        }
-    } else {
-        ALOGI("Sink doesn't support audio at all.");
-    }
-
-    if (!mSinkSupportsVideo && !mSinkSupportsAudio) {
-        ALOGE("Sink supports neither video nor audio...");
-        return ERROR_UNSUPPORTED;
-    }
-
-    mUsingHDCP = false;
-    if (!params->findParameter("wfd_content_protection", &value)) {
-        ALOGI("Sink doesn't appear to support content protection.");
-    } else if (value == "none") {
-        ALOGI("Sink does not support content protection.");
-    } else {
-        mUsingHDCP = true;
-
-        bool isHDCP2_0 = false;
-        if (value.startsWith("HDCP2.0 ")) {
-            isHDCP2_0 = true;
-        } else if (!value.startsWith("HDCP2.1 ")) {
-            ALOGE("malformed wfd_content_protection: '%s'", value.c_str());
-
-            return ERROR_MALFORMED;
-        }
-
-        int32_t hdcpPort;
-        if (!ParsedMessage::GetInt32Attribute(
-                    value.c_str() + 8, "port", &hdcpPort)
-                || hdcpPort < 1 || hdcpPort > 65535) {
-            return ERROR_MALFORMED;
-        }
-
-        mIsHDCP2_0 = isHDCP2_0;
-        mHDCPPort = hdcpPort;
-
-        status_t err = makeHDCP();
-        if (err != OK) {
-            ALOGE("Unable to instantiate HDCP component. "
-                  "Not using HDCP after all.");
-
-            mUsingHDCP = false;
-        }
-    }
-
-    return sendM4(sessionID);
-}
-
-status_t WifiDisplaySource::onReceiveM4Response(
-        int32_t sessionID, const sp<ParsedMessage> &msg) {
-    int32_t statusCode;
-    if (!msg->getStatusCode(&statusCode)) {
-        return ERROR_MALFORMED;
-    }
-
-    if (statusCode != 200) {
-        return ERROR_UNSUPPORTED;
-    }
-
-    if (mUsingHDCP && !mHDCPInitializationComplete) {
-        ALOGI("Deferring SETUP trigger until HDCP initialization completes.");
-
-        mSetupTriggerDeferred = true;
-        return OK;
-    }
-
-    return sendTrigger(sessionID, TRIGGER_SETUP);
-}
-
-status_t WifiDisplaySource::onReceiveM5Response(
-        int32_t /* sessionID */, const sp<ParsedMessage> &msg) {
-    int32_t statusCode;
-    if (!msg->getStatusCode(&statusCode)) {
-        return ERROR_MALFORMED;
-    }
-
-    if (statusCode != 200) {
-        return ERROR_UNSUPPORTED;
-    }
-
-    return OK;
-}
-
-status_t WifiDisplaySource::onReceiveM16Response(
-        int32_t sessionID, const sp<ParsedMessage> & /* msg */) {
-    // If only the response was required to include a "Session:" header...
-
-    CHECK_EQ(sessionID, mClientSessionID);
-
-    if (mClientInfo.mPlaybackSession != NULL) {
-        mClientInfo.mPlaybackSession->updateLiveness();
-    }
-
-    return OK;
-}
-
-void WifiDisplaySource::scheduleReaper() {
-    if (mReaperPending) {
-        return;
-    }
-
-    mReaperPending = true;
-    (new AMessage(kWhatReapDeadClients, this))->post(kReaperIntervalUs);
-}
-
-void WifiDisplaySource::scheduleKeepAlive(int32_t sessionID) {
-    // We need to send updates at least 5 secs before the timeout is set to
-    // expire, make sure the timeout is greater than 5 secs to begin with.
-    CHECK_GT(kPlaybackSessionTimeoutUs, 5000000ll);
-
-    sp<AMessage> msg = new AMessage(kWhatKeepAlive, this);
-    msg->setInt32("sessionID", sessionID);
-    msg->post(kPlaybackSessionTimeoutUs - 5000000ll);
-}
-
-status_t WifiDisplaySource::onReceiveClientData(const sp<AMessage> &msg) {
-    int32_t sessionID;
-    CHECK(msg->findInt32("sessionID", &sessionID));
-
-    sp<RefBase> obj;
-    CHECK(msg->findObject("data", &obj));
-
-    sp<ParsedMessage> data =
-        static_cast<ParsedMessage *>(obj.get());
-
-    ALOGV("session %d received '%s'",
-          sessionID, data->debugString().c_str());
-
-    AString method;
-    AString uri;
-    data->getRequestField(0, &method);
-
-    int32_t cseq;
-    if (!data->findInt32("cseq", &cseq)) {
-        sendErrorResponse(sessionID, "400 Bad Request", -1 /* cseq */);
-        return ERROR_MALFORMED;
-    }
-
-    if (method.startsWith("RTSP/")) {
-        // This is a response.
-
-        ResponseID id;
-        id.mSessionID = sessionID;
-        id.mCSeq = cseq;
-
-        ssize_t index = mResponseHandlers.indexOfKey(id);
-
-        if (index < 0) {
-            ALOGW("Received unsolicited server response, cseq %d", cseq);
-            return ERROR_MALFORMED;
-        }
-
-        HandleRTSPResponseFunc func = mResponseHandlers.valueAt(index);
-        mResponseHandlers.removeItemsAt(index);
-
-        status_t err = (this->*func)(sessionID, data);
-
-        if (err != OK) {
-            ALOGW("Response handler for session %d, cseq %d returned "
-                  "err %d (%s)",
-                  sessionID, cseq, err, strerror(-err));
-
-            return err;
-        }
-
-        return OK;
-    }
-
-    AString version;
-    data->getRequestField(2, &version);
-    if (!(version == AString("RTSP/1.0"))) {
-        sendErrorResponse(sessionID, "505 RTSP Version not supported", cseq);
-        return ERROR_UNSUPPORTED;
-    }
-
-    status_t err;
-    if (method == "OPTIONS") {
-        err = onOptionsRequest(sessionID, cseq, data);
-    } else if (method == "SETUP") {
-        err = onSetupRequest(sessionID, cseq, data);
-    } else if (method == "PLAY") {
-        err = onPlayRequest(sessionID, cseq, data);
-    } else if (method == "PAUSE") {
-        err = onPauseRequest(sessionID, cseq, data);
-    } else if (method == "TEARDOWN") {
-        err = onTeardownRequest(sessionID, cseq, data);
-    } else if (method == "GET_PARAMETER") {
-        err = onGetParameterRequest(sessionID, cseq, data);
-    } else if (method == "SET_PARAMETER") {
-        err = onSetParameterRequest(sessionID, cseq, data);
-    } else {
-        sendErrorResponse(sessionID, "405 Method Not Allowed", cseq);
-
-        err = ERROR_UNSUPPORTED;
-    }
-
-    return err;
-}
-
-status_t WifiDisplaySource::onOptionsRequest(
-        int32_t sessionID,
-        int32_t cseq,
-        const sp<ParsedMessage> &data) {
-    int32_t playbackSessionID;
-    sp<PlaybackSession> playbackSession =
-        findPlaybackSession(data, &playbackSessionID);
-
-    if (playbackSession != NULL) {
-        playbackSession->updateLiveness();
-    }
-
-    AString response = "RTSP/1.0 200 OK\r\n";
-    AppendCommonResponse(&response, cseq);
-
-    response.append(
-            "Public: org.wfa.wfd1.0, SETUP, TEARDOWN, PLAY, PAUSE, "
-            "GET_PARAMETER, SET_PARAMETER\r\n");
-
-    response.append("\r\n");
-
-    status_t err = mNetSession->sendRequest(sessionID, response.c_str());
-
-    if (err == OK) {
-        err = sendM3(sessionID);
-    }
-
-    return err;
-}
-
-status_t WifiDisplaySource::onSetupRequest(
-        int32_t sessionID,
-        int32_t cseq,
-        const sp<ParsedMessage> &data) {
-    CHECK_EQ(sessionID, mClientSessionID);
-    if (mClientInfo.mPlaybackSessionID != -1) {
-        // We only support a single playback session per client.
-        // This is due to the reversed keep-alive design in the wfd specs...
-        sendErrorResponse(sessionID, "400 Bad Request", cseq);
-        return ERROR_MALFORMED;
-    }
-
-    AString transport;
-    if (!data->findString("transport", &transport)) {
-        sendErrorResponse(sessionID, "400 Bad Request", cseq);
-        return ERROR_MALFORMED;
-    }
-
-    RTPSender::TransportMode rtpMode = RTPSender::TRANSPORT_UDP;
-
-    int clientRtp, clientRtcp;
-    if (transport.startsWith("RTP/AVP/TCP;")) {
-        AString interleaved;
-        if (ParsedMessage::GetAttribute(
-                    transport.c_str(), "interleaved", &interleaved)
-                && sscanf(interleaved.c_str(), "%d-%d",
-                          &clientRtp, &clientRtcp) == 2) {
-            rtpMode = RTPSender::TRANSPORT_TCP_INTERLEAVED;
-        } else {
-            bool badRequest = false;
-
-            AString clientPort;
-            if (!ParsedMessage::GetAttribute(
-                        transport.c_str(), "client_port", &clientPort)) {
-                badRequest = true;
-            } else if (sscanf(clientPort.c_str(), "%d-%d",
-                              &clientRtp, &clientRtcp) == 2) {
-            } else if (sscanf(clientPort.c_str(), "%d", &clientRtp) == 1) {
-                // No RTCP.
-                clientRtcp = -1;
-            } else {
-                badRequest = true;
-            }
-
-            if (badRequest) {
-                sendErrorResponse(sessionID, "400 Bad Request", cseq);
-                return ERROR_MALFORMED;
-            }
-
-            rtpMode = RTPSender::TRANSPORT_TCP;
-        }
-    } else if (transport.startsWith("RTP/AVP;unicast;")
-            || transport.startsWith("RTP/AVP/UDP;unicast;")) {
-        bool badRequest = false;
-
-        AString clientPort;
-        if (!ParsedMessage::GetAttribute(
-                    transport.c_str(), "client_port", &clientPort)) {
-            badRequest = true;
-        } else if (sscanf(clientPort.c_str(), "%d-%d",
-                          &clientRtp, &clientRtcp) == 2) {
-        } else if (sscanf(clientPort.c_str(), "%d", &clientRtp) == 1) {
-            // No RTCP.
-            clientRtcp = -1;
-        } else {
-            badRequest = true;
-        }
-
-        if (badRequest) {
-            sendErrorResponse(sessionID, "400 Bad Request", cseq);
-            return ERROR_MALFORMED;
-        }
-#if 1
-    // The older LG dongles doesn't specify client_port=xxx apparently.
-    } else if (transport == "RTP/AVP/UDP;unicast") {
-        clientRtp = 19000;
-        clientRtcp = -1;
-#endif
-    } else {
-        sendErrorResponse(sessionID, "461 Unsupported Transport", cseq);
-        return ERROR_UNSUPPORTED;
-    }
-
-    int32_t playbackSessionID = makeUniquePlaybackSessionID();
-
-    sp<AMessage> notify = new AMessage(kWhatPlaybackSessionNotify, this);
-    notify->setInt32("playbackSessionID", playbackSessionID);
-    notify->setInt32("sessionID", sessionID);
-
-    sp<PlaybackSession> playbackSession =
-        new PlaybackSession(
-                mOpPackageName, mNetSession, notify, mInterfaceAddr, mHDCP, mMediaPath.c_str());
-
-    looper()->registerHandler(playbackSession);
-
-    AString uri;
-    data->getRequestField(1, &uri);
-
-    if (strncasecmp("rtsp://", uri.c_str(), 7)) {
-        sendErrorResponse(sessionID, "400 Bad Request", cseq);
-        return ERROR_MALFORMED;
-    }
-
-    if (!(uri.startsWith("rtsp://") && uri.endsWith("/wfd1.0/streamid=0"))) {
-        sendErrorResponse(sessionID, "404 Not found", cseq);
-        return ERROR_MALFORMED;
-    }
-
-    RTPSender::TransportMode rtcpMode = RTPSender::TRANSPORT_UDP;
-    if (clientRtcp < 0) {
-        rtcpMode = RTPSender::TRANSPORT_NONE;
-    }
-
-    status_t err = playbackSession->init(
-            mClientInfo.mRemoteIP.c_str(),
-            clientRtp,
-            rtpMode,
-            clientRtcp,
-            rtcpMode,
-            mSinkSupportsAudio,
-            mUsingPCMAudio,
-            mSinkSupportsVideo,
-            mChosenVideoResolutionType,
-            mChosenVideoResolutionIndex,
-            mChosenVideoProfile,
-            mChosenVideoLevel);
-
-    if (err != OK) {
-        looper()->unregisterHandler(playbackSession->id());
-        playbackSession.clear();
-    }
-
-    switch (err) {
-        case OK:
-            break;
-        case -ENOENT:
-            sendErrorResponse(sessionID, "404 Not Found", cseq);
-            return err;
-        default:
-            sendErrorResponse(sessionID, "403 Forbidden", cseq);
-            return err;
-    }
-
-    mClientInfo.mPlaybackSessionID = playbackSessionID;
-    mClientInfo.mPlaybackSession = playbackSession;
-
-    AString response = "RTSP/1.0 200 OK\r\n";
-    AppendCommonResponse(&response, cseq, playbackSessionID);
-
-    if (rtpMode == RTPSender::TRANSPORT_TCP_INTERLEAVED) {
-        response.append(
-                AStringPrintf(
-                    "Transport: RTP/AVP/TCP;interleaved=%d-%d;",
-                    clientRtp, clientRtcp));
-    } else {
-        int32_t serverRtp = playbackSession->getRTPPort();
-
-        AString transportString = "UDP";
-        if (rtpMode == RTPSender::TRANSPORT_TCP) {
-            transportString = "TCP";
-        }
-
-        if (clientRtcp >= 0) {
-            response.append(
-                    AStringPrintf(
-                        "Transport: RTP/AVP/%s;unicast;client_port=%d-%d;"
-                        "server_port=%d-%d\r\n",
-                        transportString.c_str(),
-                        clientRtp, clientRtcp, serverRtp, serverRtp + 1));
-        } else {
-            response.append(
-                    AStringPrintf(
-                        "Transport: RTP/AVP/%s;unicast;client_port=%d;"
-                        "server_port=%d\r\n",
-                        transportString.c_str(),
-                        clientRtp, serverRtp));
-        }
-    }
-
-    response.append("\r\n");
-
-    err = mNetSession->sendRequest(sessionID, response.c_str());
-
-    if (err != OK) {
-        return err;
-    }
-
-    mState = AWAITING_CLIENT_PLAY;
-
-    scheduleReaper();
-    scheduleKeepAlive(sessionID);
-
-    return OK;
-}
-
-status_t WifiDisplaySource::onPlayRequest(
-        int32_t sessionID,
-        int32_t cseq,
-        const sp<ParsedMessage> &data) {
-    int32_t playbackSessionID;
-    sp<PlaybackSession> playbackSession =
-        findPlaybackSession(data, &playbackSessionID);
-
-    if (playbackSession == NULL) {
-        sendErrorResponse(sessionID, "454 Session Not Found", cseq);
-        return ERROR_MALFORMED;
-    }
-
-    if (mState != AWAITING_CLIENT_PLAY
-     && mState != PAUSED_TO_PLAYING
-     && mState != PAUSED) {
-        ALOGW("Received PLAY request but we're in state %d", mState);
-
-        sendErrorResponse(
-                sessionID, "455 Method Not Valid in This State", cseq);
-
-        return INVALID_OPERATION;
-    }
-
-    ALOGI("Received PLAY request.");
-    if (mPlaybackSessionEstablished) {
-        finishPlay();
-    } else {
-        ALOGI("deferring PLAY request until session established.");
-    }
-
-    AString response = "RTSP/1.0 200 OK\r\n";
-    AppendCommonResponse(&response, cseq, playbackSessionID);
-    response.append("Range: npt=now-\r\n");
-    response.append("\r\n");
-
-    status_t err = mNetSession->sendRequest(sessionID, response.c_str());
-
-    if (err != OK) {
-        return err;
-    }
-
-    if (mState == PAUSED_TO_PLAYING || mPlaybackSessionEstablished) {
-        mState = PLAYING;
-        return OK;
-    }
-
-    CHECK_EQ(mState, AWAITING_CLIENT_PLAY);
-    mState = ABOUT_TO_PLAY;
-
-    return OK;
-}
-
-void WifiDisplaySource::finishPlay() {
-    const sp<PlaybackSession> &playbackSession =
-        mClientInfo.mPlaybackSession;
-
-    status_t err = playbackSession->play();
-    CHECK_EQ(err, (status_t)OK);
-}
-
-status_t WifiDisplaySource::onPauseRequest(
-        int32_t sessionID,
-        int32_t cseq,
-        const sp<ParsedMessage> &data) {
-    int32_t playbackSessionID;
-    sp<PlaybackSession> playbackSession =
-        findPlaybackSession(data, &playbackSessionID);
-
-    if (playbackSession == NULL) {
-        sendErrorResponse(sessionID, "454 Session Not Found", cseq);
-        return ERROR_MALFORMED;
-    }
-
-    ALOGI("Received PAUSE request.");
-
-    if (mState != PLAYING_TO_PAUSED && mState != PLAYING) {
-        return INVALID_OPERATION;
-    }
-
-    status_t err = playbackSession->pause();
-    CHECK_EQ(err, (status_t)OK);
-
-    AString response = "RTSP/1.0 200 OK\r\n";
-    AppendCommonResponse(&response, cseq, playbackSessionID);
-    response.append("\r\n");
-
-    err = mNetSession->sendRequest(sessionID, response.c_str());
-
-    if (err != OK) {
-        return err;
-    }
-
-    mState = PAUSED;
-
-    return err;
-}
-
-status_t WifiDisplaySource::onTeardownRequest(
-        int32_t sessionID,
-        int32_t cseq,
-        const sp<ParsedMessage> &data) {
-    ALOGI("Received TEARDOWN request.");
-
-    int32_t playbackSessionID;
-    sp<PlaybackSession> playbackSession =
-        findPlaybackSession(data, &playbackSessionID);
-
-    if (playbackSession == NULL) {
-        sendErrorResponse(sessionID, "454 Session Not Found", cseq);
-        return ERROR_MALFORMED;
-    }
-
-    AString response = "RTSP/1.0 200 OK\r\n";
-    AppendCommonResponse(&response, cseq, playbackSessionID);
-    response.append("Connection: close\r\n");
-    response.append("\r\n");
-
-    mNetSession->sendRequest(sessionID, response.c_str());
-
-    if (mState == AWAITING_CLIENT_TEARDOWN) {
-        CHECK(mStopReplyID != NULL);
-        finishStop();
-    } else {
-        mClient->onDisplayError(IRemoteDisplayClient::kDisplayErrorUnknown);
-    }
-
-    return OK;
-}
-
-void WifiDisplaySource::finishStop() {
-    ALOGV("finishStop");
-
-    mState = STOPPING;
-
-    disconnectClientAsync();
-}
-
-void WifiDisplaySource::finishStopAfterDisconnectingClient() {
-    ALOGV("finishStopAfterDisconnectingClient");
-
-    if (mHDCP != NULL) {
-        ALOGI("Initiating HDCP shutdown.");
-        mHDCP->shutdownAsync();
-        return;
-    }
-
-    finishStop2();
-}
-
-void WifiDisplaySource::finishStop2() {
-    ALOGV("finishStop2");
-
-    if (mHDCP != NULL) {
-        mHDCP->setObserver(NULL);
-        mHDCPObserver.clear();
-        mHDCP.clear();
-    }
-
-    if (mSessionID != 0) {
-        mNetSession->destroySession(mSessionID);
-        mSessionID = 0;
-    }
-
-    ALOGI("We're stopped.");
-    mState = STOPPED;
-
-    status_t err = OK;
-
-    sp<AMessage> response = new AMessage;
-    response->setInt32("err", err);
-    response->postReply(mStopReplyID);
-}
-
-status_t WifiDisplaySource::onGetParameterRequest(
-        int32_t sessionID,
-        int32_t cseq,
-        const sp<ParsedMessage> &data) {
-    int32_t playbackSessionID;
-    sp<PlaybackSession> playbackSession =
-        findPlaybackSession(data, &playbackSessionID);
-
-    if (playbackSession == NULL) {
-        sendErrorResponse(sessionID, "454 Session Not Found", cseq);
-        return ERROR_MALFORMED;
-    }
-
-    playbackSession->updateLiveness();
-
-    AString response = "RTSP/1.0 200 OK\r\n";
-    AppendCommonResponse(&response, cseq, playbackSessionID);
-    response.append("\r\n");
-
-    status_t err = mNetSession->sendRequest(sessionID, response.c_str());
-    return err;
-}
-
-status_t WifiDisplaySource::onSetParameterRequest(
-        int32_t sessionID,
-        int32_t cseq,
-        const sp<ParsedMessage> &data) {
-    int32_t playbackSessionID;
-    sp<PlaybackSession> playbackSession =
-        findPlaybackSession(data, &playbackSessionID);
-
-    if (playbackSession == NULL) {
-        sendErrorResponse(sessionID, "454 Session Not Found", cseq);
-        return ERROR_MALFORMED;
-    }
-
-    if (strstr(data->getContent(), "wfd_idr_request\r\n")) {
-        playbackSession->requestIDRFrame();
-    }
-
-    playbackSession->updateLiveness();
-
-    AString response = "RTSP/1.0 200 OK\r\n";
-    AppendCommonResponse(&response, cseq, playbackSessionID);
-    response.append("\r\n");
-
-    status_t err = mNetSession->sendRequest(sessionID, response.c_str());
-    return err;
-}
-
-// static
-void WifiDisplaySource::AppendCommonResponse(
-        AString *response, int32_t cseq, int32_t playbackSessionID) {
-    time_t now = time(NULL);
-    struct tm *now2 = gmtime(&now);
-    char buf[128];
-    strftime(buf, sizeof(buf), "%a, %d %b %Y %H:%M:%S %z", now2);
-
-    response->append("Date: ");
-    response->append(buf);
-    response->append("\r\n");
-
-    response->append(AStringPrintf("Server: %s\r\n", sUserAgent.c_str()));
-
-    if (cseq >= 0) {
-        response->append(AStringPrintf("CSeq: %d\r\n", cseq));
-    }
-
-    if (playbackSessionID >= 0ll) {
-        response->append(
-                AStringPrintf(
-                    "Session: %d;timeout=%lld\r\n",
-                    playbackSessionID, kPlaybackSessionTimeoutSecs));
-    }
-}
-
-void WifiDisplaySource::sendErrorResponse(
-        int32_t sessionID,
-        const char *errorDetail,
-        int32_t cseq) {
-    AString response;
-    response.append("RTSP/1.0 ");
-    response.append(errorDetail);
-    response.append("\r\n");
-
-    AppendCommonResponse(&response, cseq);
-
-    response.append("\r\n");
-
-    mNetSession->sendRequest(sessionID, response.c_str());
-}
-
-int32_t WifiDisplaySource::makeUniquePlaybackSessionID() const {
-    return rand();
-}
-
-sp<WifiDisplaySource::PlaybackSession> WifiDisplaySource::findPlaybackSession(
-        const sp<ParsedMessage> &data, int32_t *playbackSessionID) const {
-    if (!data->findInt32("session", playbackSessionID)) {
-        // XXX the older dongles do not always include a "Session:" header.
-        *playbackSessionID = mClientInfo.mPlaybackSessionID;
-        return mClientInfo.mPlaybackSession;
-    }
-
-    if (*playbackSessionID != mClientInfo.mPlaybackSessionID) {
-        return NULL;
-    }
-
-    return mClientInfo.mPlaybackSession;
-}
-
-void WifiDisplaySource::disconnectClientAsync() {
-    ALOGV("disconnectClient");
-
-    if (mClientInfo.mPlaybackSession == NULL) {
-        disconnectClient2();
-        return;
-    }
-
-    if (mClientInfo.mPlaybackSession != NULL) {
-        ALOGV("Destroying PlaybackSession");
-        mClientInfo.mPlaybackSession->destroyAsync();
-    }
-}
-
-void WifiDisplaySource::disconnectClient2() {
-    ALOGV("disconnectClient2");
-
-    if (mClientInfo.mPlaybackSession != NULL) {
-        looper()->unregisterHandler(mClientInfo.mPlaybackSession->id());
-        mClientInfo.mPlaybackSession.clear();
-    }
-
-    if (mClientSessionID != 0) {
-        mNetSession->destroySession(mClientSessionID);
-        mClientSessionID = 0;
-    }
-
-    mClient->onDisplayDisconnected();
-
-    finishStopAfterDisconnectingClient();
-}
-
-struct WifiDisplaySource::HDCPObserver : public BnHDCPObserver {
-    explicit HDCPObserver(const sp<AMessage> &notify);
-
-    virtual void notify(
-            int msg, int ext1, int ext2, const Parcel *obj);
-
-private:
-    sp<AMessage> mNotify;
-
-    DISALLOW_EVIL_CONSTRUCTORS(HDCPObserver);
-};
-
-WifiDisplaySource::HDCPObserver::HDCPObserver(
-        const sp<AMessage> &notify)
-    : mNotify(notify) {
-}
-
-void WifiDisplaySource::HDCPObserver::notify(
-        int msg, int ext1, int ext2, const Parcel * /* obj */) {
-    sp<AMessage> notify = mNotify->dup();
-    notify->setInt32("msg", msg);
-    notify->setInt32("ext1", ext1);
-    notify->setInt32("ext2", ext2);
-    notify->post();
-}
-
-status_t WifiDisplaySource::makeHDCP() {
-    sp<IServiceManager> sm = defaultServiceManager();
-    sp<IBinder> binder = sm->getService(String16("media.player"));
-
-    sp<IMediaPlayerService> service =
-        interface_cast<IMediaPlayerService>(binder);
-
-    CHECK(service != NULL);
-
-    mHDCP = service->makeHDCP(true /* createEncryptionModule */);
-
-    if (mHDCP == NULL) {
-        return ERROR_UNSUPPORTED;
-    }
-
-    sp<AMessage> notify = new AMessage(kWhatHDCPNotify, this);
-    mHDCPObserver = new HDCPObserver(notify);
-
-    status_t err = mHDCP->setObserver(mHDCPObserver);
-
-    if (err != OK) {
-        ALOGE("Failed to set HDCP observer.");
-
-        mHDCPObserver.clear();
-        mHDCP.clear();
-
-        return err;
-    }
-
-    ALOGI("Initiating HDCP negotiation w/ host %s:%d",
-            mClientInfo.mRemoteIP.c_str(), mHDCPPort);
-
-    err = mHDCP->initAsync(mClientInfo.mRemoteIP.c_str(), mHDCPPort);
-
-    if (err != OK) {
-        return err;
-    }
-
-    return OK;
-}
-
-}  // namespace android
-
diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.h b/media/libstagefright/wifi-display/source/WifiDisplaySource.h
deleted file mode 100644
index c25a675..0000000
--- a/media/libstagefright/wifi-display/source/WifiDisplaySource.h
+++ /dev/null
@@ -1,278 +0,0 @@
-/*
- * Copyright 2012, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef WIFI_DISPLAY_SOURCE_H_
-
-#define WIFI_DISPLAY_SOURCE_H_
-
-#include "VideoFormats.h"
-
-#include <media/stagefright/foundation/AHandler.h>
-#include <media/stagefright/foundation/ANetworkSession.h>
-
-#include <netinet/in.h>
-
-#include <utils/String16.h>
-
-namespace android {
-
-struct AReplyToken;
-struct IHDCP;
-class IRemoteDisplayClient;
-struct ParsedMessage;
-
-// Represents the RTSP server acting as a wifi display source.
-// Manages incoming connections, sets up Playback sessions as necessary.
-struct WifiDisplaySource : public AHandler {
-    static const unsigned kWifiDisplayDefaultPort = 7236;
-
-    WifiDisplaySource(
-            const String16 &opPackageName,
-            const sp<ANetworkSession> &netSession,
-            const sp<IRemoteDisplayClient> &client,
-            const char *path = NULL);
-
-    status_t start(const char *iface);
-    status_t stop();
-
-    status_t pause();
-    status_t resume();
-
-protected:
-    virtual ~WifiDisplaySource();
-    virtual void onMessageReceived(const sp<AMessage> &msg);
-
-private:
-    struct PlaybackSession;
-    struct HDCPObserver;
-
-    enum State {
-        INITIALIZED,
-        AWAITING_CLIENT_CONNECTION,
-        AWAITING_CLIENT_SETUP,
-        AWAITING_CLIENT_PLAY,
-        ABOUT_TO_PLAY,
-        PLAYING,
-        PLAYING_TO_PAUSED,
-        PAUSED,
-        PAUSED_TO_PLAYING,
-        AWAITING_CLIENT_TEARDOWN,
-        STOPPING,
-        STOPPED,
-    };
-
-    enum {
-        kWhatStart,
-        kWhatRTSPNotify,
-        kWhatStop,
-        kWhatPause,
-        kWhatResume,
-        kWhatReapDeadClients,
-        kWhatPlaybackSessionNotify,
-        kWhatKeepAlive,
-        kWhatHDCPNotify,
-        kWhatFinishStop2,
-        kWhatTeardownTriggerTimedOut,
-    };
-
-    struct ResponseID {
-        int32_t mSessionID;
-        int32_t mCSeq;
-
-        bool operator<(const ResponseID &other) const {
-            return mSessionID < other.mSessionID
-                || (mSessionID == other.mSessionID
-                        && mCSeq < other.mCSeq);
-        }
-    };
-
-    typedef status_t (WifiDisplaySource::*HandleRTSPResponseFunc)(
-            int32_t sessionID, const sp<ParsedMessage> &msg);
-
-    static const int64_t kReaperIntervalUs = 1000000ll;
-
-    // We request that the dongle send us a "TEARDOWN" in order to
-    // perform an orderly shutdown. We're willing to wait up to 2 secs
-    // for this message to arrive, after that we'll force a disconnect
-    // instead.
-    static const int64_t kTeardownTriggerTimeouSecs = 2;
-
-    static const int64_t kPlaybackSessionTimeoutSecs = 30;
-
-    static const int64_t kPlaybackSessionTimeoutUs =
-        kPlaybackSessionTimeoutSecs * 1000000ll;
-
-    static const AString sUserAgent;
-
-    String16 mOpPackageName;
-
-    State mState;
-    VideoFormats mSupportedSourceVideoFormats;
-    sp<ANetworkSession> mNetSession;
-    sp<IRemoteDisplayClient> mClient;
-    AString mMediaPath;
-    struct in_addr mInterfaceAddr;
-    int32_t mSessionID;
-
-    sp<AReplyToken> mStopReplyID;
-
-    AString mWfdClientRtpPorts;
-    int32_t mChosenRTPPort;  // extracted from "wfd_client_rtp_ports"
-
-    bool mSinkSupportsVideo;
-    VideoFormats mSupportedSinkVideoFormats;
-
-    VideoFormats::ResolutionType mChosenVideoResolutionType;
-    size_t mChosenVideoResolutionIndex;
-    VideoFormats::ProfileType mChosenVideoProfile;
-    VideoFormats::LevelType mChosenVideoLevel;
-
-    bool mSinkSupportsAudio;
-
-    bool mUsingPCMAudio;
-    int32_t mClientSessionID;
-
-    struct ClientInfo {
-        AString mRemoteIP;
-        AString mLocalIP;
-        int32_t mLocalPort;
-        int32_t mPlaybackSessionID;
-        sp<PlaybackSession> mPlaybackSession;
-    };
-    ClientInfo mClientInfo;
-
-    bool mReaperPending;
-
-    int32_t mNextCSeq;
-
-    KeyedVector<ResponseID, HandleRTSPResponseFunc> mResponseHandlers;
-
-    // HDCP specific section >>>>
-    bool mUsingHDCP;
-    bool mIsHDCP2_0;
-    int32_t mHDCPPort;
-    sp<IHDCP> mHDCP;
-    sp<HDCPObserver> mHDCPObserver;
-
-    bool mHDCPInitializationComplete;
-    bool mSetupTriggerDeferred;
-
-    bool mPlaybackSessionEstablished;
-
-    status_t makeHDCP();
-    // <<<< HDCP specific section
-
-    status_t sendM1(int32_t sessionID);
-    status_t sendM3(int32_t sessionID);
-    status_t sendM4(int32_t sessionID);
-
-    enum TriggerType {
-        TRIGGER_SETUP,
-        TRIGGER_TEARDOWN,
-        TRIGGER_PAUSE,
-        TRIGGER_PLAY,
-    };
-
-    // M5
-    status_t sendTrigger(int32_t sessionID, TriggerType triggerType);
-
-    status_t sendM16(int32_t sessionID);
-
-    status_t onReceiveM1Response(
-            int32_t sessionID, const sp<ParsedMessage> &msg);
-
-    status_t onReceiveM3Response(
-            int32_t sessionID, const sp<ParsedMessage> &msg);
-
-    status_t onReceiveM4Response(
-            int32_t sessionID, const sp<ParsedMessage> &msg);
-
-    status_t onReceiveM5Response(
-            int32_t sessionID, const sp<ParsedMessage> &msg);
-
-    status_t onReceiveM16Response(
-            int32_t sessionID, const sp<ParsedMessage> &msg);
-
-    void registerResponseHandler(
-            int32_t sessionID, int32_t cseq, HandleRTSPResponseFunc func);
-
-    status_t onReceiveClientData(const sp<AMessage> &msg);
-
-    status_t onOptionsRequest(
-            int32_t sessionID,
-            int32_t cseq,
-            const sp<ParsedMessage> &data);
-
-    status_t onSetupRequest(
-            int32_t sessionID,
-            int32_t cseq,
-            const sp<ParsedMessage> &data);
-
-    status_t onPlayRequest(
-            int32_t sessionID,
-            int32_t cseq,
-            const sp<ParsedMessage> &data);
-
-    status_t onPauseRequest(
-            int32_t sessionID,
-            int32_t cseq,
-            const sp<ParsedMessage> &data);
-
-    status_t onTeardownRequest(
-            int32_t sessionID,
-            int32_t cseq,
-            const sp<ParsedMessage> &data);
-
-    status_t onGetParameterRequest(
-            int32_t sessionID,
-            int32_t cseq,
-            const sp<ParsedMessage> &data);
-
-    status_t onSetParameterRequest(
-            int32_t sessionID,
-            int32_t cseq,
-            const sp<ParsedMessage> &data);
-
-    void sendErrorResponse(
-            int32_t sessionID,
-            const char *errorDetail,
-            int32_t cseq);
-
-    static void AppendCommonResponse(
-            AString *response, int32_t cseq, int32_t playbackSessionID = -1ll);
-
-    void scheduleReaper();
-    void scheduleKeepAlive(int32_t sessionID);
-
-    int32_t makeUniquePlaybackSessionID() const;
-
-    sp<PlaybackSession> findPlaybackSession(
-            const sp<ParsedMessage> &data, int32_t *playbackSessionID) const;
-
-    void finishStop();
-    void disconnectClientAsync();
-    void disconnectClient2();
-    void finishStopAfterDisconnectingClient();
-    void finishStop2();
-
-    void finishPlay();
-
-    DISALLOW_EVIL_CONSTRUCTORS(WifiDisplaySource);
-};
-
-}  // namespace android
-
-#endif  // WIFI_DISPLAY_SOURCE_H_
diff --git a/media/libstagefright/xmlparser/Android.bp b/media/libstagefright/xmlparser/Android.bp
index 3507284..a4fa342 100644
--- a/media/libstagefright/xmlparser/Android.bp
+++ b/media/libstagefright/xmlparser/Android.bp
@@ -15,10 +15,7 @@
 
     shared_libs: [
         "libexpat",
-        "libutils",
         "liblog",
-        "libcutils",
-        "libstagefright_foundation",
         "libstagefright_omx_utils",
     ],
 
diff --git a/media/mtp/MtpDatabase.h b/media/mtp/IMtpDatabase.h
similarity index 76%
rename from media/mtp/MtpDatabase.h
rename to media/mtp/IMtpDatabase.h
index f3f9720..d09a984 100644
--- a/media/mtp/MtpDatabase.h
+++ b/media/mtp/IMtpDatabase.h
@@ -14,8 +14,8 @@
  * limitations under the License.
  */
 
-#ifndef _MTP_DATABASE_H
-#define _MTP_DATABASE_H
+#ifndef _I_MTP_DATABASE_H
+#define _I_MTP_DATABASE_H
 
 #include "MtpTypes.h"
 
@@ -25,27 +25,24 @@
 class MtpProperty;
 class MtpObjectInfo;
 
-class MtpDatabase {
+class IMtpDatabase {
 public:
-    virtual ~MtpDatabase() {}
+    virtual ~IMtpDatabase() {}
 
-    // called from SendObjectInfo to reserve a database entry for the incoming file
+    // Called from SendObjectInfo to reserve a database entry for the incoming file.
     virtual MtpObjectHandle         beginSendObject(const char* path,
                                             MtpObjectFormat format,
                                             MtpObjectHandle parent,
-                                            MtpStorageID storage,
-                                            uint64_t size,
-                                            time_t modified) = 0;
+                                            MtpStorageID storage) = 0;
 
-    // called to report success or failure of the SendObject file transfer
-    // success should signal a notification of the new object's creation,
-    // failure should remove the database entry created in beginSendObject
-    virtual void                    endSendObject(const char* path,
-                                            MtpObjectHandle handle,
-                                            MtpObjectFormat format,
+    // Called to report success or failure of the SendObject file transfer.
+    virtual void                    endSendObject(MtpObjectHandle handle,
                                             bool succeeded) = 0;
-
-    virtual void                    doScanDirectory(const char* path) = 0;
+    
+    // Called to rescan a file, such as after an edit.
+    virtual void                    rescanFile(const char* path,
+                                            MtpObjectHandle handle,
+                                            MtpObjectFormat format) = 0;
 
     virtual MtpObjectHandleList*    getObjectList(MtpStorageID storageID,
                                             MtpObjectFormat format,
@@ -93,7 +90,8 @@
                                             int64_t& outFileLength,
                                             MtpObjectFormat& outFormat) = 0;
 
-    virtual MtpResponseCode         deleteFile(MtpObjectHandle handle) = 0;
+    virtual MtpResponseCode         beginDeleteObject(MtpObjectHandle handle) = 0;
+    virtual void                    endDeleteObject(MtpObjectHandle handle, bool succeeded) = 0;
 
     virtual MtpObjectHandleList*    getObjectReferences(MtpObjectHandle handle) = 0;
 
@@ -105,14 +103,18 @@
 
     virtual MtpProperty*            getDevicePropertyDesc(MtpDeviceProperty property) = 0;
 
-    virtual MtpResponseCode         moveObject(MtpObjectHandle handle, MtpObjectHandle newParent,
-                                            MtpStorageID newStorage, MtpString& newPath) = 0;
+    virtual MtpResponseCode         beginMoveObject(MtpObjectHandle handle, MtpObjectHandle newParent,
+                                            MtpStorageID newStorage) = 0;
 
-    virtual void                    sessionStarted() = 0;
+    virtual void                    endMoveObject(MtpObjectHandle oldParent, MtpObjectHandle newParent,
+                                            MtpStorageID oldStorage, MtpStorageID newStorage,
+                                            MtpObjectHandle handle, bool succeeded) = 0;
 
-    virtual void                    sessionEnded() = 0;
+    virtual MtpResponseCode         beginCopyObject(MtpObjectHandle handle, MtpObjectHandle newParent,
+                                            MtpStorageID newStorage);
+    virtual void                    endCopyObject(MtpObjectHandle handle, bool succeeded);
 };
 
 }; // namespace android
 
-#endif // _MTP_DATABASE_H
+#endif // _I_MTP_DATABASE_H
diff --git a/media/mtp/MtpServer.cpp b/media/mtp/MtpServer.cpp
index bb0414d..cfda0a6 100644
--- a/media/mtp/MtpServer.cpp
+++ b/media/mtp/MtpServer.cpp
@@ -14,6 +14,7 @@
  * limitations under the License.
  */
 
+#include <algorithm>
 #include <android-base/logging.h>
 #include <android-base/properties.h>
 #include <chrono>
@@ -31,7 +32,7 @@
 #define LOG_TAG "MtpServer"
 
 #include "MtpDebug.h"
-#include "MtpDatabase.h"
+#include "IMtpDatabase.h"
 #include "MtpDevHandle.h"
 #include "MtpFfsCompatHandle.h"
 #include "MtpFfsHandle.h"
@@ -99,7 +100,7 @@
     MTP_EVENT_DEVICE_PROP_CHANGED,
 };
 
-MtpServer::MtpServer(MtpDatabase* database, bool ptp,
+MtpServer::MtpServer(IMtpDatabase* database, bool ptp,
                     const MtpString& deviceInfoManufacturer,
                     const MtpString& deviceInfoModel,
                     const MtpString& deviceInfoDeviceVersion,
@@ -150,21 +151,17 @@
 
 void MtpServer::removeStorage(MtpStorage* storage) {
     Mutex::Autolock autoLock(mMutex);
-
-    for (size_t i = 0; i < mStorages.size(); i++) {
-        if (mStorages[i] == storage) {
-            mStorages.removeAt(i);
-            sendStoreRemoved(storage->getStorageID());
-            break;
-        }
+    auto iter = std::find(mStorages.begin(), mStorages.end(), storage);
+    if (iter != mStorages.end()) {
+        sendStoreRemoved(storage->getStorageID());
+        mStorages.erase(iter);
     }
 }
 
 MtpStorage* MtpServer::getStorage(MtpStorageID id) {
     if (id == 0)
         return mStorages[0];
-    for (size_t i = 0; i < mStorages.size(); i++) {
-        MtpStorage* storage = mStorages[i];
+    for (MtpStorage *storage : mStorages) {
         if (storage->getStorageID() == id)
             return storage;
     }
@@ -265,9 +262,6 @@
     }
     mObjectEditList.clear();
 
-    if (mSessionOpen)
-        mDatabase->sessionEnded();
-
     sHandle->close();
 }
 
@@ -335,7 +329,7 @@
 }
 
 void MtpServer::commitEdit(ObjectEdit* edit) {
-    mDatabase->endSendObject((const char *)edit->mPath, edit->mHandle, edit->mFormat, true);
+    mDatabase->rescanFile((const char *)edit->mPath, edit->mHandle, edit->mFormat);
 }
 
 
@@ -348,9 +342,9 @@
     mResponse.reset();
 
     if (mSendObjectHandle != kInvalidObjectHandle && operation != MTP_OPERATION_SEND_OBJECT) {
-        // FIXME - need to delete mSendObjectHandle from the database
-        ALOGE("expected SendObject after SendObjectInfo");
         mSendObjectHandle = kInvalidObjectHandle;
+        mSendObjectFormat = 0;
+        mSendObjectModifiedTime = 0;
     }
 
     int containertype = mRequest.getContainerType();
@@ -526,8 +520,6 @@
     mSessionID = mRequest.getParameter(1);
     mSessionOpen = true;
 
-    mDatabase->sessionStarted();
-
     return MTP_RESPONSE_OK;
 }
 
@@ -536,7 +528,6 @@
         return MTP_RESPONSE_SESSION_NOT_OPEN;
     mSessionID = 0;
     mSessionOpen = false;
-    mDatabase->sessionEnded();
     return MTP_RESPONSE_OK;
 }
 
@@ -604,6 +595,8 @@
         return MTP_RESPONSE_INVALID_STORAGE_ID;
 
     MtpObjectHandleList* handles = mDatabase->getObjectList(storageID, format, parent);
+    if (handles == NULL)
+        return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
     mData.putAUInt32(handles);
     delete handles;
     return MTP_RESPONSE_OK;
@@ -991,26 +984,25 @@
     }
 
     ALOGD("path: %s parent: %d storageID: %08X", (const char*)path, parent, storageID);
-    MtpObjectHandle handle = mDatabase->beginSendObject((const char*)path,
-            format, parent, storageID, mSendObjectFileSize, modifiedTime);
+    MtpObjectHandle handle = mDatabase->beginSendObject((const char*)path, format,
+            parent, storageID);
     if (handle == kInvalidObjectHandle) {
         return MTP_RESPONSE_GENERAL_ERROR;
     }
 
-  if (format == MTP_FORMAT_ASSOCIATION) {
+    if (format == MTP_FORMAT_ASSOCIATION) {
         int ret = makeFolder((const char *)path);
         if (ret)
             return MTP_RESPONSE_GENERAL_ERROR;
 
         // SendObject does not get sent for directories, so call endSendObject here instead
-        mDatabase->endSendObject(path, handle, MTP_FORMAT_ASSOCIATION, MTP_RESPONSE_OK);
-    } else {
-        mSendObjectFilePath = path;
-        // save the handle for the SendObject call, which should follow
-        mSendObjectHandle = handle;
-        mSendObjectFormat = format;
-        mSendObjectModifiedTime = modifiedTime;
+        mDatabase->endSendObject(handle, MTP_RESPONSE_OK);
     }
+    mSendObjectFilePath = path;
+    // save the handle for the SendObject call, which should follow
+    mSendObjectHandle = handle;
+    mSendObjectFormat = format;
+    mSendObjectModifiedTime = modifiedTime;
 
     mResponse.setParameter(1, storageID);
     mResponse.setParameter(2, parent);
@@ -1061,6 +1053,10 @@
         path += "/";
     path += info.mName;
 
+    result = mDatabase->beginMoveObject(objectHandle, parent, storageID);
+    if (result != MTP_RESPONSE_OK)
+        return result;
+
     if (info.mStorageID == storageID) {
         ALOGV("Moving file from %s to %s", (const char*)fromPath, (const char*)path);
         if (rename(fromPath, path)) {
@@ -1087,8 +1083,8 @@
     }
 
     // If the move failed, undo the database change
-    if (result == MTP_RESPONSE_OK)
-        result = mDatabase->moveObject(objectHandle, parent, storageID, path);
+    mDatabase->endMoveObject(info.mParent, parent, info.mStorageID, storageID, objectHandle,
+            result == MTP_RESPONSE_OK);
 
     return result;
 }
@@ -1139,8 +1135,7 @@
         path += "/";
     path += info.mName;
 
-    MtpObjectHandle handle = mDatabase->beginSendObject((const char*)path,
-            format, parent, storageID, fileLength, info.mDateModified);
+    MtpObjectHandle handle = mDatabase->beginCopyObject(objectHandle, parent, storageID);
     if (handle == kInvalidObjectHandle) {
         return MTP_RESPONSE_GENERAL_ERROR;
     }
@@ -1158,9 +1153,7 @@
         }
     }
 
-    mDatabase->endSendObject(path, handle, format, result);
-    if (format == MTP_FORMAT_ASSOCIATION)
-        mDatabase->doScanDirectory(path);
+    mDatabase->endCopyObject(handle, result);
     mResponse.setParameter(1, handle);
     return result;
 }
@@ -1190,6 +1183,15 @@
     }
     initialData = ret - MTP_CONTAINER_HEADER_SIZE;
 
+    if (mSendObjectFormat == MTP_FORMAT_ASSOCIATION) {
+        if (initialData != 0)
+            ALOGE("Expected folder size to be 0!");
+        mSendObjectHandle = kInvalidObjectHandle;
+        mSendObjectFormat = 0;
+        mSendObjectModifiedTime = 0;
+        return result;
+    }
+
     mtp_file_range  mfr;
     mfr.fd = open(mSendObjectFilePath, O_RDWR | O_CREAT | O_TRUNC, S_IRUSR | S_IWUSR);
     if (mfr.fd < 0) {
@@ -1255,8 +1257,7 @@
     // reset so we don't attempt to send the data back
     mData.reset();
 
-    mDatabase->endSendObject(mSendObjectFilePath, mSendObjectHandle, mSendObjectFormat,
-            result == MTP_RESPONSE_OK);
+    mDatabase->endSendObject(mSendObjectHandle, result == MTP_RESPONSE_OK);
     mSendObjectHandle = kInvalidObjectHandle;
     mSendObjectFormat = 0;
     mSendObjectModifiedTime = 0;
@@ -1282,16 +1283,18 @@
     MtpString filePath;
     int64_t fileLength;
     int result = mDatabase->getObjectFilePath(handle, filePath, fileLength, format);
-    if (result == MTP_RESPONSE_OK) {
-        ALOGV("deleting %s", (const char *)filePath);
-        result = mDatabase->deleteFile(handle);
-        // Don't delete the actual files unless the database deletion is allowed
-        if (result == MTP_RESPONSE_OK) {
-            deletePath((const char *)filePath);
-        }
-    }
+    if (result != MTP_RESPONSE_OK)
+        return result;
 
-    return result;
+    // Don't delete the actual files unless the database deletion is allowed
+    result = mDatabase->beginDeleteObject(handle);
+    if (result != MTP_RESPONSE_OK)
+        return result;
+
+    bool success = deletePath((const char *)filePath);
+
+    mDatabase->endDeleteObject(handle, success);
+    return success ? result : MTP_RESPONSE_PARTIAL_DELETION;
 }
 
 MtpResponseCode MtpServer::doGetObjectPropDesc() {
diff --git a/media/mtp/MtpServer.h b/media/mtp/MtpServer.h
index 0204b09..af371eb 100644
--- a/media/mtp/MtpServer.h
+++ b/media/mtp/MtpServer.h
@@ -32,13 +32,13 @@
 
 namespace android {
 
-class MtpDatabase;
+class IMtpDatabase;
 class MtpStorage;
 
 class MtpServer {
 
 private:
-    MtpDatabase*        mDatabase;
+    IMtpDatabase*       mDatabase;
 
     // appear as a PTP device
     bool                mPtp;
@@ -98,7 +98,7 @@
     Vector<ObjectEdit*>  mObjectEditList;
 
 public:
-                        MtpServer(MtpDatabase* database, bool ptp,
+                        MtpServer(IMtpDatabase* database, bool ptp,
                                     const MtpString& deviceInfoManufacturer,
                                     const MtpString& deviceInfoModel,
                                     const MtpString& deviceInfoDeviceVersion,
diff --git a/media/mtp/MtpStorage.cpp b/media/mtp/MtpStorage.cpp
index d77ca72..a147325 100644
--- a/media/mtp/MtpStorage.cpp
+++ b/media/mtp/MtpStorage.cpp
@@ -17,7 +17,6 @@
 #define LOG_TAG "MtpStorage"
 
 #include "MtpDebug.h"
-#include "MtpDatabase.h"
 #include "MtpStorage.h"
 
 #include <sys/types.h>
@@ -33,14 +32,12 @@
 namespace android {
 
 MtpStorage::MtpStorage(MtpStorageID id, const char* filePath,
-        const char* description, uint64_t reserveSpace,
-        bool removable, uint64_t maxFileSize)
+        const char* description, bool removable, uint64_t maxFileSize)
     :   mStorageID(id),
         mFilePath(filePath),
         mDescription(description),
         mMaxCapacity(0),
         mMaxFileSize(maxFileSize),
-        mReserveSpace(reserveSpace),
         mRemovable(removable)
 {
     ALOGV("MtpStorage id: %d path: %s\n", id, filePath);
@@ -75,8 +72,7 @@
     struct statfs   stat;
     if (statfs(getPath(), &stat))
         return -1;
-    uint64_t freeSpace = (uint64_t)stat.f_bavail * (uint64_t)stat.f_bsize;
-    return (freeSpace > mReserveSpace ? freeSpace - mReserveSpace : 0);
+    return (uint64_t)stat.f_bavail * (uint64_t)stat.f_bsize;
 }
 
 const char* MtpStorage::getDescription() const {
diff --git a/media/mtp/MtpStorage.h b/media/mtp/MtpStorage.h
index e5a2e57..cb7e333 100644
--- a/media/mtp/MtpStorage.h
+++ b/media/mtp/MtpStorage.h
@@ -32,13 +32,11 @@
     MtpString               mDescription;
     uint64_t                mMaxCapacity;
     uint64_t                mMaxFileSize;
-    // amount of free space to leave unallocated
-    uint64_t                mReserveSpace;
     bool                    mRemovable;
 
 public:
                             MtpStorage(MtpStorageID id, const char* filePath,
-                                    const char* description, uint64_t reserveSpace,
+                                    const char* description,
                                     bool removable, uint64_t maxFileSize);
     virtual                 ~MtpStorage();
 
diff --git a/media/mtp/MtpUtils.cpp b/media/mtp/MtpUtils.cpp
index 3f5648b..51cfd7d 100644
--- a/media/mtp/MtpUtils.cpp
+++ b/media/mtp/MtpUtils.cpp
@@ -204,29 +204,39 @@
         if (name[0] == '.' && (name[1] == 0 || (name[1] == '.' && name[2] == 0))) {
             continue;
         }
-        pathStr.append(name);
+        string childPath = pathStr + name;
+        int success;
         if (entry->d_type == DT_DIR) {
-            deleteRecursive(pathStr.c_str());
-            rmdir(pathStr.c_str());
+            deleteRecursive(childPath.c_str());
+            success = rmdir(childPath.c_str());
         } else {
-            unlink(pathStr.c_str());
+            success = unlink(childPath.c_str());
         }
+        if (success == -1)
+            PLOG(ERROR) << "Deleting path " << childPath << " failed";
     }
     closedir(dir);
 }
 
-void deletePath(const char* path) {
+bool deletePath(const char* path) {
     struct stat statbuf;
+    int success;
     if (stat(path, &statbuf) == 0) {
         if (S_ISDIR(statbuf.st_mode)) {
+            // rmdir will fail if the directory is non empty, so
+            // there is no need to keep errors from deleteRecursive
             deleteRecursive(path);
-            rmdir(path);
+            success = rmdir(path);
         } else {
-            unlink(path);
+            success = unlink(path);
         }
     } else {
-        PLOG(ERROR) << "deletePath stat failed for " << path;;
+        PLOG(ERROR) << "deletePath stat failed for " << path;
+        return false;
     }
+    if (success == -1)
+        PLOG(ERROR) << "Deleting path " << path << " failed";
+    return success == 0;
 }
 
 }  // namespace android
diff --git a/media/mtp/MtpUtils.h b/media/mtp/MtpUtils.h
index b7c72f5..744546b 100644
--- a/media/mtp/MtpUtils.h
+++ b/media/mtp/MtpUtils.h
@@ -33,8 +33,7 @@
 int makeFolder(const char *path);
 int copyRecursive(const char *fromPath, const char *toPath);
 int copyFile(const char *fromPath, const char *toPath);
-void deleteRecursive(const char* path);
-void deletePath(const char* path);
+bool deletePath(const char* path);
 
 }; // namespace android
 
diff --git a/media/ndk/Android.bp b/media/ndk/Android.bp
index 0d48de1..cea2f9e 100644
--- a/media/ndk/Android.bp
+++ b/media/ndk/Android.bp
@@ -67,9 +67,9 @@
     shared_libs: [
         "libbinder",
         "libmedia",
+        "libmedia_omx",
         "libmedia_jni",
         "libmediadrm",
-        "libskia",
         "libstagefright",
         "libstagefright_foundation",
         "liblog",
diff --git a/media/ndk/NdkImage.cpp b/media/ndk/NdkImage.cpp
index c4ff537..20b1667 100644
--- a/media/ndk/NdkImage.cpp
+++ b/media/ndk/NdkImage.cpp
@@ -37,8 +37,8 @@
         mTimestamp(timestamp), mWidth(width), mHeight(height), mNumPlanes(numPlanes) {
 }
 
-// Can only be called by free() with mLock hold
 AImage::~AImage() {
+    Mutex::Autolock _l(mLock);
     if (!mIsClosed) {
         LOG_ALWAYS_FATAL(
                 "Error: AImage %p is deleted before returning buffer to AImageReader!", this);
@@ -78,7 +78,6 @@
         ALOGE("Cannot free AImage before close!");
         return;
     }
-    Mutex::Autolock _l(mLock);
     delete this;
 }
 
diff --git a/media/ndk/NdkImageReader.cpp b/media/ndk/NdkImageReader.cpp
index fd6ecb5..be635ff 100644
--- a/media/ndk/NdkImageReader.cpp
+++ b/media/ndk/NdkImageReader.cpp
@@ -657,7 +657,7 @@
 
 EXPORT
 media_status_t AImageReader_getWindow(AImageReader* reader, /*out*/ANativeWindow** window) {
-    ALOGE("%s", __FUNCTION__);
+    ALOGV("%s", __FUNCTION__);
     if (reader == nullptr || window == nullptr) {
         ALOGE("%s: invalid argument. reader %p, window %p",
                 __FUNCTION__, reader, window);
diff --git a/media/ndk/NdkMediaCodec.cpp b/media/ndk/NdkMediaCodec.cpp
index 128edba..6b20bca 100644
--- a/media/ndk/NdkMediaCodec.cpp
+++ b/media/ndk/NdkMediaCodec.cpp
@@ -52,6 +52,7 @@
 
 enum {
     kWhatActivityNotify,
+    kWhatAsyncNotify,
     kWhatRequestActivityNotifications,
     kWhatStopActivityNotifications,
 };
@@ -88,6 +89,11 @@
     bool mRequestedActivityNotification;
     OnCodecEvent mCallback;
     void *mCallbackUserData;
+
+    sp<AMessage> mAsyncNotify;
+    mutable Mutex mAsyncCallbackLock;
+    AMediaCodecOnAsyncNotifyCallback mAsyncCallback;
+    void *mAsyncCallbackUserData;
 };
 
 CodecHandler::CodecHandler(AMediaCodec *codec) {
@@ -128,6 +134,147 @@
             break;
         }
 
+        case kWhatAsyncNotify:
+        {
+             int32_t cbID;
+             if (!msg->findInt32("callbackID", &cbID)) {
+                 ALOGE("kWhatAsyncNotify: callbackID is expected.");
+                 break;
+             }
+
+             ALOGV("kWhatAsyncNotify: cbID = %d", cbID);
+
+             switch (cbID) {
+                 case MediaCodec::CB_INPUT_AVAILABLE:
+                 {
+                     int32_t index;
+                     if (!msg->findInt32("index", &index)) {
+                         ALOGE("CB_INPUT_AVAILABLE: index is expected.");
+                         break;
+                     }
+
+                     Mutex::Autolock _l(mCodec->mAsyncCallbackLock);
+                     if (mCodec->mAsyncCallbackUserData != NULL
+                         || mCodec->mAsyncCallback.onAsyncInputAvailable != NULL) {
+                         mCodec->mAsyncCallback.onAsyncInputAvailable(
+                                 mCodec,
+                                 mCodec->mAsyncCallbackUserData,
+                                 index);
+                     }
+
+                     break;
+                 }
+
+                 case MediaCodec::CB_OUTPUT_AVAILABLE:
+                 {
+                     int32_t index;
+                     size_t offset;
+                     size_t size;
+                     int64_t timeUs;
+                     int32_t flags;
+
+                     if (!msg->findInt32("index", &index)) {
+                         ALOGE("CB_OUTPUT_AVAILABLE: index is expected.");
+                         break;
+                     }
+                     if (!msg->findSize("offset", &offset)) {
+                         ALOGE("CB_OUTPUT_AVAILABLE: offset is expected.");
+                         break;
+                     }
+                     if (!msg->findSize("size", &size)) {
+                         ALOGE("CB_OUTPUT_AVAILABLE: size is expected.");
+                         break;
+                     }
+                     if (!msg->findInt64("timeUs", &timeUs)) {
+                         ALOGE("CB_OUTPUT_AVAILABLE: timeUs is expected.");
+                         break;
+                     }
+                     if (!msg->findInt32("flags", &flags)) {
+                         ALOGE("CB_OUTPUT_AVAILABLE: flags is expected.");
+                         break;
+                     }
+
+                     AMediaCodecBufferInfo bufferInfo = {
+                         (int32_t)offset,
+                         (int32_t)size,
+                         timeUs,
+                         (uint32_t)flags};
+
+                     Mutex::Autolock _l(mCodec->mAsyncCallbackLock);
+                     if (mCodec->mAsyncCallbackUserData != NULL
+                         || mCodec->mAsyncCallback.onAsyncOutputAvailable != NULL) {
+                         mCodec->mAsyncCallback.onAsyncOutputAvailable(
+                                 mCodec,
+                                 mCodec->mAsyncCallbackUserData,
+                                 index,
+                                 &bufferInfo);
+                     }
+
+                     break;
+                 }
+
+                 case MediaCodec::CB_OUTPUT_FORMAT_CHANGED:
+                 {
+                     sp<AMessage> format;
+                     if (!msg->findMessage("format", &format)) {
+                         ALOGE("CB_OUTPUT_FORMAT_CHANGED: format is expected.");
+                         break;
+                     }
+
+                     AMediaFormat *aMediaFormat = AMediaFormat_fromMsg(&format);
+
+                     Mutex::Autolock _l(mCodec->mAsyncCallbackLock);
+                     if (mCodec->mAsyncCallbackUserData != NULL
+                         || mCodec->mAsyncCallback.onAsyncFormatChanged != NULL) {
+                         mCodec->mAsyncCallback.onAsyncFormatChanged(
+                                 mCodec,
+                                 mCodec->mAsyncCallbackUserData,
+                                 aMediaFormat);
+                     }
+
+                     break;
+                 }
+
+                 case MediaCodec::CB_ERROR:
+                 {
+                     status_t err;
+                     int32_t actionCode;
+                     AString detail;
+                     if (!msg->findInt32("err", &err)) {
+                         ALOGE("CB_ERROR: err is expected.");
+                         break;
+                     }
+                     if (!msg->findInt32("action", &actionCode)) {
+                         ALOGE("CB_ERROR: action is expected.");
+                         break;
+                     }
+                     msg->findString("detail", &detail);
+                     ALOGE("Decoder reported error(0x%x), actionCode(%d), detail(%s)",
+                           err, actionCode, detail.c_str());
+
+                     Mutex::Autolock _l(mCodec->mAsyncCallbackLock);
+                     if (mCodec->mAsyncCallbackUserData != NULL
+                         || mCodec->mAsyncCallback.onAsyncError != NULL) {
+                         mCodec->mAsyncCallback.onAsyncError(
+                                 mCodec,
+                                 mCodec->mAsyncCallbackUserData,
+                                 translate_error(err),
+                                 actionCode,
+                                 detail.c_str());
+                     }
+
+                     break;
+                 }
+
+                 default:
+                 {
+                     ALOGE("kWhatAsyncNotify: callbackID(%d) is unexpected.", cbID);
+                     break;
+                 }
+             }
+             break;
+        }
+
         case kWhatStopActivityNotifications:
         {
             sp<AReplyToken> replyID;
@@ -162,7 +309,7 @@
     size_t res = mData->mLooper->start(
             false,      // runOnCallingThread
             true,       // canCallJava XXX
-            PRIORITY_FOREGROUND);
+            PRIORITY_AUDIO);
     if (res != OK) {
         ALOGE("Failed to start the looper");
         AMediaCodec_delete(mData);
@@ -183,6 +330,9 @@
     mData->mRequestedActivityNotification = false;
     mData->mCallback = NULL;
 
+    mData->mAsyncCallback = {};
+    mData->mAsyncCallbackUserData = NULL;
+
     return mData;
 }
 
@@ -222,6 +372,32 @@
 }
 
 EXPORT
+media_status_t AMediaCodec_getName(
+        AMediaCodec *mData,
+        char** out_name) {
+    if (out_name == NULL) {
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    AString compName;
+    status_t err = mData->mCodec->getName(&compName);
+    if (err != OK) {
+        return translate_error(err);
+    }
+    *out_name = strdup(compName.c_str());
+    return AMEDIA_OK;
+}
+
+EXPORT
+void AMediaCodec_releaseName(
+        AMediaCodec * /* mData */,
+        char* name) {
+    if (name != NULL) {
+        free(name);
+    }
+}
+
+EXPORT
 media_status_t AMediaCodec_configure(
         AMediaCodec *mData,
         const AMediaFormat* format,
@@ -236,8 +412,40 @@
         surface = (Surface*) window;
     }
 
-    return translate_error(mData->mCodec->configure(nativeFormat, surface,
-            crypto ? crypto->mCrypto : NULL, flags));
+    status_t err = mData->mCodec->configure(nativeFormat, surface,
+            crypto ? crypto->mCrypto : NULL, flags);
+    if (err != OK) {
+        ALOGE("configure: err(%d), failed with format: %s",
+              err, nativeFormat->debugString(0).c_str());
+    }
+    return translate_error(err);
+}
+
+EXPORT
+media_status_t AMediaCodec_setAsyncNotifyCallback(
+        AMediaCodec *mData,
+        AMediaCodecOnAsyncNotifyCallback callback,
+        void *userdata) {
+    if (mData->mAsyncNotify == NULL && userdata != NULL) {
+        mData->mAsyncNotify = new AMessage(kWhatAsyncNotify, mData->mHandler);
+        status_t err = mData->mCodec->setCallback(mData->mAsyncNotify);
+        if (err != OK) {
+            ALOGE("setAsyncNotifyCallback: err(%d), failed to set async callback", err);
+            return translate_error(err);
+        }
+    }
+
+    Mutex::Autolock _l(mData->mAsyncCallbackLock);
+    mData->mAsyncCallback = callback;
+    mData->mAsyncCallbackUserData = userdata;
+
+    return AMEDIA_OK;
+}
+
+
+EXPORT
+media_status_t AMediaCodec_releaseCrypto(AMediaCodec *mData) {
+    return translate_error(mData->mCodec->releaseCrypto());
 }
 
 EXPORT
@@ -282,6 +490,19 @@
 
 EXPORT
 uint8_t* AMediaCodec_getInputBuffer(AMediaCodec *mData, size_t idx, size_t *out_size) {
+    if (mData->mAsyncNotify != NULL) {
+        // Asynchronous mode
+        sp<MediaCodecBuffer> abuf;
+        if (mData->mCodec->getInputBuffer(idx, &abuf) != 0) {
+            return NULL;
+        }
+
+        if (out_size != NULL) {
+            *out_size = abuf->capacity();
+        }
+        return abuf->data();
+    }
+
     android::Vector<android::sp<android::MediaCodecBuffer> > abufs;
     if (mData->mCodec->getInputBuffers(&abufs) == 0) {
         size_t n = abufs.size();
@@ -304,6 +525,19 @@
 
 EXPORT
 uint8_t* AMediaCodec_getOutputBuffer(AMediaCodec *mData, size_t idx, size_t *out_size) {
+    if (mData->mAsyncNotify != NULL) {
+        // Asynchronous mode
+        sp<MediaCodecBuffer> abuf;
+        if (mData->mCodec->getOutputBuffer(idx, &abuf) != 0) {
+            return NULL;
+        }
+
+        if (out_size != NULL) {
+            *out_size = abuf->capacity();
+        }
+        return abuf->data();
+    }
+
     android::Vector<android::sp<android::MediaCodecBuffer> > abufs;
     if (mData->mCodec->getOutputBuffers(&abufs) == 0) {
         size_t n = abufs.size();
@@ -367,6 +601,20 @@
 }
 
 EXPORT
+AMediaFormat* AMediaCodec_getInputFormat(AMediaCodec *mData) {
+    sp<AMessage> format;
+    mData->mCodec->getInputFormat(&format);
+    return AMediaFormat_fromMsg(&format);
+}
+
+EXPORT
+AMediaFormat* AMediaCodec_getBufferFormat(AMediaCodec *mData, size_t index) {
+    sp<AMessage> format;
+    mData->mCodec->getOutputFormat(index, &format);
+    return AMediaFormat_fromMsg(&format);
+}
+
+EXPORT
 media_status_t AMediaCodec_releaseOutputBuffer(AMediaCodec *mData, size_t idx, bool render) {
     if (render) {
         return translate_error(mData->mCodec->renderOutputBufferAndRelease(idx));
@@ -535,6 +783,16 @@
     return translate_error(err);
 }
 
+EXPORT
+bool AMediaCodecActionCode_isRecoverable(int32_t actionCode) {
+    return (actionCode == ACTION_CODE_RECOVERABLE);
+}
+
+EXPORT
+bool AMediaCodecActionCode_isTransient(int32_t actionCode) {
+    return (actionCode == ACTION_CODE_TRANSIENT);
+}
+
 
 EXPORT
 void AMediaCodecCryptoInfo_setPattern(AMediaCodecCryptoInfo *info,
diff --git a/media/ndk/NdkMediaFormat.cpp b/media/ndk/NdkMediaFormat.cpp
index ee27520..a9025c0 100644
--- a/media/ndk/NdkMediaFormat.cpp
+++ b/media/ndk/NdkMediaFormat.cpp
@@ -125,6 +125,14 @@
                 ret.appendFormat("double(%f)", val);
                 break;
             }
+            case AMessage::kTypeRect:
+            {
+                int32_t left, top, right, bottom;
+                f->findRect(name, &left, &top, &right, &bottom);
+                ret.appendFormat("Rect(%" PRId32 ", %" PRId32 ", %" PRId32 ", %" PRId32 ")",
+                                 left, top, right, bottom);
+                break;
+            }
             case AMessage::kTypeString:
             {
                 AString val;
@@ -165,11 +173,22 @@
 }
 
 EXPORT
+bool AMediaFormat_getDouble(AMediaFormat* format, const char *name, double *out) {
+    return format->mFormat->findDouble(name, out);
+}
+
+EXPORT
 bool AMediaFormat_getSize(AMediaFormat* format, const char *name, size_t *out) {
     return format->mFormat->findSize(name, out);
 }
 
 EXPORT
+bool AMediaFormat_getRect(AMediaFormat* format, const char *name,
+                          int32_t *left, int32_t *top, int32_t *right, int32_t *bottom) {
+    return format->mFormat->findRect(name, left, top, right, bottom);
+}
+
+EXPORT
 bool AMediaFormat_getBuffer(AMediaFormat* format, const char *name, void** data, size_t *outsize) {
     sp<ABuffer> buf;
     if (format->mFormat->findBuffer(name, &buf)) {
@@ -216,6 +235,22 @@
 }
 
 EXPORT
+void AMediaFormat_setDouble(AMediaFormat* format, const char* name, double value) {
+    format->mFormat->setDouble(name, value);
+}
+
+EXPORT
+void AMediaFormat_setSize(AMediaFormat* format, const char* name, size_t value) {
+    format->mFormat->setSize(name, value);
+}
+
+EXPORT
+void AMediaFormat_setRect(AMediaFormat* format, const char *name,
+                          int32_t left, int32_t top, int32_t right, int32_t bottom) {
+    format->mFormat->setRect(name, left, top, right, bottom);
+}
+
+EXPORT
 void AMediaFormat_setString(AMediaFormat* format, const char* name, const char* value) {
     // AMessage::setString() makes a copy of the string
     format->mFormat->setString(name, value, strlen(value));
@@ -233,30 +268,61 @@
 }
 
 
+EXPORT const char* AMEDIAFORMAT_KEY_AAC_DRC_ATTENUATION_FACTOR = "aac-drc-cut-level";
+EXPORT const char* AMEDIAFORMAT_KEY_AAC_DRC_BOOST_FACTOR = "aac-drc-boost-level";
+EXPORT const char* AMEDIAFORMAT_KEY_AAC_DRC_HEAVY_COMPRESSION = "aac-drc-heavy-compression";
+EXPORT const char* AMEDIAFORMAT_KEY_AAC_DRC_TARGET_REFERENCE_LEVEL = "aac-target-ref-level";
+EXPORT const char* AMEDIAFORMAT_KEY_AAC_ENCODED_TARGET_LEVEL = "aac-encoded-target-level";
+EXPORT const char* AMEDIAFORMAT_KEY_AAC_MAX_OUTPUT_CHANNEL_COUNT = "aac-max-output-channel_count";
 EXPORT const char* AMEDIAFORMAT_KEY_AAC_PROFILE = "aac-profile";
+EXPORT const char* AMEDIAFORMAT_KEY_AAC_SBR_MODE = "aac-sbr-mode";
+EXPORT const char* AMEDIAFORMAT_KEY_AUDIO_SESSION_ID = "audio-session-id";
+EXPORT const char* AMEDIAFORMAT_KEY_BITRATE_MODE = "bitrate-mode";
 EXPORT const char* AMEDIAFORMAT_KEY_BIT_RATE = "bitrate";
+EXPORT const char* AMEDIAFORMAT_KEY_CAPTURE_RATE = "capture-rate";
 EXPORT const char* AMEDIAFORMAT_KEY_CHANNEL_COUNT = "channel-count";
 EXPORT const char* AMEDIAFORMAT_KEY_CHANNEL_MASK = "channel-mask";
 EXPORT const char* AMEDIAFORMAT_KEY_COLOR_FORMAT = "color-format";
+EXPORT const char* AMEDIAFORMAT_KEY_COLOR_RANGE = "color-range";
+EXPORT const char* AMEDIAFORMAT_KEY_COLOR_STANDARD = "color-standard";
+EXPORT const char* AMEDIAFORMAT_KEY_COLOR_TRANSFER = "color-transfer";
+EXPORT const char* AMEDIAFORMAT_KEY_COMPLEXITY = "complexity";
+EXPORT const char* AMEDIAFORMAT_KEY_DISPLAY_CROP = "crop";
 EXPORT const char* AMEDIAFORMAT_KEY_DURATION = "durationUs";
 EXPORT const char* AMEDIAFORMAT_KEY_FLAC_COMPRESSION_LEVEL = "flac-compression-level";
 EXPORT const char* AMEDIAFORMAT_KEY_FRAME_RATE = "frame-rate";
+EXPORT const char* AMEDIAFORMAT_KEY_GRID_COLS = "grid-cols";
+EXPORT const char* AMEDIAFORMAT_KEY_GRID_HEIGHT = "grid-height";
+EXPORT const char* AMEDIAFORMAT_KEY_GRID_ROWS = "grid-rows";
+EXPORT const char* AMEDIAFORMAT_KEY_GRID_WIDTH = "grid-width";
+EXPORT const char* AMEDIAFORMAT_KEY_HDR_STATIC_INFO = "hdr-static-info";
 EXPORT const char* AMEDIAFORMAT_KEY_HEIGHT = "height";
+EXPORT const char* AMEDIAFORMAT_KEY_INTRA_REFRESH_PERIOD = "intra-refresh-period";
 EXPORT const char* AMEDIAFORMAT_KEY_IS_ADTS = "is-adts";
 EXPORT const char* AMEDIAFORMAT_KEY_IS_AUTOSELECT = "is-autoselect";
 EXPORT const char* AMEDIAFORMAT_KEY_IS_DEFAULT = "is-default";
 EXPORT const char* AMEDIAFORMAT_KEY_IS_FORCED_SUBTITLE = "is-forced-subtitle";
 EXPORT const char* AMEDIAFORMAT_KEY_I_FRAME_INTERVAL = "i-frame-interval";
 EXPORT const char* AMEDIAFORMAT_KEY_LANGUAGE = "language";
+EXPORT const char* AMEDIAFORMAT_KEY_LATENCY = "latency";
+EXPORT const char* AMEDIAFORMAT_KEY_LEVEL = "level";
 EXPORT const char* AMEDIAFORMAT_KEY_MAX_HEIGHT = "max-height";
 EXPORT const char* AMEDIAFORMAT_KEY_MAX_INPUT_SIZE = "max-input-size";
 EXPORT const char* AMEDIAFORMAT_KEY_MAX_WIDTH = "max-width";
 EXPORT const char* AMEDIAFORMAT_KEY_MIME = "mime";
+EXPORT const char* AMEDIAFORMAT_KEY_OPERATING_RATE = "operating-rate";
+EXPORT const char* AMEDIAFORMAT_KEY_PCM_ENCODING = "pcm-encoding";
+EXPORT const char* AMEDIAFORMAT_KEY_PRIORITY = "priority";
+EXPORT const char* AMEDIAFORMAT_KEY_PROFILE = "profile";
 EXPORT const char* AMEDIAFORMAT_KEY_PUSH_BLANK_BUFFERS_ON_STOP = "push-blank-buffers-on-shutdown";
 EXPORT const char* AMEDIAFORMAT_KEY_REPEAT_PREVIOUS_FRAME_AFTER = "repeat-previous-frame-after";
+EXPORT const char* AMEDIAFORMAT_KEY_ROTATION = "rotation-degrees";
 EXPORT const char* AMEDIAFORMAT_KEY_SAMPLE_RATE = "sample-rate";
-EXPORT const char* AMEDIAFORMAT_KEY_WIDTH = "width";
+EXPORT const char* AMEDIAFORMAT_KEY_SLICE_HEIGHT = "slice-height";
 EXPORT const char* AMEDIAFORMAT_KEY_STRIDE = "stride";
+EXPORT const char* AMEDIAFORMAT_KEY_TEMPORAL_LAYERING = "ts-schema";
+EXPORT const char* AMEDIAFORMAT_KEY_TRACK_ID = "track-id";
+EXPORT const char* AMEDIAFORMAT_KEY_WIDTH = "width";
 
 
 } // extern "C"
diff --git a/media/ndk/OWNERS b/media/ndk/OWNERS
new file mode 100644
index 0000000..11e8340
--- /dev/null
+++ b/media/ndk/OWNERS
@@ -0,0 +1,5 @@
+marcone@google.com
+# For AImage/AImageReader
+etalvala@google.com
+yinchiayeh@google.com
+zhijunhe@google.com
diff --git a/media/ndk/include/media/NdkImageReader.h b/media/ndk/include/media/NdkImageReader.h
index a8667c9..e3b99d0 100644
--- a/media/ndk/include/media/NdkImageReader.h
+++ b/media/ndk/include/media/NdkImageReader.h
@@ -305,9 +305,9 @@
 /**
  * AImageReader constructor similar to {@link AImageReader_new} that takes an additional parameter
  * for the consumer usage. All other parameters and the return values are identical to those passed
- * to {@line AImageReader_new}.
+ * to {@link AImageReader_new}.
  *
- * <p>If the {@code format} is {@link AIMAGE_FORMAT_PRIVATE}, the created {@link AImageReader}
+ * <p>If the \c format is {@link AIMAGE_FORMAT_PRIVATE}, the created {@link AImageReader}
  * will produce images whose contents are not directly accessible by the application. The application can
  * still acquire images from this {@link AImageReader} and access {@link AHardwareBuffer} via
  * {@link AImage_getHardwareBuffer()}. The {@link AHardwareBuffer} gained this way can then
@@ -322,7 +322,7 @@
  * AImageReader}s using other format such as {@link AIMAGE_FORMAT_YUV_420_888}.</p>
  *
  * <p>Note that not all format and usage flag combination is supported by the {@link AImageReader},
- * especially if {@code format} is {@link AIMAGE_FORMAT_PRIVATE}, {@code usage} must not include either
+ * especially if \c format is {@link AIMAGE_FORMAT_PRIVATE}, \c usage must not include either
  * {@link AHARDWAREBUFFER_USAGE_READ_RARELY} or {@link AHARDWAREBUFFER_USAGE_READ_OFTEN}</p>
  *
  * @param width The default width in pixels of the Images that this reader will produce.
@@ -367,7 +367,7 @@
         int32_t width, int32_t height, int32_t format, uint64_t usage, int32_t maxImages,
         /*out*/ AImageReader** reader);
 
-/*
+/**
  * Acquire the next {@link AImage} from the image reader's queue asynchronously.
  *
  * <p>AImageReader acquire method similar to {@link AImageReader_acquireNextImage} that takes an
@@ -377,7 +377,7 @@
  * @param acquireFenceFd A sync fence fd defined in {@link sync.h}, which is used to signal when the
  *         buffer is ready to consume. When synchronization fence is not needed, fence will be set
  *         to -1 and the {@link AImage} returned is ready for use immediately. Otherwise, user shall
- *         use syscalls such as {@code poll()}, {@code epoll()}, {@code select()} to wait for the
+ *         use syscalls such as \c poll(), \c epoll(), \c select() to wait for the
  *         fence fd to change status before attempting to access the {@link AImage} returned.
  *
  * @see sync.h
@@ -386,7 +386,7 @@
 media_status_t AImageReader_acquireNextImageAsync(
         AImageReader* reader, /*out*/AImage** image, /*out*/int* acquireFenceFd);
 
-/*
+/**
  * Acquire the latest {@link AImage} from the image reader's queue asynchronously, dropping older
  * images.
  *
@@ -397,7 +397,7 @@
  * @param acquireFenceFd A sync fence fd defined in {@link sync.h}, which is used to signal when the
  *         buffer is ready to consume. When synchronization fence is not needed, fence will be set
  *         to -1 and the {@link AImage} returned is ready for use immediately. Otherwise, user shall
- *         use syscalls such as {@code poll()}, {@code epoll()}, {@code select()} to wait for the
+ *         use syscalls such as \c poll(), \c epoll(), \c select() to wait for the
  *         fence fd to change status before attempting to access the {@link AImage} returned.
  *
  * @see sync.h
diff --git a/media/ndk/include/media/NdkMediaCodec.h b/media/ndk/include/media/NdkMediaCodec.h
index 144de2d..f4a51d0 100644
--- a/media/ndk/include/media/NdkMediaCodec.h
+++ b/media/ndk/include/media/NdkMediaCodec.h
@@ -53,11 +53,63 @@
 typedef struct AMediaCodecCryptoInfo AMediaCodecCryptoInfo;
 
 enum {
+    AMEDIACODEC_BUFFER_FLAG_CODEC_CONFIG = 2,
     AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM = 4,
+    AMEDIACODEC_BUFFER_FLAG_PARTIAL_FRAME = 8,
+
     AMEDIACODEC_CONFIGURE_FLAG_ENCODE = 1,
     AMEDIACODEC_INFO_OUTPUT_BUFFERS_CHANGED = -3,
     AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED = -2,
-    AMEDIACODEC_INFO_TRY_AGAIN_LATER = -1
+    AMEDIACODEC_INFO_TRY_AGAIN_LATER = -1,
+};
+
+/**
+ * Called when an input buffer becomes available.
+ * The specified index is the index of the available input buffer.
+ */
+typedef void (*AMediaCodecOnAsyncInputAvailable)(
+        AMediaCodec *codec,
+        void *userdata,
+        int32_t index);
+/**
+ * Called when an output buffer becomes available.
+ * The specified index is the index of the available output buffer.
+ * The specified bufferInfo contains information regarding the available output buffer.
+ */
+typedef void (*AMediaCodecOnAsyncOutputAvailable)(
+        AMediaCodec *codec,
+        void *userdata,
+        int32_t index,
+        AMediaCodecBufferInfo *bufferInfo);
+/**
+ * Called when the output format has changed.
+ * The specified format contains the new output format.
+ */
+typedef void (*AMediaCodecOnAsyncFormatChanged)(
+        AMediaCodec *codec,
+        void *userdata,
+        AMediaFormat *format);
+/**
+ * Called when the MediaCodec encountered an error.
+ * The specified actionCode indicates the possible actions that client can take,
+ * and it can be checked by calling AMediaCodecActionCode_isRecoverable or
+ * AMediaCodecActionCode_isTransient. If both AMediaCodecActionCode_isRecoverable()
+ * and AMediaCodecActionCode_isTransient() return false, then the codec error is fatal
+ * and the codec must be deleted.
+ * The specified detail may contain more detailed messages about this error.
+ */
+typedef void (*AMediaCodecOnAsyncError)(
+        AMediaCodec *codec,
+        void *userdata,
+        media_status_t error,
+        int32_t actionCode,
+        const char *detail);
+
+struct AMediaCodecOnAsyncNotifyCallback {
+      AMediaCodecOnAsyncInputAvailable  onAsyncInputAvailable;
+      AMediaCodecOnAsyncOutputAvailable onAsyncOutputAvailable;
+      AMediaCodecOnAsyncFormatChanged   onAsyncFormatChanged;
+      AMediaCodecOnAsyncError           onAsyncError;
 };
 
 #if __ANDROID_API__ >= 21
@@ -180,6 +232,12 @@
 AMediaFormat* AMediaCodec_getOutputFormat(AMediaCodec*);
 
 /**
+ * Get format of the buffer. The specified buffer index must have been previously obtained from
+ * dequeueOutputBuffer.
+ */
+AMediaFormat* AMediaCodec_getBufferFormat(AMediaCodec*, size_t index);
+
+/**
  * If you are done with a buffer, use this call to return the buffer to
  * the codec. If you previously specified a surface when configuring this
  * video decoder you can optionally render the buffer.
@@ -283,6 +341,71 @@
 
 #endif /* __ANDROID_API__ >= 26 */
 
+#if __ANDROID_API__ >= 28
+
+/**
+ * Get the component name. If the codec was created by createDecoderByType
+ * or createEncoderByType, what component is chosen is not known beforehand.
+ * Caller shall call AMediaCodec_releaseName to free the returned pointer.
+ */
+media_status_t AMediaCodec_getName(AMediaCodec*, char** out_name);
+
+/**
+ * Free the memory pointed by name which is returned by AMediaCodec_getName.
+ */
+void AMediaCodec_releaseName(AMediaCodec*, char* name);
+
+/**
+ * Set an asynchronous callback for actionable AMediaCodec events.
+ * When asynchronous callback is enabled, the client should not call
+ * AMediaCodec_getInputBuffers(), AMediaCodec_getOutputBuffers(),
+ * AMediaCodec_dequeueInputBuffer() or AMediaCodec_dequeueOutputBuffer().
+ *
+ * Also, AMediaCodec_flush() behaves differently in asynchronous mode.
+ * After calling AMediaCodec_flush(), you must call AMediaCodec_start() to
+ * "resume" receiving input buffers, even if an input surface was created.
+ *
+ * Refer to the definition of AMediaCodecOnAsyncNotifyCallback on how each
+ * callback function is called and what are specified.
+ * The specified userdata is the pointer used when those callback functions are
+ * called.
+ *
+ * All callbacks are fired on one NDK internal thread.
+ * AMediaCodec_setAsyncNotifyCallback should not be called on the callback thread.
+ * No heavy duty task should be performed on callback thread.
+ */
+media_status_t AMediaCodec_setAsyncNotifyCallback(
+        AMediaCodec*,
+        AMediaCodecOnAsyncNotifyCallback callback,
+        void *userdata);
+
+/**
+ * Release the crypto if applicable.
+ */
+media_status_t AMediaCodec_releaseCrypto(AMediaCodec*);
+
+/**
+ * Call this after AMediaCodec_configure() returns successfully to get the input
+ * format accepted by the codec. Do this to determine what optional configuration
+ * parameters were supported by the codec.
+ */
+AMediaFormat* AMediaCodec_getInputFormat(AMediaCodec*);
+
+/**
+ * Returns true if the codec cannot proceed further, but can be recovered by stopping,
+ * configuring, and starting again.
+ */
+bool AMediaCodecActionCode_isRecoverable(int32_t actionCode);
+
+/**
+ * Returns true if the codec error is a transient issue, perhaps due to
+ * resource constraints, and that the method (or encoding/decoding) may be
+ * retried at a later time.
+ */
+bool AMediaCodecActionCode_isTransient(int32_t actionCode);
+
+#endif /* __ANDROID_API__ >= 28 */
+
 typedef enum {
     AMEDIACODECRYPTOINFO_MODE_CLEAR = 0,
     AMEDIACODECRYPTOINFO_MODE_AES_CTR = 1,
diff --git a/media/ndk/include/media/NdkMediaError.h b/media/ndk/include/media/NdkMediaError.h
index da61b64..e48fcbe 100644
--- a/media/ndk/include/media/NdkMediaError.h
+++ b/media/ndk/include/media/NdkMediaError.h
@@ -35,6 +35,17 @@
 typedef enum {
     AMEDIA_OK = 0,
 
+    /**
+     * This indicates required resource was not able to be allocated.
+     */
+    AMEDIACODEC_ERROR_INSUFFICIENT_RESOURCE = 1100,
+
+    /**
+     * This indicates the resource manager reclaimed the media resource used by the codec.
+     * With this error, the codec must be released, as it has moved to terminal state.
+     */
+    AMEDIACODEC_ERROR_RECLAIMED             = 1101,
+
     AMEDIA_ERROR_BASE                  = -10000,
     AMEDIA_ERROR_UNKNOWN               = AMEDIA_ERROR_BASE,
     AMEDIA_ERROR_MALFORMED             = AMEDIA_ERROR_BASE - 1,
diff --git a/media/ndk/include/media/NdkMediaFormat.h b/media/ndk/include/media/NdkMediaFormat.h
index 018ab76..b6489c7 100644
--- a/media/ndk/include/media/NdkMediaFormat.h
+++ b/media/ndk/include/media/NdkMediaFormat.h
@@ -51,6 +51,7 @@
 bool AMediaFormat_getInt32(AMediaFormat*, const char *name, int32_t *out);
 bool AMediaFormat_getInt64(AMediaFormat*, const char *name, int64_t *out);
 bool AMediaFormat_getFloat(AMediaFormat*, const char *name, float *out);
+bool AMediaFormat_getSize(AMediaFormat*, const char *name, size_t *out);
 /**
  * The returned data is owned by the format and remains valid as long as the named entry
  * is part of the format.
@@ -80,33 +81,75 @@
 /**
  * XXX should these be ints/enums that we look up in a table as needed?
  */
+extern const char* AMEDIAFORMAT_KEY_AAC_DRC_ATTENUATION_FACTOR;
+extern const char* AMEDIAFORMAT_KEY_AAC_DRC_BOOST_FACTOR;
+extern const char* AMEDIAFORMAT_KEY_AAC_DRC_HEAVY_COMPRESSION;
+extern const char* AMEDIAFORMAT_KEY_AAC_DRC_TARGET_REFERENCE_LEVEL;
+extern const char* AMEDIAFORMAT_KEY_AAC_ENCODED_TARGET_LEVEL;
+extern const char* AMEDIAFORMAT_KEY_AAC_MAX_OUTPUT_CHANNEL_COUNT;
 extern const char* AMEDIAFORMAT_KEY_AAC_PROFILE;
+extern const char* AMEDIAFORMAT_KEY_AAC_SBR_MODE;
+extern const char* AMEDIAFORMAT_KEY_AUDIO_SESSION_ID;
+extern const char* AMEDIAFORMAT_KEY_BITRATE_MODE;
 extern const char* AMEDIAFORMAT_KEY_BIT_RATE;
+extern const char* AMEDIAFORMAT_KEY_CAPTURE_RATE;
 extern const char* AMEDIAFORMAT_KEY_CHANNEL_COUNT;
 extern const char* AMEDIAFORMAT_KEY_CHANNEL_MASK;
 extern const char* AMEDIAFORMAT_KEY_COLOR_FORMAT;
+extern const char* AMEDIAFORMAT_KEY_COLOR_RANGE;
+extern const char* AMEDIAFORMAT_KEY_COLOR_STANDARD;
+extern const char* AMEDIAFORMAT_KEY_COLOR_TRANSFER;
+extern const char* AMEDIAFORMAT_KEY_COMPLEXITY;
+extern const char* AMEDIAFORMAT_KEY_DISPLAY_CROP;
 extern const char* AMEDIAFORMAT_KEY_DURATION;
 extern const char* AMEDIAFORMAT_KEY_FLAC_COMPRESSION_LEVEL;
 extern const char* AMEDIAFORMAT_KEY_FRAME_RATE;
+extern const char* AMEDIAFORMAT_KEY_GRID_COLS;
+extern const char* AMEDIAFORMAT_KEY_GRID_HEIGHT;
+extern const char* AMEDIAFORMAT_KEY_GRID_ROWS;
+extern const char* AMEDIAFORMAT_KEY_GRID_WIDTH;
+extern const char* AMEDIAFORMAT_KEY_HDR_STATIC_INFO;
 extern const char* AMEDIAFORMAT_KEY_HEIGHT;
+extern const char* AMEDIAFORMAT_KEY_INTRA_REFRESH_PERIOD;
 extern const char* AMEDIAFORMAT_KEY_IS_ADTS;
 extern const char* AMEDIAFORMAT_KEY_IS_AUTOSELECT;
 extern const char* AMEDIAFORMAT_KEY_IS_DEFAULT;
 extern const char* AMEDIAFORMAT_KEY_IS_FORCED_SUBTITLE;
 extern const char* AMEDIAFORMAT_KEY_I_FRAME_INTERVAL;
 extern const char* AMEDIAFORMAT_KEY_LANGUAGE;
+extern const char* AMEDIAFORMAT_KEY_LATENCY;
+extern const char* AMEDIAFORMAT_KEY_LEVEL;
 extern const char* AMEDIAFORMAT_KEY_MAX_HEIGHT;
 extern const char* AMEDIAFORMAT_KEY_MAX_INPUT_SIZE;
 extern const char* AMEDIAFORMAT_KEY_MAX_WIDTH;
 extern const char* AMEDIAFORMAT_KEY_MIME;
+extern const char* AMEDIAFORMAT_KEY_OPERATING_RATE;
+extern const char* AMEDIAFORMAT_KEY_PCM_ENCODING;
+extern const char* AMEDIAFORMAT_KEY_PRIORITY;
+extern const char* AMEDIAFORMAT_KEY_PROFILE;
 extern const char* AMEDIAFORMAT_KEY_PUSH_BLANK_BUFFERS_ON_STOP;
 extern const char* AMEDIAFORMAT_KEY_REPEAT_PREVIOUS_FRAME_AFTER;
+extern const char* AMEDIAFORMAT_KEY_ROTATION;
 extern const char* AMEDIAFORMAT_KEY_SAMPLE_RATE;
-extern const char* AMEDIAFORMAT_KEY_WIDTH;
+extern const char* AMEDIAFORMAT_KEY_SLICE_HEIGHT;
 extern const char* AMEDIAFORMAT_KEY_STRIDE;
+extern const char* AMEDIAFORMAT_KEY_TEMPORAL_LAYERING;
+extern const char* AMEDIAFORMAT_KEY_TRACK_ID;
+extern const char* AMEDIAFORMAT_KEY_WIDTH;
 
 #endif /* __ANDROID_API__ >= 21 */
 
+#if __ANDROID_API__ >= 28
+bool AMediaFormat_getDouble(AMediaFormat*, const char *name, double *out);
+bool AMediaFormat_getRect(AMediaFormat*, const char *name,
+                          int32_t *left, int32_t *top, int32_t *right, int32_t *bottom);
+
+void AMediaFormat_setDouble(AMediaFormat*, const char* name, double value);
+void AMediaFormat_setSize(AMediaFormat*, const char* name, size_t value);
+void AMediaFormat_setRect(AMediaFormat*, const char* name,
+                          int32_t left, int32_t top, int32_t right, int32_t bottom);
+#endif /* __ANDROID_API__ >= 28 */
+
 __END_DECLS
 
 #endif // _NDK_MEDIA_FORMAT_H
diff --git a/media/ndk/libmediandk.map.txt b/media/ndk/libmediandk.map.txt
index d7ad370..f2d97cd 100644
--- a/media/ndk/libmediandk.map.txt
+++ b/media/ndk/libmediandk.map.txt
@@ -26,30 +26,63 @@
     AImage_getPlaneRowStride; # introduced=24
     AImage_getTimestamp; # introduced=24
     AImage_getWidth; # introduced=24
+    AMEDIAFORMAT_KEY_AAC_DRC_ATTENUATION_FACTOR; # var introduced=28
+    AMEDIAFORMAT_KEY_AAC_DRC_BOOST_FACTOR; # var introduced=28
+    AMEDIAFORMAT_KEY_AAC_DRC_HEAVY_COMPRESSION; # var introduced=28
+    AMEDIAFORMAT_KEY_AAC_DRC_TARGET_REFERENCE_LEVEL; # var introduced=28
+    AMEDIAFORMAT_KEY_AAC_ENCODED_TARGET_LEVEL; # var introduced=28
+    AMEDIAFORMAT_KEY_AAC_MAX_OUTPUT_CHANNEL_COUNT; # var introduced=28
     AMEDIAFORMAT_KEY_AAC_PROFILE; # var
+    AMEDIAFORMAT_KEY_AAC_SBR_MODE; # var introduced=28
+    AMEDIAFORMAT_KEY_AUDIO_SESSION_ID; # var introduced=28
+    AMEDIAFORMAT_KEY_BITRATE_MODE; # var introduced=28
     AMEDIAFORMAT_KEY_BIT_RATE; # var
+    AMEDIAFORMAT_KEY_CAPTURE_RATE; # var introduced=28
     AMEDIAFORMAT_KEY_CHANNEL_COUNT; # var
     AMEDIAFORMAT_KEY_CHANNEL_MASK; # var
     AMEDIAFORMAT_KEY_COLOR_FORMAT; # var
+    AMEDIAFORMAT_KEY_COLOR_RANGE; # var introduced=28
+    AMEDIAFORMAT_KEY_COLOR_STANDARD; # var introduced=28
+    AMEDIAFORMAT_KEY_COLOR_TRANSFER; # var introduced=28
+    AMEDIAFORMAT_KEY_COMPLEXITY; # var introduced=28
+    AMEDIAFORMAT_KEY_DISPLAY_CROP; # var introduced=28
     AMEDIAFORMAT_KEY_DURATION; # var
     AMEDIAFORMAT_KEY_FLAC_COMPRESSION_LEVEL; # var
     AMEDIAFORMAT_KEY_FRAME_RATE; # var
+    AMEDIAFORMAT_KEY_GRID_COLS; # var introduced=28
+    AMEDIAFORMAT_KEY_GRID_HEIGHT; # var introduced=28
+    AMEDIAFORMAT_KEY_GRID_ROWS; # var introduced=28
+    AMEDIAFORMAT_KEY_GRID_WIDTH; # var introduced=28
+    AMEDIAFORMAT_KEY_HDR_STATIC_INFO; # var introduced=28
     AMEDIAFORMAT_KEY_HEIGHT; # var
+    AMEDIAFORMAT_KEY_INTRA_REFRESH_PERIOD; # var introduced=28
     AMEDIAFORMAT_KEY_IS_ADTS; # var
     AMEDIAFORMAT_KEY_IS_AUTOSELECT; # var
     AMEDIAFORMAT_KEY_IS_DEFAULT; # var
     AMEDIAFORMAT_KEY_IS_FORCED_SUBTITLE; # var
     AMEDIAFORMAT_KEY_I_FRAME_INTERVAL; # var
     AMEDIAFORMAT_KEY_LANGUAGE; # var
+    AMEDIAFORMAT_KEY_LATENCY; # var introduced=28
+    AMEDIAFORMAT_KEY_LEVEL; # var introduced=28
     AMEDIAFORMAT_KEY_MAX_HEIGHT; # var
     AMEDIAFORMAT_KEY_MAX_INPUT_SIZE; # var
     AMEDIAFORMAT_KEY_MAX_WIDTH; # var
     AMEDIAFORMAT_KEY_MIME; # var
+    AMEDIAFORMAT_KEY_OPERATING_RATE; # var introduced=28
+    AMEDIAFORMAT_KEY_PCM_ENCODING; # var introduced=28
+    AMEDIAFORMAT_KEY_PRIORITY; # var introduced=28
+    AMEDIAFORMAT_KEY_PROFILE; # var introduced=28
     AMEDIAFORMAT_KEY_PUSH_BLANK_BUFFERS_ON_STOP; # var
     AMEDIAFORMAT_KEY_REPEAT_PREVIOUS_FRAME_AFTER; # var
+    AMEDIAFORMAT_KEY_ROTATION; # var introduced=28
     AMEDIAFORMAT_KEY_SAMPLE_RATE; # var
+    AMEDIAFORMAT_KEY_SLICE_HEIGHT; # var introduced=28
     AMEDIAFORMAT_KEY_STRIDE; # var
+    AMEDIAFORMAT_KEY_TEMPORAL_LAYERING; # var introduced=28
+    AMEDIAFORMAT_KEY_TRACK_ID; # var introduced=28
     AMEDIAFORMAT_KEY_WIDTH; # var
+    AMediaCodecActionCode_isRecoverable; # introduced=28
+    AMediaCodecActionCode_isTransient; # introduced=28
     AMediaCodecCryptoInfo_delete;
     AMediaCodecCryptoInfo_getClearBytes;
     AMediaCodecCryptoInfo_getEncryptedBytes;
@@ -68,12 +101,16 @@
     AMediaCodec_dequeueOutputBuffer;
     AMediaCodec_flush;
     AMediaCodec_getInputBuffer;
+    AMediaCodec_getInputFormat; # introduced=28
+    AMediaCodec_getName; # introduced=28
     AMediaCodec_getOutputBuffer;
     AMediaCodec_getOutputFormat;
     AMediaCodec_queueInputBuffer;
     AMediaCodec_queueSecureInputBuffer;
+    AMediaCodec_releaseCrypto; # introduced=28
     AMediaCodec_releaseOutputBuffer;
     AMediaCodec_releaseOutputBufferAtTime;
+    AMediaCodec_setAsyncNotifyCallback; # introduced=28
     AMediaCodec_setOutputSurface; # introduced=24
     AMediaCodec_setParameters; # introduced=26
     AMediaCodec_setInputSurface; # introduced=26
@@ -127,16 +164,21 @@
     AMediaExtractor_unselectTrack;
     AMediaFormat_delete;
     AMediaFormat_getBuffer;
+    AMediaFormat_getDouble; # introduced=28
     AMediaFormat_getFloat;
     AMediaFormat_getInt32;
     AMediaFormat_getInt64;
+    AMediaFormat_getRect; # introduced=28
     AMediaFormat_getSize;
     AMediaFormat_getString;
     AMediaFormat_new;
     AMediaFormat_setBuffer;
+    AMediaFormat_setDouble; # introduced=28
     AMediaFormat_setFloat;
     AMediaFormat_setInt32;
     AMediaFormat_setInt64;
+    AMediaFormat_setRect; # introduced=28
+    AMediaFormat_setSize; # introduced=28
     AMediaFormat_setString;
     AMediaFormat_toString;
     AMediaMuxer_addTrack;
diff --git a/media/utils/Android.bp b/media/utils/Android.bp
index 72917dd..f2bc6d0 100644
--- a/media/utils/Android.bp
+++ b/media/utils/Android.bp
@@ -24,7 +24,6 @@
     ],
     shared_libs: [
         "libbinder",
-        "libcutils",
         "liblog",
         "libutils",
         "libmemunreachable",
diff --git a/media/utils/OWNERS b/media/utils/OWNERS
new file mode 100644
index 0000000..f9cb567
--- /dev/null
+++ b/media/utils/OWNERS
@@ -0,0 +1 @@
+gkasten@google.com
diff --git a/packages/MediaUpdate/Android.mk b/packages/MediaUpdate/Android.mk
new file mode 100644
index 0000000..4a71401
--- /dev/null
+++ b/packages/MediaUpdate/Android.mk
@@ -0,0 +1,34 @@
+#
+# Copyright 2017 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_PACKAGE_NAME := MediaUpdate
+LOCAL_MODULE_OWNER := google
+LOCAL_PRIVILEGED_MODULE := true
+
+# TODO: create a separate key for this package.
+LOCAL_CERTIFICATE := platform
+
+# TODO: Use System SDK once public APIs are approved
+# LOCAL_SDK_VERSION := system_current
+
+LOCAL_SRC_FILES := $(call all-java-files-under, src)
+LOCAL_PROGUARD_FLAG_FILES := proguard.cfg
+
+include $(BUILD_PACKAGE)
diff --git a/packages/MediaUpdate/AndroidManifest.xml b/packages/MediaUpdate/AndroidManifest.xml
new file mode 100644
index 0000000..061ae44
--- /dev/null
+++ b/packages/MediaUpdate/AndroidManifest.xml
@@ -0,0 +1,14 @@
+<?xml version="1.0" encoding="utf-8"?>
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+    package="com.android.media.update"
+    android:versionCode="1"
+    android:versionName="1.0" >
+
+    <application
+        android:label="Media Components Update"
+        android:multiArch="true"
+        android:allowBackup="false"
+        android:extractNativeLibs="false">
+    </application>
+
+</manifest>
diff --git a/packages/MediaUpdate/proguard.cfg b/packages/MediaUpdate/proguard.cfg
new file mode 100644
index 0000000..874dbf5
--- /dev/null
+++ b/packages/MediaUpdate/proguard.cfg
@@ -0,0 +1,20 @@
+#
+# Copyright 2017 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Keep entry point for updatable Java classes
+-keep public class com.android.media.update.ApiFactory {
+   public static java.lang.Object initialize(android.content.Context);
+}
diff --git a/packages/MediaUpdate/src/com/android/media/update/ApiFactory.java b/packages/MediaUpdate/src/com/android/media/update/ApiFactory.java
new file mode 100644
index 0000000..1cdd177
--- /dev/null
+++ b/packages/MediaUpdate/src/com/android/media/update/ApiFactory.java
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.media.update;
+
+import android.content.Context;
+import android.media.update.MediaController2Provider;
+import android.media.update.StaticProvider;
+import android.media.update.ViewProvider;
+import android.widget.MediaController2;
+
+import com.android.widget.MediaController2Impl;
+
+public class ApiFactory implements StaticProvider {
+    private final Context mContext;
+
+    public ApiFactory(Context context) {
+        mContext = context;
+    }
+
+    public static Object initialize(Context context) throws ReflectiveOperationException {
+        return new ApiFactory(context);
+    }
+
+    @Override
+    public MediaController2Provider createMediaController2(
+            MediaController2 instance, ViewProvider superProvider) {
+        return new MediaController2Impl(instance, superProvider);
+    }
+}
diff --git a/packages/MediaUpdate/src/com/android/widget/MediaController2Impl.java b/packages/MediaUpdate/src/com/android/widget/MediaController2Impl.java
new file mode 100644
index 0000000..d322a20
--- /dev/null
+++ b/packages/MediaUpdate/src/com/android/widget/MediaController2Impl.java
@@ -0,0 +1,192 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.widget;
+
+import android.graphics.Canvas;
+import android.media.session.MediaController;
+import android.media.update.MediaController2Provider;
+import android.media.update.ViewProvider;
+import android.view.KeyEvent;
+import android.view.MotionEvent;
+import android.view.View;
+import android.view.View.OnClickListener;
+import android.widget.MediaController2;
+
+public class MediaController2Impl implements MediaController2Provider {
+    private final MediaController2 mInstance;
+    private final ViewProvider mSuperProvider;
+
+    public MediaController2Impl(MediaController2 instance, ViewProvider superProvider) {
+        mInstance = instance;
+        mSuperProvider = superProvider;
+
+        // TODO: Implement
+    }
+
+    @Override
+    public void setController_impl(MediaController controller) {
+        // TODO: Implement
+    }
+
+    @Override
+    public void setAnchorView_impl(View view) {
+        // TODO: Implement
+    }
+
+    @Override
+    public void show_impl() {
+        // TODO: Implement
+    }
+
+    @Override
+    public void show_impl(int timeout) {
+        // TODO: Implement
+    }
+
+    @Override
+    public boolean isShowing_impl() {
+        // TODO: Implement
+        return false;
+    }
+
+    @Override
+    public void hide_impl() {
+        // TODO: Implement
+    }
+
+    @Override
+    public void setPrevNextListeners_impl(OnClickListener next, OnClickListener prev) {
+        // TODO: Implement
+    }
+
+    @Override
+    public void showCCButton_impl() {
+        // TODO: Implement
+    }
+
+    @Override
+    public boolean isPlaying_impl() {
+        // TODO: Implement
+        return false;
+    }
+
+    @Override
+    public int getCurrentPosition_impl() {
+        // TODO: Implement
+        return 0;
+    }
+
+    @Override
+    public int getBufferPercentage_impl() {
+        // TODO: Implement
+        return 0;
+    }
+
+    @Override
+    public boolean canPause_impl() {
+        // TODO: Implement
+        return false;
+    }
+
+    @Override
+    public boolean canSeekBackward_impl() {
+        // TODO: Implement
+        return false;
+    }
+
+    @Override
+    public boolean canSeekForward_impl() {
+        // TODO: Implement
+        return false;
+    }
+
+    @Override
+    public void showSubtitle_impl() {
+        // TODO: Implement
+    }
+
+    @Override
+    public void hideSubtitle_impl() {
+        // TODO: Implement
+    }
+
+    @Override
+    public void onAttachedToWindow_impl() {
+        mSuperProvider.onAttachedToWindow_impl();
+        // TODO: Implement
+    }
+
+    @Override
+    public void onDetachedFromWindow_impl() {
+        mSuperProvider.onDetachedFromWindow_impl();
+        // TODO: Implement
+    }
+
+    @Override
+    public void onLayout_impl(boolean changed, int left, int top, int right, int bottom) {
+        mSuperProvider.onLayout_impl(changed, left, top, right, bottom);
+        // TODO: Implement
+    }
+
+    @Override
+    public void draw_impl(Canvas canvas) {
+        mSuperProvider.draw_impl(canvas);
+        // TODO: Implement
+    }
+
+    @Override
+    public CharSequence getAccessibilityClassName_impl() {
+        // TODO: Implement
+        return MediaController2.class.getName();
+    }
+
+    @Override
+    public boolean onTouchEvent_impl(MotionEvent ev) {
+        // TODO: Implement
+        return mSuperProvider.onTouchEvent_impl(ev);
+    }
+
+    @Override
+    public boolean onTrackballEvent_impl(MotionEvent ev) {
+        // TODO: Implement
+        return mSuperProvider.onTrackballEvent_impl(ev);
+    }
+
+    @Override
+    public boolean onKeyDown_impl(int keyCode, KeyEvent event) {
+        // TODO: Implement
+        return mSuperProvider.onKeyDown_impl(keyCode, event);
+    }
+
+    @Override
+    public void onFinishInflate_impl() {
+        mSuperProvider.onFinishInflate_impl();
+        // TODO: Implement
+    }
+
+    @Override
+    public boolean dispatchKeyEvent_impl(KeyEvent event) {
+        // TODO: Implement
+        return mSuperProvider.dispatchKeyEvent_impl(event);
+    }
+
+    @Override
+    public void setEnabled_impl(boolean enabled) {
+        mSuperProvider.setEnabled_impl(enabled);
+        // TODO: Implement
+    }
+}
diff --git a/packages/OWNERS b/packages/OWNERS
new file mode 100644
index 0000000..bbc4cef
--- /dev/null
+++ b/packages/OWNERS
@@ -0,0 +1,6 @@
+akersten@google.com
+dwkang@google.com
+jaewan@google.com
+marcone@google.com
+sungsoo@google.com
+wjia@google.com
diff --git a/services/OWNERS b/services/OWNERS
index d500dce..d5d00da 100644
--- a/services/OWNERS
+++ b/services/OWNERS
@@ -1,4 +1,4 @@
 elaurent@google.com
 etalvala@google.com
-gkasten@android.com
+gkasten@google.com
 hunga@google.com
diff --git a/services/audioflinger/Android.mk b/services/audioflinger/Android.mk
index d0454d4..7419e64 100644
--- a/services/audioflinger/Android.mk
+++ b/services/audioflinger/Android.mk
@@ -51,6 +51,7 @@
     libmedialogservice \
     libmediautils \
     libnbaio \
+    libnblog \
     libpowermanager \
     libserviceutility \
     libmediautils \
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 91f4d37..4d5e094 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -641,38 +641,56 @@
 
 // IAudioFlinger interface
 
-
-sp<IAudioTrack> AudioFlinger::createTrack(
-        audio_stream_type_t streamType,
-        uint32_t sampleRate,
-        audio_format_t format,
-        audio_channel_mask_t channelMask,
-        size_t *frameCount,
-        audio_output_flags_t *flags,
-        const sp<IMemory>& sharedBuffer,
-        audio_io_handle_t output,
-        pid_t pid,
-        pid_t tid,
-        audio_session_t *sessionId,
-        int clientUid,
-        status_t *status,
-        audio_port_handle_t portId)
+sp<IAudioTrack> AudioFlinger::createTrack(const CreateTrackInput& input,
+                                          CreateTrackOutput& output,
+                                          status_t *status)
 {
     sp<PlaybackThread::Track> track;
     sp<TrackHandle> trackHandle;
     sp<Client> client;
     status_t lStatus;
-    audio_session_t lSessionId;
+    audio_stream_type_t streamType;
+    audio_port_handle_t portId;
 
+    bool updatePid = (input.clientInfo.clientPid == -1);
     const uid_t callingUid = IPCThreadState::self()->getCallingUid();
-    if (pid == -1 || !isTrustedCallingUid(callingUid)) {
+    uid_t clientUid = input.clientInfo.clientUid;
+    if (!isTrustedCallingUid(callingUid)) {
+        ALOGW_IF(clientUid != callingUid,
+                "%s uid %d tried to pass itself off as %d",
+                __FUNCTION__, callingUid, clientUid);
+        clientUid = callingUid;
+        updatePid = true;
+    }
+    pid_t clientPid = input.clientInfo.clientPid;
+    if (updatePid) {
         const pid_t callingPid = IPCThreadState::self()->getCallingPid();
-        ALOGW_IF(pid != -1 && pid != callingPid,
+        ALOGW_IF(clientPid != -1 && clientPid != callingPid,
                  "%s uid %d pid %d tried to pass itself off as pid %d",
-                 __func__, callingUid, callingPid, pid);
-        pid = callingPid;
+                 __func__, callingUid, callingPid, clientPid);
+        clientPid = callingPid;
     }
 
+    audio_session_t sessionId = input.sessionId;
+    if (sessionId == AUDIO_SESSION_ALLOCATE) {
+        sessionId = (audio_session_t) newAudioUniqueId(AUDIO_UNIQUE_ID_USE_SESSION);
+    } else if (audio_unique_id_get_use(sessionId) != AUDIO_UNIQUE_ID_USE_SESSION) {
+        lStatus = BAD_VALUE;
+        goto Exit;
+    }
+
+    output.sessionId = sessionId;
+    output.outputId = AUDIO_IO_HANDLE_NONE;
+    output.selectedDeviceId = input.selectedDeviceId;
+
+    lStatus = AudioSystem::getOutputForAttr(&input.attr, &output.outputId, sessionId, &streamType,
+                                            clientUid, &input.config, input.flags,
+                                            &output.selectedDeviceId, &portId);
+
+    if (lStatus != NO_ERROR || output.outputId == AUDIO_IO_HANDLE_NONE) {
+        ALOGE("createTrack() getOutputForAttr() return error %d or invalid output handle", lStatus);
+        goto Exit;
+    }
     // client AudioTrack::set already implements AUDIO_STREAM_DEFAULT => AUDIO_STREAM_MUSIC,
     // but if someone uses binder directly they could bypass that and cause us to crash
     if (uint32_t(streamType) >= AUDIO_STREAM_CNT) {
@@ -681,91 +699,76 @@
         goto Exit;
     }
 
-    // further sample rate checks are performed by createTrack_l() depending on the thread type
-    if (sampleRate == 0) {
-        ALOGE("createTrack() invalid sample rate %u", sampleRate);
-        lStatus = BAD_VALUE;
-        goto Exit;
-    }
-
     // further channel mask checks are performed by createTrack_l() depending on the thread type
-    if (!audio_is_output_channel(channelMask)) {
-        ALOGE("createTrack() invalid channel mask %#x", channelMask);
+    if (!audio_is_output_channel(input.config.channel_mask)) {
+        ALOGE("createTrack() invalid channel mask %#x", input.config.channel_mask);
         lStatus = BAD_VALUE;
         goto Exit;
     }
 
     // further format checks are performed by createTrack_l() depending on the thread type
-    if (!audio_is_valid_format(format)) {
-        ALOGE("createTrack() invalid format %#x", format);
-        lStatus = BAD_VALUE;
-        goto Exit;
-    }
-
-    if (sharedBuffer != 0 && sharedBuffer->pointer() == NULL) {
-        ALOGE("createTrack() sharedBuffer is non-0 but has NULL pointer()");
+    if (!audio_is_valid_format(input.config.format)) {
+        ALOGE("createTrack() invalid format %#x", input.config.format);
         lStatus = BAD_VALUE;
         goto Exit;
     }
 
     {
         Mutex::Autolock _l(mLock);
-        PlaybackThread *thread = checkPlaybackThread_l(output);
+        PlaybackThread *thread = checkPlaybackThread_l(output.outputId);
         if (thread == NULL) {
-            ALOGE("no playback thread found for output handle %d", output);
+            ALOGE("no playback thread found for output handle %d", output.outputId);
             lStatus = BAD_VALUE;
             goto Exit;
         }
 
-        client = registerPid(pid);
+        client = registerPid(clientPid);
 
         PlaybackThread *effectThread = NULL;
-        if (sessionId != NULL && *sessionId != AUDIO_SESSION_ALLOCATE) {
-            if (audio_unique_id_get_use(*sessionId) != AUDIO_UNIQUE_ID_USE_SESSION) {
-                ALOGE("createTrack() invalid session ID %d", *sessionId);
-                lStatus = BAD_VALUE;
-                goto Exit;
-            }
-            lSessionId = *sessionId;
-            // check if an effect chain with the same session ID is present on another
-            // output thread and move it here.
-            for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
-                sp<PlaybackThread> t = mPlaybackThreads.valueAt(i);
-                if (mPlaybackThreads.keyAt(i) != output) {
-                    uint32_t sessions = t->hasAudioSession(lSessionId);
-                    if (sessions & ThreadBase::EFFECT_SESSION) {
-                        effectThread = t.get();
-                        break;
-                    }
+        // check if an effect chain with the same session ID is present on another
+        // output thread and move it here.
+        for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
+            sp<PlaybackThread> t = mPlaybackThreads.valueAt(i);
+            if (mPlaybackThreads.keyAt(i) != output.outputId) {
+                uint32_t sessions = t->hasAudioSession(sessionId);
+                if (sessions & ThreadBase::EFFECT_SESSION) {
+                    effectThread = t.get();
+                    break;
                 }
             }
-        } else {
-            // if no audio session id is provided, create one here
-            lSessionId = (audio_session_t) nextUniqueId(AUDIO_UNIQUE_ID_USE_SESSION);
-            if (sessionId != NULL) {
-                *sessionId = lSessionId;
-            }
         }
-        ALOGV("createTrack() lSessionId: %d", lSessionId);
+        ALOGV("createTrack() sessionId: %d", sessionId);
 
-        track = thread->createTrack_l(client, streamType, sampleRate, format,
-                channelMask, frameCount, sharedBuffer, lSessionId, flags, tid,
-                clientUid, &lStatus, portId);
+        output.sampleRate = input.config.sample_rate;
+        output.frameCount = input.frameCount;
+        output.notificationFrameCount = input.notificationFrameCount;
+        output.flags = input.flags;
+
+        track = thread->createTrack_l(client, streamType, &output.sampleRate, input.config.format,
+                                      input.config.channel_mask,
+                                      &output.frameCount, &output.notificationFrameCount,
+                                      input.notificationsPerBuffer, input.speed,
+                                      input.sharedBuffer, sessionId, &output.flags,
+                                      input.clientInfo.clientTid, clientUid, &lStatus, portId);
         LOG_ALWAYS_FATAL_IF((lStatus == NO_ERROR) && (track == 0));
         // we don't abort yet if lStatus != NO_ERROR; there is still work to be done regardless
 
+        output.afFrameCount = thread->frameCount();
+        output.afSampleRate = thread->sampleRate();
+        output.afLatencyMs = thread->latency();
+
         // move effect chain to this output thread if an effect on same session was waiting
         // for a track to be created
         if (lStatus == NO_ERROR && effectThread != NULL) {
             // no risk of deadlock because AudioFlinger::mLock is held
             Mutex::Autolock _dl(thread->mLock);
             Mutex::Autolock _sl(effectThread->mLock);
-            moveEffectChain_l(lSessionId, effectThread, thread, true);
+            moveEffectChain_l(sessionId, effectThread, thread, true);
         }
 
         // Look for sync events awaiting for a session to be used.
         for (size_t i = 0; i < mPendingSyncEvents.size(); i++) {
-            if (mPendingSyncEvents[i]->triggerSession() == lSessionId) {
+            if (mPendingSyncEvents[i]->triggerSession() == sessionId) {
                 if (thread->isValidSyncEvent(mPendingSyncEvents[i])) {
                     if (lStatus == NO_ERROR) {
                         (void) track->setSyncEvent(mPendingSyncEvents[i]);
@@ -778,7 +781,7 @@
             }
         }
 
-        setAudioHwSyncForSession_l(thread, lSessionId);
+        setAudioHwSyncForSession_l(thread, sessionId);
     }
 
     if (lStatus != NO_ERROR) {
@@ -798,6 +801,9 @@
     trackHandle = new TrackHandle(track);
 
 Exit:
+    if (lStatus != NO_ERROR && output.outputId != AUDIO_IO_HANDLE_NONE) {
+        AudioSystem::releaseOutput(output.outputId, streamType, sessionId);
+    }
     *status = lStatus;
     return trackHandle;
 }
@@ -1566,120 +1572,147 @@
 
 // ----------------------------------------------------------------------------
 
-sp<IAudioRecord> AudioFlinger::openRecord(
-        audio_io_handle_t input,
-        uint32_t sampleRate,
-        audio_format_t format,
-        audio_channel_mask_t channelMask,
-        const String16& opPackageName,
-        size_t *frameCount,
-        audio_input_flags_t *flags,
-        pid_t pid,
-        pid_t tid,
-        int clientUid,
-        audio_session_t *sessionId,
-        size_t *notificationFrames,
-        sp<IMemory>& cblk,
-        sp<IMemory>& buffers,
-        status_t *status,
-        audio_port_handle_t portId)
+sp<media::IAudioRecord> AudioFlinger::createRecord(const CreateRecordInput& input,
+                                                   CreateRecordOutput& output,
+                                                   status_t *status)
 {
     sp<RecordThread::RecordTrack> recordTrack;
     sp<RecordHandle> recordHandle;
     sp<Client> client;
     status_t lStatus;
-    audio_session_t lSessionId;
+    audio_session_t sessionId = input.sessionId;
+    audio_port_handle_t portId;
 
-    cblk.clear();
-    buffers.clear();
+    output.cblk.clear();
+    output.buffers.clear();
+    output.inputId = AUDIO_IO_HANDLE_NONE;
 
-    bool updatePid = (pid == -1);
+    bool updatePid = (input.clientInfo.clientPid == -1);
     const uid_t callingUid = IPCThreadState::self()->getCallingUid();
+    uid_t clientUid = input.clientInfo.clientUid;
     if (!isTrustedCallingUid(callingUid)) {
-        ALOGW_IF((uid_t)clientUid != callingUid,
-                "%s uid %d tried to pass itself off as %d", __FUNCTION__, callingUid, clientUid);
+        ALOGW_IF(clientUid != callingUid,
+                "%s uid %d tried to pass itself off as %d",
+                __FUNCTION__, callingUid, clientUid);
         clientUid = callingUid;
         updatePid = true;
     }
-
+    pid_t clientPid = input.clientInfo.clientPid;
     if (updatePid) {
         const pid_t callingPid = IPCThreadState::self()->getCallingPid();
-        ALOGW_IF(pid != -1 && pid != callingPid,
+        ALOGW_IF(clientPid != -1 && clientPid != callingPid,
                  "%s uid %d pid %d tried to pass itself off as pid %d",
-                 __func__, callingUid, callingPid, pid);
-        pid = callingPid;
+                 __func__, callingUid, callingPid, clientPid);
+        clientPid = callingPid;
     }
 
     // check calling permissions
-    if (!recordingAllowed(opPackageName, tid, clientUid)) {
-        ALOGE("openRecord() permission denied: recording not allowed");
+    if (!recordingAllowed(input.opPackageName, input.clientInfo.clientTid, clientUid)) {
+        ALOGE("createRecord() permission denied: recording not allowed");
         lStatus = PERMISSION_DENIED;
         goto Exit;
     }
-
-    // further sample rate checks are performed by createRecordTrack_l()
-    if (sampleRate == 0) {
-        ALOGE("openRecord() invalid sample rate %u", sampleRate);
-        lStatus = BAD_VALUE;
-        goto Exit;
-    }
-
     // we don't yet support anything other than linear PCM
-    if (!audio_is_valid_format(format) || !audio_is_linear_pcm(format)) {
-        ALOGE("openRecord() invalid format %#x", format);
+    if (!audio_is_valid_format(input.config.format) || !audio_is_linear_pcm(input.config.format)) {
+        ALOGE("createRecord() invalid format %#x", input.config.format);
         lStatus = BAD_VALUE;
         goto Exit;
     }
 
     // further channel mask checks are performed by createRecordTrack_l()
-    if (!audio_is_input_channel(channelMask)) {
-        ALOGE("openRecord() invalid channel mask %#x", channelMask);
+    if (!audio_is_input_channel(input.config.channel_mask)) {
+        ALOGE("createRecord() invalid channel mask %#x", input.config.channel_mask);
         lStatus = BAD_VALUE;
         goto Exit;
     }
 
+    if (sessionId == AUDIO_SESSION_ALLOCATE) {
+        sessionId = (audio_session_t) newAudioUniqueId(AUDIO_UNIQUE_ID_USE_SESSION);
+    } else if (audio_unique_id_get_use(sessionId) != AUDIO_UNIQUE_ID_USE_SESSION) {
+        lStatus = BAD_VALUE;
+        goto Exit;
+    }
+
+    output.sessionId = sessionId;
+    output.selectedDeviceId = input.selectedDeviceId;
+    output.flags = input.flags;
+
+    client = registerPid(clientPid);
+
+    // Not a conventional loop, but a retry loop for at most two iterations total.
+    // Try first maybe with FAST flag then try again without FAST flag if that fails.
+    // Exits loop via break on no error of got exit on error
+    // The sp<> references will be dropped when re-entering scope.
+    // The lack of indentation is deliberate, to reduce code churn and ease merges.
+    for (;;) {
+    // release previously opened input if retrying.
+    if (output.inputId != AUDIO_IO_HANDLE_NONE) {
+        recordTrack.clear();
+        AudioSystem::releaseInput(output.inputId, sessionId);
+        output.inputId = AUDIO_IO_HANDLE_NONE;
+    }
+    lStatus = AudioSystem::getInputForAttr(&input.attr, &output.inputId,
+                                      sessionId,
+                                    // FIXME compare to AudioTrack
+                                      clientPid,
+                                      clientUid,
+                                      &input.config,
+                                      output.flags, &output.selectedDeviceId, &portId);
+
     {
         Mutex::Autolock _l(mLock);
-        RecordThread *thread = checkRecordThread_l(input);
+        RecordThread *thread = checkRecordThread_l(output.inputId);
         if (thread == NULL) {
-            ALOGE("openRecord() checkRecordThread_l failed");
+            ALOGE("createRecord() checkRecordThread_l failed");
             lStatus = BAD_VALUE;
             goto Exit;
         }
 
-        client = registerPid(pid);
+        ALOGV("createRecord() lSessionId: %d input %d", sessionId, output.inputId);
 
-        if (sessionId != NULL && *sessionId != AUDIO_SESSION_ALLOCATE) {
-            if (audio_unique_id_get_use(*sessionId) != AUDIO_UNIQUE_ID_USE_SESSION) {
-                lStatus = BAD_VALUE;
-                goto Exit;
-            }
-            lSessionId = *sessionId;
-        } else {
-            // if no audio session id is provided, create one here
-            lSessionId = (audio_session_t) nextUniqueId(AUDIO_UNIQUE_ID_USE_SESSION);
-            if (sessionId != NULL) {
-                *sessionId = lSessionId;
-            }
-        }
-        ALOGV("openRecord() lSessionId: %d input %d", lSessionId, input);
+        output.sampleRate = input.config.sample_rate;
+        output.frameCount = input.frameCount;
+        output.notificationFrameCount = input.notificationFrameCount;
 
-        recordTrack = thread->createRecordTrack_l(client, sampleRate, format, channelMask,
-                                                  frameCount, lSessionId, notificationFrames,
-                                                  clientUid, flags, tid, &lStatus, portId);
+        recordTrack = thread->createRecordTrack_l(client, &output.sampleRate,
+                                                  input.config.format, input.config.channel_mask,
+                                                  &output.frameCount, sessionId,
+                                                  &output.notificationFrameCount,
+                                                  clientUid, &output.flags,
+                                                  input.clientInfo.clientTid,
+                                                  &lStatus, portId);
         LOG_ALWAYS_FATAL_IF((lStatus == NO_ERROR) && (recordTrack == 0));
 
-        if (lStatus == NO_ERROR) {
-            // Check if one effect chain was awaiting for an AudioRecord to be created on this
-            // session and move it to this thread.
-            sp<EffectChain> chain = getOrphanEffectChain_l(lSessionId);
-            if (chain != 0) {
-                Mutex::Autolock _l(thread->mLock);
-                thread->addEffectChain_l(chain);
-            }
+        // lStatus == BAD_TYPE means FAST flag was rejected: request a new input from
+        // audio policy manager without FAST constraint
+        if (lStatus == BAD_TYPE) {
+            continue;
         }
+
+        if (lStatus != NO_ERROR) {
+            goto Exit;
+        }
+
+        // Check if one effect chain was awaiting for an AudioRecord to be created on this
+        // session and move it to this thread.
+        sp<EffectChain> chain = getOrphanEffectChain_l(sessionId);
+        if (chain != 0) {
+            Mutex::Autolock _l(thread->mLock);
+            thread->addEffectChain_l(chain);
+        }
+        break;
+    }
+    // End of retry loop.
+    // The lack of indentation is deliberate, to reduce code churn and ease merges.
     }
 
+    output.cblk = recordTrack->getCblk();
+    output.buffers = recordTrack->getBuffers();
+
+    // return handle to client
+    recordHandle = new RecordHandle(recordTrack);
+
+Exit:
     if (lStatus != NO_ERROR) {
         // remove local strong reference to Client before deleting the RecordTrack so that the
         // Client destructor is called by the TrackBase destructor with mClientLock held
@@ -1690,16 +1723,11 @@
             client.clear();
         }
         recordTrack.clear();
-        goto Exit;
+        if (output.inputId != AUDIO_IO_HANDLE_NONE) {
+            AudioSystem::releaseInput(output.inputId, sessionId);
+        }
     }
 
-    cblk = recordTrack->getCblk();
-    buffers = recordTrack->getBuffers();
-
-    // return handle to client
-    recordHandle = new RecordHandle(recordTrack);
-
-Exit:
     *status = lStatus;
     return recordHandle;
 }
@@ -2014,8 +2042,8 @@
                                   uint32_t *latencyMs,
                                   audio_output_flags_t flags)
 {
-    ALOGI("openOutput() this %p, module %d Device %x, SamplingRate %d, Format %#08x, Channels %x, "
-              "flags %x",
+    ALOGI("openOutput() this %p, module %d Device %#x, SamplingRate %d, Format %#08x, "
+              "Channels %#x, flags %#x",
               this, module,
               (devices != NULL) ? *devices : 0,
               config->sample_rate,
@@ -2257,8 +2285,8 @@
     sp<StreamInHalInterface> inStream;
     status_t status = inHwHal->openInputStream(
             *input, devices, &halconfig, flags, address.string(), source, &inStream);
-    ALOGV("openInput_l() openInputStream returned input %p, devices %x, SamplingRate %d"
-           ", Format %#x, Channels %x, flags %#x, status %d addr %s",
+    ALOGV("openInput_l() openInputStream returned input %p, devices %#x, SamplingRate %d"
+           ", Format %#x, Channels %#x, flags %#x, status %d addr %s",
             inStream.get(),
             devices,
             halconfig.sample_rate,
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 63898a0..bc73ffd 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -33,7 +33,6 @@
 #include <media/IAudioFlinger.h>
 #include <media/IAudioFlingerClient.h>
 #include <media/IAudioTrack.h>
-#include <media/IAudioRecord.h>
 #include <media/AudioSystem.h>
 #include <media/AudioTrack.h>
 #include <media/MmapStreamInterface.h>
@@ -72,10 +71,12 @@
 
 #include <powermanager/IPowerManager.h>
 
-#include <media/nbaio/NBLog.h>
+#include <media/nblog/NBLog.h>
 #include <private/media/AudioEffectShared.h>
 #include <private/media/AudioTrackShared.h>
 
+#include "android/media/BnAudioRecord.h"
+
 namespace android {
 
 class AudioMixer;
@@ -113,39 +114,13 @@
     virtual     status_t    dump(int fd, const Vector<String16>& args);
 
     // IAudioFlinger interface, in binder opcode order
-    virtual sp<IAudioTrack> createTrack(
-                                audio_stream_type_t streamType,
-                                uint32_t sampleRate,
-                                audio_format_t format,
-                                audio_channel_mask_t channelMask,
-                                size_t *pFrameCount,
-                                audio_output_flags_t *flags,
-                                const sp<IMemory>& sharedBuffer,
-                                audio_io_handle_t output,
-                                pid_t pid,
-                                pid_t tid,
-                                audio_session_t *sessionId,
-                                int clientUid,
-                                status_t *status /*non-NULL*/,
-                                audio_port_handle_t portId);
+    virtual sp<IAudioTrack> createTrack(const CreateTrackInput& input,
+                                        CreateTrackOutput& output,
+                                        status_t *status);
 
-    virtual sp<IAudioRecord> openRecord(
-                                audio_io_handle_t input,
-                                uint32_t sampleRate,
-                                audio_format_t format,
-                                audio_channel_mask_t channelMask,
-                                const String16& opPackageName,
-                                size_t *pFrameCount,
-                                audio_input_flags_t *flags,
-                                pid_t pid,
-                                pid_t tid,
-                                int clientUid,
-                                audio_session_t *sessionId,
-                                size_t *notificationFrames,
-                                sp<IMemory>& cblk,
-                                sp<IMemory>& buffers,
-                                status_t *status /*non-NULL*/,
-                                audio_port_handle_t portId);
+    virtual sp<media::IAudioRecord> createRecord(const CreateRecordInput& input,
+                                                 CreateRecordOutput& output,
+                                                 status_t *status);
 
     virtual     uint32_t    sampleRate(audio_io_handle_t ioHandle) const;
     virtual     audio_format_t format(audio_io_handle_t output) const;
@@ -537,6 +512,13 @@
     };
 
     // --- PlaybackThread ---
+#ifdef FLOAT_EFFECT_CHAIN
+#define EFFECT_BUFFER_FORMAT AUDIO_FORMAT_PCM_FLOAT
+using effect_buffer_t = float;
+#else
+#define EFFECT_BUFFER_FORMAT AUDIO_FORMAT_PCM_16_BIT
+using effect_buffer_t = int16_t;
+#endif
 
 #include "Threads.h"
 
@@ -556,10 +538,10 @@
         virtual void        pause();
         virtual status_t    attachAuxEffect(int effectId);
         virtual status_t    setParameters(const String8& keyValuePairs);
-        virtual VolumeShaper::Status applyVolumeShaper(
-                const sp<VolumeShaper::Configuration>& configuration,
-                const sp<VolumeShaper::Operation>& operation) override;
-        virtual sp<VolumeShaper::State> getVolumeShaperState(int id) override;
+        virtual media::VolumeShaper::Status applyVolumeShaper(
+                const sp<media::VolumeShaper::Configuration>& configuration,
+                const sp<media::VolumeShaper::Operation>& operation) override;
+        virtual sp<media::VolumeShaper::State> getVolumeShaperState(int id) override;
         virtual status_t    getTimestamp(AudioTimestamp& timestamp);
         virtual void        signal(); // signal playback thread for a change in control block
 
@@ -571,15 +553,13 @@
     };
 
     // server side of the client's IAudioRecord
-    class RecordHandle : public android::BnAudioRecord {
+    class RecordHandle : public android::media::BnAudioRecord {
     public:
         explicit RecordHandle(const sp<RecordThread::RecordTrack>& recordTrack);
         virtual             ~RecordHandle();
-        virtual status_t    start(int /*AudioSystem::sync_event_t*/ event,
-                audio_session_t triggerSession);
-        virtual void        stop();
-        virtual status_t onTransact(
-            uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags);
+        virtual binder::Status    start(int /*AudioSystem::sync_event_t*/ event,
+                int /*audio_session_t*/ triggerSession);
+        virtual binder::Status   stop();
     private:
         const sp<RecordThread::RecordTrack> mRecordTrack;
 
diff --git a/services/audioflinger/BufLog.cpp b/services/audioflinger/BufLog.cpp
index 9680eb5..2780290 100644
--- a/services/audioflinger/BufLog.cpp
+++ b/services/audioflinger/BufLog.cpp
@@ -121,7 +121,7 @@
     } else {
         mTag[0] = 0;
     }
-    ALOGV("Creating BufLogStream id:%d tag:%s format:%d ch:%d sr:%d maxbytes:%zu", mId, mTag,
+    ALOGV("Creating BufLogStream id:%d tag:%s format:%#x ch:%d sr:%d maxbytes:%zu", mId, mTag,
             mFormat, mChannels, mSamplingRate, mMaxBytes);
 
     //open file (s), info about tag, format, etc.
diff --git a/services/audioflinger/Configuration.h b/services/audioflinger/Configuration.h
index 845697a..6e0f2b6 100644
--- a/services/audioflinger/Configuration.h
+++ b/services/audioflinger/Configuration.h
@@ -41,4 +41,12 @@
 // uncomment to log CPU statistics every n wall clock seconds
 //#define DEBUG_CPU_USAGE 10
 
+// define FLOAT_EFFECT_CHAIN to request float effects (falls back to int16_t if unavailable)
+#define FLOAT_EFFECT_CHAIN
+
+#ifdef FLOAT_EFFECT_CHAIN
+// define FLOAT_AUX to process aux effect buffers in float (FLOAT_EFFECT_CHAIN must be defined)
+#define FLOAT_AUX
+#endif
+
 #endif // ANDROID_AUDIOFLINGER_CONFIGURATION_H
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index bd5f146..b4ff0d6 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -19,6 +19,8 @@
 #define LOG_TAG "AudioFlinger"
 //#define LOG_NDEBUG 0
 
+#include <algorithm>
+
 #include "Configuration.h"
 #include <utils/Log.h>
 #include <system/audio_effects/effect_aec.h>
@@ -47,8 +49,6 @@
 #define ALOGVV(a...) do { } while(0)
 #endif
 
-#define min(a, b) ((a) < (b) ? (a) : (b))
-
 namespace android {
 
 // ----------------------------------------------------------------------------
@@ -67,12 +67,19 @@
     : mPinned(pinned),
       mThread(thread), mChain(chain), mId(id), mSessionId(sessionId),
       mDescriptor(*desc),
-      // mConfig is set by configure() and not used before then
+      // clear mConfig to ensure consistent initial value of buffer framecount
+      // in case buffers are associated by setInBuffer() or setOutBuffer()
+      // prior to configure().
+      mConfig{{}, {}},
       mStatus(NO_INIT), mState(IDLE),
-      // mMaxDisableWaitCnt is set by configure() and not used before then
-      // mDisableWaitCnt is set by process() and updateState() and not used before then
+      mMaxDisableWaitCnt(1), // set by configure(), should be >= 1
+      mDisableWaitCnt(0),    // set by process() and updateState()
       mSuspended(false),
+      mOffloaded(false),
       mAudioFlinger(thread->mAudioFlinger)
+#ifdef FLOAT_EFFECT_CHAIN
+      , mSupportsFloat(false)
+#endif
 {
     ALOGV("Constructor %p pinned %d", this, pinned);
     int lStatus;
@@ -285,56 +292,158 @@
         return;
     }
 
+    // TODO: Implement multichannel effects; here outChannelCount == FCC_2 == 2
+    const uint32_t inChannelCount =
+            audio_channel_count_from_out_mask(mConfig.inputCfg.channels);
+    const uint32_t outChannelCount =
+            audio_channel_count_from_out_mask(mConfig.outputCfg.channels);
+    const bool auxType =
+            (mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY;
+
+    // safeInputOutputSampleCount is 0 if the channel count between input and output
+    // buffers do not match. This prevents automatic accumulation or copying between the
+    // input and output effect buffers without an intermediary effect process.
+    // TODO: consider implementing channel conversion.
+    const size_t safeInputOutputSampleCount =
+            inChannelCount != outChannelCount ? 0
+                    : outChannelCount * std::min(
+                            mConfig.inputCfg.buffer.frameCount,
+                            mConfig.outputCfg.buffer.frameCount);
+    const auto accumulateInputToOutput = [this, safeInputOutputSampleCount]() {
+#ifdef FLOAT_EFFECT_CHAIN
+        accumulate_float(
+                mConfig.outputCfg.buffer.f32,
+                mConfig.inputCfg.buffer.f32,
+                safeInputOutputSampleCount);
+#else
+        accumulate_i16(
+                mConfig.outputCfg.buffer.s16,
+                mConfig.inputCfg.buffer.s16,
+                safeInputOutputSampleCount);
+#endif
+    };
+    const auto copyInputToOutput = [this, safeInputOutputSampleCount]() {
+#ifdef FLOAT_EFFECT_CHAIN
+        memcpy(
+                mConfig.outputCfg.buffer.f32,
+                mConfig.inputCfg.buffer.f32,
+                safeInputOutputSampleCount * sizeof(*mConfig.outputCfg.buffer.f32));
+
+#else
+        memcpy(
+                mConfig.outputCfg.buffer.s16,
+                mConfig.inputCfg.buffer.s16,
+                safeInputOutputSampleCount * sizeof(*mConfig.outputCfg.buffer.s16));
+#endif
+    };
+
     if (isProcessEnabled()) {
-        // do 32 bit to 16 bit conversion for auxiliary effect input buffer
-        if ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) {
-            ditherAndClamp(mConfig.inputCfg.buffer.s32,
-                                        mConfig.inputCfg.buffer.s32,
-                                        mConfig.inputCfg.buffer.frameCount/2);
-        }
         int ret;
         if (isProcessImplemented()) {
-            // do the actual processing in the effect engine
-            ret = mEffectInterface->process();
-        } else {
-            if (mConfig.inputCfg.buffer.raw != mConfig.outputCfg.buffer.raw) {
-                size_t frameCnt = mConfig.inputCfg.buffer.frameCount * FCC_2;  //always stereo here
-                int16_t *in = mConfig.inputCfg.buffer.s16;
-                int16_t *out = mConfig.outputCfg.buffer.s16;
+            if (auxType) {
+                // We overwrite the aux input buffer here and clear after processing.
+#ifdef FLOAT_EFFECT_CHAIN
+                if (mSupportsFloat) {
+#ifndef FLOAT_AUX
+                    // Do in-place float conversion for auxiliary effect input buffer.
+                    static_assert(sizeof(float) <= sizeof(int32_t),
+                            "in-place conversion requires sizeof(float) <= sizeof(int32_t)");
 
-                if (mConfig.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
-                    for (size_t i = 0; i < frameCnt; i++) {
-                        out[i] = clamp16((int32_t)out[i] + (int32_t)in[i]);
+                    memcpy_to_float_from_q4_27(
+                            mConfig.inputCfg.buffer.f32,
+                            mConfig.inputCfg.buffer.s32,
+                            mConfig.inputCfg.buffer.frameCount);
+#endif // !FLOAT_AUX
+                } else
+#endif // FLOAT_EFFECT_CHAIN
+                {
+#ifdef FLOAT_AUX
+                    memcpy_to_i16_from_float(
+                            mConfig.inputCfg.buffer.s16,
+                            mConfig.inputCfg.buffer.f32,
+                            mConfig.inputCfg.buffer.frameCount);
+#else
+                    memcpy_to_i16_from_q4_27(
+                            mConfig.inputCfg.buffer.s16,
+                            mConfig.inputCfg.buffer.s32,
+                            mConfig.inputCfg.buffer.frameCount);
+#endif
+                }
+            }
+#ifdef FLOAT_EFFECT_CHAIN
+            if (!mSupportsFloat) { // convert input to int16_t as effect doesn't support float.
+                if (!auxType) {
+                    if (mInConversionBuffer.get() == nullptr) {
+                        ALOGW("%s: mInConversionBuffer is null, bypassing", __func__);
+                        goto data_bypass;
                     }
+                    memcpy_to_i16_from_float(
+                            mInConversionBuffer->audioBuffer()->s16,
+                            mInBuffer->audioBuffer()->f32,
+                            inChannelCount * mConfig.inputCfg.buffer.frameCount);
+                }
+                if (mConfig.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
+                    if (mOutConversionBuffer.get() == nullptr) {
+                        ALOGW("%s: mOutConversionBuffer is null, bypassing", __func__);
+                        goto data_bypass;
+                    }
+                    memcpy_to_i16_from_float(
+                            mOutConversionBuffer->audioBuffer()->s16,
+                            mOutBuffer->audioBuffer()->f32,
+                            outChannelCount * mConfig.outputCfg.buffer.frameCount);
+                }
+            }
+#endif
+
+            ret = mEffectInterface->process();
+
+#ifdef FLOAT_EFFECT_CHAIN
+            if (!mSupportsFloat) { // convert output int16_t back to float.
+                memcpy_to_float_from_i16(
+                        mOutBuffer->audioBuffer()->f32,
+                        mOutConversionBuffer->audioBuffer()->s16,
+                        outChannelCount * mConfig.outputCfg.buffer.frameCount);
+            }
+#endif
+        } else {
+#ifdef FLOAT_EFFECT_CHAIN
+            data_bypass:
+#endif
+            if (!auxType  /* aux effects do not require data bypass */
+                    && mConfig.inputCfg.buffer.raw != mConfig.outputCfg.buffer.raw) {
+                if (mConfig.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
+                    accumulateInputToOutput();
                 } else {
-                    memcpy(mConfig.outputCfg.buffer.raw, mConfig.inputCfg.buffer.raw,
-                           frameCnt * sizeof(int16_t));
+                    copyInputToOutput();
                 }
             }
             ret = -ENODATA;
         }
+
         // force transition to IDLE state when engine is ready
         if (mState == STOPPED && ret == -ENODATA) {
             mDisableWaitCnt = 1;
         }
 
         // clear auxiliary effect input buffer for next accumulation
-        if ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) {
-            memset(mConfig.inputCfg.buffer.raw, 0,
-                   mConfig.inputCfg.buffer.frameCount*sizeof(int32_t));
+        if (auxType) {
+#ifdef FLOAT_AUX
+            const size_t size =
+                    mConfig.inputCfg.buffer.frameCount * inChannelCount * sizeof(float);
+#else
+            const size_t size =
+                    mConfig.inputCfg.buffer.frameCount * inChannelCount * sizeof(int32_t);
+#endif
+            memset(mConfig.inputCfg.buffer.raw, 0, size);
         }
     } else if ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_INSERT &&
+                // mInBuffer->audioBuffer()->raw != mOutBuffer->audioBuffer()->raw
                 mConfig.inputCfg.buffer.raw != mConfig.outputCfg.buffer.raw) {
         // If an insert effect is idle and input buffer is different from output buffer,
         // accumulate input onto output
         sp<EffectChain> chain = mChain.promote();
-        if (chain != 0 && chain->activeTrackCnt() != 0) {
-            size_t frameCnt = mConfig.inputCfg.buffer.frameCount * FCC_2;  //always stereo here
-            int16_t *in = mConfig.inputCfg.buffer.s16;
-            int16_t *out = mConfig.outputCfg.buffer.s16;
-            for (size_t i = 0; i < frameCnt; i++) {
-                out[i] = clamp16((int32_t)out[i] + (int32_t)in[i]);
-            }
+        if (chain.get() != nullptr && chain->activeTrackCnt() != 0) {
+            accumulateInputToOutput();
         }
     }
 }
@@ -349,6 +458,7 @@
 
 status_t AudioFlinger::EffectModule::configure()
 {
+    ALOGVV("configure() started");
     status_t status;
     sp<ThreadBase> thread;
     uint32_t size;
@@ -384,8 +494,8 @@
         }
     }
 
-    mConfig.inputCfg.format = AUDIO_FORMAT_PCM_16_BIT;
-    mConfig.outputCfg.format = AUDIO_FORMAT_PCM_16_BIT;
+    mConfig.inputCfg.format = EFFECT_BUFFER_FORMAT;
+    mConfig.outputCfg.format = EFFECT_BUFFER_FORMAT;
     mConfig.inputCfg.samplingRate = thread->sampleRate();
     mConfig.outputCfg.samplingRate = mConfig.inputCfg.samplingRate;
     mConfig.inputCfg.bufferProvider.cookie = NULL;
@@ -413,12 +523,6 @@
     mConfig.outputCfg.mask = EFFECT_CONFIG_ALL;
     mConfig.inputCfg.buffer.frameCount = thread->frameCount();
     mConfig.outputCfg.buffer.frameCount = mConfig.inputCfg.buffer.frameCount;
-    if (mInBuffer != 0) {
-        mInBuffer->setFrameCount(mConfig.inputCfg.buffer.frameCount);
-    }
-    if (mOutBuffer != 0) {
-        mOutBuffer->setFrameCount(mConfig.outputCfg.buffer.frameCount);
-    }
 
     ALOGV("configure() %p thread %p buffer %p framecount %zu",
             this, thread.get(), mConfig.inputCfg.buffer.raw, mConfig.inputCfg.buffer.frameCount);
@@ -430,39 +534,72 @@
                                        &mConfig,
                                        &size,
                                        &cmdStatus);
-    if (status == 0) {
+    if (status == NO_ERROR) {
         status = cmdStatus;
+#ifdef FLOAT_EFFECT_CHAIN
+        mSupportsFloat = true;
+#endif
     }
-
-    if (status == 0 &&
-            (memcmp(&mDescriptor.type, SL_IID_VISUALIZATION, sizeof(effect_uuid_t)) == 0)) {
-        uint32_t buf32[sizeof(effect_param_t) / sizeof(uint32_t) + 2];
-        effect_param_t *p = (effect_param_t *)buf32;
-
-        p->psize = sizeof(uint32_t);
-        p->vsize = sizeof(uint32_t);
-        size = sizeof(int);
-        *(int32_t *)p->data = VISUALIZER_PARAM_LATENCY;
-
-        uint32_t latency = 0;
-        PlaybackThread *pbt = thread->mAudioFlinger->checkPlaybackThread_l(thread->mId);
-        if (pbt != NULL) {
-            latency = pbt->latency_l();
+#ifdef FLOAT_EFFECT_CHAIN
+    else {
+        ALOGV("EFFECT_CMD_SET_CONFIG failed with float format, retry with int16_t.");
+        mConfig.inputCfg.format = AUDIO_FORMAT_PCM_16_BIT;
+        mConfig.outputCfg.format = AUDIO_FORMAT_PCM_16_BIT;
+        status = mEffectInterface->command(EFFECT_CMD_SET_CONFIG,
+                                           sizeof(effect_config_t),
+                                           &mConfig,
+                                           &size,
+                                           &cmdStatus);
+        if (status == NO_ERROR) {
+            status = cmdStatus;
+            mSupportsFloat = false;
+            ALOGVV("config worked with 16 bit");
+        } else {
+            ALOGE("%s failed %d with int16_t (as well as float)", __func__, status);
         }
+    }
+#endif
 
-        *((int32_t *)p->data + 1)= latency;
-        mEffectInterface->command(EFFECT_CMD_SET_PARAM,
-                                  sizeof(effect_param_t) + 8,
-                                  &buf32,
-                                  &size,
-                                  &cmdStatus);
+    if (status == NO_ERROR) {
+        // Establish Buffer strategy
+        setInBuffer(mInBuffer);
+        setOutBuffer(mOutBuffer);
+
+        // Update visualizer latency
+        if (memcmp(&mDescriptor.type, SL_IID_VISUALIZATION, sizeof(effect_uuid_t)) == 0) {
+            uint32_t buf32[sizeof(effect_param_t) / sizeof(uint32_t) + 2];
+            effect_param_t *p = (effect_param_t *)buf32;
+
+            p->psize = sizeof(uint32_t);
+            p->vsize = sizeof(uint32_t);
+            size = sizeof(int);
+            *(int32_t *)p->data = VISUALIZER_PARAM_LATENCY;
+
+            uint32_t latency = 0;
+            PlaybackThread *pbt = thread->mAudioFlinger->checkPlaybackThread_l(thread->mId);
+            if (pbt != NULL) {
+                latency = pbt->latency_l();
+            }
+
+            *((int32_t *)p->data + 1)= latency;
+            mEffectInterface->command(EFFECT_CMD_SET_PARAM,
+                    sizeof(effect_param_t) + 8,
+                    &buf32,
+                    &size,
+                    &cmdStatus);
+        }
     }
 
-    mMaxDisableWaitCnt = (MAX_DISABLE_TIME_MS * mConfig.outputCfg.samplingRate) /
-            (1000 * mConfig.outputCfg.buffer.frameCount);
+    // mConfig.outputCfg.buffer.frameCount cannot be zero.
+    mMaxDisableWaitCnt = (uint32_t)std::max(
+            (uint64_t)1, // mMaxDisableWaitCnt must be greater than zero.
+            (uint64_t)MAX_DISABLE_TIME_MS * mConfig.outputCfg.samplingRate
+                / ((uint64_t)1000 * mConfig.outputCfg.buffer.frameCount));
 
 exit:
+    // TODO: consider clearing mConfig on error.
     mStatus = status;
+    ALOGVV("configure ended");
     return status;
 }
 
@@ -774,6 +911,9 @@
 }
 
 void AudioFlinger::EffectModule::setInBuffer(const sp<EffectBufferHalInterface>& buffer) {
+    ALOGVV("setInBuffer %p",(&buffer));
+
+    // mConfig.inputCfg.buffer.frameCount may be zero if configure() is not called yet.
     if (buffer != 0) {
         mConfig.inputCfg.buffer.raw = buffer->audioBuffer()->raw;
         buffer->setFrameCount(mConfig.inputCfg.buffer.frameCount);
@@ -782,9 +922,42 @@
     }
     mInBuffer = buffer;
     mEffectInterface->setInBuffer(buffer);
+
+#ifdef FLOAT_EFFECT_CHAIN
+    // aux effects do in place conversion to float - we don't allocate mInConversionBuffer.
+    // Theoretically insert effects can also do in-place conversions (destroying
+    // the original buffer) when the output buffer is identical to the input buffer,
+    // but we don't optimize for it here.
+    const bool auxType = (mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY;
+    if (!auxType && !mSupportsFloat && mInBuffer.get() != nullptr) {
+        // we need to translate - create hidl shared buffer and intercept
+        const size_t inFrameCount = mConfig.inputCfg.buffer.frameCount;
+        const int inChannels = audio_channel_count_from_out_mask(mConfig.inputCfg.channels);
+        const size_t size = inChannels * inFrameCount * sizeof(int16_t);
+
+        ALOGV("%s: setInBuffer updating for inChannels:%d inFrameCount:%zu total size:%zu",
+                __func__, inChannels, inFrameCount, size);
+
+        if (size > 0 && (mInConversionBuffer.get() == nullptr
+                || size > mInConversionBuffer->getSize())) {
+            mInConversionBuffer.clear();
+            ALOGV("%s: allocating mInConversionBuffer %zu", __func__, size);
+            (void)EffectBufferHalInterface::allocate(size, &mInConversionBuffer);
+        }
+        if (mInConversionBuffer.get() != nullptr) {
+            mInConversionBuffer->setFrameCount(inFrameCount);
+            mEffectInterface->setInBuffer(mInConversionBuffer);
+        } else if (size > 0) {
+            ALOGE("%s cannot create mInConversionBuffer", __func__);
+        }
+    }
+#endif
 }
 
 void AudioFlinger::EffectModule::setOutBuffer(const sp<EffectBufferHalInterface>& buffer) {
+    ALOGVV("setOutBuffer %p",(&buffer));
+
+    // mConfig.outputCfg.buffer.frameCount may be zero if configure() is not called yet.
     if (buffer != 0) {
         mConfig.outputCfg.buffer.raw = buffer->audioBuffer()->raw;
         buffer->setFrameCount(mConfig.outputCfg.buffer.frameCount);
@@ -793,6 +966,32 @@
     }
     mOutBuffer = buffer;
     mEffectInterface->setOutBuffer(buffer);
+
+#ifdef FLOAT_EFFECT_CHAIN
+    // Note: Any effect that does not accumulate does not need mOutConversionBuffer and
+    // can do in-place conversion from int16_t to float.  We don't optimize here.
+    if (!mSupportsFloat && mOutBuffer.get() != nullptr) {
+        const size_t outFrameCount = mConfig.outputCfg.buffer.frameCount;
+        const int outChannels = audio_channel_count_from_out_mask(mConfig.outputCfg.channels);
+        const size_t size = outChannels * outFrameCount * sizeof(int16_t);
+
+        ALOGV("%s: setOutBuffer updating for outChannels:%d outFrameCount:%zu total size:%zu",
+                __func__, outChannels, outFrameCount, size);
+
+        if (size > 0 && (mOutConversionBuffer.get() == nullptr
+                || size > mOutConversionBuffer->getSize())) {
+            mOutConversionBuffer.clear();
+            ALOGV("%s: allocating mOutConversionBuffer %zu", __func__, size);
+            (void)EffectBufferHalInterface::allocate(size, &mOutConversionBuffer);
+        }
+        if (mOutConversionBuffer.get() != nullptr) {
+            mOutConversionBuffer->setFrameCount(outFrameCount);
+            mEffectInterface->setOutBuffer(mOutConversionBuffer);
+        } else if (size > 0) {
+            ALOGE("%s cannot create mOutConversionBuffer", __func__);
+        }
+    }
+#endif
 }
 
 status_t AudioFlinger::EffectModule::setVolume(uint32_t *left, uint32_t *right, bool controller)
@@ -1063,15 +1262,26 @@
     return s;
 }
 
+static std::string dumpInOutBuffer(bool isInput, const sp<EffectBufferHalInterface> &buffer) {
+    std::stringstream ss;
+
+    if (buffer.get() == nullptr) {
+        return "nullptr"; // make different than below
+    } else if (buffer->externalData() != nullptr) {
+        ss << (isInput ? buffer->externalData() : buffer->audioBuffer()->raw)
+                << " -> "
+                << (isInput ? buffer->audioBuffer()->raw : buffer->externalData());
+    } else {
+        ss << buffer->audioBuffer()->raw;
+    }
+    return ss.str();
+}
 
 void AudioFlinger::EffectModule::dump(int fd, const Vector<String16>& args __unused)
 {
-    const size_t SIZE = 256;
-    char buffer[SIZE];
     String8 result;
 
-    snprintf(buffer, SIZE, "\tEffect ID %d:\n", mId);
-    result.append(buffer);
+    result.appendFormat("\tEffect ID %d:\n", mId);
 
     bool locked = AudioFlinger::dumpTryLock(mLock);
     // failed to lock - AudioFlinger is probably deadlocked
@@ -1080,59 +1290,64 @@
     }
 
     result.append("\t\tSession Status State Engine:\n");
-    snprintf(buffer, SIZE, "\t\t%05d   %03d    %03d   %p\n",
+    result.appendFormat("\t\t%05d   %03d    %03d   %p\n",
             mSessionId, mStatus, mState, mEffectInterface.get());
-    result.append(buffer);
 
     result.append("\t\tDescriptor:\n");
     char uuidStr[64];
     AudioEffect::guidToString(&mDescriptor.uuid, uuidStr, sizeof(uuidStr));
-    snprintf(buffer, SIZE, "\t\t- UUID: %s\n", uuidStr);
-    result.append(buffer);
+    result.appendFormat("\t\t- UUID: %s\n", uuidStr);
     AudioEffect::guidToString(&mDescriptor.type, uuidStr, sizeof(uuidStr));
-    snprintf(buffer, SIZE, "\t\t- TYPE: %s\n", uuidStr);
-    result.append(buffer);
-    snprintf(buffer, SIZE, "\t\t- apiVersion: %08X\n\t\t- flags: %08X (%s)\n",
+    result.appendFormat("\t\t- TYPE: %s\n", uuidStr);
+    result.appendFormat("\t\t- apiVersion: %08X\n\t\t- flags: %08X (%s)\n",
             mDescriptor.apiVersion,
             mDescriptor.flags,
             effectFlagsToString(mDescriptor.flags).string());
-    result.append(buffer);
-    snprintf(buffer, SIZE, "\t\t- name: %s\n",
+    result.appendFormat("\t\t- name: %s\n",
             mDescriptor.name);
-    result.append(buffer);
-    snprintf(buffer, SIZE, "\t\t- implementor: %s\n",
+
+    result.appendFormat("\t\t- implementor: %s\n",
             mDescriptor.implementor);
-    result.append(buffer);
+
+    result.appendFormat("\t\t- data: %s\n", mSupportsFloat ? "float" : "int16");
 
     result.append("\t\t- Input configuration:\n");
-    result.append("\t\t\tFrames  Smp rate Channels Format Buffer\n");
-    snprintf(buffer, SIZE, "\t\t\t%05zu   %05d    %08x %6d (%s) %p\n",
+    result.append("\t\t\tBuffer     Frames  Smp rate Channels Format\n");
+    result.appendFormat("\t\t\t%p %05zu   %05d    %08x %6d (%s)\n",
+            mConfig.inputCfg.buffer.raw,
             mConfig.inputCfg.buffer.frameCount,
             mConfig.inputCfg.samplingRate,
             mConfig.inputCfg.channels,
             mConfig.inputCfg.format,
-            formatToString((audio_format_t)mConfig.inputCfg.format).c_str(),
-            mConfig.inputCfg.buffer.raw);
-    result.append(buffer);
+            formatToString((audio_format_t)mConfig.inputCfg.format).c_str());
 
     result.append("\t\t- Output configuration:\n");
     result.append("\t\t\tBuffer     Frames  Smp rate Channels Format\n");
-    snprintf(buffer, SIZE, "\t\t\t%p %05zu   %05d    %08x %d (%s)\n",
+    result.appendFormat("\t\t\t%p %05zu   %05d    %08x %6d (%s)\n",
             mConfig.outputCfg.buffer.raw,
             mConfig.outputCfg.buffer.frameCount,
             mConfig.outputCfg.samplingRate,
             mConfig.outputCfg.channels,
             mConfig.outputCfg.format,
             formatToString((audio_format_t)mConfig.outputCfg.format).c_str());
-    result.append(buffer);
 
-    snprintf(buffer, SIZE, "\t\t%zu Clients:\n", mHandles.size());
-    result.append(buffer);
+#ifdef FLOAT_EFFECT_CHAIN
+
+    result.appendFormat("\t\t- HAL buffers:\n"
+            "\t\t\tIn(%s) InConversion(%s) Out(%s) OutConversion(%s)\n",
+            dumpInOutBuffer(true /* isInput */, mInBuffer).c_str(),
+            dumpInOutBuffer(true /* isInput */, mInConversionBuffer).c_str(),
+            dumpInOutBuffer(false /* isInput */, mOutBuffer).c_str(),
+            dumpInOutBuffer(false /* isInput */, mOutConversionBuffer).c_str());
+#endif
+
+    result.appendFormat("\t\t%zu Clients:\n", mHandles.size());
     result.append("\t\t\t  Pid Priority Ctrl Locked client server\n");
+    char buffer[256];
     for (size_t i = 0; i < mHandles.size(); ++i) {
         EffectHandle *handle = mHandles[i];
         if (handle != NULL && !handle->disconnected()) {
-            handle->dumpToBuffer(buffer, SIZE);
+            handle->dumpToBuffer(buffer, sizeof(buffer));
             result.append(buffer);
         }
     }
@@ -1602,8 +1817,11 @@
     // and sample format changes for effects.
     // Currently effects processing is only available for stereo, AUDIO_FORMAT_PCM_16_BIT
     // (4 bytes frame size)
+
     const size_t frameSize =
-            audio_bytes_per_sample(AUDIO_FORMAT_PCM_16_BIT) * min(FCC_2, thread->channelCount());
+            audio_bytes_per_sample(EFFECT_BUFFER_FORMAT)
+            * std::min((uint32_t)FCC_2, thread->channelCount());
+
     memset(mInBuffer->audioBuffer()->raw, 0, thread->frameCount() * frameSize);
     mInBuffer->commit();
 }
@@ -1718,8 +1936,13 @@
         // calling the process in effect engine
         size_t numSamples = thread->frameCount();
         sp<EffectBufferHalInterface> halBuffer;
+#ifdef FLOAT_EFFECT_CHAIN
+        status_t result = EffectBufferHalInterface::allocate(
+                numSamples * sizeof(float), &halBuffer);
+#else
         status_t result = EffectBufferHalInterface::allocate(
                 numSamples * sizeof(int32_t), &halBuffer);
+#endif
         if (result != OK) return result;
         effect->setInBuffer(halBuffer);
         // auxiliary effects output samples to chain input buffer for further processing
@@ -1959,19 +2182,6 @@
     }
 }
 
-static void dumpInOutBuffer(
-        char *dump, size_t dumpSize, bool isInput, EffectBufferHalInterface *buffer) {
-    if (buffer == nullptr) {
-        snprintf(dump, dumpSize, "%p", buffer);
-    } else if (buffer->externalData() != nullptr) {
-        snprintf(dump, dumpSize, "%p -> %p",
-                isInput ? buffer->externalData() : buffer->audioBuffer()->raw,
-                isInput ? buffer->audioBuffer()->raw : buffer->externalData());
-    } else {
-        snprintf(dump, dumpSize, "%p", buffer->audioBuffer()->raw);
-    }
-}
-
 void AudioFlinger::EffectChain::dump(int fd, const Vector<String16>& args)
 {
     const size_t SIZE = 256;
@@ -1989,15 +2199,13 @@
             result.append("\tCould not lock mutex:\n");
         }
 
-        char inBufferStr[64], outBufferStr[64];
-        dumpInOutBuffer(inBufferStr, sizeof(inBufferStr), true, mInBuffer.get());
-        dumpInOutBuffer(outBufferStr, sizeof(outBufferStr), false, mOutBuffer.get());
-        snprintf(buffer, SIZE, "\t%-*s%-*s   Active tracks:\n",
-                (int)strlen(inBufferStr), "In buffer    ",
-                (int)strlen(outBufferStr), "Out buffer      ");
-        result.append(buffer);
-        snprintf(buffer, SIZE, "\t%s   %s   %d\n", inBufferStr, outBufferStr, mActiveTrackCnt);
-        result.append(buffer);
+        const std::string inBufferStr = dumpInOutBuffer(true /* isInput */, mInBuffer);
+        const std::string outBufferStr = dumpInOutBuffer(false /* isInput */, mOutBuffer);
+        result.appendFormat("\t%-*s%-*s   Active tracks:\n",
+                (int)inBufferStr.size(), "In buffer    ",
+                (int)outBufferStr.size(), "Out buffer      ");
+        result.appendFormat("\t%s   %s   %d\n",
+                inBufferStr.c_str(), outBufferStr.c_str(), mActiveTrackCnt);
         write(fd, result.string(), result.size());
 
         for (size_t i = 0; i < numEffects; ++i) {
diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h
index e29798b..eea3208 100644
--- a/services/audioflinger/Effects.h
+++ b/services/audioflinger/Effects.h
@@ -168,6 +168,12 @@
     bool     mSuspended;            // effect is suspended: temporarily disabled by framework
     bool     mOffloaded;            // effect is currently offloaded to the audio DSP
     wp<AudioFlinger>    mAudioFlinger;
+
+#ifdef FLOAT_EFFECT_CHAIN
+    bool    mSupportsFloat;         // effect supports float processing
+    sp<EffectBufferHalInterface> mInConversionBuffer;  // Buffers for HAL conversion if needed.
+    sp<EffectBufferHalInterface> mOutConversionBuffer;
+#endif
 };
 
 // The EffectHandle class implements the IEffect interface. It provides resources
@@ -308,14 +314,14 @@
     void setInBuffer(const sp<EffectBufferHalInterface>& buffer) {
         mInBuffer = buffer;
     }
-    int16_t *inBuffer() const {
-        return mInBuffer != 0 ? reinterpret_cast<int16_t*>(mInBuffer->ptr()) : NULL;
+    effect_buffer_t *inBuffer() const {
+        return mInBuffer != 0 ? reinterpret_cast<effect_buffer_t*>(mInBuffer->ptr()) : NULL;
     }
     void setOutBuffer(const sp<EffectBufferHalInterface>& buffer) {
         mOutBuffer = buffer;
     }
-    int16_t *outBuffer() const {
-        return mOutBuffer != 0 ? reinterpret_cast<int16_t*>(mOutBuffer->ptr()) : NULL;
+    effect_buffer_t *outBuffer() const {
+        return mOutBuffer != 0 ? reinterpret_cast<effect_buffer_t*>(mOutBuffer->ptr()) : NULL;
     }
 
     void incTrackCnt() { android_atomic_inc(&mTrackCnt); }
diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/FastMixer.cpp
index c10fa05..ace586c 100644
--- a/services/audioflinger/FastMixer.cpp
+++ b/services/audioflinger/FastMixer.cpp
@@ -138,8 +138,6 @@
 
 void FastMixer::onStateChange()
 {
-    // log that audio was turned on/off
-    LOG_AUDIO_STATE();
     const FastMixerState * const current = (const FastMixerState *) mCurrent;
     const FastMixerState * const previous = (const FastMixerState *) mPrevious;
     FastMixerDumpState * const dumpState = (FastMixerDumpState *) mDumpState;
@@ -336,7 +334,13 @@
 
 void FastMixer::onWork()
 {
-    LOG_HIST_TS();
+    // TODO: pass an ID parameter to indicate which time series we want to write to in NBLog.cpp
+    // Or: pass both of these into a single call with a boolean
+    if (mIsWarm) {
+        LOG_HIST_TS();
+    } else {
+        LOG_AUDIO_STATE();
+    }
     const FastMixerState * const current = (const FastMixerState *) mCurrent;
     FastMixerDumpState * const dumpState = (FastMixerDumpState *) mDumpState;
     const FastMixerState::Command command = mCommand;
diff --git a/services/audioflinger/FastMixerState.h b/services/audioflinger/FastMixerState.h
index 5a55c7a..2be1e91 100644
--- a/services/audioflinger/FastMixerState.h
+++ b/services/audioflinger/FastMixerState.h
@@ -21,7 +21,7 @@
 #include <system/audio.h>
 #include <media/ExtendedAudioBufferProvider.h>
 #include <media/nbaio/NBAIO.h>
-#include <media/nbaio/NBLog.h>
+#include <media/nblog/NBLog.h>
 #include "FastThreadState.h"
 
 namespace android {
diff --git a/services/audioflinger/FastThread.cpp b/services/audioflinger/FastThread.cpp
index 85865b7..dc15487 100644
--- a/services/audioflinger/FastThread.cpp
+++ b/services/audioflinger/FastThread.cpp
@@ -297,7 +297,8 @@
                     size_t i = mBounds & (mDumpState->mSamplingN - 1);
                     mBounds = (mBounds & 0xFFFF0000) | ((mBounds + 1) & 0xFFFF);
                     if (mFull) {
-                        mBounds += 0x10000;
+                        //mBounds += 0x10000;
+                        __builtin_add_overflow(mBounds, 0x10000, &mBounds);
                     } else if (!(mBounds & (mDumpState->mSamplingN - 1))) {
                         mFull = true;
                     }
diff --git a/services/audioflinger/FastThreadState.h b/services/audioflinger/FastThreadState.h
index f18f846..54c0dc6 100644
--- a/services/audioflinger/FastThreadState.h
+++ b/services/audioflinger/FastThreadState.h
@@ -19,7 +19,7 @@
 
 #include "Configuration.h"
 #include <stdint.h>
-#include <media/nbaio/NBLog.h>
+#include <media/nblog/NBLog.h>
 
 namespace android {
 
diff --git a/services/audioflinger/OWNERS b/services/audioflinger/OWNERS
index 703e4d2..d02d9e0 100644
--- a/services/audioflinger/OWNERS
+++ b/services/audioflinger/OWNERS
@@ -1,3 +1,4 @@
 hunga@google.com
 jmtrivi@google.com
 mnaganov@google.com
+gkasten@google.com
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index 1c1a989..e97bb06 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -68,8 +68,8 @@
             status_t    attachAuxEffect(int EffectId);
             void        setAuxBuffer(int EffectId, int32_t *buffer);
             int32_t     *auxBuffer() const { return mAuxBuffer; }
-            void        setMainBuffer(int16_t *buffer) { mMainBuffer = buffer; }
-            int16_t     *mainBuffer() const { return mMainBuffer; }
+            void        setMainBuffer(effect_buffer_t *buffer) { mMainBuffer = buffer; }
+            effect_buffer_t *mainBuffer() const { return mMainBuffer; }
             int         auxEffectId() const { return mAuxEffectId; }
     virtual status_t    getTimestamp(AudioTimestamp& timestamp);
             void        signal();
@@ -82,11 +82,11 @@
     virtual bool        isFastTrack() const { return (mFlags & AUDIO_OUTPUT_FLAG_FAST) != 0; }
 
 // implement volume handling.
-   VolumeShaper::Status applyVolumeShaper(
-                                const sp<VolumeShaper::Configuration>& configuration,
-                                const sp<VolumeShaper::Operation>& operation);
-sp<VolumeShaper::State> getVolumeShaperState(int id);
-    sp<VolumeHandler>   getVolumeHandler() { return mVolumeHandler; }
+    media::VolumeShaper::Status applyVolumeShaper(
+                                const sp<media::VolumeShaper::Configuration>& configuration,
+                                const sp<media::VolumeShaper::Operation>& operation);
+    sp<media::VolumeShaper::State> getVolumeShaperState(int id);
+    sp<media::VolumeHandler>   getVolumeHandler() { return mVolumeHandler; }
 
 protected:
     // for numerous
@@ -150,7 +150,8 @@
                                     // allocated statically at track creation time,
                                     // and is even allocated (though unused) for fast tracks
                                     // FIXME don't allocate track name for fast tracks
-    int16_t             *mMainBuffer;
+    effect_buffer_t     *mMainBuffer;
+
     int32_t             *mAuxBuffer;
     int                 mAuxEffectId;
     bool                mHasVolumeController;
@@ -163,7 +164,7 @@
 
     ExtendedTimestamp  mSinkTimestamp;
 
-    sp<VolumeHandler>  mVolumeHandler; // handles multiple VolumeShaper configs and operations
+    sp<media::VolumeHandler>  mVolumeHandler; // handles multiple VolumeShaper configs and operations
 
 private:
     // The following fields are only for fast tracks, and should be in a subclass
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 2aa14e6..d5def48 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -1412,7 +1412,7 @@
     bool chainCreated = false;
 
     ALOGD_IF((mType == OFFLOAD) && !effect->isOffloadable(),
-             "addEffect_l() on offloaded thread %p: effect %s does not support offload flags %x",
+             "addEffect_l() on offloaded thread %p: effect %s does not support offload flags %#x",
                     this, effect->desc().name, effect->desc().flags);
 
     if (chain == 0) {
@@ -1837,10 +1837,13 @@
 sp<AudioFlinger::PlaybackThread::Track> AudioFlinger::PlaybackThread::createTrack_l(
         const sp<AudioFlinger::Client>& client,
         audio_stream_type_t streamType,
-        uint32_t sampleRate,
+        uint32_t *pSampleRate,
         audio_format_t format,
         audio_channel_mask_t channelMask,
         size_t *pFrameCount,
+        size_t *pNotificationFrameCount,
+        uint32_t notificationsPerBuffer,
+        float speed,
         const sp<IMemory>& sharedBuffer,
         audio_session_t sessionId,
         audio_output_flags_t *flags,
@@ -1850,9 +1853,16 @@
         audio_port_handle_t portId)
 {
     size_t frameCount = *pFrameCount;
+    size_t notificationFrameCount = *pNotificationFrameCount;
     sp<Track> track;
     status_t lStatus;
     audio_output_flags_t outputFlags = mOutput->flags;
+    audio_output_flags_t requestedFlags = *flags;
+
+    if (*pSampleRate == 0) {
+        *pSampleRate = mSampleRate;
+    }
+    uint32_t sampleRate = *pSampleRate;
 
     // special case for FAST flag considered OK if fast mixer is present
     if (hasFastMixer()) {
@@ -1929,36 +1939,114 @@
         *flags = (audio_output_flags_t)(*flags & ~AUDIO_OUTPUT_FLAG_FAST);
       }
     }
-    // For normal PCM streaming tracks, update minimum frame count.
-    // For compatibility with AudioTrack calculation, buffer depth is forced
-    // to be at least 2 x the normal mixer frame count and cover audio hardware latency.
-    // This is probably too conservative, but legacy application code may depend on it.
-    // If you change this calculation, also review the start threshold which is related.
-    if (!(*flags & AUDIO_OUTPUT_FLAG_FAST)
-            && audio_has_proportional_frames(format) && sharedBuffer == 0) {
-        // this must match AudioTrack.cpp calculateMinFrameCount().
-        // TODO: Move to a common library
-        uint32_t latencyMs = 0;
-        lStatus = mOutput->stream->getLatency(&latencyMs);
-        if (lStatus != OK) {
-            ALOGE("Error when retrieving output stream latency: %d", lStatus);
+
+    if (!audio_has_proportional_frames(format)) {
+        if (sharedBuffer != 0) {
+            // Same comment as below about ignoring frameCount parameter for set()
+            frameCount = sharedBuffer->size();
+        } else if (frameCount == 0) {
+            frameCount = mNormalFrameCount;
+        }
+        if (notificationFrameCount != frameCount) {
+            notificationFrameCount = frameCount;
+        }
+    } else if (sharedBuffer != 0) {
+        // FIXME: Ensure client side memory buffers need
+        // not have additional alignment beyond sample
+        // (e.g. 16 bit stereo accessed as 32 bit frame).
+        size_t alignment = audio_bytes_per_sample(format);
+        if (alignment & 1) {
+            // for AUDIO_FORMAT_PCM_24_BIT_PACKED (not exposed through Java).
+            alignment = 1;
+        }
+        uint32_t channelCount = audio_channel_count_from_out_mask(channelMask);
+        size_t frameSize = channelCount * audio_bytes_per_sample(format);
+        if (channelCount > 1) {
+            // More than 2 channels does not require stronger alignment than stereo
+            alignment <<= 1;
+        }
+        if (((uintptr_t)sharedBuffer->pointer() & (alignment - 1)) != 0) {
+            ALOGE("Invalid buffer alignment: address %p, channel count %u",
+                  sharedBuffer->pointer(), channelCount);
+            lStatus = BAD_VALUE;
             goto Exit;
         }
-        uint32_t minBufCount = latencyMs / ((1000 * mNormalFrameCount) / mSampleRate);
-        if (minBufCount < 2) {
-            minBufCount = 2;
+
+        // When initializing a shared buffer AudioTrack via constructors,
+        // there's no frameCount parameter.
+        // But when initializing a shared buffer AudioTrack via set(),
+        // there _is_ a frameCount parameter.  We silently ignore it.
+        frameCount = sharedBuffer->size() / frameSize;
+    } else {
+        size_t minFrameCount = 0;
+        // For fast tracks we try to respect the application's request for notifications per buffer.
+        if (*flags & AUDIO_OUTPUT_FLAG_FAST) {
+            if (notificationsPerBuffer > 0) {
+                // Avoid possible arithmetic overflow during multiplication.
+                if (notificationsPerBuffer > SIZE_MAX / mFrameCount) {
+                    ALOGE("Requested notificationPerBuffer=%u ignored for HAL frameCount=%zu",
+                          notificationsPerBuffer, mFrameCount);
+                } else {
+                    minFrameCount = mFrameCount * notificationsPerBuffer;
+                }
+            }
+        } else {
+            // For normal PCM streaming tracks, update minimum frame count.
+            // Buffer depth is forced to be at least 2 x the normal mixer frame count and
+            // cover audio hardware latency.
+            // This is probably too conservative, but legacy application code may depend on it.
+            // If you change this calculation, also review the start threshold which is related.
+            uint32_t latencyMs = latency_l();
+            if (latencyMs == 0) {
+                ALOGE("Error when retrieving output stream latency");
+                lStatus = UNKNOWN_ERROR;
+                goto Exit;
+            }
+
+            minFrameCount = AudioSystem::calculateMinFrameCount(latencyMs, mNormalFrameCount,
+                                mSampleRate, sampleRate, speed /*, 0 mNotificationsPerBufferReq*/);
+
         }
-        // For normal mixing tracks, if speed is > 1.0f (normal), AudioTrack
-        // or the client should compute and pass in a larger buffer request.
-        size_t minFrameCount =
-                minBufCount * sourceFramesNeededWithTimestretch(
-                        sampleRate, mNormalFrameCount,
-                        mSampleRate, AUDIO_TIMESTRETCH_SPEED_NORMAL /*speed*/);
-        if (frameCount < minFrameCount) { // including frameCount == 0
+        if (frameCount < minFrameCount) {
             frameCount = minFrameCount;
         }
     }
+
+    // Make sure that application is notified with sufficient margin before underrun.
+    // The client can divide the AudioTrack buffer into sub-buffers,
+    // and expresses its desire to server as the notification frame count.
+    if (sharedBuffer == 0 && audio_is_linear_pcm(format)) {
+        size_t maxNotificationFrames;
+        if (*flags & AUDIO_OUTPUT_FLAG_FAST) {
+            // notify every HAL buffer, regardless of the size of the track buffer
+            maxNotificationFrames = mFrameCount;
+        } else {
+            // For normal tracks, use at least double-buffering if no sample rate conversion,
+            // or at least triple-buffering if there is sample rate conversion
+            const int nBuffering = sampleRate == mSampleRate ? 2 : 3;
+            maxNotificationFrames = frameCount / nBuffering;
+            // If client requested a fast track but this was denied, then use the smaller maximum.
+            if (requestedFlags & AUDIO_OUTPUT_FLAG_FAST) {
+                size_t maxNotificationFramesFastDenied = FMS_20 * sampleRate / 1000;
+                if (maxNotificationFrames > maxNotificationFramesFastDenied) {
+                    maxNotificationFrames = maxNotificationFramesFastDenied;
+                }
+            }
+        }
+        if (notificationFrameCount == 0 || notificationFrameCount > maxNotificationFrames) {
+            if (notificationFrameCount == 0) {
+                ALOGD("Client defaulted notificationFrames to %zu for frameCount %zu",
+                    maxNotificationFrames, frameCount);
+            } else {
+                ALOGW("Client adjusted notificationFrames from %zu to %zu for frameCount %zu",
+                      notificationFrameCount, maxNotificationFrames, frameCount);
+            }
+            notificationFrameCount = maxNotificationFrames;
+        }
+    }
+
     *pFrameCount = frameCount;
+    *pNotificationFrameCount = notificationFrameCount;
 
     switch (mType) {
 
@@ -2449,7 +2537,7 @@
     free(mEffectBuffer);
     mEffectBuffer = NULL;
     if (mEffectBufferEnabled) {
-        mEffectBufferFormat = AUDIO_FORMAT_PCM_16_BIT; // Note: Effects support 16b only
+        mEffectBufferFormat = EFFECT_BUFFER_FORMAT;
         mEffectBufferSize = mNormalFrameCount * mChannelCount
                 * audio_bytes_per_sample(mEffectBufferFormat);
         (void)posix_memalign(&mEffectBuffer, 32, mEffectBufferSize);
@@ -2638,6 +2726,7 @@
 // shared by MIXER and DIRECT, overridden by DUPLICATING
 ssize_t AudioFlinger::PlaybackThread::threadLoop_write()
 {
+    LOG_HIST_TS();
     mInWrite = true;
     ssize_t bytesWritten;
     const size_t offset = mCurrentWriteLength - mBytesRemaining;
@@ -2795,8 +2884,7 @@
             &halInBuffer);
     if (result != OK) return result;
     halOutBuffer = halInBuffer;
-    int16_t *buffer = reinterpret_cast<int16_t*>(halInBuffer->externalData());
-
+    effect_buffer_t *buffer = reinterpret_cast<effect_buffer_t*>(halInBuffer->externalData());
     ALOGV("addEffectChain_l() %p on thread %p for session %d", chain.get(), this, session);
     if (session > AUDIO_SESSION_OUTPUT_MIX) {
         // Only one effect chain can be present in direct output thread and it uses
@@ -2804,10 +2892,14 @@
         if (mType != DIRECT) {
             size_t numSamples = mNormalFrameCount * mChannelCount;
             status_t result = EffectBufferHalInterface::allocate(
-                    numSamples * sizeof(int16_t),
+                    numSamples * sizeof(effect_buffer_t),
                     &halInBuffer);
             if (result != OK) return result;
+#ifdef FLOAT_EFFECT_CHAIN
+            buffer = halInBuffer->audioBuffer()->f32;
+#else
             buffer = halInBuffer->audioBuffer()->s16;
+#endif
             ALOGV("addEffectChain_l() creating new input buffer %p session %d",
                     buffer, session);
         }
@@ -2882,7 +2974,7 @@
             for (size_t i = 0; i < mTracks.size(); ++i) {
                 sp<Track> track = mTracks[i];
                 if (session == track->sessionId()) {
-                    track->setMainBuffer(reinterpret_cast<int16_t*>(mSinkBuffer));
+                    track->setMainBuffer(reinterpret_cast<effect_buffer_t*>(mSinkBuffer));
                     chain->decTrackCnt();
                 }
             }
@@ -3116,6 +3208,10 @@
 
                     threadLoop_standby();
 
+                    // This is where we go into standby
+                    if (!mStandby) {
+                        LOG_AUDIO_STATE();
+                    }
                     mStandby = true;
                 }
 
@@ -3601,7 +3697,7 @@
         // mNormalSink below
 {
     ALOGV("MixerThread() id=%d device=%#x type=%d", id, device, type);
-    ALOGV("mSampleRate=%u, mChannelMask=%#x, mChannelCount=%u, mFormat=%d, mFrameSize=%zu, "
+    ALOGV("mSampleRate=%u, mChannelMask=%#x, mChannelCount=%u, mFormat=%#x, mFrameSize=%zu, "
             "mFrameCount=%zu, mNormalFrameCount=%zu",
             mSampleRate, mChannelMask, mChannelCount, mFormat, mFrameSize, mFrameCount,
             mNormalFrameCount);
@@ -3669,7 +3765,7 @@
         NBAIO_Format origformat = format;
 #endif
         // adjust format to match that of the Fast Mixer
-        ALOGV("format changed from %d to %d", format.mFormat, fastMixerFormat);
+        ALOGV("format changed from %#x to %#x", format.mFormat, fastMixerFormat);
         format.mFormat = fastMixerFormat;
         format.mFrameSize = audio_bytes_per_sample(format.mFormat) * format.mChannelCount;
 
@@ -4205,10 +4301,16 @@
                     // because we're about to decrement the last sp<> on those tracks.
                     block = FastMixerStateQueue::BLOCK_UNTIL_ACKED;
                 } else {
-                    LOG_ALWAYS_FATAL("fast track %d should have been active; "
+                    // ALOGW rather than LOG_ALWAYS_FATAL because it seems there are cases where an
+                    // AudioTrack may start (which may not be with a start() but with a write()
+                    // after underrun) and immediately paused or released.  In that case the
+                    // FastTrack state hasn't had time to update.
+                    // TODO Remove the ALOGW when this theory is confirmed.
+                    ALOGW("fast track %d should have been active; "
                             "mState=%d, mTrackMask=%#x, recentUnderruns=%u, isShared=%d",
                             j, track->mState, state->mTrackMask, recentUnderruns,
                             track->sharedBuffer() != 0);
+                    // Since the FastMixer state already has the track inactive, do nothing here.
                 }
                 tracksToRemove->add(track);
                 // Avoids a misleading display in dumpsys
@@ -4461,7 +4563,7 @@
                 mAudioMixer->setParameter(
                         name,
                         AudioMixer::TRACK,
-                        AudioMixer::MIXER_FORMAT, (void *)AUDIO_FORMAT_PCM_16_BIT);
+                        AudioMixer::MIXER_FORMAT, (void *)EFFECT_BUFFER_FORMAT);
                 mAudioMixer->setParameter(
                         name,
                         AudioMixer::TRACK,
@@ -5453,7 +5555,7 @@
         mPausedWriteLength(0), mPausedBytesRemaining(0), mKeepWakeLock(true),
         mOffloadUnderrunPosition(~0LL)
 {
-    //FIXME: mStandby should be set to true by ThreadBase constructor
+    //FIXME: mStandby should be set to true by ThreadBase constructo
     mStandby = true;
     mKeepWakeLock = property_get_bool("ro.audio.offload_wakelock", true /* default_value */);
 }
@@ -6612,12 +6714,12 @@
 // RecordThread::createRecordTrack_l() must be called with AudioFlinger::mLock held
 sp<AudioFlinger::RecordThread::RecordTrack> AudioFlinger::RecordThread::createRecordTrack_l(
         const sp<AudioFlinger::Client>& client,
-        uint32_t sampleRate,
+        uint32_t *pSampleRate,
         audio_format_t format,
         audio_channel_mask_t channelMask,
         size_t *pFrameCount,
         audio_session_t sessionId,
-        size_t *notificationFrames,
+        size_t *pNotificationFrameCount,
         uid_t uid,
         audio_input_flags_t *flags,
         pid_t tid,
@@ -6625,16 +6727,30 @@
         audio_port_handle_t portId)
 {
     size_t frameCount = *pFrameCount;
+    size_t notificationFrameCount = *pNotificationFrameCount;
     sp<RecordTrack> track;
     status_t lStatus;
     audio_input_flags_t inputFlags = mInput->flags;
+    audio_input_flags_t requestedFlags = *flags;
+    uint32_t sampleRate;
+
+    lStatus = initCheck();
+    if (lStatus != NO_ERROR) {
+        ALOGE("createRecordTrack_l() audio driver not initialized");
+        goto Exit;
+    }
+
+    if (*pSampleRate == 0) {
+        *pSampleRate = mSampleRate;
+    }
+    sampleRate = *pSampleRate;
 
     // special case for FAST flag considered OK if fast capture is present
     if (hasFastCapture()) {
         inputFlags = (audio_input_flags_t)(inputFlags | AUDIO_INPUT_FLAG_FAST);
     }
 
-    // Check if requested flags are compatible with output stream flags
+    // Check if requested flags are compatible with input stream flags
     if ((*flags & inputFlags) != *flags) {
         ALOGW("createRecordTrack_l(): mismatch between requested flags (%08x) and"
                 " input flags (%08x)",
@@ -6689,12 +6805,20 @@
       }
     }
 
+    // If FAST or RAW flags were corrected, ask caller to request new input from audio policy
+    if ((*flags & AUDIO_INPUT_FLAG_FAST) !=
+            (requestedFlags & AUDIO_INPUT_FLAG_FAST)) {
+        *flags = (audio_input_flags_t) (*flags & ~(AUDIO_INPUT_FLAG_FAST | AUDIO_INPUT_FLAG_RAW));
+        lStatus = BAD_TYPE;
+        goto Exit;
+    }
+
     // compute track buffer size in frames, and suggest the notification frame count
     if (*flags & AUDIO_INPUT_FLAG_FAST) {
         // fast track: frame count is exactly the pipe depth
         frameCount = mPipeFramesP2;
         // ignore requested notificationFrames, and always notify exactly once every HAL buffer
-        *notificationFrames = mFrameCount;
+        notificationFrameCount = mFrameCount;
     } else {
         // not fast track: max notification period is resampled equivalent of one HAL buffer time
         //                 or 20 ms if there is a fast capture
@@ -6713,17 +6837,12 @@
         const size_t minFrameCount = maxNotificationFrames *
                 max(kMinNotifications, minNotificationsByMs);
         frameCount = max(frameCount, minFrameCount);
-        if (*notificationFrames == 0 || *notificationFrames > maxNotificationFrames) {
-            *notificationFrames = maxNotificationFrames;
+        if (notificationFrameCount == 0 || notificationFrameCount > maxNotificationFrames) {
+            notificationFrameCount = maxNotificationFrames;
         }
     }
     *pFrameCount = frameCount;
-
-    lStatus = initCheck();
-    if (lStatus != NO_ERROR) {
-        ALOGE("createRecordTrack_l() audio driver not initialized");
-        goto Exit;
-    }
+    *pNotificationFrameCount = notificationFrameCount;
 
     { // scope for mLock
         Mutex::Autolock _l(mLock);
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index dd2b89b..17f26c5 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -485,6 +485,7 @@
                 // Updated by updateSuspendedSessions_l() only.
                 KeyedVector< audio_session_t, KeyedVector< int, sp<SuspendedSessionDesc> > >
                                         mSuspendedSessions;
+                // TODO: add comment and adjust size as needed
                 static const size_t     kLogSize = 4 * 1024;
                 sp<NBLog::Writer>       mNBLogWriter;
                 bool                    mSystemReady;
@@ -705,10 +706,13 @@
                 sp<Track>   createTrack_l(
                                 const sp<AudioFlinger::Client>& client,
                                 audio_stream_type_t streamType,
-                                uint32_t sampleRate,
+                                uint32_t *sampleRate,
                                 audio_format_t format,
                                 audio_channel_mask_t channelMask,
                                 size_t *pFrameCount,
+                                size_t *pNotificationFrameCount,
+                                uint32_t notificationsPerBuffer,
+                                float speed,
                                 const sp<IMemory>& sharedBuffer,
                                 audio_session_t sessionId,
                                 audio_output_flags_t *flags,
@@ -737,11 +741,10 @@
     virtual     String8     getParameters(const String8& keys);
     virtual     void        ioConfigChanged(audio_io_config_event event, pid_t pid = 0);
                 status_t    getRenderPosition(uint32_t *halFrames, uint32_t *dspFrames);
-                // FIXME rename mixBuffer() to sinkBuffer() and remove int16_t* dependency.
                 // Consider also removing and passing an explicit mMainBuffer initialization
                 // parameter to AF::PlaybackThread::Track::Track().
-                int16_t     *mixBuffer() const {
-                    return reinterpret_cast<int16_t *>(mSinkBuffer); };
+                effect_buffer_t *sinkBuffer() const {
+                    return reinterpret_cast<effect_buffer_t *>(mSinkBuffer); };
 
     virtual     void detachAuxEffect_l(int effectId);
                 status_t attachAuxEffect(const sp<AudioFlinger::PlaybackThread::Track>& track,
@@ -984,6 +987,7 @@
     sp<NBAIO_Source>        mTeeSource;
 #endif
     uint32_t                mScreenState;   // cached copy of gScreenState
+    // TODO: add comment and adjust size as needed
     static const size_t     kFastMixerLogSize = 8 * 1024;
     sp<NBLog::Writer>       mFastMixerNBLogWriter;
 
@@ -1323,12 +1327,12 @@
 
             sp<AudioFlinger::RecordThread::RecordTrack>  createRecordTrack_l(
                     const sp<AudioFlinger::Client>& client,
-                    uint32_t sampleRate,
+                    uint32_t *pSampleRate,
                     audio_format_t format,
                     audio_channel_mask_t channelMask,
                     size_t *pFrameCount,
                     audio_session_t sessionId,
-                    size_t *notificationFrames,
+                    size_t *pNotificationFrameCount,
                     uid_t uid,
                     audio_input_flags_t *flags,
                     pid_t tid,
@@ -1456,6 +1460,7 @@
             // If a fast capture is present, the Pipe as IMemory, otherwise clear
             sp<IMemory>                         mPipeMemory;
 
+            // TODO: add comment and adjust size as needed
             static const size_t                 kFastCaptureLogSize = 4 * 1024;
             sp<NBLog::Writer>                   mFastCaptureNBLogWriter;
 
diff --git a/services/audioflinger/TrackBase.h b/services/audioflinger/TrackBase.h
index d4ce0b4..a3ea756 100644
--- a/services/audioflinger/TrackBase.h
+++ b/services/audioflinger/TrackBase.h
@@ -192,7 +192,7 @@
                                     // where for AudioTrack (but not AudioRecord),
                                     // 8-bit PCM samples are stored as 16-bit
     const size_t        mFrameCount;// size of track buffer given at createTrack() or
-                                    // openRecord(), and then adjusted as needed
+                                    // createRecord(), and then adjusted as needed
 
     const audio_session_t mSessionId;
     uid_t               mUid;
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index fe93367..cdd8ca0 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -52,6 +52,7 @@
 
 namespace android {
 
+using media::VolumeShaper;
 // ----------------------------------------------------------------------------
 //      TrackBase
 // ----------------------------------------------------------------------------
@@ -394,12 +395,12 @@
     mSharedBuffer(sharedBuffer),
     mStreamType(streamType),
     mName(-1),  // see note below
-    mMainBuffer(thread->mixBuffer()),
+    mMainBuffer(thread->sinkBuffer()),
     mAuxBuffer(NULL),
     mAuxEffectId(0), mHasVolumeController(false),
     mPresentationCompleteFrames(0),
     mFrameMap(16 /* sink-frame-to-track-frame map memory */),
-    mVolumeHandler(new VolumeHandler(sampleRate)),
+    mVolumeHandler(new media::VolumeHandler(sampleRate)),
     // mSinkTimestamp
     mFastIndex(-1),
     mCachedVolume(1.0),
@@ -1562,14 +1563,16 @@
     mRecordTrack->destroy();
 }
 
-status_t AudioFlinger::RecordHandle::start(int /*AudioSystem::sync_event_t*/ event,
-        audio_session_t triggerSession) {
+binder::Status AudioFlinger::RecordHandle::start(int /*AudioSystem::sync_event_t*/ event,
+        int /*audio_session_t*/ triggerSession) {
     ALOGV("RecordHandle::start()");
-    return mRecordTrack->start((AudioSystem::sync_event_t)event, triggerSession);
+    return binder::Status::fromStatusT(
+        mRecordTrack->start((AudioSystem::sync_event_t)event, (audio_session_t) triggerSession));
 }
 
-void AudioFlinger::RecordHandle::stop() {
+binder::Status AudioFlinger::RecordHandle::stop() {
     stop_nonvirtual();
+    return binder::Status::ok();
 }
 
 void AudioFlinger::RecordHandle::stop_nonvirtual() {
@@ -1577,12 +1580,6 @@
     mRecordTrack->stop();
 }
 
-status_t AudioFlinger::RecordHandle::onTransact(
-    uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
-{
-    return BnAudioRecord::onTransact(code, data, reply, flags);
-}
-
 // ----------------------------------------------------------------------------
 
 // RecordTrack constructor must be called with AudioFlinger::mLock and ThreadBase::mLock held
diff --git a/services/audioflinger/TypedLogger.h b/services/audioflinger/TypedLogger.h
index 7e77e89..38c3c02 100644
--- a/services/audioflinger/TypedLogger.h
+++ b/services/audioflinger/TypedLogger.h
@@ -18,7 +18,9 @@
 #ifndef ANDROID_TYPED_LOGGER_H
 #define ANDROID_TYPED_LOGGER_H
 
-#include <media/nbaio/NBLog.h>
+// This is the client API for the typed logger.
+
+#include <media/nblog/NBLog.h>
 #include <algorithm>
 
 /*
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index 7b19f58..f2cb25f 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -109,12 +109,7 @@
     //
 
     // request an output appropriate for playback of the supplied stream type and parameters
-    virtual audio_io_handle_t getOutput(audio_stream_type_t stream,
-                                        uint32_t samplingRate,
-                                        audio_format_t format,
-                                        audio_channel_mask_t channelMask,
-                                        audio_output_flags_t flags,
-                                        const audio_offload_info_t *offloadInfo) = 0;
+    virtual audio_io_handle_t getOutput(audio_stream_type_t stream) = 0;
     virtual status_t getOutputForAttr(const audio_attributes_t *attr,
                                         audio_io_handle_t *output,
                                         audio_session_t session,
@@ -360,6 +355,6 @@
 extern "C" void destroyAudioPolicyManager(AudioPolicyInterface *interface);
 
 
-}; // namespace android
+} // namespace android
 
 #endif // ANDROID_AUDIOPOLICY_INTERFACE_H
diff --git a/services/audiopolicy/common/include/policy.h b/services/audiopolicy/common/include/policy.h
index 31f0550..9bd68e1 100644
--- a/services/audiopolicy/common/include/policy.h
+++ b/services/audiopolicy/common/include/policy.h
@@ -35,7 +35,8 @@
  * A device mask for all audio input devices that are considered "virtual" when evaluating
  * active inputs in getActiveInputs()
  */
-#define APM_AUDIO_IN_DEVICE_VIRTUAL_ALL  (AUDIO_DEVICE_IN_REMOTE_SUBMIX)
+#define APM_AUDIO_IN_DEVICE_VIRTUAL_ALL (AUDIO_DEVICE_IN_REMOTE_SUBMIX|\
+        AUDIO_DEVICE_IN_BUS|AUDIO_DEVICE_IN_FM_TUNER)
 
 
 /**
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioCollections.h b/services/audiopolicy/common/managerdefinitions/include/AudioCollections.h
index 8f00d22..f86e75a 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioCollections.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioCollections.h
@@ -41,4 +41,4 @@
     status_t dump(int fd, int spaces) const;
 };
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioGain.h b/services/audiopolicy/common/managerdefinitions/include/AudioGain.h
index cea5c0b..4ac508f 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioGain.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioGain.h
@@ -66,4 +66,4 @@
     bool              mUseInChannelMask;
 };
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h
index b169bac..b25d6d4 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h
@@ -34,8 +34,8 @@
 class AudioInputDescriptor: public AudioPortConfig, public AudioSessionInfoProvider
 {
 public:
-    explicit AudioInputDescriptor(const sp<IOProfile>& profile);
-    void setIoHandle(audio_io_handle_t ioHandle);
+    explicit AudioInputDescriptor(const sp<IOProfile>& profile,
+                                  AudioPolicyClientInterface *clientInterface);
     audio_port_handle_t getId() const;
     audio_module_handle_t getModuleHandle() const;
     uint32_t getOpenRefCount() const;
@@ -73,6 +73,20 @@
 
     void setPatchHandle(audio_patch_handle_t handle);
 
+    status_t open(const audio_config_t *config,
+                  audio_devices_t device,
+                  const String8& address,
+                  audio_source_t source,
+                  audio_input_flags_t flags,
+                  audio_io_handle_t *input);
+    // Called when a stream is about to be started.
+    // Note: called after AudioSession::changeActiveCount(1)
+    status_t start();
+    // Called after a stream is stopped
+    // Note: called after AudioSession::changeActiveCount(-1)
+    void stop();
+    void close();
+
 private:
     audio_patch_handle_t          mPatchHandle;
     audio_port_handle_t           mId;
@@ -85,6 +99,7 @@
     // a particular input started and prevent preemption of this active input by this session.
     // We also inherit sessions from the preempted input to avoid a 3 way preemption loop etc...
     SortedVector<audio_session_t> mPreemptedSessions;
+    AudioPolicyClientInterface *mClientInterface;
 };
 
 class AudioInputCollection :
@@ -112,4 +127,4 @@
 };
 
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
index c09cb5a..2803ec1 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
@@ -101,8 +101,6 @@
 
     status_t    dump(int fd);
 
-    void setIoHandle(audio_io_handle_t ioHandle);
-
     virtual audio_devices_t device() const;
     virtual bool sharesHwModuleWith(const sp<AudioOutputDescriptor>& outputDesc);
     virtual audio_devices_t supportedDevices();
@@ -122,6 +120,20 @@
                            const struct audio_port_config *srcConfig = NULL) const;
     virtual void toAudioPort(struct audio_port *port) const;
 
+            status_t open(const audio_config_t *config,
+                          audio_devices_t device,
+                          const String8& address,
+                          audio_stream_type_t stream,
+                          audio_output_flags_t flags,
+                          audio_io_handle_t *output);
+            // Called when a stream is about to be started
+            // Note: called before changeRefCount(1);
+            status_t start();
+            // Called after a stream is stopped.
+            // Note: called after changeRefCount(-1);
+            void stop();
+            void close();
+
     const sp<IOProfile> mProfile;          // I/O profile this output derives from
     audio_io_handle_t mIoHandle;           // output handle
     uint32_t mLatency;                  //
@@ -208,4 +220,4 @@
 };
 
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPatch.h b/services/audiopolicy/common/managerdefinitions/include/AudioPatch.h
index 385f257..c1c3f3c 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioPatch.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioPatch.h
@@ -16,6 +16,7 @@
 
 #pragma once
 
+#include "HandleGenerator.h"
 #include <system/audio.h>
 #include <utils/Errors.h>
 #include <utils/RefBase.h>
@@ -24,7 +25,7 @@
 
 namespace android {
 
-class AudioPatch : public RefBase
+class AudioPatch : public RefBase, private HandleGenerator<audio_patch_handle_t>
 {
 public:
     AudioPatch(const struct audio_patch *patch, uid_t uid);
@@ -35,9 +36,6 @@
     struct audio_patch mPatch;
     uid_t mUid;
     audio_patch_handle_t mAfPatchHandle;
-
-private:
-    static volatile int32_t mNextUniqueId;
 };
 
 class AudioPatchCollection : public DefaultKeyedVector<audio_patch_handle_t, sp<AudioPatch> >
@@ -52,4 +50,4 @@
     status_t dump(int fd) const;
 };
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyConfig.h b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyConfig.h
index f2756b5..43f6ed6 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyConfig.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyConfig.h
@@ -39,14 +39,13 @@
                       DeviceVector &availableOutputDevices,
                       DeviceVector &availableInputDevices,
                       sp<DeviceDescriptor> &defaultOutputDevices,
-                      bool &isSpeakerDrcEnabled,
                       VolumeCurvesCollection *volumes = nullptr)
         : mHwModules(hwModules),
           mAvailableOutputDevices(availableOutputDevices),
           mAvailableInputDevices(availableInputDevices),
           mDefaultOutputDevices(defaultOutputDevices),
           mVolumeCurves(volumes),
-          mIsSpeakerDrcEnabled(isSpeakerDrcEnabled)
+          mIsSpeakerDrcEnabled(false)
     {}
 
     void setVolumes(const VolumeCurvesCollection &volumes)
@@ -80,6 +79,8 @@
         mAvailableOutputDevices.add(availableOutputDevices);
     }
 
+    bool isSpeakerDrcEnabled() const { return mIsSpeakerDrcEnabled; }
+
     void setSpeakerDrcEnabled(bool isSpeakerDrcEnabled)
     {
         mIsSpeakerDrcEnabled = isSpeakerDrcEnabled;
@@ -112,7 +113,7 @@
         mAvailableOutputDevices.add(mDefaultOutputDevices);
         mAvailableInputDevices.add(defaultInputDevice);
 
-        module = new HwModule("primary");
+        module = new HwModule(AUDIO_HARDWARE_MODULE_ID_PRIMARY);
 
         sp<OutputProfile> outProfile;
         outProfile = new OutputProfile(String8("primary"));
@@ -121,7 +122,7 @@
                 new AudioProfile(AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO, 44100));
         outProfile->addSupportedDevice(mDefaultOutputDevices);
         outProfile->setFlags(AUDIO_OUTPUT_FLAG_PRIMARY);
-        module->mOutputProfiles.add(outProfile);
+        module->addOutputProfile(outProfile);
 
         sp<InputProfile> inProfile;
         inProfile = new InputProfile(String8("primary"));
@@ -129,7 +130,7 @@
         inProfile->addAudioProfile(
                 new AudioProfile(AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_MONO, 8000));
         inProfile->addSupportedDevice(defaultInputDevice);
-        module->mInputProfiles.add(inProfile);
+        module->addInputProfile(inProfile);
 
         mHwModules.add(module);
     }
@@ -140,7 +141,10 @@
     DeviceVector &mAvailableInputDevices;
     sp<DeviceDescriptor> &mDefaultOutputDevices;
     VolumeCurvesCollection *mVolumeCurves;
-    bool &mIsSpeakerDrcEnabled;
+    // TODO: remove when legacy conf file is removed. true on devices that use DRC on the
+    // DEVICE_CATEGORY_SPEAKER path to boost soft sounds, used to adjust volume curves accordingly.
+    // Note: remove also speaker_drc_enabled from global configuration of XML config file.
+    bool mIsSpeakerDrcEnabled;
 };
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
index 0bacef7..8fc6fe9 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
@@ -83,4 +83,4 @@
     status_t dump(int fd) const;
 };
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPort.h b/services/audiopolicy/common/managerdefinitions/include/AudioPort.h
index 4f79ed2..caf3c02 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioPort.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioPort.h
@@ -18,6 +18,7 @@
 
 #include "AudioCollections.h"
 #include "AudioProfile.h"
+#include "HandleGenerator.h"
 #include <utils/String8.h>
 #include <utils/Vector.h>
 #include <utils/RefBase.h>
@@ -32,7 +33,7 @@
 class AudioRoute;
 typedef Vector<sp<AudioGain> > AudioGainCollection;
 
-class AudioPort : public virtual RefBase
+class AudioPort : public virtual RefBase, private HandleGenerator<audio_port_handle_t>
 {
 public:
     AudioPort(const String8& name, audio_port_type_t type,  audio_port_role_t role) :
@@ -83,12 +84,7 @@
     bool hasDynamicAudioProfile() const { return mProfiles.hasDynamicProfile(); }
 
     // searches for an exact match
-    status_t checkExactAudioProfile(uint32_t samplingRate,
-                                    audio_channel_mask_t channelMask,
-                                    audio_format_t format) const
-    {
-        return mProfiles.checkExactProfile(samplingRate, channelMask, format);
-    }
+    virtual status_t checkExactAudioProfile(const struct audio_port_config *config) const;
 
     // searches for a compatible match, currently implemented for input
     // parameters are input|output, returned value is the best match.
@@ -152,7 +148,6 @@
     uint32_t mFlags; // attribute flags mask (e.g primary output, direct output...).
     AudioProfileVector mProfiles; // AudioProfiles supported by this port (format, Rates, Channels)
     AudioRouteVector mRoutes; // Routes involving this port
-    static volatile int32_t mNextUniqueId;
 };
 
 class AudioPortConfig : public virtual RefBase
@@ -176,4 +171,4 @@
     struct audio_gain_config mGain;
 };
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioProfile.h b/services/audiopolicy/common/managerdefinitions/include/AudioProfile.h
index 404e27d..8741c66 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioProfile.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioProfile.h
@@ -349,4 +349,4 @@
 
 bool operator == (const AudioProfile &left, const AudioProfile &right);
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioRoute.h b/services/audiopolicy/common/managerdefinitions/include/AudioRoute.h
index df54f48..6b24fde 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioRoute.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioRoute.h
@@ -55,4 +55,4 @@
 
 };
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioSession.h b/services/audiopolicy/common/managerdefinitions/include/AudioSession.h
index cedf22d..0d19373 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioSession.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioSession.h
@@ -102,4 +102,4 @@
     status_t dump(int fd, int spaces) const;
 };
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioSourceDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioSourceDescriptor.h
index 7e1e24d..0d90f42 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioSourceDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioSourceDescriptor.h
@@ -56,4 +56,4 @@
     status_t dump(int fd) const;
 };
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/include/ConfigParsingUtils.h b/services/audiopolicy/common/managerdefinitions/include/ConfigParsingUtils.h
index ee95ceb..a007854 100644
--- a/services/audiopolicy/common/managerdefinitions/include/ConfigParsingUtils.h
+++ b/services/audiopolicy/common/managerdefinitions/include/ConfigParsingUtils.h
@@ -56,4 +56,4 @@
     static status_t loadHwModule(cnode *root, sp<HwModule> &module, AudioPolicyConfig &config);
 };
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
index 1a644d7..92a4c3e 100644
--- a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
@@ -91,4 +91,4 @@
     audio_devices_t mDeviceTypes;
 };
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h
index 9ea0aea..04831c6 100644
--- a/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h
@@ -69,4 +69,4 @@
     static const uint32_t MAX_EFFECTS_MEMORY = 512;
 };
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/include/Gains.h b/services/audiopolicy/common/managerdefinitions/include/Gains.h
index 34afc8c..8332af9 100644
--- a/services/audiopolicy/common/managerdefinitions/include/Gains.h
+++ b/services/audiopolicy/common/managerdefinitions/include/Gains.h
@@ -56,4 +56,4 @@
     static const VolumeCurvePoint *sVolumeProfiles[AUDIO_STREAM_CNT][DEVICE_CATEGORY_CNT];
 };
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/include/HandleGenerator.h b/services/audiopolicy/common/managerdefinitions/include/HandleGenerator.h
new file mode 100644
index 0000000..737a2e0
--- /dev/null
+++ b/services/audiopolicy/common/managerdefinitions/include/HandleGenerator.h
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <atomic>
+#include <limits>
+
+namespace android {
+
+template<typename T>
+class HandleGenerator {
+  protected:
+    static T getNextHandle();
+};
+
+template<typename T>
+T HandleGenerator<T>::getNextHandle() {
+    static std::atomic<uint32_t> mNextUniqueId(1);
+    uint32_t id = mNextUniqueId++;
+    while (id > std::numeric_limits<T>::max()) {
+        id -= std::numeric_limits<T>::max();
+    }
+    return static_cast<T>(id);
+}
+
+}  // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/include/HwModule.h b/services/audiopolicy/common/managerdefinitions/include/HwModule.h
index 29b6b9c..cb9f49e 100644
--- a/services/audiopolicy/common/managerdefinitions/include/HwModule.h
+++ b/services/audiopolicy/common/managerdefinitions/include/HwModule.h
@@ -44,12 +44,10 @@
 
     const char *getName() const { return mName.string(); }
 
-
     const DeviceVector &getDeclaredDevices() const { return mDeclaredDevices; }
     void setDeclaredDevices(const DeviceVector &devices);
 
     const InputProfileCollection &getInputProfiles() const { return mInputProfiles; }
-
     const OutputProfileCollection &getOutputProfiles() const { return mOutputProfiles; }
 
     void setProfiles(const IOProfileCollection &profiles);
@@ -76,6 +74,7 @@
     status_t removeInputProfile(const String8& name);
 
     audio_module_handle_t getHandle() const { return mHandle; }
+    void setHandle(audio_module_handle_t handle);
 
     sp<AudioPort> findPortByTagName(const String8 &tagName) const
     {
@@ -85,14 +84,13 @@
     // TODO remove from here (split serialization)
     void dump(int fd);
 
+private:
+    void refreshSupportedDevices();
+
     const String8 mName; // base name of the audio HW module (primary, a2dp ...)
     audio_module_handle_t mHandle;
     OutputProfileCollection mOutputProfiles; // output profiles exposed by this module
     InputProfileCollection mInputProfiles;  // input profiles exposed by this module
-
-private:
-    void refreshSupportedDevices();
-
     uint32_t mHalVersion; // audio HAL API version
     DeviceVector mDeclaredDevices; // devices declared in audio_policy configuration file.
     AudioRouteVector mRoutes;
@@ -114,4 +112,4 @@
     status_t dump(int fd) const;
 };
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/include/IOProfile.h b/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
index ec04ef7..24fe7cb 100644
--- a/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
+++ b/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
@@ -34,7 +34,11 @@
 {
 public:
     IOProfile(const String8 &name, audio_port_role_t role)
-        : AudioPort(name, AUDIO_PORT_TYPE_MIX, role) {}
+        : AudioPort(name, AUDIO_PORT_TYPE_MIX, role),
+          maxOpenCount((role == AUDIO_PORT_ROLE_SOURCE) ? 1 : 0),
+          curOpenCount(0),
+          maxActiveCount(1),
+          curActiveCount(0) {}
 
     // For a Profile aka MixPort, tag name and name are equivalent.
     virtual const String8 getTagName() const { return getName(); }
@@ -103,6 +107,34 @@
 
     const DeviceVector &getSupportedDevices() const { return mSupportedDevices; }
 
+    bool canOpenNewIo() {
+        if (maxOpenCount == 0 || curOpenCount < maxOpenCount) {
+            return true;
+        }
+        return false;
+    }
+
+    bool canStartNewIo() {
+        if (maxActiveCount == 0 || curActiveCount < maxActiveCount) {
+            return true;
+        }
+        return false;
+    }
+
+    // Maximum number of input or output streams that can be simultaneously opened for this profile.
+    // By convention 0 means no limit. To respect legacy behavior, initialized to 1 for output
+    // profiles and 0 for input profiles
+    uint32_t     maxOpenCount;
+    // Number of streams currently opened for this profile.
+    uint32_t     curOpenCount;
+    // Maximum number of input or output streams that can be simultaneously active for this profile.
+    // By convention 0 means no limit. To respect legacy behavior, initialized to 0 for output
+    // profiles and 1 for input profiles
+    uint32_t     maxActiveCount;
+    // Number of streams currently active for this profile. This is not the number of active clients
+    // (AudioTrack or AudioRecord) but the number of active HAL streams.
+    uint32_t     curActiveCount;
+
 private:
     DeviceVector mSupportedDevices; // supported devices: this input/output can be routed from/to
 };
@@ -119,4 +151,4 @@
     explicit OutputProfile(const String8 &name) : IOProfile(name, AUDIO_PORT_ROLE_SOURCE) {}
 };
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/include/IVolumeCurvesCollection.h b/services/audiopolicy/common/managerdefinitions/include/IVolumeCurvesCollection.h
index a3de686..e1f6b08 100644
--- a/services/audiopolicy/common/managerdefinitions/include/IVolumeCurvesCollection.h
+++ b/services/audiopolicy/common/managerdefinitions/include/IVolumeCurvesCollection.h
@@ -25,6 +25,8 @@
 class IVolumeCurvesCollection
 {
 public:
+    virtual ~IVolumeCurvesCollection() = default;
+
     virtual void clearCurrentVolumeIndex(audio_stream_type_t stream) = 0;
     virtual void addCurrentVolumeIndex(audio_stream_type_t stream, audio_devices_t device,
                                        int index) = 0;
@@ -46,9 +48,6 @@
                                          audio_devices_t device) const = 0;
 
     virtual status_t dump(int fd) const = 0;
-
-protected:
-    virtual ~IVolumeCurvesCollection() {}
 };
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/include/Serializer.h b/services/audiopolicy/common/managerdefinitions/include/Serializer.h
index 078b582..29de848 100644
--- a/services/audiopolicy/common/managerdefinitions/include/Serializer.h
+++ b/services/audiopolicy/common/managerdefinitions/include/Serializer.h
@@ -92,6 +92,8 @@
         static const char name[];
         static const char role[];
         static const char flags[];
+        static const char maxOpenCount[];
+        static const char maxActiveCount[];
     };
 
     typedef IOProfile Element;
@@ -234,4 +236,4 @@
     // Children are: ModulesTraits, VolumeTraits
 };
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/include/SessionRoute.h b/services/audiopolicy/common/managerdefinitions/include/SessionRoute.h
index 75bfd9d..fc2c273 100644
--- a/services/audiopolicy/common/managerdefinitions/include/SessionRoute.h
+++ b/services/audiopolicy/common/managerdefinitions/include/SessionRoute.h
@@ -115,4 +115,4 @@
     const session_route_map_type_t mMapType;
 };
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/include/SoundTriggerSession.h b/services/audiopolicy/common/managerdefinitions/include/SoundTriggerSession.h
index 420e6d7..f895599 100644
--- a/services/audiopolicy/common/managerdefinitions/include/SoundTriggerSession.h
+++ b/services/audiopolicy/common/managerdefinitions/include/SoundTriggerSession.h
@@ -30,4 +30,4 @@
     status_t acquireSession(audio_session_t session, audio_io_handle_t ioHandle);
 };
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/include/StreamDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/StreamDescriptor.h
index 8822927..50b1037 100644
--- a/services/audiopolicy/common/managerdefinitions/include/StreamDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/StreamDescriptor.h
@@ -107,4 +107,4 @@
     void setVolumeIndexMax(audio_stream_type_t stream,int volIndexMax);
 };
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/include/TypeConverter.h b/services/audiopolicy/common/managerdefinitions/include/TypeConverter.h
index fc95eb9..63c19d1 100644
--- a/services/audiopolicy/common/managerdefinitions/include/TypeConverter.h
+++ b/services/audiopolicy/common/managerdefinitions/include/TypeConverter.h
@@ -58,4 +58,4 @@
 template <>
 const RuleTypeConverter::Table RuleTypeConverter::mTable[];
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/include/VolumeCurve.h b/services/audiopolicy/common/managerdefinitions/include/VolumeCurve.h
index e7fcefc..3e6b2b4 100644
--- a/services/audiopolicy/common/managerdefinitions/include/VolumeCurve.h
+++ b/services/audiopolicy/common/managerdefinitions/include/VolumeCurve.h
@@ -135,7 +135,13 @@
 
     float volIndexToDb(device_category deviceCat, int indexInUi) const
     {
-        return getCurvesFor(deviceCat)->volIndexToDb(indexInUi, mIndexMin, mIndexMax);
+        sp<VolumeCurve> vc = getCurvesFor(deviceCat);
+        if (vc != 0) {
+            return vc->volIndexToDb(indexInUi, mIndexMin, mIndexMax);
+        } else {
+            ALOGE("Invalid device category %d for Volume Curve", deviceCat);
+            return 0.0f;
+        }
     }
 
     void dump(int fd, int spaces, bool curvePoints = false) const;
@@ -230,4 +236,4 @@
     }
 };
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioCollections.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioCollections.cpp
index 635fe4d..ca67b87 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioCollections.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioCollections.cpp
@@ -27,14 +27,12 @@
 
 sp<AudioPort> AudioPortVector::findByTagName(const String8 &tagName) const
 {
-    sp<AudioPort> port = 0;
-    for (size_t i = 0; i < size(); i++) {
-        if (itemAt(i)->getTagName() == tagName) {
-            port = itemAt(i);
-            break;
+    for (const auto& port : *this) {
+        if (port->getTagName() == tagName) {
+            return port;
         }
     }
-    return port;
+    return nullptr;
 }
 
 status_t AudioRouteVector::dump(int fd, int spaces) const
@@ -55,4 +53,4 @@
     return NO_ERROR;
 }
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioGain.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioGain.cpp
index e454941..193d4a6 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioGain.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioGain.cpp
@@ -126,4 +126,4 @@
     write(fd, result.string(), result.size());
 }
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
index 2492ed6..92332fb 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
@@ -17,6 +17,7 @@
 #define LOG_TAG "APM::AudioInputDescriptor"
 //#define LOG_NDEBUG 0
 
+#include <AudioPolicyInterface.h>
 #include "AudioInputDescriptor.h"
 #include "IOProfile.h"
 #include "AudioGain.h"
@@ -26,10 +27,12 @@
 
 namespace android {
 
-AudioInputDescriptor::AudioInputDescriptor(const sp<IOProfile>& profile)
+AudioInputDescriptor::AudioInputDescriptor(const sp<IOProfile>& profile,
+                                           AudioPolicyClientInterface *clientInterface)
     : mIoHandle(0),
       mDevice(AUDIO_DEVICE_NONE), mPolicyMix(NULL),
-      mProfile(profile), mPatchHandle(AUDIO_PATCH_HANDLE_NONE), mId(0)
+      mProfile(profile), mPatchHandle(AUDIO_PATCH_HANDLE_NONE), mId(0),
+      mClientInterface(clientInterface)
 {
     if (profile != NULL) {
         profile->pickAudioProfile(mSamplingRate, mChannelMask, mFormat);
@@ -39,12 +42,6 @@
     }
 }
 
-void AudioInputDescriptor::setIoHandle(audio_io_handle_t ioHandle)
-{
-    mId = AudioPort::getNextUniqueId();
-    mIoHandle = ioHandle;
-}
-
 audio_module_handle_t AudioInputDescriptor::getModuleHandle() const
 {
     if (mProfile == 0) {
@@ -192,6 +189,93 @@
     return config;
 }
 
+status_t AudioInputDescriptor::open(const audio_config_t *config,
+                                       audio_devices_t device,
+                                       const String8& address,
+                                       audio_source_t source,
+                                       audio_input_flags_t flags,
+                                       audio_io_handle_t *input)
+{
+    audio_config_t lConfig;
+    if (config == nullptr) {
+        lConfig = AUDIO_CONFIG_INITIALIZER;
+        lConfig.sample_rate = mSamplingRate;
+        lConfig.channel_mask = mChannelMask;
+        lConfig.format = mFormat;
+    } else {
+        lConfig = *config;
+    }
+
+    mDevice = device;
+
+    ALOGV("opening input for device %08x address %s profile %p name %s",
+          mDevice, address.string(), mProfile.get(), mProfile->getName().string());
+
+    status_t status = mClientInterface->openInput(mProfile->getModuleHandle(),
+                                                  input,
+                                                  &lConfig,
+                                                  &mDevice,
+                                                  address,
+                                                  source,
+                                                  flags);
+    LOG_ALWAYS_FATAL_IF(mDevice != device,
+                        "%s openInput returned device %08x when given device %08x",
+                        __FUNCTION__, mDevice, device);
+
+    if (status == NO_ERROR) {
+        LOG_ALWAYS_FATAL_IF(*input == AUDIO_IO_HANDLE_NONE,
+                            "%s openInput returned input handle %d for device %08x",
+                            __FUNCTION__, *input, device);
+        mSamplingRate = lConfig.sample_rate;
+        mChannelMask = lConfig.channel_mask;
+        mFormat = lConfig.format;
+        mId = AudioPort::getNextUniqueId();
+        mIoHandle = *input;
+        mProfile->curOpenCount++;
+    }
+
+    return status;
+}
+
+status_t AudioInputDescriptor::start()
+{
+    if (getAudioSessionCount(true/*activeOnly*/) == 1) {
+        if (!mProfile->canStartNewIo()) {
+            ALOGI("%s mProfile->curActiveCount %d", __func__, mProfile->curActiveCount);
+            return INVALID_OPERATION;
+        }
+        mProfile->curActiveCount++;
+    }
+    return NO_ERROR;
+}
+
+void AudioInputDescriptor::stop()
+{
+    if (!isActive()) {
+        LOG_ALWAYS_FATAL_IF(mProfile->curActiveCount < 1,
+                            "%s invalid profile active count %u",
+                            __func__, mProfile->curActiveCount);
+        mProfile->curActiveCount--;
+    }
+}
+
+void AudioInputDescriptor::close()
+{
+    if (mIoHandle != AUDIO_IO_HANDLE_NONE) {
+        mClientInterface->closeInput(mIoHandle);
+        LOG_ALWAYS_FATAL_IF(mProfile->curOpenCount < 1, "%s profile open count %u",
+                            __FUNCTION__, mProfile->curOpenCount);
+        // do not call stop() here as stop() is supposed to be called after
+        // AudioSession::changeActiveCount(-1) and we don't know how many sessions
+        // are still active at this time
+        if (isActive()) {
+            mProfile->curActiveCount--;
+        }
+        mProfile->curOpenCount--;
+        mIoHandle = AUDIO_IO_HANDLE_NONE;
+    }
+}
+
 status_t AudioInputDescriptor::dump(int fd)
 {
     const size_t SIZE = 256;
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index 8593444..044d6db 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -23,6 +23,7 @@
 #include "AudioGain.h"
 #include "Volume.h"
 #include "HwModule.h"
+#include <media/AudioParameter.h>
 #include <media/AudioPolicy.h>
 
 // A device mask for all audio output devices that are considered "remote" when evaluating
@@ -46,17 +47,17 @@
     for (int i = 0; i < NUM_STRATEGIES; i++) {
         mStrategyMutedByDevice[i] = false;
     }
-    if (port != NULL) {
-        port->pickAudioProfile(mSamplingRate, mChannelMask, mFormat);
-        if (port->mGains.size() > 0) {
-            port->mGains[0]->getDefaultConfig(&mGain);
+    if (mPort.get() != nullptr) {
+        mPort->pickAudioProfile(mSamplingRate, mChannelMask, mFormat);
+        if (mPort->mGains.size() > 0) {
+            mPort->mGains[0]->getDefaultConfig(&mGain);
         }
     }
 }
 
 audio_module_handle_t AudioOutputDescriptor::getModuleHandle() const
 {
-    return mPort->getModuleHandle();
+    return mPort.get() != nullptr ? mPort->getModuleHandle() : AUDIO_MODULE_HANDLE_NONE;
 }
 
 audio_port_handle_t AudioOutputDescriptor::getId() const
@@ -175,9 +176,9 @@
     dstConfig->ext.mix.usecase.stream = AUDIO_STREAM_DEFAULT;
 }
 
-void AudioOutputDescriptor::toAudioPort(
-                                                    struct audio_port *port) const
+void AudioOutputDescriptor::toAudioPort(struct audio_port *port) const
 {
+    // Should not be called for duplicated ports, see SwAudioOutputDescriptor::toAudioPortConfig.
     mPort->toAudioPort(port);
     port->id = mId;
     port->ext.mix.hw_module = getModuleHandle();
@@ -231,13 +232,6 @@
     }
 }
 
-void SwAudioOutputDescriptor::setIoHandle(audio_io_handle_t ioHandle)
-{
-    mId = AudioPort::getNextUniqueId();
-    mIoHandle = ioHandle;
-}
-
-
 status_t SwAudioOutputDescriptor::dump(int fd)
 {
     const size_t SIZE = 256;
@@ -387,6 +381,130 @@
     return changed;
 }
 
+status_t SwAudioOutputDescriptor::open(const audio_config_t *config,
+                                       audio_devices_t device,
+                                       const String8& address,
+                                       audio_stream_type_t stream,
+                                       audio_output_flags_t flags,
+                                       audio_io_handle_t *output)
+{
+    audio_config_t lConfig;
+    if (config == nullptr) {
+        lConfig = AUDIO_CONFIG_INITIALIZER;
+        lConfig.sample_rate = mSamplingRate;
+        lConfig.channel_mask = mChannelMask;
+        lConfig.format = mFormat;
+    } else {
+        lConfig = *config;
+    }
+
+    mDevice = device;
+    // if the selected profile is offloaded and no offload info was specified,
+    // create a default one
+    if ((mProfile->getFlags() & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) &&
+            lConfig.offload_info.format == AUDIO_FORMAT_DEFAULT) {
+        flags = (audio_output_flags_t)(flags | AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD);
+        lConfig.offload_info = AUDIO_INFO_INITIALIZER;
+        lConfig.offload_info.sample_rate = lConfig.sample_rate;
+        lConfig.offload_info.channel_mask = lConfig.channel_mask;
+        lConfig.offload_info.format = lConfig.format;
+        lConfig.offload_info.stream_type = stream;
+        lConfig.offload_info.duration_us = -1;
+        lConfig.offload_info.has_video = true; // conservative
+        lConfig.offload_info.is_streaming = true; // likely
+    }
+
+    mFlags = (audio_output_flags_t)(mFlags | flags);
+
+    ALOGV("opening output for device %08x address %s profile %p name %s",
+          mDevice, address.string(), mProfile.get(), mProfile->getName().string());
+
+    status_t status = mClientInterface->openOutput(mProfile->getModuleHandle(),
+                                                   output,
+                                                   &lConfig,
+                                                   &mDevice,
+                                                   address,
+                                                   &mLatency,
+                                                   mFlags);
+    LOG_ALWAYS_FATAL_IF(mDevice != device,
+                        "%s openOutput returned device %08x when given device %08x",
+                        __FUNCTION__, mDevice, device);
+
+    if (status == NO_ERROR) {
+        LOG_ALWAYS_FATAL_IF(*output == AUDIO_IO_HANDLE_NONE,
+                            "%s openOutput returned output handle %d for device %08x",
+                            __FUNCTION__, *output, device);
+        mSamplingRate = lConfig.sample_rate;
+        mChannelMask = lConfig.channel_mask;
+        mFormat = lConfig.format;
+        mId = AudioPort::getNextUniqueId();
+        mIoHandle = *output;
+        mProfile->curOpenCount++;
+    }
+
+    return status;
+}
+
+status_t SwAudioOutputDescriptor::start()
+{
+    if (isDuplicated()) {
+        status_t status = mOutput1->start();
+        if (status != NO_ERROR) {
+            return status;
+        }
+        status = mOutput2->start();
+        if (status != NO_ERROR) {
+            mOutput1->stop();
+            return status;
+        }
+        return NO_ERROR;
+    }
+    if (!isActive()) {
+        if (!mProfile->canStartNewIo()) {
+            return INVALID_OPERATION;
+        }
+        mProfile->curActiveCount++;
+    }
+    return NO_ERROR;
+}
+
+void SwAudioOutputDescriptor::stop()
+{
+    if (isDuplicated()) {
+        mOutput1->stop();
+        mOutput2->stop();
+        return;
+    }
+
+    if (!isActive()) {
+        LOG_ALWAYS_FATAL_IF(mProfile->curActiveCount < 1,
+                            "%s invalid profile active count %u",
+                            __func__, mProfile->curActiveCount);
+        mProfile->curActiveCount--;
+    }
+}
+
+void SwAudioOutputDescriptor::close()
+{
+    if (mIoHandle != AUDIO_IO_HANDLE_NONE) {
+        AudioParameter param;
+        param.add(String8("closing"), String8("true"));
+        mClientInterface->setParameters(mIoHandle, param.toString());
+
+        mClientInterface->closeOutput(mIoHandle);
+
+        LOG_ALWAYS_FATAL_IF(mProfile->curOpenCount < 1, "%s profile open count %u",
+                            __FUNCTION__, mProfile->curOpenCount);
+        // do not call stop() here as stop() is supposed to be called after changeRefCount(-1)
+        // and we don't know how many streams are still active at this time
+        if (isActive()) {
+            mProfile->curActiveCount--;
+        }
+        mProfile->curOpenCount--;
+        mIoHandle = AUDIO_IO_HANDLE_NONE;
+    }
+}
+
 // HwAudioOutputDescriptor implementation
 HwAudioOutputDescriptor::HwAudioOutputDescriptor(const sp<AudioSourceDescriptor>& source,
                                                  AudioPolicyClientInterface *clientInterface)
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPatch.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPatch.cpp
index 32606ea..a9fe48d 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPatch.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPatch.cpp
@@ -22,15 +22,12 @@
 #include "TypeConverter.h"
 
 #include <log/log.h>
-#include <cutils/atomic.h>
 #include <utils/String8.h>
 
 namespace android {
 
-int32_t volatile AudioPatch::mNextUniqueId = 1;
-
 AudioPatch::AudioPatch(const struct audio_patch *patch, uid_t uid) :
-    mHandle(static_cast<audio_patch_handle_t>(android_atomic_inc(&mNextUniqueId))),
+    mHandle(HandleGenerator<audio_patch_handle_t>::getNextHandle()),
     mPatch(*patch),
     mUid(uid),
     mAfPatchHandle(AUDIO_PATCH_HANDLE_NONE)
@@ -176,4 +173,4 @@
     return NO_ERROR;
 }
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPort.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPort.cpp
index fcf9070..094ff65 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPort.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPort.cpp
@@ -21,7 +21,6 @@
 #include "HwModule.h"
 #include "AudioGain.h"
 #include <policy.h>
-#include <cutils/atomic.h>
 
 #ifndef ARRAY_SIZE
 #define ARRAY_SIZE(a) (sizeof(a) / sizeof((a)[0]))
@@ -29,8 +28,6 @@
 
 namespace android {
 
-int32_t volatile AudioPort::mNextUniqueId = 1;
-
 // --- AudioPort class implementation
 void AudioPort::attach(const sp<HwModule>& module)
 {
@@ -40,31 +37,22 @@
 // Note that is a different namespace than AudioFlinger unique IDs
 audio_port_handle_t AudioPort::getNextUniqueId()
 {
-    return static_cast<audio_port_handle_t>(android_atomic_inc(&mNextUniqueId));
+    return getNextHandle();
 }
 
 audio_module_handle_t AudioPort::getModuleHandle() const
 {
-    if (mModule == 0) {
-        return AUDIO_MODULE_HANDLE_NONE;
-    }
-    return mModule->mHandle;
+    return mModule != 0 ? mModule->getHandle() : AUDIO_MODULE_HANDLE_NONE;
 }
 
 uint32_t AudioPort::getModuleVersionMajor() const
 {
-    if (mModule == 0) {
-        return 0;
-    }
-    return mModule->getHalVersionMajor();
+    return mModule != 0 ? mModule->getHalVersionMajor() : 0;
 }
 
 const char *AudioPort::getModuleName() const
 {
-    if (mModule == 0) {
-        return "invalid module";
-    }
-    return mModule->getName();
+    return mModule != 0 ? mModule->getName() : "invalid module";
 }
 
 void AudioPort::toAudioPort(struct audio_port *port) const
@@ -74,11 +62,11 @@
     SortedVector<audio_format_t> flatenedFormats;
     SampleRateVector flatenedRates;
     ChannelsVector flatenedChannels;
-    for (size_t profileIndex = 0; profileIndex < mProfiles.size(); profileIndex++) {
-        if (mProfiles[profileIndex]->isValid()) {
-            audio_format_t formatToExport = mProfiles[profileIndex]->getFormat();
-            const SampleRateVector &ratesToExport = mProfiles[profileIndex]->getSampleRates();
-            const ChannelsVector &channelsToExport = mProfiles[profileIndex]->getChannels();
+    for (const auto& profile : mProfiles) {
+        if (profile->isValid()) {
+            audio_format_t formatToExport = profile->getFormat();
+            const SampleRateVector &ratesToExport = profile->getSampleRates();
+            const ChannelsVector &channelsToExport = profile->getChannels();
 
             if (flatenedFormats.indexOf(formatToExport) < 0) {
                 flatenedFormats.add(formatToExport);
@@ -130,14 +118,12 @@
 
 void AudioPort::importAudioPort(const sp<AudioPort>& port, bool force __unused)
 {
-    size_t indexToImport;
-    for (indexToImport = 0; indexToImport < port->mProfiles.size(); indexToImport++) {
-        const sp<AudioProfile> &profileToImport = port->mProfiles[indexToImport];
+    for (const auto& profileToImport : port->mProfiles) {
         if (profileToImport->isValid()) {
             // Import only valid port, i.e. valid format, non empty rates and channels masks
             bool hasSameProfile = false;
-            for (size_t profileIndex = 0; profileIndex < mProfiles.size(); profileIndex++) {
-                if (*mProfiles[profileIndex] == *profileToImport) {
+            for (const auto& profile : mProfiles) {
+                if (*profile == *profileToImport) {
                     // never import a profile twice
                     hasSameProfile = true;
                     break;
@@ -151,6 +137,26 @@
     }
 }
 
+status_t AudioPort::checkExactAudioProfile(const struct audio_port_config *config) const
+{
+    status_t status = NO_ERROR;
+    auto config_mask = config->config_mask;
+    if (config_mask & AUDIO_PORT_CONFIG_GAIN) {
+        config_mask &= ~AUDIO_PORT_CONFIG_GAIN;
+        status = checkGain(&config->gain, config->gain.index);
+        if (status != NO_ERROR) {
+            return status;
+        }
+    }
+    if (config_mask != 0) {
+        // TODO should we check sample_rate / channel_mask / format separately?
+        status = mProfiles.checkExactProfile(config->sample_rate,
+                                             config->channel_mask,
+                                             config->format);
+    }
+    return status;
+}
+
 void AudioPort::pickSamplingRate(uint32_t &pickedRate,const SampleRateVector &samplingRates) const
 {
     pickedRate = 0;
@@ -402,9 +408,7 @@
         status = NO_INIT;
         goto exit;
     }
-    status = audioport->checkExactAudioProfile(config->sample_rate,
-                                               config->channel_mask,
-                                               config->format);
+    status = audioport->checkExactAudioProfile(config);
     if (status != NO_ERROR) {
         goto exit;
     }
@@ -418,10 +422,6 @@
         mFormat = config->format;
     }
     if (config->config_mask & AUDIO_PORT_CONFIG_GAIN) {
-        status = audioport->checkGain(&config->gain, config->gain.index);
-        if (status != NO_ERROR) {
-            goto exit;
-        }
         mGain = config->gain;
     }
 
@@ -479,4 +479,4 @@
     }
 }
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioProfile.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioProfile.cpp
index 98f7a94..fd6fc1c 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioProfile.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioProfile.cpp
@@ -233,8 +233,7 @@
         return NO_ERROR;
     }
 
-    for (size_t i = 0; i < size(); i++) {
-        const sp<AudioProfile> profile = itemAt(i);
+    for (const auto& profile : *this) {
         if (profile->checkExact(samplingRate, channelMask, format) == NO_ERROR) {
             return NO_ERROR;
         }
@@ -288,4 +287,4 @@
     return AudioPort::compareFormats((*profile1)->getFormat(), (*profile2)->getFormat());
 }
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioSession.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioSession.cpp
index 5b57d3d..7cda46b 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioSession.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioSession.cpp
@@ -290,4 +290,4 @@
     return NO_ERROR;
 }
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/src/ConfigParsingUtils.cpp b/services/audiopolicy/common/managerdefinitions/src/ConfigParsingUtils.cpp
index e5888e2..1e105f5 100644
--- a/services/audiopolicy/common/managerdefinitions/src/ConfigParsingUtils.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/ConfigParsingUtils.cpp
@@ -416,4 +416,4 @@
     return NO_ERROR;
 }
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
index a2c1165..3b1e751 100644
--- a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
@@ -60,7 +60,7 @@
 void DeviceVector::refreshTypes()
 {
     mDeviceTypes = AUDIO_DEVICE_NONE;
-    for(size_t i = 0; i < size(); i++) {
+    for (size_t i = 0; i < size(); i++) {
         mDeviceTypes |= itemAt(i)->type();
     }
     ALOGV("DeviceVector::refreshTypes() mDeviceTypes %08x", mDeviceTypes);
@@ -68,7 +68,7 @@
 
 ssize_t DeviceVector::indexOf(const sp<DeviceDescriptor>& item) const
 {
-    for(size_t i = 0; i < size(); i++) {
+    for (size_t i = 0; i < size(); i++) {
         if (item->equals(itemAt(i))) {
             return i;
         }
@@ -78,12 +78,15 @@
 
 void DeviceVector::add(const DeviceVector &devices)
 {
-    for (size_t i = 0; i < devices.size(); i++) {
-        sp<DeviceDescriptor> device = devices.itemAt(i);
+    bool added = false;
+    for (const auto& device : devices) {
         if (indexOf(device) < 0 && SortedVector::add(device) >= 0) {
-            refreshTypes();
+            added = true;
         }
     }
+    if (added) {
+        refreshTypes();
+    }
 }
 
 ssize_t DeviceVector::add(const sp<DeviceDescriptor>& item)
@@ -148,14 +151,12 @@
 
 sp<DeviceDescriptor> DeviceVector::getDeviceFromId(audio_port_handle_t id) const
 {
-    sp<DeviceDescriptor> device;
-    for (size_t i = 0; i < size(); i++) {
-        if (itemAt(i)->getId() == id) {
-            device = itemAt(i);
-            break;
+    for (const auto& device : *this) {
+        if (device->getId() == id) {
+            return device;
         }
     }
-    return device;
+    return nullptr;
 }
 
 DeviceVector DeviceVector::getDevicesFromType(audio_devices_t type) const
@@ -180,11 +181,9 @@
         audio_devices_t type, const String8& address) const
 {
     DeviceVector devices;
-    for (size_t i = 0; i < size(); i++) {
-        if (itemAt(i)->type() == type) {
-            if (itemAt(i)->mAddress == address) {
-                devices.add(itemAt(i));
-            }
+    for (const auto& device : *this) {
+        if (device->type() == type && device->mAddress == address) {
+            devices.add(device);
         }
     }
     return devices;
@@ -192,14 +191,12 @@
 
 sp<DeviceDescriptor> DeviceVector::getDeviceFromTagName(const String8 &tagName) const
 {
-    sp<DeviceDescriptor> device;
-    for (size_t i = 0; i < size(); i++) {
-        if (itemAt(i)->getTagName() == tagName) {
-            device = itemAt(i);
-            break;
+    for (const auto& device : *this) {
+        if (device->getTagName() == tagName) {
+            return device;
         }
     }
-    return device;
+    return nullptr;
 }
 
 status_t DeviceVector::dump(int fd, const String8 &tag, int spaces, bool verbose) const
@@ -248,7 +245,8 @@
     // without the test?
     // This has been demonstrated to NOT be true (at start up)
     // ALOG_ASSERT(mModule != NULL);
-    dstConfig->ext.device.hw_module = mModule != 0 ? mModule->mHandle : AUDIO_MODULE_HANDLE_NONE;
+    dstConfig->ext.device.hw_module =
+            mModule != 0 ? mModule->getHandle() : AUDIO_MODULE_HANDLE_NONE;
     strncpy(dstConfig->ext.device.address, mAddress.string(), AUDIO_DEVICE_MAX_ADDRESS_LEN);
 }
 
@@ -259,7 +257,7 @@
     port->id = mId;
     toAudioPortConfig(&port->active_config);
     port->ext.device.type = mDeviceType;
-    port->ext.device.hw_module = mModule->mHandle;
+    port->ext.device.hw_module = mModule->getHandle();
     strncpy(port->ext.device.address, mAddress.string(), AUDIO_DEVICE_MAX_ADDRESS_LEN);
 }
 
@@ -312,4 +310,4 @@
     AudioPort::log("  ");
 }
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/src/Gains.cpp b/services/audiopolicy/common/managerdefinitions/src/Gains.cpp
index e3fc9a8..b2dafdd 100644
--- a/services/audiopolicy/common/managerdefinitions/src/Gains.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/Gains.cpp
@@ -235,4 +235,4 @@
     return decibels;
 }
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp b/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
index cc56fb8..aef7dbe 100644
--- a/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
@@ -154,10 +154,9 @@
 DeviceVector HwModule::getRouteSourceDevices(const sp<AudioRoute> &route) const
 {
     DeviceVector sourceDevices;
-    Vector <sp<AudioPort> > sources = route->getSources();
-    for (size_t i = 0; i < sources.size(); i++) {
-        if (sources[i]->getType() == AUDIO_PORT_TYPE_DEVICE) {
-            sourceDevices.add(mDeclaredDevices.getDeviceFromTagName(sources[i]->getTagName()));
+    for (const auto& source : route->getSources()) {
+        if (source->getType() == AUDIO_PORT_TYPE_DEVICE) {
+            sourceDevices.add(mDeclaredDevices.getDeviceFromTagName(source->getTagName()));
         }
     }
     return sourceDevices;
@@ -173,17 +172,15 @@
 void HwModule::refreshSupportedDevices()
 {
     // Now updating the streams (aka IOProfile until now) supported devices
-    for (size_t i = 0; i < mInputProfiles.size(); i++) {
-        sp<IOProfile> stream = mInputProfiles[i];
+    for (const auto& stream : mInputProfiles) {
         DeviceVector sourceDevices;
-        const AudioRouteVector &routes = stream->getRoutes();
-        for (size_t j = 0; j < routes.size(); j++) {
-            sp<AudioPort> sink = routes[j]->getSink();
+        for (const auto& route : stream->getRoutes()) {
+            sp<AudioPort> sink = route->getSink();
             if (sink == 0 || stream != sink) {
                 ALOGE("%s: Invalid route attached to input stream", __FUNCTION__);
                 continue;
             }
-            DeviceVector sourceDevicesForRoute = getRouteSourceDevices(routes[j]);
+            DeviceVector sourceDevicesForRoute = getRouteSourceDevices(route);
             if (sourceDevicesForRoute.isEmpty()) {
                 ALOGE("%s: invalid source devices for %s", __FUNCTION__, stream->getName().string());
                 continue;
@@ -196,17 +193,15 @@
         }
         stream->setSupportedDevices(sourceDevices);
     }
-    for (size_t i = 0; i < mOutputProfiles.size(); i++) {
-        sp<IOProfile> stream = mOutputProfiles[i];
+    for (const auto& stream : mOutputProfiles) {
         DeviceVector sinkDevices;
-        const AudioRouteVector &routes = stream->getRoutes();
-        for (size_t j = 0; j < routes.size(); j++) {
-            sp<AudioPort> source = routes[j]->getSources().findByTagName(stream->getTagName());
+        for (const auto& route : stream->getRoutes()) {
+            sp<AudioPort> source = route->getSources().findByTagName(stream->getTagName());
             if (source == 0 || stream != source) {
                 ALOGE("%s: Invalid route attached to output stream", __FUNCTION__);
                 continue;
             }
-            sp<DeviceDescriptor> sinkDevice = getRouteSinkDevice(routes[j]);
+            sp<DeviceDescriptor> sinkDevice = getRouteSinkDevice(route);
             if (sinkDevice == 0) {
                 ALOGE("%s: invalid sink device for %s", __FUNCTION__, stream->getName().string());
                 continue;
@@ -217,6 +212,12 @@
     }
 }
 
+void HwModule::setHandle(audio_module_handle_t handle) {
+    ALOGW_IF(mHandle != AUDIO_MODULE_HANDLE_NONE,
+            "HwModule handle is changing from %d to %d", mHandle, handle);
+    mHandle = handle;
+}
+
 void HwModule::dump(int fd)
 {
     const size_t SIZE = 256;
@@ -252,60 +253,40 @@
 
 sp <HwModule> HwModuleCollection::getModuleFromName(const char *name) const
 {
-    sp <HwModule> module;
-
-    for (size_t i = 0; i < size(); i++)
-    {
-        if (strcmp(itemAt(i)->getName(), name) == 0) {
-            return itemAt(i);
+    for (const auto& module : *this) {
+        if (strcmp(module->getName(), name) == 0) {
+            return module;
         }
     }
-    return module;
+    return nullptr;
 }
 
-
 sp <HwModule> HwModuleCollection::getModuleForDevice(audio_devices_t device) const
 {
-    sp <HwModule> module;
-
-    for (size_t i = 0; i < size(); i++) {
-        if (itemAt(i)->getHandle() == 0) {
-            continue;
-        }
-        if (audio_is_output_device(device)) {
-            for (size_t j = 0; j < itemAt(i)->mOutputProfiles.size(); j++)
-            {
-                if (itemAt(i)->mOutputProfiles[j]->supportDevice(device)) {
-                    return itemAt(i);
-                }
-            }
-        } else {
-            for (size_t j = 0; j < itemAt(i)->mInputProfiles.size(); j++) {
-                if (itemAt(i)->mInputProfiles[j]->supportDevice(device)) {
-                    return itemAt(i);
-                }
+    for (const auto& module : *this) {
+        const auto& profiles = audio_is_output_device(device) ?
+                module->getOutputProfiles() : module->getInputProfiles();
+        for (const auto& profile : profiles) {
+            if (profile->supportDevice(device)) {
+                return module;
             }
         }
     }
-    return module;
+    return nullptr;
 }
 
-sp<DeviceDescriptor>  HwModuleCollection::getDeviceDescriptor(const audio_devices_t device,
-                                                              const char *device_address,
-                                                              const char *device_name,
-                                                              bool matchAdress) const
+sp<DeviceDescriptor> HwModuleCollection::getDeviceDescriptor(const audio_devices_t device,
+                                                             const char *device_address,
+                                                             const char *device_name,
+                                                             bool matchAdress) const
 {
-    String8 address = (device_address == NULL) ? String8("") : String8(device_address);
+    String8 address = (device_address == nullptr) ? String8("") : String8(device_address);
     // handle legacy remote submix case where the address was not always specified
     if (device_distinguishes_on_address(device) && (address.length() == 0)) {
         address = String8("0");
     }
 
-    for (size_t i = 0; i < size(); i++) {
-        const sp<HwModule> hwModule = itemAt(i);
-        if (hwModule->mHandle == 0) {
-            continue;
-        }
+    for (const auto& hwModule : *this) {
         DeviceVector declaredDevices = hwModule->getDeclaredDevices();
         DeviceVector deviceList = declaredDevices.getDevicesFromTypeAddr(device, address);
         if (!deviceList.isEmpty()) {
@@ -340,4 +321,5 @@
     return NO_ERROR;
 }
 
+
 } //namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
index 74ef4ec..69dd06b 100644
--- a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
@@ -71,7 +71,13 @@
             return false;
         }
     } else {
-        if (checkExactAudioProfile(samplingRate, channelMask, format) != NO_ERROR) {
+        const struct audio_port_config config = {
+            .config_mask = AUDIO_PORT_CONFIG_ALL & ~AUDIO_PORT_CONFIG_GAIN,
+            .sample_rate = samplingRate,
+            .channel_mask = channelMask,
+            .format = format,
+        };
+        if (checkExactAudioProfile(&config) != NO_ERROR) {
             return false;
         }
     }
@@ -122,6 +128,16 @@
     result.append("\n");
     write(fd, result.string(), result.size());
     mSupportedDevices.dump(fd, String8("Supported"), 4, false);
+
+    result.clear();
+    snprintf(buffer, SIZE, "\n    - maxOpenCount: %u - curOpenCount: %u\n",
+             maxOpenCount, curOpenCount);
+    result.append(buffer);
+    snprintf(buffer, SIZE, "    - maxActiveCount: %u - curActiveCount: %u\n",
+             maxActiveCount, curActiveCount);
+    result.append(buffer);
+
+    write(fd, result.string(), result.size());
 }
 
 void IOProfile::log()
@@ -129,4 +145,4 @@
     // @TODO: forward log to AudioPort
 }
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp b/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
index a224004..a253113 100644
--- a/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
@@ -140,19 +140,19 @@
     }
 
     string minValueMBLiteral = getXmlAttribute(root, Attributes::minValueMB);
-    uint32_t minValueMB;
+    int32_t minValueMB;
     if (!minValueMBLiteral.empty() && convertTo(minValueMBLiteral, minValueMB)) {
         gain->setMinValueInMb(minValueMB);
     }
 
     string maxValueMBLiteral = getXmlAttribute(root, Attributes::maxValueMB);
-    uint32_t maxValueMB;
+    int32_t maxValueMB;
     if (!maxValueMBLiteral.empty() && convertTo(maxValueMBLiteral, maxValueMB)) {
         gain->setMaxValueInMb(maxValueMB);
     }
 
     string defaultValueMBLiteral = getXmlAttribute(root, Attributes::defaultValueMB);
-    uint32_t defaultValueMB;
+    int32_t defaultValueMB;
     if (!defaultValueMBLiteral.empty() && convertTo(defaultValueMBLiteral, defaultValueMB)) {
         gain->setDefaultValueInMb(defaultValueMB);
     }
@@ -217,6 +217,8 @@
 const char MixPortTraits::Attributes::name[] = "name";
 const char MixPortTraits::Attributes::role[] = "role";
 const char MixPortTraits::Attributes::flags[] = "flags";
+const char MixPortTraits::Attributes::maxOpenCount[] = "maxOpenCount";
+const char MixPortTraits::Attributes::maxActiveCount[] = "maxActiveCount";
 
 status_t MixPortTraits::deserialize(_xmlDoc *doc, const _xmlNode *child, PtrElement &mixPort,
                                     PtrSerializingCtx /*serializingContext*/)
@@ -259,6 +261,14 @@
             mixPort->setFlags(InputFlagConverter::maskFromString(flags));
         }
     }
+    string maxOpenCount = getXmlAttribute(child, Attributes::maxOpenCount);
+    if (!maxOpenCount.empty()) {
+        convertTo(maxOpenCount, mixPort->maxOpenCount);
+    }
+    string maxActiveCount = getXmlAttribute(child, Attributes::maxActiveCount);
+    if (!maxActiveCount.empty()) {
+        convertTo(maxActiveCount, mixPort->maxActiveCount);
+    }
     // Deserialize children
     AudioGainTraits::Collection gains;
     deserializeCollection<AudioGainTraits>(doc, child, gains, NULL);
@@ -638,4 +648,4 @@
     return android::OK;
 }
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/src/StreamDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/StreamDescriptor.cpp
index b3019e1..65649fb 100644
--- a/services/audiopolicy/common/managerdefinitions/src/StreamDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/StreamDescriptor.cpp
@@ -223,4 +223,4 @@
     return NO_ERROR;
 }
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/src/TypeConverter.cpp b/services/audiopolicy/common/managerdefinitions/src/TypeConverter.cpp
index 0362037..7273d0d 100644
--- a/services/audiopolicy/common/managerdefinitions/src/TypeConverter.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/TypeConverter.cpp
@@ -65,4 +65,4 @@
 template class TypeConverter<RouteFlagTraits>;
 template class TypeConverter<RuleTraits>;
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/src/VolumeCurve.cpp b/services/audiopolicy/common/managerdefinitions/src/VolumeCurve.cpp
index 14caf7c..ec861c1 100644
--- a/services/audiopolicy/common/managerdefinitions/src/VolumeCurve.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/VolumeCurve.cpp
@@ -138,4 +138,4 @@
     return NO_ERROR;
 }
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/config/audio_policy_configuration.xml b/services/audiopolicy/config/audio_policy_configuration.xml
index 7af2f81..73efe8e 100644
--- a/services/audiopolicy/config/audio_policy_configuration.xml
+++ b/services/audiopolicy/config/audio_policy_configuration.xml
@@ -163,37 +163,16 @@
                        sources="primary output,deep_buffer,compressed_offload,BT SCO Headset Mic,Telephony Rx"/>
                 <route type="mix" sink="Wired Headphones"
                        sources="primary output,deep_buffer,compressed_offload,BT SCO Headset Mic,Telephony Rx"/>
-                <route type="mix" sink="Telephony Tx"
-                       sources="voice_tx"/>
                 <route type="mix" sink="primary input"
                        sources="Built-In Mic,Built-In Back Mic,Wired Headset Mic,BT SCO Headset Mic"/>
                 <route type="mix" sink="Telephony Tx"
-                       sources="Built-In Mic,Built-In Back Mic,Wired Headset Mic,BT SCO Headset Mic"/>
+                       sources="Built-In Mic,Built-In Back Mic,Wired Headset Mic,BT SCO Headset Mic, voice_tx"/>
                 <route type="mix" sink="voice_rx"
                        sources="Telephony Rx"/>
             </routes>
 
         </module>
 
-        <!-- HDMI Audio HAL -->
-        <module description="HDMI Audio HAL" name="hdmi" version="2.0">
-            <mixPorts>
-                <mixPort name="hdmi output" role="source">
-                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT" samplingRates="48000"/>
-                </mixPort>
-            </mixPorts>
-            <devicePorts>
-                <devicePort tagName="HDMI Out" type="AUDIO_DEVICE_OUT_AUX_DIGITAL" role="sink">
-                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
-                             samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
-                </devicePort>
-            </devicePorts>
-            <routes>
-                <route type="mix" sink="HDMI Out"
-                       sources="hdmi output"/>
-            </routes>
-        </module>
-
         <!-- A2dp Audio HAL -->
         <xi:include href="a2dp_audio_policy_configuration.xml"/>
 
diff --git a/services/audiopolicy/engine/interface/AudioPolicyManagerInterface.h b/services/audiopolicy/engine/interface/AudioPolicyManagerInterface.h
index 567ff9e..04594f5 100644
--- a/services/audiopolicy/engine/interface/AudioPolicyManagerInterface.h
+++ b/services/audiopolicy/engine/interface/AudioPolicyManagerInterface.h
@@ -137,4 +137,4 @@
     virtual ~AudioPolicyManagerInterface() {}
 };
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/engine/interface/AudioPolicyManagerObserver.h b/services/audiopolicy/engine/interface/AudioPolicyManagerObserver.h
index 846fa48..b7902cf 100644
--- a/services/audiopolicy/engine/interface/AudioPolicyManagerObserver.h
+++ b/services/audiopolicy/engine/interface/AudioPolicyManagerObserver.h
@@ -59,4 +59,4 @@
     virtual ~AudioPolicyManagerObserver() {}
 };
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/engineconfigurable/include/AudioPolicyEngineInstance.h b/services/audiopolicy/engineconfigurable/include/AudioPolicyEngineInstance.h
index 6c4be2c..a597e87 100644
--- a/services/audiopolicy/engineconfigurable/include/AudioPolicyEngineInstance.h
+++ b/services/audiopolicy/engineconfigurable/include/AudioPolicyEngineInstance.h
@@ -76,6 +76,6 @@
 template <>
 AudioPolicyPluginInterface *EngineInstance::queryInterface() const;
 
-}; // namespace audio_policy
+} // namespace audio_policy
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/engineconfigurable/interface/AudioPolicyPluginInterface.h b/services/audiopolicy/engineconfigurable/interface/AudioPolicyPluginInterface.h
index 759d0c9..2e29a9b 100644
--- a/services/audiopolicy/engineconfigurable/interface/AudioPolicyPluginInterface.h
+++ b/services/audiopolicy/engineconfigurable/interface/AudioPolicyPluginInterface.h
@@ -143,4 +143,4 @@
     virtual ~AudioPolicyPluginInterface() {}
 };
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/engineconfigurable/src/Engine.h b/services/audiopolicy/engineconfigurable/src/Engine.h
index bc5e035..328d23d 100644
--- a/services/audiopolicy/engineconfigurable/src/Engine.h
+++ b/services/audiopolicy/engineconfigurable/src/Engine.h
@@ -194,7 +194,7 @@
     AudioPolicyManagerObserver *mApmObserver;
 };
 
-}; // namespace audio_policy
+} // namespace audio_policy
 
-}; // namespace android
+} // namespace android
 
diff --git a/services/audiopolicy/manager/AudioPolicyFactory.cpp b/services/audiopolicy/manager/AudioPolicyFactory.cpp
index 9910a1f..3efa1b0 100644
--- a/services/audiopolicy/manager/AudioPolicyFactory.cpp
+++ b/services/audiopolicy/manager/AudioPolicyFactory.cpp
@@ -29,4 +29,4 @@
     delete interface;
 }
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index bdfaf2f..68730a5 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -32,7 +32,6 @@
 
 #include <AudioPolicyManagerInterface.h>
 #include <AudioPolicyEngineInstance.h>
-#include <cutils/atomic.h>
 #include <cutils/properties.h>
 #include <utils/Log.h>
 #include <media/AudioParameter.h>
@@ -88,7 +87,7 @@
                                                          const char *device_name)
 {
     ALOGV("setDeviceConnectionStateInt() device: 0x%X, state %d, address %s name %s",
--            device, state, device_address, device_name);
+            device, state, device_address, device_name);
 
     // connect/disconnect only 1 device at a time
     if (!audio_is_output_device(device) && !audio_is_input_device(device)) return BAD_VALUE;
@@ -183,14 +182,14 @@
         checkOutputForAllStrategies();
         // outputs must be closed after checkOutputForAllStrategies() is executed
         if (!outputs.isEmpty()) {
-            for (size_t i = 0; i < outputs.size(); i++) {
-                sp<SwAudioOutputDescriptor> desc = mOutputs.valueFor(outputs[i]);
+            for (audio_io_handle_t output : outputs) {
+                sp<SwAudioOutputDescriptor> desc = mOutputs.valueFor(output);
                 // close unused outputs after device disconnection or direct outputs that have been
                 // opened by checkOutputsForDevice() to query dynamic parameters
                 if ((state == AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE) ||
                         (((desc->mFlags & AUDIO_OUTPUT_FLAG_DIRECT) != 0) &&
                          (desc->mDirectOpenCount == 0))) {
-                    closeOutput(outputs[i]);
+                    closeOutput(output);
                 }
             }
             // check again after closing A2DP output to reset mA2dpSuspended if needed
@@ -387,9 +386,6 @@
 uint32_t AudioPolicyManager::updateCallRouting(audio_devices_t rxDevice, uint32_t delayMs)
 {
     bool createTxPatch = false;
-    status_t status;
-    audio_patch_handle_t afPatchHandle;
-    DeviceVector deviceList;
     uint32_t muteWaitMs = 0;
 
     if(!hasPrimaryOutput() || mPrimaryOutput->device() == AUDIO_DEVICE_OUT_STUB) {
@@ -421,87 +417,53 @@
             createTxPatch = true;
         }
     } else { // create RX path audio patch
-        struct audio_patch patch;
-
-        patch.num_sources = 1;
-        patch.num_sinks = 1;
-        deviceList = mAvailableOutputDevices.getDevicesFromType(rxDevice);
-        ALOG_ASSERT(!deviceList.isEmpty(),
-                    "updateCallRouting() selected device not in output device list");
-        sp<DeviceDescriptor> rxSinkDeviceDesc = deviceList.itemAt(0);
-        deviceList = mAvailableInputDevices.getDevicesFromType(AUDIO_DEVICE_IN_TELEPHONY_RX);
-        ALOG_ASSERT(!deviceList.isEmpty(),
-                    "updateCallRouting() no telephony RX device");
-        sp<DeviceDescriptor> rxSourceDeviceDesc = deviceList.itemAt(0);
-
-        rxSourceDeviceDesc->toAudioPortConfig(&patch.sources[0]);
-        rxSinkDeviceDesc->toAudioPortConfig(&patch.sinks[0]);
-
-        // request to reuse existing output stream if one is already opened to reach the RX device
-        SortedVector<audio_io_handle_t> outputs =
-                                getOutputsForDevice(rxDevice, mOutputs);
-        audio_io_handle_t output = selectOutput(outputs,
-                                                AUDIO_OUTPUT_FLAG_NONE,
-                                                AUDIO_FORMAT_INVALID);
-        if (output != AUDIO_IO_HANDLE_NONE) {
-            sp<SwAudioOutputDescriptor> outputDesc = mOutputs.valueFor(output);
-            ALOG_ASSERT(!outputDesc->isDuplicated(),
-                        "updateCallRouting() RX device output is duplicated");
-            outputDesc->toAudioPortConfig(&patch.sources[1]);
-            patch.sources[1].ext.mix.usecase.stream = AUDIO_STREAM_PATCH;
-            patch.num_sources = 2;
-        }
-
-        afPatchHandle = AUDIO_PATCH_HANDLE_NONE;
-        status = mpClientInterface->createAudioPatch(&patch, &afPatchHandle, delayMs);
-        ALOGW_IF(status != NO_ERROR, "updateCallRouting() error %d creating RX audio patch",
-                                               status);
-        if (status == NO_ERROR) {
-            mCallRxPatch = new AudioPatch(&patch, mUidCached);
-            mCallRxPatch->mAfPatchHandle = afPatchHandle;
-            mCallRxPatch->mUid = mUidCached;
-        }
+        mCallRxPatch = createTelephonyPatch(true /*isRx*/, rxDevice, delayMs);
         createTxPatch = true;
     }
     if (createTxPatch) { // create TX path audio patch
-        struct audio_patch patch;
+        mCallTxPatch = createTelephonyPatch(false /*isRx*/, txDevice, delayMs);
+    }
 
-        patch.num_sources = 1;
-        patch.num_sinks = 1;
-        deviceList = mAvailableInputDevices.getDevicesFromType(txDevice);
-        ALOG_ASSERT(!deviceList.isEmpty(),
-                    "updateCallRouting() selected device not in input device list");
-        sp<DeviceDescriptor> txSourceDeviceDesc = deviceList.itemAt(0);
-        txSourceDeviceDesc->toAudioPortConfig(&patch.sources[0]);
-        deviceList = mAvailableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_TELEPHONY_TX);
-        ALOG_ASSERT(!deviceList.isEmpty(),
-                    "updateCallRouting() no telephony TX device");
-        sp<DeviceDescriptor> txSinkDeviceDesc = deviceList.itemAt(0);
-        txSinkDeviceDesc->toAudioPortConfig(&patch.sinks[0]);
+    return muteWaitMs;
+}
 
-        SortedVector<audio_io_handle_t> outputs =
-                                getOutputsForDevice(AUDIO_DEVICE_OUT_TELEPHONY_TX, mOutputs);
-        audio_io_handle_t output = selectOutput(outputs,
-                                                AUDIO_OUTPUT_FLAG_NONE,
-                                                AUDIO_FORMAT_INVALID);
-        // request to reuse existing output stream if one is already opened to reach the TX
-        // path output device
-        if (output != AUDIO_IO_HANDLE_NONE) {
-            sp<AudioOutputDescriptor> outputDesc = mOutputs.valueFor(output);
-            ALOG_ASSERT(!outputDesc->isDuplicated(),
-                        "updateCallRouting() RX device output is duplicated");
-            outputDesc->toAudioPortConfig(&patch.sources[1]);
-            patch.sources[1].ext.mix.usecase.stream = AUDIO_STREAM_PATCH;
-            patch.num_sources = 2;
-        }
+sp<AudioPatch> AudioPolicyManager::createTelephonyPatch(
+        bool isRx, audio_devices_t device, uint32_t delayMs) {
+    struct audio_patch patch;
+    patch.num_sources = 1;
+    patch.num_sinks = 1;
 
+    sp<DeviceDescriptor> txSourceDeviceDesc;
+    if (isRx) {
+        fillAudioPortConfigForDevice(mAvailableOutputDevices, device, &patch.sinks[0]);
+        fillAudioPortConfigForDevice(
+                mAvailableInputDevices, AUDIO_DEVICE_IN_TELEPHONY_RX, &patch.sources[0]);
+    } else {
+        txSourceDeviceDesc = fillAudioPortConfigForDevice(
+                mAvailableInputDevices, device, &patch.sources[0]);
+        fillAudioPortConfigForDevice(
+                mAvailableOutputDevices, AUDIO_DEVICE_OUT_TELEPHONY_TX, &patch.sinks[0]);
+    }
+
+    audio_devices_t outputDevice = isRx ? device : AUDIO_DEVICE_OUT_TELEPHONY_TX;
+    SortedVector<audio_io_handle_t> outputs = getOutputsForDevice(outputDevice, mOutputs);
+    audio_io_handle_t output = selectOutput(outputs, AUDIO_OUTPUT_FLAG_NONE, AUDIO_FORMAT_INVALID);
+    // request to reuse existing output stream if one is already opened to reach the target device
+    if (output != AUDIO_IO_HANDLE_NONE) {
+        sp<AudioOutputDescriptor> outputDesc = mOutputs.valueFor(output);
+        ALOG_ASSERT(!outputDesc->isDuplicated(),
+                "%s() %#x device output %d is duplicated", __func__, outputDevice, output);
+        outputDesc->toAudioPortConfig(&patch.sources[1]);
+        patch.sources[1].ext.mix.usecase.stream = AUDIO_STREAM_PATCH;
+        patch.num_sources = 2;
+    }
+
+    if (!isRx) {
         // terminate active capture if on the same HW module as the call TX source device
         // FIXME: would be better to refine to only inputs whose profile connects to the
         // call TX device but this information is not in the audio patch and logic here must be
         // symmetric to the one in startInput()
-        Vector<sp <AudioInputDescriptor> > activeInputs = mInputs.getActiveInputs();
-        for (size_t i = 0; i < activeInputs.size(); i++) {
-            sp<AudioInputDescriptor> activeDesc = activeInputs[i];
+        for (const auto& activeDesc : mInputs.getActiveInputs()) {
             if (activeDesc->hasSameHwModuleAs(txSourceDeviceDesc)) {
                 AudioSessionCollection activeSessions =
                         activeDesc->getAudioSessions(true /*activeOnly*/);
@@ -512,19 +474,29 @@
                 }
             }
         }
-
-        afPatchHandle = AUDIO_PATCH_HANDLE_NONE;
-        status = mpClientInterface->createAudioPatch(&patch, &afPatchHandle, delayMs);
-        ALOGW_IF(status != NO_ERROR, "setPhoneState() error %d creating TX audio patch",
-                                               status);
-        if (status == NO_ERROR) {
-            mCallTxPatch = new AudioPatch(&patch, mUidCached);
-            mCallTxPatch->mAfPatchHandle = afPatchHandle;
-            mCallTxPatch->mUid = mUidCached;
-        }
     }
 
-    return muteWaitMs;
+    audio_patch_handle_t afPatchHandle = AUDIO_PATCH_HANDLE_NONE;
+    status_t status = mpClientInterface->createAudioPatch(&patch, &afPatchHandle, delayMs);
+    ALOGW_IF(status != NO_ERROR,
+            "%s() error %d creating %s audio patch", __func__, status, isRx ? "RX" : "TX");
+    sp<AudioPatch> audioPatch;
+    if (status == NO_ERROR) {
+        audioPatch = new AudioPatch(&patch, mUidCached);
+        audioPatch->mAfPatchHandle = afPatchHandle;
+        audioPatch->mUid = mUidCached;
+    }
+    return audioPatch;
+}
+
+sp<DeviceDescriptor> AudioPolicyManager::fillAudioPortConfigForDevice(
+        const DeviceVector& devices, audio_devices_t device, audio_port_config *config) {
+    DeviceVector deviceList = devices.getDevicesFromType(device);
+    ALOG_ASSERT(!deviceList.isEmpty(),
+            "%s() selected device type %#x is not in devices list", __func__, device);
+    sp<DeviceDescriptor> deviceDesc = deviceList.itemAt(0);
+    deviceDesc->toAudioPortConfig(config);
+    return deviceDesc;
 }
 
 void AudioPolicyManager::setPhoneState(audio_mode_t state)
@@ -683,9 +655,7 @@
         }
     }
 
-    Vector<sp <AudioInputDescriptor> > activeInputs = mInputs.getActiveInputs();
-    for (size_t i = 0; i < activeInputs.size(); i++) {
-        sp<AudioInputDescriptor> activeDesc = activeInputs[i];
+    for (const auto& activeDesc : mInputs.getActiveInputs()) {
         audio_devices_t newDevice = getNewInputDevice(activeDesc);
         // Force new input selection if the new device can not be reached via current input
         if (activeDesc->mProfile->getSupportedDevices().types() &
@@ -721,12 +691,8 @@
 
     sp<IOProfile> profile;
 
-    for (size_t i = 0; i < mHwModules.size(); i++) {
-        if (mHwModules[i]->mHandle == 0) {
-            continue;
-        }
-        for (size_t j = 0; j < mHwModules[i]->mOutputProfiles.size(); j++) {
-            sp<IOProfile> curProfile = mHwModules[i]->mOutputProfiles[j];
+    for (const auto& hwModule : mHwModules) {
+        for (const auto& curProfile : hwModule->getOutputProfiles()) {
             if (!curProfile->isCompatibleProfile(device, String8(""),
                     samplingRate, NULL /*updatedSamplingRate*/,
                     format, NULL /*updatedFormat*/,
@@ -751,20 +717,22 @@
     return profile;
 }
 
-audio_io_handle_t AudioPolicyManager::getOutput(audio_stream_type_t stream,
-                                                uint32_t samplingRate,
-                                                audio_format_t format,
-                                                audio_channel_mask_t channelMask,
-                                                audio_output_flags_t flags,
-                                                const audio_offload_info_t *offloadInfo)
+audio_io_handle_t AudioPolicyManager::getOutput(audio_stream_type_t stream)
 {
     routing_strategy strategy = getStrategy(stream);
     audio_devices_t device = getDeviceForStrategy(strategy, false /*fromCache*/);
-    ALOGV("getOutput() device %d, stream %d, samplingRate %d, format %x, channelMask %x, flags %x",
-          device, stream, samplingRate, format, channelMask, flags);
 
-    return getOutputForDevice(device, AUDIO_SESSION_ALLOCATE, stream, samplingRate, format,
-                              channelMask, flags, offloadInfo);
+    // Note that related method getOutputForAttr() uses getOutputForDevice() not selectOutput().
+    // We use selectOutput() here since we don't have the desired AudioTrack sample rate,
+    // format, flags, etc. This may result in some discrepancy for functions that utilize
+    // getOutput() solely on audio_stream_type such as AudioSystem::getOutputFrameCount()
+    // and AudioSystem::getOutputSamplingRate().
+
+    SortedVector<audio_io_handle_t> outputs = getOutputsForDevice(device, mOutputs);
+    audio_io_handle_t output = selectOutput(outputs, AUDIO_OUTPUT_FLAG_NONE, AUDIO_FORMAT_INVALID);
+
+    ALOGV("getOutput() stream %d selected device %08x, output %d", stream, device, output);
+    return output;
 }
 
 status_t AudioPolicyManager::getOutputForAttr(const audio_attributes_t *attr,
@@ -825,12 +793,7 @@
     // Explicit routing?
     sp<DeviceDescriptor> deviceDesc;
     if (*selectedDeviceId != AUDIO_PORT_HANDLE_NONE) {
-        for (size_t i = 0; i < mAvailableOutputDevices.size(); i++) {
-            if (mAvailableOutputDevices[i]->getId() == *selectedDeviceId) {
-                deviceDesc = mAvailableOutputDevices[i];
-                break;
-            }
-        }
+        deviceDesc = mAvailableOutputDevices.getDeviceFromId(*selectedDeviceId);
     }
     mOutputRoutes.addRoute(session, *stream, SessionRoute::SOURCE_TYPE_NA, deviceDesc, uid);
 
@@ -841,12 +804,11 @@
         flags = (audio_output_flags_t)(flags | AUDIO_OUTPUT_FLAG_HW_AV_SYNC);
     }
 
-    ALOGV("getOutputForAttr() device 0x%x, samplingRate %d, format %x, channelMask %x, flags %x",
+    ALOGV("getOutputForAttr() device 0x%x, sampling rate %d, format %#x, channel mask %#x, "
+          "flags %#x",
           device, config->sample_rate, config->format, config->channel_mask, flags);
 
-    *output = getOutputForDevice(device, session, *stream,
-                                 config->sample_rate, config->format, config->channel_mask,
-                                 flags, &config->offload_info);
+    *output = getOutputForDevice(device, session, *stream, config, flags);
     if (*output == AUDIO_IO_HANDLE_NONE) {
         mOutputRoutes.removeRoute(session);
         return INVALID_OPERATION;
@@ -865,57 +827,12 @@
         audio_devices_t device,
         audio_session_t session,
         audio_stream_type_t stream,
-        uint32_t samplingRate,
-        audio_format_t format,
-        audio_channel_mask_t channelMask,
-        audio_output_flags_t flags,
-        const audio_offload_info_t *offloadInfo)
+        const audio_config_t *config,
+        audio_output_flags_t flags)
 {
     audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
     status_t status;
 
-#ifdef AUDIO_POLICY_TEST
-    if (mCurOutput != 0) {
-        ALOGV("getOutput() test output mCurOutput %d, samplingRate %d, format %d, channelMask %x, mDirectOutput %d",
-                mCurOutput, mTestSamplingRate, mTestFormat, mTestChannels, mDirectOutput);
-
-        if (mTestOutputs[mCurOutput] == 0) {
-            ALOGV("getOutput() opening test output");
-            sp<AudioOutputDescriptor> outputDesc = new SwAudioOutputDescriptor(NULL,
-                                                                               mpClientInterface);
-            outputDesc->mDevice = mTestDevice;
-            outputDesc->mLatency = mTestLatencyMs;
-            outputDesc->mFlags =
-                    (audio_output_flags_t)(mDirectOutput ? AUDIO_OUTPUT_FLAG_DIRECT : 0);
-            outputDesc->mRefCount[stream] = 0;
-            audio_config_t config = AUDIO_CONFIG_INITIALIZER;
-            config.sample_rate = mTestSamplingRate;
-            config.channel_mask = mTestChannels;
-            config.format = mTestFormat;
-            if (offloadInfo != NULL) {
-                config.offload_info = *offloadInfo;
-            }
-            status = mpClientInterface->openOutput(0,
-                                                  &mTestOutputs[mCurOutput],
-                                                  &config,
-                                                  &outputDesc->mDevice,
-                                                  String8(""),
-                                                  &outputDesc->mLatency,
-                                                  outputDesc->mFlags);
-            if (status == NO_ERROR) {
-                outputDesc->mSamplingRate = config.sample_rate;
-                outputDesc->mFormat = config.format;
-                outputDesc->mChannelMask = config.channel_mask;
-                AudioParameter outputCmd = AudioParameter();
-                outputCmd.addInt(String8("set_id"),mCurOutput);
-                mpClientInterface->setParameters(mTestOutputs[mCurOutput],outputCmd.toString());
-                addOutput(mTestOutputs[mCurOutput], outputDesc);
-            }
-        }
-        return mTestOutputs[mCurOutput];
-    }
-#endif //AUDIO_POLICY_TEST
-
     // open a direct output if required by specified parameters
     //force direct flag if offload flag is set: offloading implies a direct output stream
     // and all common behaviors are driven by checking only the direct flag
@@ -938,7 +855,7 @@
     if (stream == AUDIO_STREAM_TTS) {
         flags = AUDIO_OUTPUT_FLAG_TTS;
     } else if (stream == AUDIO_STREAM_VOICE_CALL &&
-               audio_is_linear_pcm(format)) {
+               audio_is_linear_pcm(config->format)) {
         flags = (audio_output_flags_t)(AUDIO_OUTPUT_FLAG_VOIP_RX |
                                        AUDIO_OUTPUT_FLAG_DIRECT);
         ALOGV("Set VoIP and Direct output flags for PCM format");
@@ -949,8 +866,8 @@
     // skip direct output selection if the request can obviously be attached to a mixed output
     // and not explicitly requested
     if (((flags & AUDIO_OUTPUT_FLAG_DIRECT) == 0) &&
-            audio_is_linear_pcm(format) && samplingRate <= SAMPLE_RATE_HZ_MAX &&
-            audio_channel_count_from_out_mask(channelMask) <= 2) {
+            audio_is_linear_pcm(config->format) && config->sample_rate <= SAMPLE_RATE_HZ_MAX &&
+            audio_channel_count_from_out_mask(config->channel_mask) <= 2) {
         goto non_direct_output;
     }
 
@@ -964,102 +881,63 @@
     if (((flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) == 0) ||
             !(mEffects.isNonOffloadableEffectEnabled() || mMasterMono)) {
         profile = getProfileForDirectOutput(device,
-                                           samplingRate,
-                                           format,
-                                           channelMask,
+                                           config->sample_rate,
+                                           config->format,
+                                           config->channel_mask,
                                            (audio_output_flags_t)flags);
     }
 
     if (profile != 0) {
-        sp<SwAudioOutputDescriptor> outputDesc = NULL;
-
         for (size_t i = 0; i < mOutputs.size(); i++) {
             sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
             if (!desc->isDuplicated() && (profile == desc->mProfile)) {
-                outputDesc = desc;
                 // reuse direct output if currently open by the same client
                 // and configured with same parameters
-                if ((samplingRate == outputDesc->mSamplingRate) &&
-                    audio_formats_match(format, outputDesc->mFormat) &&
-                    (channelMask == outputDesc->mChannelMask)) {
-                  if (session == outputDesc->mDirectClientSession) {
-                      outputDesc->mDirectOpenCount++;
-                      ALOGV("getOutput() reusing direct output %d for session %d",
-                            mOutputs.keyAt(i), session);
-                      return mOutputs.keyAt(i);
-                  } else {
-                      ALOGV("getOutput() do not reuse direct output because current client (%d) "
-                            "is not the same as requesting client (%d)",
-                            outputDesc->mDirectClientSession, session);
-                      goto non_direct_output;
-                  }
+                if ((config->sample_rate == desc->mSamplingRate) &&
+                    audio_formats_match(config->format, desc->mFormat) &&
+                    (config->channel_mask == desc->mChannelMask) &&
+                    (session == desc->mDirectClientSession)) {
+                    desc->mDirectOpenCount++;
+                    ALOGV("getOutputForDevice() reusing direct output %d for session %d",
+                        mOutputs.keyAt(i), session);
+                    return mOutputs.keyAt(i);
                 }
             }
         }
-        // close direct output if currently open and configured with different parameters
-        if (outputDesc != NULL) {
-            closeOutput(outputDesc->mIoHandle);
+
+        if (!profile->canOpenNewIo()) {
+            goto non_direct_output;
         }
 
-        // if the selected profile is offloaded and no offload info was specified,
-        // create a default one
-        audio_offload_info_t defaultOffloadInfo = AUDIO_INFO_INITIALIZER;
-        if ((profile->getFlags() & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) && !offloadInfo) {
-            flags = (audio_output_flags_t)(flags | AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD);
-            defaultOffloadInfo.sample_rate = samplingRate;
-            defaultOffloadInfo.channel_mask = channelMask;
-            defaultOffloadInfo.format = format;
-            defaultOffloadInfo.stream_type = stream;
-            defaultOffloadInfo.bit_rate = 0;
-            defaultOffloadInfo.duration_us = -1;
-            defaultOffloadInfo.has_video = true; // conservative
-            defaultOffloadInfo.is_streaming = true; // likely
-            offloadInfo = &defaultOffloadInfo;
-        }
+        sp<SwAudioOutputDescriptor> outputDesc =
+                new SwAudioOutputDescriptor(profile, mpClientInterface);
 
-        outputDesc = new SwAudioOutputDescriptor(profile, mpClientInterface);
-        outputDesc->mDevice = device;
-        outputDesc->mLatency = 0;
-        outputDesc->mFlags = (audio_output_flags_t)(outputDesc->mFlags | flags);
-        audio_config_t config = AUDIO_CONFIG_INITIALIZER;
-        config.sample_rate = samplingRate;
-        config.channel_mask = channelMask;
-        config.format = format;
-        if (offloadInfo != NULL) {
-            config.offload_info = *offloadInfo;
-        }
         DeviceVector outputDevices = mAvailableOutputDevices.getDevicesFromType(device);
         String8 address = outputDevices.size() > 0 ? outputDevices.itemAt(0)->mAddress
                 : String8("");
-        status = mpClientInterface->openOutput(profile->getModuleHandle(),
-                                               &output,
-                                               &config,
-                                               &outputDesc->mDevice,
-                                               address,
-                                               &outputDesc->mLatency,
-                                               outputDesc->mFlags);
+
+        status = outputDesc->open(config, device, address, stream, flags, &output);
 
         // only accept an output with the requested parameters
         if (status != NO_ERROR ||
-            (samplingRate != 0 && samplingRate != config.sample_rate) ||
-            (format != AUDIO_FORMAT_DEFAULT && !audio_formats_match(format, config.format)) ||
-            (channelMask != 0 && channelMask != config.channel_mask)) {
-            ALOGV("getOutput() failed opening direct output: output %d samplingRate %d %d,"
-                    "format %d %d, channelMask %04x %04x", output, samplingRate,
-                    outputDesc->mSamplingRate, format, outputDesc->mFormat, channelMask,
-                    outputDesc->mChannelMask);
+            (config->sample_rate != 0 && config->sample_rate != outputDesc->mSamplingRate) ||
+            (config->format != AUDIO_FORMAT_DEFAULT &&
+                    !audio_formats_match(config->format, outputDesc->mFormat)) ||
+            (config->channel_mask != 0 && config->channel_mask != outputDesc->mChannelMask)) {
+            ALOGV("getOutputForDevice() failed opening direct output: output %d sample rate %d %d,"
+                    "format %d %d, channel mask %04x %04x", output, config->sample_rate,
+                    outputDesc->mSamplingRate, config->format, outputDesc->mFormat,
+                    config->channel_mask, outputDesc->mChannelMask);
             if (output != AUDIO_IO_HANDLE_NONE) {
-                mpClientInterface->closeOutput(output);
+                outputDesc->close();
             }
             // fall back to mixer output if possible when the direct output could not be open
-            if (audio_is_linear_pcm(format) && samplingRate <= SAMPLE_RATE_HZ_MAX) {
+            if (audio_is_linear_pcm(config->format) &&
+                    config->sample_rate  <= SAMPLE_RATE_HZ_MAX) {
                 goto non_direct_output;
             }
             return AUDIO_IO_HANDLE_NONE;
         }
-        outputDesc->mSamplingRate = config.sample_rate;
-        outputDesc->mChannelMask = config.channel_mask;
-        outputDesc->mFormat = config.format;
         outputDesc->mRefCount[stream] = 0;
         outputDesc->mStopTime[stream] = 0;
         outputDesc->mDirectOpenCount = 1;
@@ -1067,7 +945,7 @@
 
         addOutput(output, outputDesc);
         mPreviousOutputs = mOutputs;
-        ALOGV("getOutput() returns new direct output %d", output);
+        ALOGV("getOutputForDevice() returns new direct output %d", output);
         mpClientInterface->onAudioPortListUpdate();
         return output;
     }
@@ -1085,17 +963,18 @@
     // open a non direct output
 
     // for non direct outputs, only PCM is supported
-    if (audio_is_linear_pcm(format)) {
+    if (audio_is_linear_pcm(config->format)) {
         // get which output is suitable for the specified stream. The actual
         // routing change will happen when startOutput() will be called
         SortedVector<audio_io_handle_t> outputs = getOutputsForDevice(device, mOutputs);
 
         // at this stage we should ignore the DIRECT flag as no direct output could be found earlier
         flags = (audio_output_flags_t)(flags & ~AUDIO_OUTPUT_FLAG_DIRECT);
-        output = selectOutput(outputs, flags, format);
+        output = selectOutput(outputs, flags, config->format);
     }
-    ALOGW_IF((output == 0), "getOutput() could not find output for stream %d, samplingRate %d,"
-            "format %d, channels %x, flags %x", stream, samplingRate, format, channelMask, flags);
+    ALOGW_IF((output == 0), "getOutputForDevice() could not find output for stream %d, "
+            "sampling rate %d, format %#x, channels %#x, flags %#x",
+            stream, config->sample_rate, config->format, config->channel_mask, flags);
 
     return output;
 }
@@ -1113,21 +992,21 @@
     // 4: the first output in the list
 
     if (outputs.size() == 0) {
-        return 0;
+        return AUDIO_IO_HANDLE_NONE;
     }
     if (outputs.size() == 1) {
         return outputs[0];
     }
 
     int maxCommonFlags = 0;
-    audio_io_handle_t outputForFlags = 0;
-    audio_io_handle_t outputForPrimary = 0;
-    audio_io_handle_t outputForFormat = 0;
+    audio_io_handle_t outputForFlags = AUDIO_IO_HANDLE_NONE;
+    audio_io_handle_t outputForPrimary = AUDIO_IO_HANDLE_NONE;
+    audio_io_handle_t outputForFormat = AUDIO_IO_HANDLE_NONE;
     audio_format_t bestFormat = AUDIO_FORMAT_INVALID;
     audio_format_t bestFormatForFlags = AUDIO_FORMAT_INVALID;
 
-    for (size_t i = 0; i < outputs.size(); i++) {
-        sp<SwAudioOutputDescriptor> outputDesc = mOutputs.valueFor(outputs[i]);
+    for (audio_io_handle_t output : outputs) {
+        sp<SwAudioOutputDescriptor> outputDesc = mOutputs.valueFor(output);
         if (!outputDesc->isDuplicated()) {
             // if a valid format is specified, skip output if not compatible
             if (format != AUDIO_FORMAT_INVALID) {
@@ -1140,7 +1019,7 @@
                 }
                 if (AudioPort::isBetterFormatMatch(
                         outputDesc->mFormat, bestFormat, format)) {
-                    outputForFormat = outputs[i];
+                    outputForFormat = output;
                     bestFormat = outputDesc->mFormat;
                 }
             }
@@ -1148,31 +1027,32 @@
             int commonFlags = popcount(outputDesc->mProfile->getFlags() & flags);
             if (commonFlags >= maxCommonFlags) {
                 if (commonFlags == maxCommonFlags) {
-                    if (AudioPort::isBetterFormatMatch(
-                            outputDesc->mFormat, bestFormatForFlags, format)) {
-                        outputForFlags = outputs[i];
+                    if (format != AUDIO_FORMAT_INVALID
+                            && AudioPort::isBetterFormatMatch(
+                                    outputDesc->mFormat, bestFormatForFlags, format)) {
+                        outputForFlags = output;
                         bestFormatForFlags = outputDesc->mFormat;
                     }
                 } else {
-                    outputForFlags = outputs[i];
+                    outputForFlags = output;
                     maxCommonFlags = commonFlags;
                     bestFormatForFlags = outputDesc->mFormat;
                 }
-                ALOGV("selectOutput() commonFlags for output %d, %04x", outputs[i], commonFlags);
+                ALOGV("selectOutput() commonFlags for output %d, %04x", output, commonFlags);
             }
             if (outputDesc->mProfile->getFlags() & AUDIO_OUTPUT_FLAG_PRIMARY) {
-                outputForPrimary = outputs[i];
+                outputForPrimary = output;
             }
         }
     }
 
-    if (outputForFlags != 0) {
+    if (outputForFlags != AUDIO_IO_HANDLE_NONE) {
         return outputForFlags;
     }
-    if (outputForFormat != 0) {
+    if (outputForFormat != AUDIO_IO_HANDLE_NONE) {
         return outputForFormat;
     }
-    if (outputForPrimary != 0) {
+    if (outputForPrimary != AUDIO_IO_HANDLE_NONE) {
         return outputForPrimary;
     }
 
@@ -1193,6 +1073,11 @@
 
     sp<SwAudioOutputDescriptor> outputDesc = mOutputs.valueAt(index);
 
+    status_t status = outputDesc->start();
+    if (status != NO_ERROR) {
+        return status;
+    }
+
     // Routing?
     mOutputRoutes.incRouteActivity(session);
 
@@ -1216,10 +1101,11 @@
 
     uint32_t delayMs = 0;
 
-    status_t status = startSource(outputDesc, stream, newDevice, address, &delayMs);
+    status = startSource(outputDesc, stream, newDevice, address, &delayMs);
 
     if (status != NO_ERROR) {
         mOutputRoutes.decRouteActivity(session);
+        outputDesc->stop();
         return status;
     }
     // Automatically enable the remote submix input when output is started on a re routing mix
@@ -1266,6 +1152,12 @@
     bool force = !outputDesc->isActive() &&
             (outputDesc->getPatchHandle() == AUDIO_PATCH_HANDLE_NONE);
 
+    // requiresMuteCheck is false when we can bypass mute strategy.
+    // It covers a common case when there is no materially active audio
+    // and muting would result in unnecessary delay and dropped audio.
+    const uint32_t outputLatencyMs = outputDesc->latency();
+    bool requiresMuteCheck = outputDesc->isActive(outputLatencyMs * 2);  // account for drain
+
     // increment usage count for this stream on the requested output:
     // NOTE that the usage count is the same for duplicated output and hardware output which is
     // necessary for a correct control of hardware output routing by startOutput() and stopOutput()
@@ -1289,29 +1181,44 @@
         for (size_t i = 0; i < mOutputs.size(); i++) {
             sp<AudioOutputDescriptor> desc = mOutputs.valueAt(i);
             if (desc != outputDesc) {
+                // An output has a shared device if
+                // - managed by the same hw module
+                // - supports the currently selected device
+                const bool sharedDevice = outputDesc->sharesHwModuleWith(desc)
+                        && (desc->supportedDevices() & device) != AUDIO_DEVICE_NONE;
+
                 // force a device change if any other output is:
                 // - managed by the same hw module
-                // - has a current device selection that differs from selected device.
                 // - supports currently selected device
+                // - has a current device selection that differs from selected device.
                 // - has an active audio patch
                 // In this case, the audio HAL must receive the new device selection so that it can
-                // change the device currently selected by the other active output.
-                if (outputDesc->sharesHwModuleWith(desc) &&
+                // change the device currently selected by the other output.
+                if (sharedDevice &&
                         desc->device() != device &&
-                        desc->supportedDevices() & device &&
                         desc->getPatchHandle() != AUDIO_PATCH_HANDLE_NONE) {
                     force = true;
                 }
                 // wait for audio on other active outputs to be presented when starting
                 // a notification so that audio focus effect can propagate, or that a mute/unmute
                 // event occurred for beacon
-                uint32_t latency = desc->latency();
-                if (shouldWait && desc->isActive(latency * 2) && (waitMs < latency)) {
-                    waitMs = latency;
+                const uint32_t latencyMs = desc->latency();
+                const bool isActive = desc->isActive(latencyMs * 2);  // account for drain
+
+                if (shouldWait && isActive && (waitMs < latencyMs)) {
+                    waitMs = latencyMs;
                 }
+
+                // Require mute check if another output is on a shared device
+                // and currently active to have proper drain and avoid pops.
+                // Note restoring AudioTracks onto this output needs to invoke
+                // a volume ramp if there is no mute.
+                requiresMuteCheck |= sharedDevice && isActive;
             }
         }
-        uint32_t muteWaitMs = setOutputDevice(outputDesc, device, force, 0, NULL, address);
+
+        const uint32_t muteWaitMs =
+                setOutputDevice(outputDesc, device, force, 0, NULL, address, requiresMuteCheck);
 
         // handle special case for sonification while in call
         if (isInCall()) {
@@ -1336,6 +1243,14 @@
         if (waitMs > muteWaitMs) {
             *delayMs = waitMs - muteWaitMs;
         }
+
+        // FIXME: A device change (muteWaitMs > 0) likely introduces a volume change.
+        // A volume change enacted by APM with 0 delay is not synchronous, as it goes
+        // via AudioCommandThread to AudioFlinger.  Hence it is possible that the volume
+        // change occurs after the MixerThread starts and causes a stream volume
+        // glitch.
+        //
+        // We do not introduce additional delay here.
     }
 
     return NO_ERROR;
@@ -1379,7 +1294,12 @@
         }
     }
 
-    return stopSource(outputDesc, stream, forceDeviceUpdate);
+    status_t status = stopSource(outputDesc, stream, forceDeviceUpdate);
+
+    if (status == NO_ERROR ) {
+        outputDesc->stop();
+    }
+    return status;
 }
 
 status_t AudioPolicyManager::stopSource(const sp<AudioOutputDescriptor>& outputDesc,
@@ -1454,19 +1374,6 @@
         return;
     }
 
-#ifdef AUDIO_POLICY_TEST
-    int testIndex = testOutputIndex(output);
-    if (testIndex != 0) {
-        sp<AudioOutputDescriptor> outputDesc = mOutputs.valueAt(index);
-        if (outputDesc->isActive()) {
-            mpClientInterface->closeOutput(output);
-            removeOutput(output);
-            mTestOutputs[testIndex] = 0;
-        }
-        return;
-    }
-#endif //AUDIO_POLICY_TEST
-
     // Routing
     mOutputRoutes.removeRoute(session);
 
@@ -1495,7 +1402,7 @@
                                              input_type_t *inputType,
                                              audio_port_handle_t *portId)
 {
-    ALOGV("getInputForAttr() source %d, samplingRate %d, format %d, channelMask %x,"
+    ALOGV("getInputForAttr() source %d, sampling rate %d, format %#x, channel mask %#x,"
             "session %d, flags %#x",
           attr->source, config->sample_rate, config->format, config->channel_mask, session, flags);
 
@@ -1507,15 +1414,14 @@
     AudioMix *policyMix = NULL;
     DeviceVector inputDevices;
 
+    if (inputSource == AUDIO_SOURCE_DEFAULT) {
+        inputSource = AUDIO_SOURCE_MIC;
+    }
+
     // Explicit routing?
     sp<DeviceDescriptor> deviceDesc;
     if (*selectedDeviceId != AUDIO_PORT_HANDLE_NONE) {
-        for (size_t i = 0; i < mAvailableInputDevices.size(); i++) {
-            if (mAvailableInputDevices[i]->getId() == *selectedDeviceId) {
-                deviceDesc = mAvailableInputDevices[i];
-                break;
-            }
-        }
+        deviceDesc = mAvailableOutputDevices.getDeviceFromId(*selectedDeviceId);
     }
     mInputRoutes.addRoute(session, SessionRoute::STREAM_TYPE_NA, inputSource, deviceDesc, uid);
 
@@ -1563,9 +1469,6 @@
     *input = AUDIO_IO_HANDLE_NONE;
     *inputType = API_INPUT_INVALID;
 
-    if (inputSource == AUDIO_SOURCE_DEFAULT) {
-        inputSource = AUDIO_SOURCE_MIC;
-    }
     halInputSource = inputSource;
 
     // TODO: check for existing client for this port ID
@@ -1615,7 +1518,7 @@
     }
 
     *input = getInputForDevice(device, address, session, uid, inputSource,
-                               config->sample_rate, config->format, config->channel_mask, flags,
+                               config, flags,
                                policyMix);
     if (*input == AUDIO_IO_HANDLE_NONE) {
         status = INVALID_OPERATION;
@@ -1642,9 +1545,7 @@
                                                         audio_session_t session,
                                                         uid_t uid,
                                                         audio_source_t inputSource,
-                                                        uint32_t samplingRate,
-                                                        audio_format_t format,
-                                                        audio_channel_mask_t channelMask,
+                                                        const audio_config_base_t *config,
                                                         audio_input_flags_t flags,
                                                         AudioMix *policyMix)
 {
@@ -1663,16 +1564,17 @@
             halInputSource = AUDIO_SOURCE_VOICE_RECOGNITION;
         }
     } else if (inputSource == AUDIO_SOURCE_VOICE_COMMUNICATION &&
-               audio_is_linear_pcm(format)) {
+               audio_is_linear_pcm(config->format)) {
         flags = (audio_input_flags_t)(flags | AUDIO_INPUT_FLAG_VOIP_TX);
     }
 
     // find a compatible input profile (not necessarily identical in parameters)
     sp<IOProfile> profile;
-    // samplingRate and flags may be updated by getInputProfile
-    uint32_t profileSamplingRate = (samplingRate == 0) ? SAMPLE_RATE_HZ_DEFAULT : samplingRate;
-    audio_format_t profileFormat = format;
-    audio_channel_mask_t profileChannelMask = channelMask;
+    // sampling rate and flags may be updated by getInputProfile
+    uint32_t profileSamplingRate = (config->sample_rate == 0) ?
+            SAMPLE_RATE_HZ_DEFAULT : config->sample_rate;
+    audio_format_t profileFormat = config->format;
+    audio_channel_mask_t profileChannelMask = config->channel_mask;
     audio_input_flags_t profileFlags = flags;
     for (;;) {
         profile = getInputProfile(device, address,
@@ -1685,13 +1587,14 @@
         } else if (profileFlags != AUDIO_INPUT_FLAG_NONE) {
             profileFlags = AUDIO_INPUT_FLAG_NONE; // retry
         } else { // fail
-            ALOGW("getInputForDevice() could not find profile for device 0x%X,"
-                  "samplingRate %u, format %#x, channelMask 0x%X, flags %#x",
-                    device, samplingRate, format, channelMask, flags);
+            ALOGW("getInputForDevice() could not find profile for device 0x%X, "
+                  "sampling rate %u, format %#x, channel mask 0x%X, flags %#x",
+                    device, config->sample_rate, config->format, config->channel_mask, flags);
             return input;
         }
     }
     // Pick input sampling rate if not specified by client
+    uint32_t samplingRate = config->sample_rate;
     if (samplingRate == 0) {
         samplingRate = profileSamplingRate;
     }
@@ -1702,14 +1605,14 @@
     }
 
     sp<AudioSession> audioSession = new AudioSession(session,
-                                                              inputSource,
-                                                              format,
-                                                              samplingRate,
-                                                              channelMask,
-                                                              flags,
-                                                              uid,
-                                                              isSoundTrigger,
-                                                              policyMix, mpClientInterface);
+                                                     inputSource,
+                                                     config->format,
+                                                     samplingRate,
+                                                     config->channel_mask,
+                                                     flags,
+                                                     uid,
+                                                     isSoundTrigger,
+                                                     policyMix, mpClientInterface);
 
 // FIXME: disable concurrent capture until UI is ready
 #if 0
@@ -1753,8 +1656,8 @@
             // can be selected.
             if (!isConcurrentSource(inputSource) &&
                     ((desc->mSamplingRate != samplingRate ||
-                    desc->mChannelMask != channelMask ||
-                    !audio_formats_match(desc->mFormat, format)) &&
+                    desc->mChannelMask != config->channel_mask ||
+                    !audio_formats_match(desc->mFormat, config->format)) &&
                     (source_priority(desc->getHighestPrioritySource(false /*activeOnly*/)) <
                      source_priority(inputSource)))) {
                 reusedInputDesc = desc;
@@ -1777,44 +1680,40 @@
     }
 #endif
 
-    audio_config_t config = AUDIO_CONFIG_INITIALIZER;
-    config.sample_rate = profileSamplingRate;
-    config.channel_mask = profileChannelMask;
-    config.format = profileFormat;
+    if (!profile->canOpenNewIo()) {
+        return AUDIO_IO_HANDLE_NONE;
+    }
+
+    sp<AudioInputDescriptor> inputDesc = new AudioInputDescriptor(profile, mpClientInterface);
+
+    audio_config_t lConfig = AUDIO_CONFIG_INITIALIZER;
+    lConfig.sample_rate = profileSamplingRate;
+    lConfig.channel_mask = profileChannelMask;
+    lConfig.format = profileFormat;
 
     if (address == "") {
         DeviceVector inputDevices = mAvailableInputDevices.getDevicesFromType(device);
-        //   the inputs vector must be of size 1, but we don't want to crash here
+        // the inputs vector must be of size >= 1, but we don't want to crash here
         address = inputDevices.size() > 0 ? inputDevices.itemAt(0)->mAddress : String8("");
     }
 
-    status_t status = mpClientInterface->openInput(profile->getModuleHandle(),
-                                                   &input,
-                                                   &config,
-                                                   &device,
-                                                   address,
-                                                   halInputSource,
-                                                   profileFlags);
+    status_t status = inputDesc->open(&lConfig, device, address,
+            halInputSource, profileFlags, &input);
 
     // only accept input with the exact requested set of parameters
     if (status != NO_ERROR || input == AUDIO_IO_HANDLE_NONE ||
-        (profileSamplingRate != config.sample_rate) ||
-        !audio_formats_match(profileFormat, config.format) ||
-        (profileChannelMask != config.channel_mask)) {
-        ALOGW("getInputForAttr() failed opening input: samplingRate %d"
-              ", format %d, channelMask %x",
-                samplingRate, format, channelMask);
+        (profileSamplingRate != lConfig.sample_rate) ||
+        !audio_formats_match(profileFormat, lConfig.format) ||
+        (profileChannelMask != lConfig.channel_mask)) {
+        ALOGW("getInputForAttr() failed opening input: sampling rate %d"
+              ", format %#x, channel mask %#x",
+              profileSamplingRate, profileFormat, profileChannelMask);
         if (input != AUDIO_IO_HANDLE_NONE) {
-            mpClientInterface->closeInput(input);
+            inputDesc->close();
         }
         return AUDIO_IO_HANDLE_NONE;
     }
 
-    sp<AudioInputDescriptor> inputDesc = new AudioInputDescriptor(profile);
-    inputDesc->mSamplingRate = profileSamplingRate;
-    inputDesc->mFormat = profileFormat;
-    inputDesc->mChannelMask = profileChannelMask;
-    inputDesc->mDevice = device;
     inputDesc->mPolicyMix = policyMix;
     inputDesc->addAudioSession(session, audioSession);
 
@@ -1855,9 +1754,7 @@
         return true;
     }
 
-    Vector< sp<AudioInputDescriptor> > activeInputs = mInputs.getActiveInputs();
-    for (size_t i = 0; i <  activeInputs.size(); i++) {
-        sp<AudioInputDescriptor> activeInput = activeInputs[i];
+    for (const auto& activeInput : mInputs.getActiveInputs()) {
         if (!isConcurrentSource(activeInput->inputSource(true)) &&
                 !is_virtual_input_device(activeInput->mDevice)) {
             return false;
@@ -1943,9 +1840,7 @@
         }
 
         Vector< sp<AudioInputDescriptor> > activeInputs = mInputs.getActiveInputs();
-        for (size_t i = 0; i < activeInputs.size(); i++) {
-            sp<AudioInputDescriptor> activeDesc = activeInputs[i];
-
+        for (const auto& activeDesc : activeInputs) {
             if (is_virtual_input_device(activeDesc->mDevice)) {
                 continue;
             }
@@ -1985,9 +1880,7 @@
             inputDesc->isSoundTrigger() ? soundTriggerSupportsConcurrentCapture() : false;
 
         // if capture is allowed, preempt currently active HOTWORD captures
-        for (size_t i = 0; i < activeInputs.size(); i++) {
-            sp<AudioInputDescriptor> activeDesc = activeInputs[i];
-
+        for (const auto& activeDesc : activeInputs) {
             if (is_virtual_input_device(activeDesc->mDevice)) {
                 continue;
             }
@@ -2027,6 +1920,13 @@
         audio_devices_t device = getNewInputDevice(inputDesc);
         setInputDevice(input, device, true /* force */);
 
+        status_t status = inputDesc->start();
+        if (status != NO_ERROR) {
+            mInputRoutes.decRouteActivity(session);
+            audioSession->changeActiveCount(-1);
+            return status;
+        }
+
         if (inputDesc->getAudioSessionCount(true/*activeOnly*/) == 1) {
             // if input maps to a dynamic policy with an activity listener, notify of state change
             if ((inputDesc->mPolicyMix != NULL)
@@ -2093,7 +1993,7 @@
     mInputRoutes.decRouteActivity(session);
 
     if (audioSession->activeCount() == 0) {
-
+        inputDesc->stop();
         if (inputDesc->isActive()) {
             setInputDevice(input, getNewInputDevice(inputDesc), false /* force */);
         } else {
@@ -2182,7 +2082,7 @@
 void AudioPolicyManager::closeAllInputs() {
     bool patchRemoved = false;
 
-    for(size_t input_index = 0; input_index < mInputs.size(); input_index++) {
+    for (size_t input_index = 0; input_index < mInputs.size(); input_index++) {
         sp<AudioInputDescriptor> inputDesc = mInputs.valueAt(input_index);
         ssize_t patch_index = mAudioPatches.indexOfKey(inputDesc->getPatchHandle());
         if (patch_index >= 0) {
@@ -2191,7 +2091,7 @@
             mAudioPatches.removeItemsAt(patch_index);
             patchRemoved = true;
         }
-        mpClientInterface->closeInput(mInputs.keyAt(input_index));
+        inputDesc->close();
     }
     mInputs.clear();
     SoundTrigger::setCaptureState(false);
@@ -2346,21 +2246,21 @@
         audio_io_handle_t outputDeepBuffer = AUDIO_IO_HANDLE_NONE;
         audio_io_handle_t outputPrimary = AUDIO_IO_HANDLE_NONE;
 
-        for (size_t i = 0; i < outputs.size(); i++) {
-            sp<SwAudioOutputDescriptor> desc = mOutputs.valueFor(outputs[i]);
+        for (audio_io_handle_t output : outputs) {
+            sp<SwAudioOutputDescriptor> desc = mOutputs.valueFor(output);
             if (activeOnly && !desc->isStreamActive(AUDIO_STREAM_MUSIC)) {
                 continue;
             }
-            ALOGV("selectOutputForMusicEffects activeOnly %d outputs[%zu] flags 0x%08x",
-                  activeOnly, i, desc->mFlags);
+            ALOGV("selectOutputForMusicEffects activeOnly %d output %d flags 0x%08x",
+                  activeOnly, output, desc->mFlags);
             if ((desc->mFlags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) != 0) {
-                outputOffloaded = outputs[i];
+                outputOffloaded = output;
             }
             if ((desc->mFlags & AUDIO_OUTPUT_FLAG_DEEP_BUFFER) != 0) {
-                outputDeepBuffer = outputs[i];
+                outputDeepBuffer = output;
             }
             if ((desc->mFlags & AUDIO_OUTPUT_FLAG_PRIMARY) != 0) {
-                outputPrimary = outputs[i];
+                outputPrimary = output;
             }
         }
         if (outputOffloaded != AUDIO_IO_HANDLE_NONE) {
@@ -2471,23 +2371,16 @@
             break;
         }
         if ((mixes[i].mRouteFlags & MIX_ROUTE_FLAG_LOOP_BACK) == MIX_ROUTE_FLAG_LOOP_BACK) {
-            // Loop back through "remote submix"
-            if (rSubmixModule == 0) {
-                for (size_t j = 0; i < mHwModules.size(); j++) {
-                    if (strcmp(AUDIO_HARDWARE_MODULE_ID_REMOTE_SUBMIX, mHwModules[j]->mName) == 0
-                            && mHwModules[j]->mHandle != 0) {
-                        rSubmixModule = mHwModules[j];
-                        break;
-                    }
-                }
-            }
-
             ALOGV("registerPolicyMixes() mix %zu of %zu is LOOP_BACK", i, mixes.size());
-
             if (rSubmixModule == 0) {
-                ALOGE(" Unable to find audio module for submix, aborting mix %zu registration", i);
-                res = INVALID_OPERATION;
-                break;
+                rSubmixModule = mHwModules.getModuleFromName(
+                        AUDIO_HARDWARE_MODULE_ID_REMOTE_SUBMIX);
+                if (rSubmixModule == 0) {
+                    ALOGE(" Unable to find audio module for submix, aborting mix %zu registration",
+                            i);
+                    res = INVALID_OPERATION;
+                    break;
+                }
             }
 
             String8 address = mixes[i].mDeviceAddress;
@@ -2566,24 +2459,19 @@
     status_t res = NO_ERROR;
     sp<HwModule> rSubmixModule;
     // examine each mix's route type
-    for (size_t i = 0; i < mixes.size(); i++) {
-        if ((mixes[i].mRouteFlags & MIX_ROUTE_FLAG_LOOP_BACK) == MIX_ROUTE_FLAG_LOOP_BACK) {
+    for (const auto& mix : mixes) {
+        if ((mix.mRouteFlags & MIX_ROUTE_FLAG_LOOP_BACK) == MIX_ROUTE_FLAG_LOOP_BACK) {
 
             if (rSubmixModule == 0) {
-                for (size_t j = 0; i < mHwModules.size(); j++) {
-                    if (strcmp(AUDIO_HARDWARE_MODULE_ID_REMOTE_SUBMIX, mHwModules[j]->mName) == 0
-                            && mHwModules[j]->mHandle != 0) {
-                        rSubmixModule = mHwModules[j];
-                        break;
-                    }
+                rSubmixModule = mHwModules.getModuleFromName(
+                        AUDIO_HARDWARE_MODULE_ID_REMOTE_SUBMIX);
+                if (rSubmixModule == 0) {
+                    res = INVALID_OPERATION;
+                    continue;
                 }
             }
-            if (rSubmixModule == 0) {
-                res = INVALID_OPERATION;
-                continue;
-            }
 
-            String8 address = mixes[i].mDeviceAddress;
+            String8 address = mix.mDeviceAddress;
 
             if (mPolicyMixes.unregisterMix(address) != NO_ERROR) {
                 res = INVALID_OPERATION;
@@ -2605,8 +2493,8 @@
             rSubmixModule->removeOutputProfile(address);
             rSubmixModule->removeInputProfile(address);
 
-        } if ((mixes[i].mRouteFlags & MIX_ROUTE_FLAG_RENDER) == MIX_ROUTE_FLAG_RENDER) {
-            if (mPolicyMixes.unregisterMix(mixes[i].mDeviceAddress) != NO_ERROR) {
+        } if ((mix.mRouteFlags & MIX_ROUTE_FLAG_RENDER) == MIX_ROUTE_FLAG_RENDER) {
+            if (mPolicyMixes.unregisterMix(mix.mDeviceAddress) != NO_ERROR) {
                 res = INVALID_OPERATION;
                 continue;
             }
@@ -2658,7 +2546,7 @@
 
     mAvailableOutputDevices.dump(fd, String8("Available output"));
     mAvailableInputDevices.dump(fd, String8("Available input"));
-    mHwModules.dump(fd);
+    mHwModulesAll.dump(fd);
     mOutputs.dump(fd);
     mInputs.dump(fd);
     mVolumeCurves->dump(fd);
@@ -2763,23 +2651,23 @@
         // do not report devices with type AUDIO_DEVICE_IN_STUB or AUDIO_DEVICE_OUT_STUB
         // as they are used by stub HALs by convention
         if (role == AUDIO_PORT_ROLE_SINK || role == AUDIO_PORT_ROLE_NONE) {
-            for (size_t i = 0; i < mAvailableOutputDevices.size(); i++) {
-                if (mAvailableOutputDevices[i]->type() == AUDIO_DEVICE_OUT_STUB) {
+            for (const auto& dev : mAvailableOutputDevices) {
+                if (dev->type() == AUDIO_DEVICE_OUT_STUB) {
                     continue;
                 }
                 if (portsWritten < portsMax) {
-                    mAvailableOutputDevices[i]->toAudioPort(&ports[portsWritten++]);
+                    dev->toAudioPort(&ports[portsWritten++]);
                 }
                 (*num_ports)++;
             }
         }
         if (role == AUDIO_PORT_ROLE_SOURCE || role == AUDIO_PORT_ROLE_NONE) {
-            for (size_t i = 0; i < mAvailableInputDevices.size(); i++) {
-                if (mAvailableInputDevices[i]->type() == AUDIO_DEVICE_IN_STUB) {
+            for (const auto& dev : mAvailableInputDevices) {
+                if (dev->type() == AUDIO_DEVICE_IN_STUB) {
                     continue;
                 }
                 if (portsWritten < portsMax) {
-                    mAvailableInputDevices[i]->toAudioPort(&ports[portsWritten++]);
+                    dev->toAudioPort(&ports[portsWritten++]);
                 }
                 (*num_ports)++;
             }
@@ -3274,8 +3162,8 @@
         }
     }
     // reroute outputs if necessary
-    for (size_t i = 0; i < affectedStrategies.size(); i++) {
-        checkStrategyRoute(affectedStrategies[i], AUDIO_IO_HANDLE_NONE);
+    for (const auto& strategy : affectedStrategies) {
+        checkStrategyRoute(strategy, AUDIO_IO_HANDLE_NONE);
     }
 
     // remove input routes associated with this uid
@@ -3297,8 +3185,8 @@
             inputsToClose.add(inputDesc->mIoHandle);
         }
     }
-    for (size_t i = 0; i < inputsToClose.size(); i++) {
-        closeInput(inputsToClose[i]);
+    for (const auto& input : inputsToClose) {
+        closeInput(input);
     }
 }
 
@@ -3401,6 +3289,11 @@
             ALOGV("%s output for device %08x is duplicated", __FUNCTION__, sinkDevice);
             return INVALID_OPERATION;
         }
+        status_t status = outputDesc->start();
+        if (status != NO_ERROR) {
+            return status;
+        }
+
         // create a special patch with no sink and two sources:
         // - the second source indicates to PatchPanel through which output mix this patch should
         // be connected as well as the stream type for volume control
@@ -3411,7 +3304,7 @@
         srcDeviceDesc->toAudioPortConfig(&patch->sources[0], NULL);
         outputDesc->toAudioPortConfig(&patch->sources[1], NULL);
         patch->sources[1].ext.mix.usecase.stream = stream;
-        status_t status = mpClientInterface->createAudioPatch(patch,
+        status = mpClientInterface->createAudioPatch(patch,
                                                               &afPatchHandle,
                                                               0);
         ALOGV("%s patch panel returned %d patchHandle %d", __FUNCTION__,
@@ -3475,8 +3368,8 @@
                 offloaded.push(desc->mIoHandle);
             }
         }
-        for (size_t i = 0; i < offloaded.size(); ++i) {
-            closeOutput(offloaded[i]);
+        for (const auto& handle : offloaded) {
+            closeOutput(handle);
         }
     }
     // update master mono for all remaining outputs
@@ -3513,7 +3406,10 @@
     audio_stream_type_t stream = streamTypefromAttributesInt(&sourceDesc->mAttributes);
     sp<SwAudioOutputDescriptor> swOutputDesc = sourceDesc->mSwOutput.promote();
     if (swOutputDesc != 0) {
-        stopSource(swOutputDesc, stream, false);
+        status_t status = stopSource(swOutputDesc, stream, false);
+        if (status == NO_ERROR) {
+            swOutputDesc->stop();
+        }
         mpClientInterface->releaseAudioPatch(patchDesc->mAfPatchHandle, 0);
     } else {
         sp<HwAudioOutputDescriptor> hwOutputDesc = sourceDesc->mHwOutput.promote();
@@ -3549,7 +3445,7 @@
 // ----------------------------------------------------------------------------
 uint32_t AudioPolicyManager::nextAudioPortGeneration()
 {
-    return android_atomic_inc(&mAudioPortGeneration);
+    return mAudioPortGeneration++;
 }
 
 #ifdef USE_XML_AUDIO_POLICY_CONF
@@ -3579,13 +3475,22 @@
 }
 #endif
 
-AudioPolicyManager::AudioPolicyManager(AudioPolicyClientInterface *clientInterface)
+AudioPolicyManager::AudioPolicyManager(AudioPolicyClientInterface *clientInterface,
+                                       bool /*forTesting*/)
     :
-#ifdef AUDIO_POLICY_TEST
-    Thread(false),
-#endif //AUDIO_POLICY_TEST
+    mUidCached(getuid()),
+    mpClientInterface(clientInterface),
     mLimitRingtoneVolume(false), mLastVoiceVolume(-1.0f),
     mA2dpSuspended(false),
+#ifdef USE_XML_AUDIO_POLICY_CONF
+    mVolumeCurves(new VolumeCurvesCollection()),
+    mConfig(mHwModulesAll, mAvailableOutputDevices, mAvailableInputDevices,
+            mDefaultOutputDevice, static_cast<VolumeCurvesCollection*>(mVolumeCurves.get())),
+#else
+    mVolumeCurves(new StreamDescriptorCollection()),
+    mConfig(mHwModulesAll, mAvailableOutputDevices, mAvailableInputDevices,
+            mDefaultOutputDevice),
+#endif
     mAudioPortGeneration(1),
     mBeaconMuteRefCount(0),
     mBeaconPlayingRefCount(0),
@@ -3595,70 +3500,72 @@
     mMusicEffectOutput(AUDIO_IO_HANDLE_NONE),
     mHasComputedSoundTriggerSupportsConcurrentCapture(false)
 {
-    mUidCached = getuid();
-    mpClientInterface = clientInterface;
+}
 
-    // TODO: remove when legacy conf file is removed. true on devices that use DRC on the
-    // DEVICE_CATEGORY_SPEAKER path to boost soft sounds, used to adjust volume curves accordingly.
-    // Note: remove also speaker_drc_enabled from global configuration of XML config file.
-    bool speakerDrcEnabled = false;
+AudioPolicyManager::AudioPolicyManager(AudioPolicyClientInterface *clientInterface)
+        : AudioPolicyManager(clientInterface, false /*forTesting*/)
+{
+    loadConfig();
+    initialize();
+}
 
+void AudioPolicyManager::loadConfig() {
 #ifdef USE_XML_AUDIO_POLICY_CONF
-    mVolumeCurves = new VolumeCurvesCollection();
-    AudioPolicyConfig config(mHwModules, mAvailableOutputDevices, mAvailableInputDevices,
-                             mDefaultOutputDevice, speakerDrcEnabled,
-                             static_cast<VolumeCurvesCollection *>(mVolumeCurves));
-    if (deserializeAudioPolicyXmlConfig(config) != NO_ERROR) {
+    if (deserializeAudioPolicyXmlConfig(getConfig()) != NO_ERROR) {
 #else
-    mVolumeCurves = new StreamDescriptorCollection();
-    AudioPolicyConfig config(mHwModules, mAvailableOutputDevices, mAvailableInputDevices,
-                             mDefaultOutputDevice, speakerDrcEnabled);
-    if ((ConfigParsingUtils::loadConfig(AUDIO_POLICY_VENDOR_CONFIG_FILE, config) != NO_ERROR) &&
-            (ConfigParsingUtils::loadConfig(AUDIO_POLICY_CONFIG_FILE, config) != NO_ERROR)) {
+    if ((ConfigParsingUtils::loadConfig(AUDIO_POLICY_VENDOR_CONFIG_FILE, getConfig()) != NO_ERROR)
+           && (ConfigParsingUtils::loadConfig(AUDIO_POLICY_CONFIG_FILE, getConfig()) != NO_ERROR)) {
 #endif
         ALOGE("could not load audio policy configuration file, setting defaults");
-        config.setDefault();
+        getConfig().setDefault();
     }
-    // must be done after reading the policy (since conditionned by Speaker Drc Enabling)
-    mVolumeCurves->initializeVolumeCurves(speakerDrcEnabled);
+}
+
+status_t AudioPolicyManager::initialize() {
+    mVolumeCurves->initializeVolumeCurves(getConfig().isSpeakerDrcEnabled());
 
     // Once policy config has been parsed, retrieve an instance of the engine and initialize it.
     audio_policy::EngineInstance *engineInstance = audio_policy::EngineInstance::getInstance();
     if (!engineInstance) {
         ALOGE("%s:  Could not get an instance of policy engine", __FUNCTION__);
-        return;
+        return NO_INIT;
     }
     // Retrieve the Policy Manager Interface
     mEngine = engineInstance->queryInterface<AudioPolicyManagerInterface>();
     if (mEngine == NULL) {
         ALOGE("%s: Failed to get Policy Engine Interface", __FUNCTION__);
-        return;
+        return NO_INIT;
     }
     mEngine->setObserver(this);
     status_t status = mEngine->initCheck();
-    (void) status;
-    ALOG_ASSERT(status == NO_ERROR, "Policy engine not initialized(err=%d)", status);
+    if (status != NO_ERROR) {
+        LOG_FATAL("Policy engine not initialized(err=%d)", status);
+        return status;
+    }
 
     // mAvailableOutputDevices and mAvailableInputDevices now contain all attached devices
     // open all output streams needed to access attached devices
     audio_devices_t outputDeviceTypes = mAvailableOutputDevices.types();
     audio_devices_t inputDeviceTypes = mAvailableInputDevices.types() & ~AUDIO_DEVICE_BIT_IN;
-    for (size_t i = 0; i < mHwModules.size(); i++) {
-        mHwModules[i]->mHandle = mpClientInterface->loadHwModule(mHwModules[i]->getName());
-        if (mHwModules[i]->mHandle == 0) {
-            ALOGW("could not open HW module %s", mHwModules[i]->getName());
+    for (const auto& hwModule : mHwModulesAll) {
+        hwModule->setHandle(mpClientInterface->loadHwModule(hwModule->getName()));
+        if (hwModule->getHandle() == AUDIO_MODULE_HANDLE_NONE) {
+            ALOGW("could not open HW module %s", hwModule->getName());
             continue;
         }
+        mHwModules.push_back(hwModule);
         // open all output streams needed to access attached devices
         // except for direct output streams that are only opened when they are actually
         // required by an app.
         // This also validates mAvailableOutputDevices list
-        for (size_t j = 0; j < mHwModules[i]->mOutputProfiles.size(); j++)
-        {
-            const sp<IOProfile> outProfile = mHwModules[i]->mOutputProfiles[j];
-
+        for (const auto& outProfile : hwModule->getOutputProfiles()) {
+            if (!outProfile->canOpenNewIo()) {
+                ALOGE("Invalid Output profile max open count %u for profile %s",
+                      outProfile->maxOpenCount, outProfile->getTagName().c_str());
+                continue;
+            }
             if (!outProfile->hasSupportedDevices()) {
-                ALOGW("Output profile contains no device on module %s", mHwModules[i]->getName());
+                ALOGW("Output profile contains no device on module %s", hwModule->getName());
                 continue;
             }
             if ((outProfile->getFlags() & AUDIO_OUTPUT_FLAG_TTS) != 0) {
@@ -3685,35 +3592,20 @@
             const DeviceVector &devicesForType = supportedDevices.getDevicesFromType(profileType);
             String8 address = devicesForType.size() > 0 ? devicesForType.itemAt(0)->mAddress
                     : String8("");
-
-            outputDesc->mDevice = profileType;
-            audio_config_t config = AUDIO_CONFIG_INITIALIZER;
-            config.sample_rate = outputDesc->mSamplingRate;
-            config.channel_mask = outputDesc->mChannelMask;
-            config.format = outputDesc->mFormat;
             audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
-            status_t status = mpClientInterface->openOutput(outProfile->getModuleHandle(),
-                                                            &output,
-                                                            &config,
-                                                            &outputDesc->mDevice,
-                                                            address,
-                                                            &outputDesc->mLatency,
-                                                            outputDesc->mFlags);
+            status_t status = outputDesc->open(nullptr, profileType, address,
+                                           AUDIO_STREAM_DEFAULT, AUDIO_OUTPUT_FLAG_NONE, &output);
 
             if (status != NO_ERROR) {
                 ALOGW("Cannot open output stream for device %08x on hw module %s",
                       outputDesc->mDevice,
-                      mHwModules[i]->getName());
+                      hwModule->getName());
             } else {
-                outputDesc->mSamplingRate = config.sample_rate;
-                outputDesc->mChannelMask = config.channel_mask;
-                outputDesc->mFormat = config.format;
-
-                for (size_t k = 0; k  < supportedDevices.size(); k++) {
-                    ssize_t index = mAvailableOutputDevices.indexOf(supportedDevices[k]);
+                for (const auto& dev : supportedDevices) {
+                    ssize_t index = mAvailableOutputDevices.indexOf(dev);
                     // give a valid ID to an attached device once confirmed it is reachable
                     if (index >= 0 && !mAvailableOutputDevices[index]->isAttached()) {
-                        mAvailableOutputDevices[index]->attach(mHwModules[i]);
+                        mAvailableOutputDevices[index]->attach(hwModule);
                     }
                 }
                 if (mPrimaryOutput == 0 &&
@@ -3722,21 +3614,23 @@
                 }
                 addOutput(output, outputDesc);
                 setOutputDevice(outputDesc,
-                                outputDesc->mDevice,
+                                profileType,
                                 true,
                                 0,
                                 NULL,
-                                address.string());
+                                address);
             }
         }
         // open input streams needed to access attached devices to validate
         // mAvailableInputDevices list
-        for (size_t j = 0; j < mHwModules[i]->mInputProfiles.size(); j++)
-        {
-            const sp<IOProfile> inProfile = mHwModules[i]->mInputProfiles[j];
-
+        for (const auto& inProfile : hwModule->getInputProfiles()) {
+            if (!inProfile->canOpenNewIo()) {
+                ALOGE("Invalid Input profile max open count %u for profile %s",
+                      inProfile->maxOpenCount, inProfile->getTagName().c_str());
+                continue;
+            }
             if (!inProfile->hasSupportedDevices()) {
-                ALOGW("Input profile contains no device on module %s", mHwModules[i]->getName());
+                ALOGW("Input profile contains no device on module %s", hwModule->getName());
                 continue;
             }
             // chose first device present in profile's SupportedDevices also part of
@@ -3747,49 +3641,40 @@
                 continue;
             }
             sp<AudioInputDescriptor> inputDesc =
-                    new AudioInputDescriptor(inProfile);
+                    new AudioInputDescriptor(inProfile, mpClientInterface);
 
-            inputDesc->mDevice = profileType;
-
-            // find the address
             DeviceVector inputDevices = mAvailableInputDevices.getDevicesFromType(profileType);
-            //   the inputs vector must be of size 1, but we don't want to crash here
+            //   the inputs vector must be of size >= 1, but we don't want to crash here
             String8 address = inputDevices.size() > 0 ? inputDevices.itemAt(0)->mAddress
                     : String8("");
             ALOGV("  for input device 0x%x using address %s", profileType, address.string());
             ALOGE_IF(inputDevices.size() == 0, "Input device list is empty!");
 
-            audio_config_t config = AUDIO_CONFIG_INITIALIZER;
-            config.sample_rate = inputDesc->mSamplingRate;
-            config.channel_mask = inputDesc->mChannelMask;
-            config.format = inputDesc->mFormat;
             audio_io_handle_t input = AUDIO_IO_HANDLE_NONE;
-            status_t status = mpClientInterface->openInput(inProfile->getModuleHandle(),
-                                                           &input,
-                                                           &config,
-                                                           &inputDesc->mDevice,
-                                                           address,
-                                                           AUDIO_SOURCE_MIC,
-                                                           AUDIO_INPUT_FLAG_NONE);
+            status_t status = inputDesc->open(nullptr,
+                                              profileType,
+                                              address,
+                                              AUDIO_SOURCE_MIC,
+                                              AUDIO_INPUT_FLAG_NONE,
+                                              &input);
 
             if (status == NO_ERROR) {
-                const DeviceVector &supportedDevices = inProfile->getSupportedDevices();
-                for (size_t k = 0; k  < supportedDevices.size(); k++) {
-                    ssize_t index =  mAvailableInputDevices.indexOf(supportedDevices[k]);
+                for (const auto& dev : inProfile->getSupportedDevices()) {
+                    ssize_t index = mAvailableInputDevices.indexOf(dev);
                     // give a valid ID to an attached device once confirmed it is reachable
                     if (index >= 0) {
                         sp<DeviceDescriptor> devDesc = mAvailableInputDevices[index];
                         if (!devDesc->isAttached()) {
-                            devDesc->attach(mHwModules[i]);
+                            devDesc->attach(hwModule);
                             devDesc->importAudioPort(inProfile, true);
                         }
                     }
                 }
-                mpClientInterface->closeInput(input);
+                inputDesc->close();
             } else {
                 ALOGW("Cannot open input stream for device %08x on hw module %s",
-                      inputDesc->mDevice,
-                      mHwModules[i]->getName());
+                      profileType,
+                      hwModule->getName());
             }
         }
     }
@@ -3819,53 +3704,32 @@
     // make sure default device is reachable
     if (mDefaultOutputDevice == 0 || mAvailableOutputDevices.indexOf(mDefaultOutputDevice) < 0) {
         ALOGE("Default device %08x is unreachable", mDefaultOutputDevice->type());
+        status = NO_INIT;
     }
 
-    ALOGE_IF((mPrimaryOutput == 0), "Failed to open primary output");
+    if (mPrimaryOutput == 0) {
+        ALOGE("Failed to open primary output");
+        status = NO_INIT;
+    }
 
     updateDevicesAndOutputs();
-
-#ifdef AUDIO_POLICY_TEST
-    if (mPrimaryOutput != 0) {
-        AudioParameter outputCmd = AudioParameter();
-        outputCmd.addInt(String8("set_id"), 0);
-        mpClientInterface->setParameters(mPrimaryOutput->mIoHandle, outputCmd.toString());
-
-        mTestDevice = AUDIO_DEVICE_OUT_SPEAKER;
-        mTestSamplingRate = 44100;
-        mTestFormat = AUDIO_FORMAT_PCM_16_BIT;
-        mTestChannels =  AUDIO_CHANNEL_OUT_STEREO;
-        mTestLatencyMs = 0;
-        mCurOutput = 0;
-        mDirectOutput = false;
-        for (int i = 0; i < NUM_TEST_OUTPUTS; i++) {
-            mTestOutputs[i] = 0;
-        }
-
-        const size_t SIZE = 256;
-        char buffer[SIZE];
-        snprintf(buffer, SIZE, "AudioPolicyManagerTest");
-        run(buffer, ANDROID_PRIORITY_AUDIO);
-    }
-#endif //AUDIO_POLICY_TEST
+    return status;
 }
 
 AudioPolicyManager::~AudioPolicyManager()
 {
-#ifdef AUDIO_POLICY_TEST
-    exit();
-#endif //AUDIO_POLICY_TEST
    for (size_t i = 0; i < mOutputs.size(); i++) {
-        mpClientInterface->closeOutput(mOutputs.keyAt(i));
+        mOutputs.valueAt(i)->close();
    }
    for (size_t i = 0; i < mInputs.size(); i++) {
-        mpClientInterface->closeInput(mInputs.keyAt(i));
+        mInputs.valueAt(i)->close();
    }
    mAvailableOutputDevices.clear();
    mAvailableInputDevices.clear();
    mOutputs.clear();
    mInputs.clear();
    mHwModules.clear();
+   mHwModulesAll.clear();
 }
 
 status_t AudioPolicyManager::initCheck()
@@ -3873,169 +3737,10 @@
     return hasPrimaryOutput() ? NO_ERROR : NO_INIT;
 }
 
-#ifdef AUDIO_POLICY_TEST
-bool AudioPolicyManager::threadLoop()
-{
-    ALOGV("entering threadLoop()");
-    while (!exitPending())
-    {
-        String8 command;
-        int valueInt;
-        String8 value;
-
-        Mutex::Autolock _l(mLock);
-        mWaitWorkCV.waitRelative(mLock, milliseconds(50));
-
-        command = mpClientInterface->getParameters(0, String8("test_cmd_policy"));
-        AudioParameter param = AudioParameter(command);
-
-        if (param.getInt(String8("test_cmd_policy"), valueInt) == NO_ERROR &&
-            valueInt != 0) {
-            ALOGV("Test command %s received", command.string());
-            String8 target;
-            if (param.get(String8("target"), target) != NO_ERROR) {
-                target = "Manager";
-            }
-            if (param.getInt(String8("test_cmd_policy_output"), valueInt) == NO_ERROR) {
-                param.remove(String8("test_cmd_policy_output"));
-                mCurOutput = valueInt;
-            }
-            if (param.get(String8("test_cmd_policy_direct"), value) == NO_ERROR) {
-                param.remove(String8("test_cmd_policy_direct"));
-                if (value == "false") {
-                    mDirectOutput = false;
-                } else if (value == "true") {
-                    mDirectOutput = true;
-                }
-            }
-            if (param.getInt(String8("test_cmd_policy_input"), valueInt) == NO_ERROR) {
-                param.remove(String8("test_cmd_policy_input"));
-                mTestInput = valueInt;
-            }
-
-            if (param.get(String8("test_cmd_policy_format"), value) == NO_ERROR) {
-                param.remove(String8("test_cmd_policy_format"));
-                int format = AUDIO_FORMAT_INVALID;
-                if (value == "PCM 16 bits") {
-                    format = AUDIO_FORMAT_PCM_16_BIT;
-                } else if (value == "PCM 8 bits") {
-                    format = AUDIO_FORMAT_PCM_8_BIT;
-                } else if (value == "Compressed MP3") {
-                    format = AUDIO_FORMAT_MP3;
-                }
-                if (format != AUDIO_FORMAT_INVALID) {
-                    if (target == "Manager") {
-                        mTestFormat = format;
-                    } else if (mTestOutputs[mCurOutput] != 0) {
-                        AudioParameter outputParam = AudioParameter();
-                        outputParam.addInt(String8(AudioParameter::keyStreamSupportedFormats), format);
-                        mpClientInterface->setParameters(mTestOutputs[mCurOutput], outputParam.toString());
-                    }
-                }
-            }
-            if (param.get(String8("test_cmd_policy_channels"), value) == NO_ERROR) {
-                param.remove(String8("test_cmd_policy_channels"));
-                int channels = 0;
-
-                if (value == "Channels Stereo") {
-                    channels =  AUDIO_CHANNEL_OUT_STEREO;
-                } else if (value == "Channels Mono") {
-                    channels =  AUDIO_CHANNEL_OUT_MONO;
-                }
-                if (channels != 0) {
-                    if (target == "Manager") {
-                        mTestChannels = channels;
-                    } else if (mTestOutputs[mCurOutput] != 0) {
-                        AudioParameter outputParam = AudioParameter();
-                        outputParam.addInt(String8(AudioParameter::keyStreamSupportedChannels), channels);
-                        mpClientInterface->setParameters(mTestOutputs[mCurOutput], outputParam.toString());
-                    }
-                }
-            }
-            if (param.getInt(String8("test_cmd_policy_sampleRate"), valueInt) == NO_ERROR) {
-                param.remove(String8("test_cmd_policy_sampleRate"));
-                if (valueInt >= 0 && valueInt <= 96000) {
-                    int samplingRate = valueInt;
-                    if (target == "Manager") {
-                        mTestSamplingRate = samplingRate;
-                    } else if (mTestOutputs[mCurOutput] != 0) {
-                        AudioParameter outputParam = AudioParameter();
-                        outputParam.addInt(String8(AudioParameter::keyStreamSupportedSamplingRates), samplingRate);
-                        mpClientInterface->setParameters(mTestOutputs[mCurOutput], outputParam.toString());
-                    }
-                }
-            }
-
-            if (param.get(String8("test_cmd_policy_reopen"), value) == NO_ERROR) {
-                param.remove(String8("test_cmd_policy_reopen"));
-
-                mpClientInterface->closeOutput(mpClientInterface->closeOutput(mPrimaryOutput););
-
-                audio_module_handle_t moduleHandle = mPrimaryOutput->getModuleHandle();
-
-                removeOutput(mPrimaryOutput->mIoHandle);
-                sp<SwAudioOutputDescriptor> outputDesc = new AudioOutputDescriptor(NULL,
-                                                                               mpClientInterface);
-                outputDesc->mDevice = AUDIO_DEVICE_OUT_SPEAKER;
-                audio_config_t config = AUDIO_CONFIG_INITIALIZER;
-                config.sample_rate = outputDesc->mSamplingRate;
-                config.channel_mask = outputDesc->mChannelMask;
-                config.format = outputDesc->mFormat;
-                audio_io_handle_t handle;
-                status_t status = mpClientInterface->openOutput(moduleHandle,
-                                                                &handle,
-                                                                &config,
-                                                                &outputDesc->mDevice,
-                                                                String8(""),
-                                                                &outputDesc->mLatency,
-                                                                outputDesc->mFlags);
-                if (status != NO_ERROR) {
-                    ALOGE("Failed to reopen hardware output stream, "
-                        "samplingRate: %d, format %d, channels %d",
-                        outputDesc->mSamplingRate, outputDesc->mFormat, outputDesc->mChannelMask);
-                } else {
-                    outputDesc->mSamplingRate = config.sample_rate;
-                    outputDesc->mChannelMask = config.channel_mask;
-                    outputDesc->mFormat = config.format;
-                    mPrimaryOutput = outputDesc;
-                    AudioParameter outputCmd = AudioParameter();
-                    outputCmd.addInt(String8("set_id"), 0);
-                    mpClientInterface->setParameters(handle, outputCmd.toString());
-                    addOutput(handle, outputDesc);
-                }
-            }
-
-
-            mpClientInterface->setParameters(0, String8("test_cmd_policy="));
-        }
-    }
-    return false;
-}
-
-void AudioPolicyManager::exit()
-{
-    {
-        AutoMutex _l(mLock);
-        requestExit();
-        mWaitWorkCV.signal();
-    }
-    requestExitAndWait();
-}
-
-int AudioPolicyManager::testOutputIndex(audio_io_handle_t output)
-{
-    for (int i = 0; i < NUM_TEST_OUTPUTS; i++) {
-        if (output == mTestOutputs[i]) return i;
-    }
-    return 0;
-}
-#endif //AUDIO_POLICY_TEST
-
 // ---
 
 void AudioPolicyManager::addOutput(audio_io_handle_t output, const sp<SwAudioOutputDescriptor>& outputDesc)
 {
-    outputDesc->setIoHandle(output);
     mOutputs.add(output, outputDesc);
     updateMono(output); // update mono status when adding to output list
     selectOutputForMusicEffects();
@@ -4050,7 +3755,6 @@
 
 void AudioPolicyManager::addInput(audio_io_handle_t input, const sp<AudioInputDescriptor>& inputDesc)
 {
-    inputDesc->setIoHandle(input);
     mInputs.add(input, inputDesc);
     nextAudioPortGeneration();
 }
@@ -4097,19 +3801,15 @@
         }
         // then look for output profiles that can be routed to this device
         SortedVector< sp<IOProfile> > profiles;
-        for (size_t i = 0; i < mHwModules.size(); i++)
-        {
-            if (mHwModules[i]->mHandle == 0) {
-                continue;
-            }
-            for (size_t j = 0; j < mHwModules[i]->mOutputProfiles.size(); j++)
-            {
-                sp<IOProfile> profile = mHwModules[i]->mOutputProfiles[j];
+        for (const auto& hwModule : mHwModules) {
+            for (size_t j = 0; j < hwModule->getOutputProfiles().size(); j++) {
+                sp<IOProfile> profile = hwModule->getOutputProfiles()[j];
                 if (profile->supportDevice(device)) {
                     if (!device_distinguishes_on_address(device) ||
                             profile->supportDeviceAddress(address)) {
                         profiles.add(profile);
-                        ALOGV("checkOutputsForDevice(): adding profile %zu from module %zu", j, i);
+                        ALOGV("checkOutputsForDevice(): adding profile %zu from module %s",
+                                j, hwModule->getName());
                     }
                 }
             }
@@ -4144,30 +3844,20 @@
                 continue;
             }
 
+            if (!profile->canOpenNewIo()) {
+                ALOGW("Max Output number %u already opened for this profile %s",
+                      profile->maxOpenCount, profile->getTagName().c_str());
+                continue;
+            }
+
             ALOGV("opening output for device %08x with params %s profile %p name %s",
                   device, address.string(), profile.get(), profile->getName().string());
             desc = new SwAudioOutputDescriptor(profile, mpClientInterface);
-            desc->mDevice = device;
-            audio_config_t config = AUDIO_CONFIG_INITIALIZER;
-            config.sample_rate = desc->mSamplingRate;
-            config.channel_mask = desc->mChannelMask;
-            config.format = desc->mFormat;
-            config.offload_info.sample_rate = desc->mSamplingRate;
-            config.offload_info.channel_mask = desc->mChannelMask;
-            config.offload_info.format = desc->mFormat;
             audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
-            status_t status = mpClientInterface->openOutput(profile->getModuleHandle(),
-                                                            &output,
-                                                            &config,
-                                                            &desc->mDevice,
-                                                            address,
-                                                            &desc->mLatency,
-                                                            desc->mFlags);
-            if (status == NO_ERROR) {
-                desc->mSamplingRate = config.sample_rate;
-                desc->mChannelMask = config.channel_mask;
-                desc->mFormat = config.format;
+            status_t status = desc->open(nullptr, device, address,
+                                         AUDIO_STREAM_DEFAULT, AUDIO_OUTPUT_FLAG_NONE, &output);
 
+            if (status == NO_ERROR) {
                 // Here is where the out_set_parameters() for card & device gets called
                 if (!address.isEmpty()) {
                     char *param = audio_device_address_to_parameter(device, address);
@@ -4177,27 +3867,21 @@
                 updateAudioProfiles(device, output, profile->getAudioProfiles());
                 if (!profile->hasValidAudioProfile()) {
                     ALOGW("checkOutputsForDevice() missing param");
-                    mpClientInterface->closeOutput(output);
+                    desc->close();
                     output = AUDIO_IO_HANDLE_NONE;
                 } else if (profile->hasDynamicAudioProfile()) {
-                    mpClientInterface->closeOutput(output);
+                    desc->close();
                     output = AUDIO_IO_HANDLE_NONE;
-                    profile->pickAudioProfile(config.sample_rate, config.channel_mask, config.format);
+                    audio_config_t config = AUDIO_CONFIG_INITIALIZER;
+                    profile->pickAudioProfile(
+                            config.sample_rate, config.channel_mask, config.format);
                     config.offload_info.sample_rate = config.sample_rate;
                     config.offload_info.channel_mask = config.channel_mask;
                     config.offload_info.format = config.format;
-                    status = mpClientInterface->openOutput(profile->getModuleHandle(),
-                                                           &output,
-                                                           &config,
-                                                           &desc->mDevice,
-                                                           address,
-                                                           &desc->mLatency,
-                                                           desc->mFlags);
-                    if (status == NO_ERROR) {
-                        desc->mSamplingRate = config.sample_rate;
-                        desc->mChannelMask = config.channel_mask;
-                        desc->mFormat = config.format;
-                    } else {
+
+                    status_t status = desc->open(&config, device, address, AUDIO_STREAM_DEFAULT,
+                                                 AUDIO_OUTPUT_FLAG_NONE, &output);
+                    if (status != NO_ERROR) {
                         output = AUDIO_IO_HANDLE_NONE;
                     }
                 }
@@ -4243,7 +3927,7 @@
                         } else {
                             ALOGW("checkOutputsForDevice() could not open dup output for %d and %d",
                                     mPrimaryOutput->mIoHandle, output);
-                            mpClientInterface->closeOutput(output);
+                            desc->close();
                             removeOutput(output);
                             nextAudioPortGeneration();
                             output = AUDIO_IO_HANDLE_NONE;
@@ -4295,17 +3979,13 @@
             }
         }
         // Clear any profiles associated with the disconnected device.
-        for (size_t i = 0; i < mHwModules.size(); i++)
-        {
-            if (mHwModules[i]->mHandle == 0) {
-                continue;
-            }
-            for (size_t j = 0; j < mHwModules[i]->mOutputProfiles.size(); j++)
-            {
-                sp<IOProfile> profile = mHwModules[i]->mOutputProfiles[j];
+        for (const auto& hwModule : mHwModules) {
+            for (size_t j = 0; j < hwModule->getOutputProfiles().size(); j++) {
+                sp<IOProfile> profile = hwModule->getOutputProfiles()[j];
                 if (profile->supportDevice(device)) {
                     ALOGV("checkOutputsForDevice(): "
-                            "clearing direct output profile %zu on module %zu", j, i);
+                            "clearing direct output profile %zu on module %s",
+                            j, hwModule->getName());
                     profile->clearAudioProfiles();
                 }
             }
@@ -4339,23 +4019,18 @@
 
         // then look for input profiles that can be routed to this device
         SortedVector< sp<IOProfile> > profiles;
-        for (size_t module_idx = 0; module_idx < mHwModules.size(); module_idx++)
-        {
-            if (mHwModules[module_idx]->mHandle == 0) {
-                continue;
-            }
+        for (const auto& hwModule : mHwModules) {
             for (size_t profile_index = 0;
-                 profile_index < mHwModules[module_idx]->mInputProfiles.size();
-                 profile_index++)
-            {
-                sp<IOProfile> profile = mHwModules[module_idx]->mInputProfiles[profile_index];
+                 profile_index < hwModule->getInputProfiles().size();
+                 profile_index++) {
+                sp<IOProfile> profile = hwModule->getInputProfiles()[profile_index];
 
                 if (profile->supportDevice(device)) {
                     if (!device_distinguishes_on_address(device) ||
                             profile->supportDeviceAddress(address)) {
                         profiles.add(profile);
-                        ALOGV("checkInputsForDevice(): adding profile %zu from module %zu",
-                              profile_index, module_idx);
+                        ALOGV("checkInputsForDevice(): adding profile %zu from module %s",
+                                profile_index, hwModule->getName());
                     }
                 }
             }
@@ -4371,6 +4046,7 @@
         for (ssize_t profile_index = 0; profile_index < (ssize_t)profiles.size(); profile_index++) {
 
             sp<IOProfile> profile = profiles[profile_index];
+
             // nothing to do if one input is already opened for this profile
             size_t input_index;
             for (input_index = 0; input_index < mInputs.size(); input_index++) {
@@ -4386,31 +4062,22 @@
                 continue;
             }
 
-            ALOGV("opening input for device 0x%X with params %s", device, address.string());
-            desc = new AudioInputDescriptor(profile);
-            desc->mDevice = device;
-            audio_config_t config = AUDIO_CONFIG_INITIALIZER;
-            config.sample_rate = desc->mSamplingRate;
-            config.channel_mask = desc->mChannelMask;
-            config.format = desc->mFormat;
+            if (!profile->canOpenNewIo()) {
+                ALOGW("Max Input number %u already opened for this profile %s",
+                      profile->maxOpenCount, profile->getTagName().c_str());
+                continue;
+            }
+
+            desc = new AudioInputDescriptor(profile, mpClientInterface);
             audio_io_handle_t input = AUDIO_IO_HANDLE_NONE;
-
-            ALOGV("opening inputput for device %08x with params %s profile %p name %s",
-                  desc->mDevice, address.string(), profile.get(), profile->getName().string());
-
-            status_t status = mpClientInterface->openInput(profile->getModuleHandle(),
-                                                           &input,
-                                                           &config,
-                                                           &desc->mDevice,
-                                                           address,
-                                                           AUDIO_SOURCE_MIC,
-                                                           AUDIO_INPUT_FLAG_NONE /*FIXME*/);
+            status_t status = desc->open(nullptr,
+                                         device,
+                                         address,
+                                         AUDIO_SOURCE_MIC,
+                                         AUDIO_INPUT_FLAG_NONE,
+                                         &input);
 
             if (status == NO_ERROR) {
-                desc->mSamplingRate = config.sample_rate;
-                desc->mChannelMask = config.channel_mask;
-                desc->mFormat = config.format;
-
                 if (!address.isEmpty()) {
                     char *param = audio_device_address_to_parameter(device, address);
                     mpClientInterface->setParameters(input, String8(param));
@@ -4419,7 +4086,7 @@
                 updateAudioProfiles(device, input, profile->getAudioProfiles());
                 if (!profile->hasValidAudioProfile()) {
                     ALOGW("checkInputsForDevice() direct input missing param");
-                    mpClientInterface->closeInput(input);
+                    desc->close();
                     input = AUDIO_IO_HANDLE_NONE;
                 }
 
@@ -4457,17 +4124,14 @@
             }
         }
         // Clear any profiles associated with the disconnected device.
-        for (size_t module_index = 0; module_index < mHwModules.size(); module_index++) {
-            if (mHwModules[module_index]->mHandle == 0) {
-                continue;
-            }
+        for (const auto& hwModule : mHwModules) {
             for (size_t profile_index = 0;
-                 profile_index < mHwModules[module_index]->mInputProfiles.size();
+                 profile_index < hwModule->getInputProfiles().size();
                  profile_index++) {
-                sp<IOProfile> profile = mHwModules[module_index]->mInputProfiles[profile_index];
+                sp<IOProfile> profile = hwModule->getInputProfiles()[profile_index];
                 if (profile->supportDevice(device)) {
-                    ALOGV("checkInputsForDevice(): clearing direct input profile %zu on module %zu",
-                          profile_index, module_index);
+                    ALOGV("checkInputsForDevice(): clearing direct input profile %zu on module %s",
+                            profile_index, hwModule->getName());
                     profile->clearAudioProfiles();
                 }
             }
@@ -4495,7 +4159,7 @@
         if (dupOutputDesc->isDuplicated() &&
                 (dupOutputDesc->mOutput1 == outputDesc ||
                 dupOutputDesc->mOutput2 == outputDesc)) {
-            sp<AudioOutputDescriptor> outputDesc2;
+            sp<SwAudioOutputDescriptor> outputDesc2;
             if (dupOutputDesc->mOutput1 == outputDesc) {
                 outputDesc2 = dupOutputDesc->mOutput2;
             } else {
@@ -4505,10 +4169,16 @@
             // and as they were also referenced on the other output, the reference
             // count for their stream type must be adjusted accordingly on
             // the other output.
+            bool wasActive = outputDesc2->isActive();
             for (int j = 0; j < AUDIO_STREAM_CNT; j++) {
                 int refCount = dupOutputDesc->mRefCount[j];
                 outputDesc2->changeRefCount((audio_stream_type_t)j,-refCount);
             }
+            // stop() will be a no op if the output is still active but is needed in case all
+            // active streams refcounts where cleared above
+            if (wasActive) {
+                outputDesc2->stop();
+            }
             audio_io_handle_t duplicatedOutput = mOutputs.keyAt(i);
             ALOGV("closeOutput() closing also duplicated output %d", duplicatedOutput);
 
@@ -4527,11 +4197,8 @@
         mpClientInterface->onAudioPatchListUpdate();
     }
 
-    AudioParameter param;
-    param.add(String8("closing"), String8("true"));
-    mpClientInterface->setParameters(output, param.toString());
+    outputDesc->close();
 
-    mpClientInterface->closeOutput(output);
     removeOutput(output);
     mPreviousOutputs = mOutputs;
 }
@@ -4556,7 +4223,7 @@
         mpClientInterface->onAudioPatchListUpdate();
     }
 
-    mpClientInterface->closeInput(input);
+    inputDesc->close();
     mInputs.removeItem(input);
 }
 
@@ -4622,14 +4289,14 @@
         ALOGV("checkOutputForStrategy() strategy %d, moving from output %d to output %d",
               strategy, srcOutputs[0], dstOutputs[0]);
         // mute strategy while moving tracks from one output to another
-        for (size_t i = 0; i < srcOutputs.size(); i++) {
-            sp<SwAudioOutputDescriptor> desc = mOutputs.valueFor(srcOutputs[i]);
+        for (audio_io_handle_t srcOut : srcOutputs) {
+            sp<SwAudioOutputDescriptor> desc = mOutputs.valueFor(srcOut);
             if (isStrategyActive(desc, strategy)) {
                 setStrategyMute(strategy, true, desc);
                 setStrategyMute(strategy, false, desc, MUTE_TIME_MS, newDevice);
             }
             sp<AudioSourceDescriptor> source =
-                    getSourceForStrategyOnOutput(srcOutputs[i], strategy);
+                    getSourceForStrategyOnOutput(srcOut, strategy);
             if (source != 0){
                 connectAudioSource(source);
             }
@@ -4823,9 +4490,8 @@
         routing_strategy curStrategy = getStrategy((audio_stream_type_t)curStream);
         audio_devices_t curDevices =
                 getDeviceForStrategy((routing_strategy)curStrategy, false /*fromCache*/);
-        SortedVector<audio_io_handle_t> outputs = getOutputsForDevice(curDevices, mOutputs);
-        for (size_t i = 0; i < outputs.size(); i++) {
-            sp<AudioOutputDescriptor> outputDesc = mOutputs.valueFor(outputs[i]);
+        for (audio_io_handle_t output : getOutputsForDevice(curDevices, mOutputs)) {
+            sp<AudioOutputDescriptor> outputDesc = mOutputs.valueFor(output);
             if (outputDesc->isStreamActive((audio_stream_type_t)curStream)) {
                 curDevices |= outputDesc->device();
             }
@@ -5055,21 +4721,24 @@
                                              bool force,
                                              int delayMs,
                                              audio_patch_handle_t *patchHandle,
-                                             const char* address)
+                                             const char *address,
+                                             bool requiresMuteCheck)
 {
     ALOGV("setOutputDevice() device %04x delayMs %d", device, delayMs);
     AudioParameter param;
     uint32_t muteWaitMs;
 
     if (outputDesc->isDuplicated()) {
-        muteWaitMs = setOutputDevice(outputDesc->subOutput1(), device, force, delayMs);
-        muteWaitMs += setOutputDevice(outputDesc->subOutput2(), device, force, delayMs);
+        muteWaitMs = setOutputDevice(outputDesc->subOutput1(), device, force, delayMs,
+                nullptr /* patchHandle */, nullptr /* address */, requiresMuteCheck);
+        muteWaitMs += setOutputDevice(outputDesc->subOutput2(), device, force, delayMs,
+                nullptr /* patchHandle */, nullptr /* address */, requiresMuteCheck);
         return muteWaitMs;
     }
     // no need to proceed if new device is not AUDIO_DEVICE_NONE and not supported by current
     // output profile
     if ((device != AUDIO_DEVICE_NONE) &&
-            ((device & outputDesc->supportedDevices()) == 0)) {
+            ((device & outputDesc->supportedDevices()) == AUDIO_DEVICE_NONE)) {
         return 0;
     }
 
@@ -5083,7 +4752,14 @@
     if (device != AUDIO_DEVICE_NONE) {
         outputDesc->mDevice = device;
     }
-    muteWaitMs = checkDeviceMuteStrategies(outputDesc, prevDevice, delayMs);
+
+    // if the outputs are not materially active, there is no need to mute.
+    if (requiresMuteCheck) {
+        muteWaitMs = checkDeviceMuteStrategies(outputDesc, prevDevice, delayMs);
+    } else {
+        ALOGV("%s: suppressing checkDeviceMuteStrategies", __func__);
+        muteWaitMs = 0;
+    }
 
     // Do not change the routing if:
     //      the requested device is AUDIO_DEVICE_NONE
@@ -5299,14 +4975,8 @@
     // TODO: perhaps isCompatibleProfile should return a "matching" score so we can return
     // the best matching profile, not the first one.
 
-    for (size_t i = 0; i < mHwModules.size(); i++)
-    {
-        if (mHwModules[i]->mHandle == 0) {
-            continue;
-        }
-        for (size_t j = 0; j < mHwModules[i]->mInputProfiles.size(); j++)
-        {
-            sp<IOProfile> profile = mHwModules[i]->mInputProfiles[j];
+    for (const auto& hwModule : mHwModules) {
+        for (const auto& profile : hwModule->getInputProfiles()) {
             // profile->log();
             if (profile->isCompatibleProfile(device, address, samplingRate,
                                              &samplingRate /*updatedSamplingRate*/,
@@ -5786,7 +5456,7 @@
     bool supportsAC3 = false;
     bool supportsOtherSurround = false;
     bool supportsIEC61937 = false;
-    for (size_t formatIndex = 0; formatIndex < formats.size(); formatIndex++) {
+    for (ssize_t formatIndex = 0; formatIndex < (ssize_t)formats.size(); formatIndex++) {
         audio_format_t format = formats[formatIndex];
         switch (format) {
             case AUDIO_FORMAT_AC3:
@@ -5795,6 +5465,11 @@
             case AUDIO_FORMAT_E_AC3:
             case AUDIO_FORMAT_DTS:
             case AUDIO_FORMAT_DTS_HD:
+                // If ALWAYS, remove all other surround formats here since we will add them later.
+                if (forceUse == AUDIO_POLICY_FORCE_ENCODED_SURROUND_ALWAYS) {
+                    formats.removeAt(formatIndex);
+                    formatIndex--;
+                }
                 supportsOtherSurround = true;
                 break;
             case AUDIO_FORMAT_IEC61937:
@@ -5840,8 +5515,7 @@
         // If ALWAYS, add support for raw surround formats if all are missing.
         // This assumes that if any of these formats are reported by the HAL
         // then the report is valid and should not be modified.
-        if ((forceUse == AUDIO_POLICY_FORCE_ENCODED_SURROUND_ALWAYS)
-                && !supportsOtherSurround) {
+        if (forceUse == AUDIO_POLICY_FORCE_ENCODED_SURROUND_ALWAYS) {
             formats.add(AUDIO_FORMAT_E_AC3);
             formats.add(AUDIO_FORMAT_DTS);
             formats.add(AUDIO_FORMAT_DTS_HD);
@@ -5878,8 +5552,7 @@
     } else if (forceUse == AUDIO_POLICY_FORCE_ENCODED_SURROUND_ALWAYS) {
         bool supports5dot1 = false;
         // Are there any channel masks that can be considered "surround"?
-        for (size_t maskIndex = 0; maskIndex < channelMasks.size(); maskIndex++) {
-            audio_channel_mask_t channelMask = channelMasks[maskIndex];
+        for (audio_channel_mask_t channelMask : channelMasks) {
             if ((channelMask & AUDIO_CHANNEL_OUT_5POINT1) == AUDIO_CHANNEL_OUT_5POINT1) {
                 supports5dot1 = true;
                 break;
@@ -5916,10 +5589,8 @@
         }
         profiles.setFormats(formats);
     }
-    const FormatVector &supportedFormats = profiles.getSupportedFormats();
 
-    for (size_t formatIndex = 0; formatIndex < supportedFormats.size(); formatIndex++) {
-        audio_format_t format = supportedFormats[formatIndex];
+    for (audio_format_t format : profiles.getSupportedFormats()) {
         ChannelsVector channelMasks;
         SampleRateVector samplingRates;
         AudioParameter requestedParameters;
@@ -5955,4 +5626,4 @@
     }
 }
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 82c4c35..611edec 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -16,6 +16,9 @@
 
 #pragma once
 
+#include <atomic>
+#include <memory>
+
 #include <stdint.h>
 #include <sys/types.h>
 #include <cutils/config_utils.h>
@@ -31,6 +34,7 @@
 #include <AudioPolicyManagerInterface.h>
 #include <AudioPolicyManagerObserver.h>
 #include <AudioGain.h>
+#include <AudioPolicyConfig.h>
 #include <AudioPort.h>
 #include <AudioPatch.h>
 #include <DeviceDescriptor.h>
@@ -76,10 +80,6 @@
 // ----------------------------------------------------------------------------
 
 class AudioPolicyManager : public AudioPolicyInterface, public AudioPolicyManagerObserver
-
-#ifdef AUDIO_POLICY_TEST
-    , public Thread
-#endif //AUDIO_POLICY_TEST
 {
 
 public:
@@ -103,12 +103,7 @@
 
         virtual void setSystemProperty(const char* property, const char* value);
         virtual status_t initCheck();
-        virtual audio_io_handle_t getOutput(audio_stream_type_t stream,
-                                            uint32_t samplingRate,
-                                            audio_format_t format,
-                                            audio_channel_mask_t channelMask,
-                                            audio_output_flags_t flags,
-                                            const audio_offload_info_t *offloadInfo);
+        virtual audio_io_handle_t getOutput(audio_stream_type_t stream);
         virtual status_t getOutputForAttr(const audio_attributes_t *attr,
                                           audio_io_handle_t *output,
                                           audio_session_t session,
@@ -241,6 +236,21 @@
         routing_strategy getStrategy(audio_stream_type_t stream) const;
 
 protected:
+        // A constructor that allows more fine-grained control over initialization process,
+        // used in automatic tests.
+        AudioPolicyManager(AudioPolicyClientInterface *clientInterface, bool forTesting);
+
+        // These methods should be used when finer control over APM initialization
+        // is needed, e.g. in tests. Must be used in conjunction with the constructor
+        // that only performs fields initialization. The public constructor comprises
+        // these steps in the following sequence:
+        //   - field initializing constructor;
+        //   - loadConfig;
+        //   - initialize.
+        AudioPolicyConfig& getConfig() { return mConfig; }
+        void loadConfig();
+        status_t initialize();
+
         // From AudioPolicyManagerObserver
         virtual const AudioPatchCollection &getAudioPatches() const
         {
@@ -275,7 +285,7 @@
         {
             return mDefaultOutputDevice;
         }
-protected:
+
         void addOutput(audio_io_handle_t output, const sp<SwAudioOutputDescriptor>& outputDesc);
         void removeOutput(audio_io_handle_t output);
         void addInput(audio_io_handle_t input, const sp<AudioInputDescriptor>& inputDesc);
@@ -304,7 +314,8 @@
                              bool force = false,
                              int delayMs = 0,
                              audio_patch_handle_t *patchHandle = NULL,
-                             const char* address = NULL);
+                             const char *address = nullptr,
+                             bool requiresMuteCheck = true);
         status_t resetOutputDevice(const sp<AudioOutputDescriptor>& outputDesc,
                                    int delayMs = 0,
                                    audio_patch_handle_t *patchHandle = NULL);
@@ -419,11 +430,6 @@
         {
             return mEffects.getMaxEffectsMemory();
         }
-#ifdef AUDIO_POLICY_TEST
-        virtual     bool        threadLoop();
-                    void        exit();
-        int testOutputIndex(audio_io_handle_t output);
-#endif //AUDIO_POLICY_TEST
 
         SortedVector<audio_io_handle_t> getOutputsForDevice(audio_devices_t device,
                                                             const SwAudioOutputCollection& openOutputs);
@@ -481,6 +487,9 @@
         }
 
         uint32_t updateCallRouting(audio_devices_t rxDevice, uint32_t delayMs = 0);
+        sp<AudioPatch> createTelephonyPatch(bool isRx, audio_devices_t device, uint32_t delayMs);
+        sp<DeviceDescriptor> fillAudioPortConfigForDevice(
+                const DeviceVector& devices, audio_devices_t device, audio_port_config *config);
 
         // if argument "device" is different from AUDIO_DEVICE_NONE,  startSource() will force
         // the re-evaluation of the output device.
@@ -534,18 +543,20 @@
         SessionRouteMap mOutputRoutes = SessionRouteMap(SessionRouteMap::MAPTYPE_OUTPUT);
         SessionRouteMap mInputRoutes = SessionRouteMap(SessionRouteMap::MAPTYPE_INPUT);
 
-        IVolumeCurvesCollection *mVolumeCurves; // Volume Curves per use case and device category
-
         bool    mLimitRingtoneVolume;        // limit ringtone volume to music volume if headset connected
         audio_devices_t mDeviceForStrategy[NUM_STRATEGIES];
         float   mLastVoiceVolume;            // last voice volume value sent to audio HAL
-
-        EffectDescriptorCollection mEffects;  // list of registered audio effects
         bool    mA2dpSuspended;  // true if A2DP output is suspended
-        sp<DeviceDescriptor> mDefaultOutputDevice; // output device selected by default at boot time
-        HwModuleCollection mHwModules;
 
-        volatile int32_t mAudioPortGeneration;
+        std::unique_ptr<IVolumeCurvesCollection> mVolumeCurves; // Volume Curves per use case and device category
+        EffectDescriptorCollection mEffects;  // list of registered audio effects
+        sp<DeviceDescriptor> mDefaultOutputDevice; // output device selected by default at boot time
+        HwModuleCollection mHwModules; // contains only modules that have been loaded successfully
+        HwModuleCollection mHwModulesAll; // normally not needed, used during construction and for
+                                          // dumps
+        AudioPolicyConfig mConfig;
+
+        std::atomic<uint32_t> mAudioPortGeneration;
 
         AudioPatchCollection mAudioPatches;
 
@@ -574,22 +585,6 @@
         AudioPolicyMixCollection mPolicyMixes; // list of registered mixes
         audio_io_handle_t mMusicEffectOutput;     // output selected for music effects
 
-
-#ifdef AUDIO_POLICY_TEST
-        Mutex   mLock;
-        Condition mWaitWorkCV;
-
-        int             mCurOutput;
-        bool            mDirectOutput;
-        audio_io_handle_t mTestOutputs[NUM_TEST_OUTPUTS];
-        int             mTestInput;
-        uint32_t        mTestDevice;
-        uint32_t        mTestSamplingRate;
-        uint32_t        mTestFormat;
-        uint32_t        mTestChannels;
-        uint32_t        mTestLatencyMs;
-#endif //AUDIO_POLICY_TEST
-
         uint32_t nextAudioPortGeneration();
 
         // Audio Policy Engine Interface.
@@ -630,20 +625,15 @@
                 audio_devices_t device,
                 audio_session_t session,
                 audio_stream_type_t stream,
-                uint32_t samplingRate,
-                audio_format_t format,
-                audio_channel_mask_t channelMask,
-                audio_output_flags_t flags,
-                const audio_offload_info_t *offloadInfo);
+                const audio_config_t *config,
+                audio_output_flags_t flags);
         // internal method to return the input handle for the given device and format
         audio_io_handle_t getInputForDevice(audio_devices_t device,
                 String8 address,
                 audio_session_t session,
                 uid_t uid,
                 audio_source_t inputSource,
-                uint32_t samplingRate,
-                audio_format_t format,
-                audio_channel_mask_t channelMask,
+                const audio_config_base_t *config,
                 audio_input_flags_t flags,
                 AudioMix *policyMix);
 
diff --git a/services/audiopolicy/service/AudioPolicyClientImpl.cpp b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
index 31c9575..b064f8c 100644
--- a/services/audiopolicy/service/AudioPolicyClientImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
@@ -233,4 +233,4 @@
     return AudioSystem::newAudioUniqueId(use);
 }
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/service/AudioPolicyEffects.cpp b/services/audiopolicy/service/AudioPolicyEffects.cpp
index 84b1073..7fe363d 100644
--- a/services/audiopolicy/service/AudioPolicyEffects.cpp
+++ b/services/audiopolicy/service/AudioPolicyEffects.cpp
@@ -744,4 +744,4 @@
 }
 
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/service/AudioPolicyEffects.h b/services/audiopolicy/service/AudioPolicyEffects.h
index 59d5d14..623180e 100644
--- a/services/audiopolicy/service/AudioPolicyEffects.h
+++ b/services/audiopolicy/service/AudioPolicyEffects.h
@@ -192,6 +192,6 @@
     KeyedVector< audio_session_t, EffectVector* > mOutputSessions;
 };
 
-}; // namespace android
+} // namespace android
 
 #endif // ANDROID_AUDIOPOLICYEFFECTS_H
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index b7bce55..7dd6d70 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -141,12 +141,7 @@
     return mAudioPolicyManager->getForceUse(usage);
 }
 
-audio_io_handle_t AudioPolicyService::getOutput(audio_stream_type_t stream,
-                                    uint32_t samplingRate,
-                                    audio_format_t format,
-                                    audio_channel_mask_t channelMask,
-                                    audio_output_flags_t flags,
-                                    const audio_offload_info_t *offloadInfo)
+audio_io_handle_t AudioPolicyService::getOutput(audio_stream_type_t stream)
 {
     if (uint32_t(stream) >= AUDIO_STREAM_PUBLIC_CNT) {
         return AUDIO_IO_HANDLE_NONE;
@@ -156,8 +151,7 @@
     }
     ALOGV("getOutput()");
     Mutex::Autolock _l(mLock);
-    return mAudioPolicyManager->getOutput(stream, samplingRate,
-                                    format, channelMask, flags, offloadInfo);
+    return mAudioPolicyManager->getOutput(stream);
 }
 
 status_t AudioPolicyService::getOutputForAttr(const audio_attributes_t *attr,
@@ -173,7 +167,7 @@
     if (mAudioPolicyManager == NULL) {
         return NO_INIT;
     }
-    ALOGV("getOutput()");
+    ALOGV("getOutputForAttr()");
     Mutex::Autolock _l(mLock);
 
     const uid_t callingUid = IPCThreadState::self()->getCallingUid();
@@ -284,8 +278,8 @@
         return NO_INIT;
     }
     // already checked by client, but double-check in case the client wrapper is bypassed
-    if (attr->source >= AUDIO_SOURCE_CNT && attr->source != AUDIO_SOURCE_HOTWORD &&
-        attr->source != AUDIO_SOURCE_FM_TUNER) {
+    if (attr->source < AUDIO_SOURCE_DEFAULT && attr->source >= AUDIO_SOURCE_CNT &&
+            attr->source != AUDIO_SOURCE_HOTWORD && attr->source != AUDIO_SOURCE_FM_TUNER) {
         return BAD_VALUE;
     }
 
@@ -784,4 +778,4 @@
 }
 
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index 7af2e74..af0c823 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -1146,4 +1146,4 @@
 int aps_set_voice_volume(void *service, float volume, int delay_ms);
 };
 
-}; // namespace android
+} // namespace android
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index 38d4b17..833a230 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -68,13 +68,7 @@
     virtual status_t setPhoneState(audio_mode_t state);
     virtual status_t setForceUse(audio_policy_force_use_t usage, audio_policy_forced_cfg_t config);
     virtual audio_policy_forced_cfg_t getForceUse(audio_policy_force_use_t usage);
-    virtual audio_io_handle_t getOutput(audio_stream_type_t stream,
-                                        uint32_t samplingRate = 0,
-                                        audio_format_t format = AUDIO_FORMAT_DEFAULT,
-                                        audio_channel_mask_t channelMask = 0,
-                                        audio_output_flags_t flags =
-                                                AUDIO_OUTPUT_FLAG_NONE,
-                                        const audio_offload_info_t *offloadInfo = NULL);
+    virtual audio_io_handle_t getOutput(audio_stream_type_t stream);
     virtual status_t getOutputForAttr(const audio_attributes_t *attr,
                                       audio_io_handle_t *output,
                                       audio_session_t session,
@@ -583,6 +577,6 @@
     audio_mode_t mPhoneState;
 };
 
-}; // namespace android
+} // namespace android
 
 #endif // ANDROID_AUDIOPOLICYSERVICE_H
diff --git a/services/audiopolicy/tests/Android.mk b/services/audiopolicy/tests/Android.mk
new file mode 100644
index 0000000..a43daea
--- /dev/null
+++ b/services/audiopolicy/tests/Android.mk
@@ -0,0 +1,32 @@
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_C_INCLUDES := \
+  frameworks/av/services/audiopolicy \
+  frameworks/av/services/audiopolicy/common/include \
+  frameworks/av/services/audiopolicy/engine/interface \
+  frameworks/av/services/audiopolicy/utilities
+
+LOCAL_SHARED_LIBRARIES := \
+  libaudiopolicymanagerdefault \
+  libbase \
+  liblog \
+  libmedia_helper \
+  libutils \
+
+LOCAL_STATIC_LIBRARIES := \
+  libaudiopolicycomponents \
+
+LOCAL_SRC_FILES := \
+  audiopolicymanager_tests.cpp \
+
+LOCAL_MODULE := audiopolicy_tests
+
+LOCAL_MODULE_TAGS := tests
+
+LOCAL_CFLAGS := -Werror -Wall
+
+LOCAL_MULTILIB := $(AUDIOSERVER_MULTILIB)
+
+include $(BUILD_NATIVE_TEST)
diff --git a/services/audiopolicy/tests/AudioPolicyTestClient.h b/services/audiopolicy/tests/AudioPolicyTestClient.h
new file mode 100644
index 0000000..eb8222c
--- /dev/null
+++ b/services/audiopolicy/tests/AudioPolicyTestClient.h
@@ -0,0 +1,88 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include "AudioPolicyInterface.h"
+
+namespace android {
+
+class AudioPolicyTestClient : public AudioPolicyClientInterface
+{
+public:
+    virtual ~AudioPolicyTestClient() = default;
+
+    // AudioPolicyClientInterface Implementation
+    audio_module_handle_t loadHwModule(const char* /*name*/) override {
+        return AUDIO_MODULE_HANDLE_NONE;
+    }
+    status_t openOutput(audio_module_handle_t /*module*/,
+                        audio_io_handle_t* /*output*/,
+                        audio_config_t* /*config*/,
+                        audio_devices_t* /*devices*/,
+                        const String8& /*address*/,
+                        uint32_t* /*latencyMs*/,
+                        audio_output_flags_t /*flags*/) override { return NO_INIT; }
+    audio_io_handle_t openDuplicateOutput(audio_io_handle_t /*output1*/,
+                                          audio_io_handle_t /*output2*/) override {
+        return AUDIO_IO_HANDLE_NONE;
+    }
+    status_t closeOutput(audio_io_handle_t /*output*/) override { return NO_INIT; }
+    status_t suspendOutput(audio_io_handle_t /*output*/) override { return NO_INIT; }
+    status_t restoreOutput(audio_io_handle_t /*output*/) override { return NO_INIT; }
+    status_t openInput(audio_module_handle_t /*module*/,
+                       audio_io_handle_t* /*input*/,
+                       audio_config_t* /*config*/,
+                       audio_devices_t* /*device*/,
+                       const String8& /*address*/,
+                       audio_source_t /*source*/,
+                       audio_input_flags_t /*flags*/) override { return NO_INIT; }
+    status_t closeInput(audio_io_handle_t /*input*/) override { return NO_INIT; }
+    status_t setStreamVolume(audio_stream_type_t /*stream*/,
+                             float /*volume*/,
+                             audio_io_handle_t /*output*/,
+                             int /*delayMs*/) override { return NO_INIT; }
+    status_t invalidateStream(audio_stream_type_t /*stream*/) override { return NO_INIT; }
+    void setParameters(audio_io_handle_t /*ioHandle*/,
+                       const String8& /*keyValuePairs*/,
+                       int /*delayMs*/) override { }
+    String8 getParameters(audio_io_handle_t /*ioHandle*/,
+                          const String8& /*keys*/) override { return String8(); }
+    status_t startTone(audio_policy_tone_t /*tone*/,
+                       audio_stream_type_t /*stream*/) override { return NO_INIT; }
+    status_t stopTone() override { return NO_INIT; }
+    status_t setVoiceVolume(float /*volume*/, int /*delayMs*/) override { return NO_INIT; }
+    status_t moveEffects(audio_session_t /*session*/,
+                         audio_io_handle_t /*srcOutput*/,
+                         audio_io_handle_t /*dstOutput*/) override { return NO_INIT; }
+    status_t createAudioPatch(const struct audio_patch* /*patch*/,
+                              audio_patch_handle_t* /*handle*/,
+                              int /*delayMs*/) override { return NO_INIT; }
+    status_t releaseAudioPatch(audio_patch_handle_t /*handle*/,
+                               int /*delayMs*/) override { return NO_INIT; }
+    status_t setAudioPortConfig(const struct audio_port_config* /*config*/,
+                                int /*delayMs*/) override { return NO_INIT; }
+    void onAudioPortListUpdate() override { }
+    void onAudioPatchListUpdate() override { }
+    audio_unique_id_t newAudioUniqueId(audio_unique_id_use_t /*use*/) override { return 0; }
+    void onDynamicPolicyMixStateUpdate(String8 /*regId*/, int32_t /*state*/) override { }
+    void onRecordingConfigurationUpdate(int /*event*/,
+                                        const record_client_info_t* /*clientInfo*/,
+                                        const struct audio_config_base* /*clientConfig*/,
+                                        const struct audio_config_base* /*deviceConfig*/,
+                                        audio_patch_handle_t /*patchHandle*/) override { }
+};
+
+} // namespace android
diff --git a/include/media/AudioClient.h b/services/audiopolicy/tests/AudioPolicyTestManager.h
similarity index 61%
copy from include/media/AudioClient.h
copy to services/audiopolicy/tests/AudioPolicyTestManager.h
index 9efd76d..fe543a6 100644
--- a/include/media/AudioClient.h
+++ b/services/audiopolicy/tests/AudioPolicyTestManager.h
@@ -13,26 +13,18 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+#pragma once
 
-
-#ifndef ANDROID_AUDIO_CLIENT_H
-#define ANDROID_AUDIO_CLIENT_H
-
-#include <system/audio.h>
-#include <utils/String16.h>
+#include "managerdefault/AudioPolicyManager.h"
 
 namespace android {
 
-class AudioClient {
- public:
-    AudioClient() :
-        clientUid(-1), clientPid(-1), packageName("") {}
-
-    uid_t clientUid;
-    pid_t clientPid;
-    String16 packageName;
+class AudioPolicyTestManager : public AudioPolicyManager {
+  public:
+    explicit AudioPolicyTestManager(AudioPolicyClientInterface *clientInterface)
+            : AudioPolicyManager(clientInterface, true /*forTesting*/) { }
+    using AudioPolicyManager::getConfig;
+    using AudioPolicyManager::initialize;
 };
 
-}; // namespace android
-
-#endif  // ANDROID_AUDIO_CLIENT_H
+}  // namespace android
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
new file mode 100644
index 0000000..a9593b8
--- /dev/null
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -0,0 +1,196 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <memory>
+#include <set>
+
+#include <gtest/gtest.h>
+
+#include "AudioPolicyTestClient.h"
+#include "AudioPolicyTestManager.h"
+
+using namespace android;
+
+TEST(AudioPolicyManagerTestInit, Failure) {
+    AudioPolicyTestClient client;
+    AudioPolicyTestManager manager(&client);
+    manager.getConfig().setDefault();
+    // Since the default client fails to open anything,
+    // APM should indicate that the initialization didn't succeed.
+    ASSERT_EQ(NO_INIT, manager.initialize());
+    ASSERT_EQ(NO_INIT, manager.initCheck());
+}
+
+
+class AudioPolicyManagerTestClient : public AudioPolicyTestClient {
+  public:
+    // AudioPolicyClientInterface implementation
+    audio_module_handle_t loadHwModule(const char* /*name*/) override {
+        return mNextModuleHandle++;
+    }
+
+    status_t openOutput(audio_module_handle_t module,
+                        audio_io_handle_t* output,
+                        audio_config_t* /*config*/,
+                        audio_devices_t* /*devices*/,
+                        const String8& /*address*/,
+                        uint32_t* /*latencyMs*/,
+                        audio_output_flags_t /*flags*/) override {
+        if (module >= mNextModuleHandle) {
+            ALOGE("%s: Module handle %d has not been allocated yet (next is %d)",
+                    __func__, module, mNextModuleHandle);
+            return BAD_VALUE;
+        }
+        *output = mNextIoHandle++;
+        return NO_ERROR;
+    }
+
+    status_t openInput(audio_module_handle_t module,
+                       audio_io_handle_t* input,
+                       audio_config_t* /*config*/,
+                       audio_devices_t* /*device*/,
+                       const String8& /*address*/,
+                       audio_source_t /*source*/,
+                       audio_input_flags_t /*flags*/) override {
+        if (module >= mNextModuleHandle) {
+            ALOGE("%s: Module handle %d has not been allocated yet (next is %d)",
+                    __func__, module, mNextModuleHandle);
+            return BAD_VALUE;
+        }
+        *input = mNextIoHandle++;
+        return NO_ERROR;
+    }
+
+    status_t createAudioPatch(const struct audio_patch* /*patch*/,
+                              audio_patch_handle_t* handle,
+                              int /*delayMs*/) override {
+        *handle = mNextPatchHandle++;
+        mActivePatches.insert(*handle);
+        return NO_ERROR;
+    }
+
+    status_t releaseAudioPatch(audio_patch_handle_t handle,
+                               int /*delayMs*/) override {
+        if (mActivePatches.erase(handle) != 1) {
+            if (handle >= mNextPatchHandle) {
+                ALOGE("%s: Patch handle %d has not been allocated yet (next is %d)",
+                        __func__, handle, mNextPatchHandle);
+            } else {
+                ALOGE("%s: Attempt to release patch %d twice", __func__, handle);
+            }
+            return BAD_VALUE;
+        }
+        return NO_ERROR;
+    }
+
+    // Helper methods for tests
+    size_t getActivePatchesCount() const { return mActivePatches.size(); }
+
+  private:
+    audio_module_handle_t mNextModuleHandle = AUDIO_MODULE_HANDLE_NONE + 1;
+    audio_io_handle_t mNextIoHandle = AUDIO_IO_HANDLE_NONE + 1;
+    audio_patch_handle_t mNextPatchHandle = AUDIO_PATCH_HANDLE_NONE + 1;
+    std::set<audio_patch_handle_t> mActivePatches;
+};
+
+class AudioPolicyManagerTest : public testing::Test {
+  protected:
+    virtual void SetUp();
+    virtual void TearDown();
+
+    std::unique_ptr<AudioPolicyManagerTestClient> mClient;
+    std::unique_ptr<AudioPolicyTestManager> mManager;
+};
+
+void AudioPolicyManagerTest::SetUp() {
+    mClient.reset(new AudioPolicyManagerTestClient);
+    mManager.reset(new AudioPolicyTestManager(mClient.get()));
+    mManager->getConfig().setDefault();
+    ASSERT_EQ(NO_ERROR, mManager->initialize());
+    ASSERT_EQ(NO_ERROR, mManager->initCheck());
+}
+
+void AudioPolicyManagerTest::TearDown() {
+    mManager.reset();
+    mClient.reset();
+}
+
+TEST_F(AudioPolicyManagerTest, InitSuccess) {
+    // SetUp must finish with no assertions.
+}
+
+TEST_F(AudioPolicyManagerTest, CreateAudioPatchFailure) {
+    audio_patch patch{};
+    audio_patch_handle_t handle = AUDIO_PATCH_HANDLE_NONE;
+    const size_t patchCountBefore = mClient->getActivePatchesCount();
+    ASSERT_EQ(BAD_VALUE, mManager->createAudioPatch(nullptr, &handle, 0));
+    ASSERT_EQ(BAD_VALUE, mManager->createAudioPatch(&patch, nullptr, 0));
+    ASSERT_EQ(BAD_VALUE, mManager->createAudioPatch(&patch, &handle, 0));
+    patch.num_sources = AUDIO_PATCH_PORTS_MAX + 1;
+    patch.num_sinks = 1;
+    ASSERT_EQ(BAD_VALUE, mManager->createAudioPatch(&patch, &handle, 0));
+    patch.num_sources = 1;
+    patch.num_sinks = AUDIO_PATCH_PORTS_MAX + 1;
+    ASSERT_EQ(BAD_VALUE, mManager->createAudioPatch(&patch, &handle, 0));
+    patch.num_sources = 2;
+    patch.num_sinks = 1;
+    ASSERT_EQ(INVALID_OPERATION, mManager->createAudioPatch(&patch, &handle, 0));
+    patch = {};
+    patch.num_sources = 1;
+    patch.sources[0].role = AUDIO_PORT_ROLE_SINK;
+    patch.num_sinks = 1;
+    patch.sinks[0].role = AUDIO_PORT_ROLE_SINK;
+    ASSERT_EQ(INVALID_OPERATION, mManager->createAudioPatch(&patch, &handle, 0));
+    patch = {};
+    patch.num_sources = 1;
+    patch.sources[0].role = AUDIO_PORT_ROLE_SOURCE;
+    patch.num_sinks = 1;
+    patch.sinks[0].role = AUDIO_PORT_ROLE_SOURCE;
+    ASSERT_EQ(INVALID_OPERATION, mManager->createAudioPatch(&patch, &handle, 0));
+    // Verify that the handle is left unchanged.
+    ASSERT_EQ(AUDIO_PATCH_HANDLE_NONE, handle);
+    ASSERT_EQ(patchCountBefore, mClient->getActivePatchesCount());
+}
+
+TEST_F(AudioPolicyManagerTest, CreateAudioPatchFromMix) {
+    audio_patch patch{};
+    audio_patch_handle_t handle = AUDIO_PATCH_HANDLE_NONE;
+    uid_t uid = 42;
+    const size_t patchCountBefore = mClient->getActivePatchesCount();
+    patch.num_sources = 1;
+    {
+        auto& src = patch.sources[0];
+        src.role = AUDIO_PORT_ROLE_SOURCE;
+        src.type = AUDIO_PORT_TYPE_MIX;
+        src.id = mManager->getConfig().getAvailableInputDevices()[0]->getId();
+        // Note: these are the parameters of the output device.
+        src.sample_rate = 44100;
+        src.format = AUDIO_FORMAT_PCM_16_BIT;
+        src.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+    }
+    patch.num_sinks = 1;
+    {
+        auto& sink = patch.sinks[0];
+        sink.role = AUDIO_PORT_ROLE_SINK;
+        sink.type = AUDIO_PORT_TYPE_DEVICE;
+        sink.id = mManager->getConfig().getDefaultOutputDevice()->getId();
+    }
+    ASSERT_EQ(NO_ERROR, mManager->createAudioPatch(&patch, &handle, uid));
+    ASSERT_NE(AUDIO_PATCH_HANDLE_NONE, handle);
+    ASSERT_EQ(patchCountBefore + 1, mClient->getActivePatchesCount());
+}
+
+// TODO: Add patch creation tests that involve already existing patch
diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk
index 1fbba58..aeaca48 100644
--- a/services/camera/libcameraservice/Android.mk
+++ b/services/camera/libcameraservice/Android.mk
@@ -78,7 +78,8 @@
     android.hardware.camera.provider@2.4 \
     android.hardware.camera.device@1.0 \
     android.hardware.camera.device@3.2 \
-    android.hardware.camera.device@3.3
+    android.hardware.camera.device@3.3 \
+    android.hardware.camera.device@3.4
 
 LOCAL_EXPORT_SHARED_LIBRARY_HEADERS := libbinder libcamera_client libfmq
 
diff --git a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
old mode 100644
new mode 100755
index d8b7af2..d1bbdaf
--- a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
@@ -395,7 +395,7 @@
     }
 
     // Read JFIF segment markers, skip over segment data
-    size = 0;
+    size = MARKER_LENGTH; //jump SOI;
     while (size <= maxSize - MARKER_LENGTH) {
         segment_t *segment = (segment_t*)(jpegBuffer + size);
         uint8_t type = checkJpegMarker(segment->marker);
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 09c2d82..5cbc158 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -121,6 +121,34 @@
     return submitRequestList(requestList, streaming, submitInfo);
 }
 
+binder::Status CameraDeviceClient::insertGbpLocked(const sp<IGraphicBufferProducer>& gbp,
+        SurfaceMap* outSurfaceMap,
+        Vector<int32_t>* outputStreamIds) {
+    int idx = mStreamMap.indexOfKey(IInterface::asBinder(gbp));
+
+    // Trying to submit request with surface that wasn't created
+    if (idx == NAME_NOT_FOUND) {
+        ALOGE("%s: Camera %s: Tried to submit a request with a surface that"
+                " we have not called createStream on",
+                __FUNCTION__, mCameraIdStr.string());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                "Request targets Surface that is not part of current capture session");
+    }
+
+    const StreamSurfaceId& streamSurfaceId = mStreamMap.valueAt(idx);
+    if (outSurfaceMap->find(streamSurfaceId.streamId()) == outSurfaceMap->end()) {
+        (*outSurfaceMap)[streamSurfaceId.streamId()] = std::vector<size_t>();
+        outputStreamIds->push_back(streamSurfaceId.streamId());
+    }
+    (*outSurfaceMap)[streamSurfaceId.streamId()].push_back(streamSurfaceId.surfaceId());
+
+    ALOGV("%s: Camera %s: Appending output stream %d surface %d to request",
+            __FUNCTION__, mCameraIdStr.string(), streamSurfaceId.streamId(),
+            streamSurfaceId.surfaceId());
+
+    return binder::Status::ok();
+}
+
 binder::Status CameraDeviceClient::submitRequestList(
         const std::vector<hardware::camera2::CaptureRequest>& requests,
         bool streaming,
@@ -174,7 +202,7 @@
                    __FUNCTION__, mCameraIdStr.string());
             return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
                     "Request settings are empty");
-        } else if (request.mSurfaceList.isEmpty()) {
+        } else if (request.mSurfaceList.isEmpty() && request.mStreamIdxList.size() == 0) {
             ALOGE("%s: Camera %s: Requests must have at least one surface target. "
                     "Rejecting request.", __FUNCTION__, mCameraIdStr.string());
             return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
@@ -193,31 +221,44 @@
          */
         SurfaceMap surfaceMap;
         Vector<int32_t> outputStreamIds;
-        for (sp<Surface> surface : request.mSurfaceList) {
-            if (surface == 0) continue;
+        if (request.mSurfaceList.size() > 0) {
+            for (sp<Surface> surface : request.mSurfaceList) {
+                if (surface == 0) continue;
 
-            sp<IGraphicBufferProducer> gbp = surface->getIGraphicBufferProducer();
-            int idx = mStreamMap.indexOfKey(IInterface::asBinder(gbp));
-
-            // Trying to submit request with surface that wasn't created
-            if (idx == NAME_NOT_FOUND) {
-                ALOGE("%s: Camera %s: Tried to submit a request with a surface that"
-                        " we have not called createStream on",
-                        __FUNCTION__, mCameraIdStr.string());
-                return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
-                        "Request targets Surface that is not part of current capture session");
+                sp<IGraphicBufferProducer> gbp = surface->getIGraphicBufferProducer();
+                res = insertGbpLocked(gbp, &surfaceMap, &outputStreamIds);
+                if (!res.isOk()) {
+                    return res;
+                }
             }
+        } else {
+            for (size_t i = 0; i < request.mStreamIdxList.size(); i++) {
+                int streamId = request.mStreamIdxList.itemAt(i);
+                int surfaceIdx = request.mSurfaceIdxList.itemAt(i);
 
-            const StreamSurfaceId& streamSurfaceId = mStreamMap.valueAt(idx);
-            if (surfaceMap.find(streamSurfaceId.streamId()) == surfaceMap.end()) {
-                surfaceMap[streamSurfaceId.streamId()] = std::vector<size_t>();
-                outputStreamIds.push_back(streamSurfaceId.streamId());
+                ssize_t index = mConfiguredOutputs.indexOfKey(streamId);
+                if (index < 0) {
+                    ALOGE("%s: Camera %s: Tried to submit a request with a surface that"
+                            " we have not called createStream on: stream %d",
+                            __FUNCTION__, mCameraIdStr.string(), streamId);
+                    return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                            "Request targets Surface that is not part of current capture session");
+                }
+
+                const auto& gbps = mConfiguredOutputs.valueAt(index).getGraphicBufferProducers();
+                if ((size_t)surfaceIdx >= gbps.size()) {
+                    ALOGE("%s: Camera %s: Tried to submit a request with a surface that"
+                            " we have not called createStream on: stream %d, surfaceIdx %d",
+                            __FUNCTION__, mCameraIdStr.string(), streamId, surfaceIdx);
+                    return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                            "Request targets Surface has invalid surface index");
+                }
+
+                res = insertGbpLocked(gbps[surfaceIdx], &surfaceMap, &outputStreamIds);
+                if (!res.isOk()) {
+                    return res;
+                }
             }
-            surfaceMap[streamSurfaceId.streamId()].push_back(streamSurfaceId.surfaceId());
-
-            ALOGV("%s: Camera %s: Appending output stream %d surface %d to request",
-                    __FUNCTION__, mCameraIdStr.string(), streamSurfaceId.streamId(),
-                    streamSurfaceId.surfaceId());
         }
 
         metadata.update(ANDROID_REQUEST_OUTPUT_STREAMS, &outputStreamIds[0],
@@ -318,7 +359,8 @@
     return binder::Status::ok();
 }
 
-binder::Status CameraDeviceClient::endConfigure(int operatingMode) {
+binder::Status CameraDeviceClient::endConfigure(int operatingMode,
+        const hardware::camera2::impl::CameraMetadataNative& sessionParams) {
     ATRACE_CALL();
     ALOGV("%s: ending configure (%d input stream, %zu output surfaces)",
             __FUNCTION__, mInputStream.configured ? 1 : 0,
@@ -364,7 +406,7 @@
         }
     }
 
-    status_t err = mDevice->configureStreams(operatingMode);
+    status_t err = mDevice->configureStreams(sessionParams, operatingMode);
     if (err == BAD_VALUE) {
         String8 msg = String8::format("Camera %s: Unsupported set of inputs/outputs provided",
                 mCameraIdStr.string());
@@ -439,6 +481,8 @@
                 mStreamMap.removeItem(surface);
             }
 
+            mConfiguredOutputs.removeItem(streamId);
+
             if (dIndex != NAME_NOT_FOUND) {
                 mDeferredStreams.removeItemsAt(dIndex);
             }
@@ -530,10 +574,11 @@
     }
 
     int streamId = camera3::CAMERA3_STREAM_ID_INVALID;
+    std::vector<int> surfaceIds;
     err = mDevice->createStream(surfaces, deferredConsumer, streamInfo.width,
             streamInfo.height, streamInfo.format, streamInfo.dataSpace,
             static_cast<camera3_stream_rotation_t>(outputConfiguration.getRotation()),
-            &streamId, outputConfiguration.getSurfaceSetID(), isShared);
+            &streamId, &surfaceIds, outputConfiguration.getSurfaceSetID(), isShared);
 
     if (err != OK) {
         res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
@@ -545,9 +590,11 @@
         for (auto& binder : binders) {
             ALOGV("%s: mStreamMap add binder %p streamId %d, surfaceId %d",
                     __FUNCTION__, binder.get(), streamId, i);
-            mStreamMap.add(binder, StreamSurfaceId(streamId, i++));
+            mStreamMap.add(binder, StreamSurfaceId(streamId, surfaceIds[i]));
+            i++;
         }
 
+        mConfiguredOutputs.add(streamId, outputConfiguration);
         mStreamInfoMap[streamId] = streamInfo;
 
         ALOGV("%s: Camera %s: Successfully created a new stream ID %d for output surface"
@@ -592,10 +639,12 @@
     }
     int streamId = camera3::CAMERA3_STREAM_ID_INVALID;
     std::vector<sp<Surface>> noSurface;
+    std::vector<int> surfaceIds;
     err = mDevice->createStream(noSurface, /*hasDeferredConsumer*/true, width,
             height, format, dataSpace,
             static_cast<camera3_stream_rotation_t>(outputConfiguration.getRotation()),
-            &streamId, outputConfiguration.getSurfaceSetID(), isShared, consumerUsage);
+            &streamId, &surfaceIds, outputConfiguration.getSurfaceSetID(), isShared,
+            consumerUsage);
 
     if (err != OK) {
         res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
@@ -721,6 +770,132 @@
     return res;
 }
 
+binder::Status CameraDeviceClient::updateOutputConfiguration(int streamId,
+        const hardware::camera2::params::OutputConfiguration &outputConfiguration) {
+    ATRACE_CALL();
+
+    binder::Status res;
+    if (!(res = checkPidStatus(__FUNCTION__)).isOk()) return res;
+
+    Mutex::Autolock icl(mBinderSerializationLock);
+
+    if (!mDevice.get()) {
+        return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
+    }
+
+    const std::vector<sp<IGraphicBufferProducer> >& bufferProducers =
+            outputConfiguration.getGraphicBufferProducers();
+    auto producerCount = bufferProducers.size();
+    if (producerCount == 0) {
+        ALOGE("%s: bufferProducers must not be empty", __FUNCTION__);
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                "bufferProducers must not be empty");
+    }
+
+    // The first output is the one associated with the output configuration.
+    // It should always be present, valid and the corresponding stream id should match.
+    sp<IBinder> binder = IInterface::asBinder(bufferProducers[0]);
+    ssize_t index = mStreamMap.indexOfKey(binder);
+    if (index == NAME_NOT_FOUND) {
+        ALOGE("%s: Outputconfiguration is invalid", __FUNCTION__);
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                "OutputConfiguration is invalid");
+    }
+    if (mStreamMap.valueFor(binder).streamId() != streamId) {
+        ALOGE("%s: Stream Id: %d provided doesn't match the id: %d in the stream map",
+                __FUNCTION__, streamId, mStreamMap.valueFor(binder).streamId());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                "Stream id is invalid");
+    }
+
+    std::vector<size_t> removedSurfaceIds;
+    std::vector<sp<IBinder>> removedOutputs;
+    std::vector<sp<Surface>> newOutputs;
+    std::vector<OutputStreamInfo> streamInfos;
+    KeyedVector<sp<IBinder>, sp<IGraphicBufferProducer>> newOutputsMap;
+    for (auto &it : bufferProducers) {
+        newOutputsMap.add(IInterface::asBinder(it), it);
+    }
+
+    for (size_t i = 0; i < mStreamMap.size(); i++) {
+        ssize_t idx = newOutputsMap.indexOfKey(mStreamMap.keyAt(i));
+        if (idx == NAME_NOT_FOUND) {
+            if (mStreamMap[i].streamId() == streamId) {
+                removedSurfaceIds.push_back(mStreamMap[i].surfaceId());
+                removedOutputs.push_back(mStreamMap.keyAt(i));
+            }
+        } else {
+            if (mStreamMap[i].streamId() != streamId) {
+                ALOGE("%s: Output surface already part of a different stream", __FUNCTION__);
+                return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                        "Target Surface is invalid");
+            }
+            newOutputsMap.removeItemsAt(idx);
+        }
+    }
+
+    for (size_t i = 0; i < newOutputsMap.size(); i++) {
+        OutputStreamInfo outInfo;
+        sp<Surface> surface;
+        res = createSurfaceFromGbp(outInfo, /*isStreamInfoValid*/ false, surface,
+                newOutputsMap.valueAt(i));
+        if (!res.isOk())
+            return res;
+
+        // Stream sharing is only supported for IMPLEMENTATION_DEFINED
+        // formats.
+        if (outInfo.format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
+            String8 msg = String8::format("Camera %s: Stream sharing is only supported for "
+                    "IMPLEMENTATION_DEFINED format", mCameraIdStr.string());
+            ALOGW("%s: %s", __FUNCTION__, msg.string());
+            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+        }
+        streamInfos.push_back(outInfo);
+        newOutputs.push_back(surface);
+    }
+
+    //Trivial case no changes required
+    if (removedSurfaceIds.empty() && newOutputs.empty()) {
+        return binder::Status::ok();
+    }
+
+    KeyedVector<sp<Surface>, size_t> outputMap;
+    auto ret = mDevice->updateStream(streamId, newOutputs, streamInfos, removedSurfaceIds,
+            &outputMap);
+    if (ret != OK) {
+        switch (ret) {
+            case NAME_NOT_FOUND:
+            case BAD_VALUE:
+            case -EBUSY:
+                res = STATUS_ERROR_FMT(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                        "Camera %s: Error updating stream: %s (%d)",
+                        mCameraIdStr.string(), strerror(ret), ret);
+                break;
+            default:
+                res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
+                        "Camera %s: Error updating stream: %s (%d)",
+                        mCameraIdStr.string(), strerror(ret), ret);
+                break;
+        }
+    } else {
+        for (const auto &it : removedOutputs) {
+            mStreamMap.removeItem(it);
+        }
+
+        for (size_t i = 0; i < outputMap.size(); i++) {
+            mStreamMap.add(IInterface::asBinder(outputMap.keyAt(i)->getIGraphicBufferProducer()),
+                    StreamSurfaceId(streamId, outputMap.valueAt(i)));
+        }
+
+        mConfiguredOutputs.replaceValueFor(streamId, outputConfiguration);
+
+        ALOGV("%s: Camera %s: Successful stream ID %d update",
+                  __FUNCTION__, mCameraIdStr.string(), streamId);
+    }
+
+    return res;
+}
+
 bool CameraDeviceClient::isPublicFormat(int32_t format)
 {
     switch(format) {
@@ -1242,15 +1417,12 @@
     }
 
     std::vector<sp<Surface>> consumerSurfaces;
-    std::vector<size_t> consumerSurfaceIds;
-    size_t surfaceId = 0;
     for (auto& bufferProducer : bufferProducers) {
         // Don't create multiple streams for the same target surface
         ssize_t index = mStreamMap.indexOfKey(IInterface::asBinder(bufferProducer));
         if (index != NAME_NOT_FOUND) {
             ALOGV("Camera %s: Surface already has a stream created "
                     " for it (ID %zd)", mCameraIdStr.string(), index);
-            surfaceId++;
             continue;
         }
 
@@ -1262,8 +1434,6 @@
             return res;
 
         consumerSurfaces.push_back(surface);
-        consumerSurfaceIds.push_back(surfaceId);
-        surfaceId++;
     }
 
     // Gracefully handle case where finalizeOutputConfigurations is called
@@ -1275,12 +1445,13 @@
 
     // Finish the deferred stream configuration with the surface.
     status_t err;
-    err = mDevice->setConsumerSurfaces(streamId, consumerSurfaces);
+    std::vector<int> consumerSurfaceIds;
+    err = mDevice->setConsumerSurfaces(streamId, consumerSurfaces, &consumerSurfaceIds);
     if (err == OK) {
         for (size_t i = 0; i < consumerSurfaces.size(); i++) {
             sp<IBinder> binder = IInterface::asBinder(
                     consumerSurfaces[i]->getIGraphicBufferProducer());
-            ALOGV("%s: mStreamMap add binder %p streamId %d, surfaceId %zu", __FUNCTION__,
+            ALOGV("%s: mStreamMap add binder %p streamId %d, surfaceId %d", __FUNCTION__,
                     binder.get(), streamId, consumerSurfaceIds[i]);
             mStreamMap.add(binder, StreamSurfaceId(streamId, consumerSurfaceIds[i]));
         }
@@ -1288,6 +1459,7 @@
             mDeferredStreams.removeItemsAt(deferredStreamIndex);
         }
         mStreamInfoMap[streamId].finalized = true;
+        mConfiguredOutputs.replaceValueFor(streamId, outputConfiguration);
     } else if (err == NO_INIT) {
         res = STATUS_ERROR_FMT(CameraService::ERROR_ILLEGAL_ARGUMENT,
                 "Camera %s: Deferred surface is invalid: %s (%d)",
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 50661cb..4086c72 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -26,6 +26,8 @@
 #include "common/FrameProcessorBase.h"
 #include "common/Camera2ClientBase.h"
 
+using android::camera3::OutputStreamInfo;
+
 namespace android {
 
 struct CameraDeviceClientBase :
@@ -83,7 +85,8 @@
 
     virtual binder::Status beginConfigure() override;
 
-    virtual binder::Status endConfigure(int operatingMode) override;
+    virtual binder::Status endConfigure(int operatingMode,
+            const hardware::camera2::impl::CameraMetadataNative& sessionParams) override;
 
     // Returns -EBUSY if device is not idle or in error state
     virtual binder::Status deleteStream(int streamId) override;
@@ -131,6 +134,10 @@
     // Prepare stream by preallocating up to maxCount of its buffers
     virtual binder::Status prepare2(int32_t maxCount, int32_t streamId) override;
 
+    // Update an output configuration
+    virtual binder::Status updateOutputConfiguration(int streamId,
+            const hardware::camera2::params::OutputConfiguration &outputConfiguration) override;
+
     // Finalize the output configurations with surfaces not added before.
     virtual binder::Status finalizeOutputConfigurations(int32_t streamId,
             const hardware::camera2::params::OutputConfiguration &outputConfiguration) override;
@@ -206,24 +213,6 @@
 
     }; // class StreamSurfaceId
 
-    // OutputStreamInfo describes the property of a camera stream.
-    class OutputStreamInfo {
-    public:
-        int width;
-        int height;
-        int format;
-        android_dataspace dataSpace;
-        uint64_t consumerUsage;
-        bool finalized = false;
-        OutputStreamInfo() :
-                width(-1), height(-1), format(-1), dataSpace(HAL_DATASPACE_UNKNOWN),
-                consumerUsage(0) {}
-        OutputStreamInfo(int _width, int _height, int _format, android_dataspace _dataSpace,
-                uint64_t _consumerUsage) :
-                    width(_width), height(_height), format(_format),
-                    dataSpace(_dataSpace), consumerUsage(_consumerUsage) {}
-    };
-
 private:
     /** ICameraDeviceUser interface-related private members */
 
@@ -267,9 +256,18 @@
     binder::Status createSurfaceFromGbp(OutputStreamInfo& streamInfo, bool isStreamInfoValid,
             sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp);
 
+
+    // Utility method to insert the surface into SurfaceMap
+    binder::Status insertGbpLocked(const sp<IGraphicBufferProducer>& gbp,
+            /*out*/SurfaceMap* surfaceMap,
+            /*out*/Vector<int32_t>* streamIds);
+
     // IGraphicsBufferProducer binder -> Stream ID + Surface ID for output streams
     KeyedVector<sp<IBinder>, StreamSurfaceId> mStreamMap;
 
+    // Stream ID -> OutputConfiguration. Used for looking up Surface by stream/surface index
+    KeyedVector<int32_t, hardware::camera2::params::OutputConfiguration> mConfiguredOutputs;
+
     struct InputStreamConfiguration {
         bool configured;
         int32_t width;
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index 51ef160..4ce82dc 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -156,13 +156,14 @@
     result = "  Device dump:\n";
     write(fd, result.string(), result.size());
 
-    if (!mDevice.get()) {
+    sp<CameraDeviceBase> device = mDevice;
+    if (!device.get()) {
         result = "  *** Device is detached\n";
         write(fd, result.string(), result.size());
         return NO_ERROR;
     }
 
-    status_t res = mDevice->dump(fd, args);
+    status_t res = device->dump(fd, args);
     if (res != OK) {
         result = String8::format("   Error dumping device: %s (%d)",
                 strerror(-res), res);
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index a90050e..3fd6921 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -23,6 +23,7 @@
 #include <utils/String8.h>
 #include <utils/String16.h>
 #include <utils/Vector.h>
+#include <utils/KeyedVector.h>
 #include <utils/Timers.h>
 #include <utils/List.h>
 
@@ -118,6 +119,7 @@
     virtual status_t createStream(sp<Surface> consumer,
             uint32_t width, uint32_t height, int format,
             android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id,
+            std::vector<int> *surfaceIds = nullptr,
             int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
             bool isShared = false, uint64_t consumerUsage = 0) = 0;
 
@@ -131,6 +133,7 @@
     virtual status_t createStream(const std::vector<sp<Surface>>& consumers,
             bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
             android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id,
+            std::vector<int> *surfaceIds = nullptr,
             int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
             bool isShared = false, uint64_t consumerUsage = 0) = 0;
 
@@ -209,7 +212,8 @@
      * - BAD_VALUE if the set of streams was invalid (e.g. fmts or sizes)
      * - INVALID_OPERATION if the device was in the wrong state
      */
-    virtual status_t configureStreams(int operatingMode = 0) = 0;
+    virtual status_t configureStreams(const CameraMetadata& sessionParams,
+            int operatingMode = 0) = 0;
 
     // get the buffer producer of the input stream
     virtual status_t getInputBufferProducer(
@@ -347,7 +351,15 @@
      * Set the deferred consumer surface and finish the rest of the stream configuration.
      */
     virtual status_t setConsumerSurfaces(int streamId,
-            const std::vector<sp<Surface>>& consumers) = 0;
+            const std::vector<sp<Surface>>& consumers, std::vector<int> *surfaceIds /*out*/) = 0;
+
+    /**
+     * Update a given stream.
+     */
+    virtual status_t updateStream(int streamId, const std::vector<sp<Surface>> &newSurfaces,
+            const std::vector<android::camera3::OutputStreamInfo> &outputInfo,
+            const std::vector<size_t> &removedSurfaceIds,
+            KeyedVector<sp<Surface>, size_t> *outputMap/*out*/) = 0;
 
     /**
      * Drop buffers for stream of streamId if dropping is true. If dropping is false, do not
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index ae3bbc1..2ff200d 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -643,6 +643,14 @@
             dprintf(fd, "  API2 camera characteristics:\n");
             info2.dump(fd, /*verbosity*/ 2, /*indentation*/ 4);
         }
+
+        dprintf(fd, "== Camera HAL device %s (v%d.%d) dumpState: ==\n", device->mName.c_str(),
+                device->mVersion.get_major(), device->mVersion.get_minor());
+        res = device->dumpState(fd);
+        if (res != OK) {
+            dprintf(fd, "   <Error dumping device %s state: %s (%d)>\n",
+                    device->mName.c_str(), strerror(-res), res);
+        }
     }
     return OK;
 }
@@ -908,6 +916,17 @@
     return OK;
 }
 
+status_t CameraProviderManager::ProviderInfo::DeviceInfo1::dumpState(int fd) const {
+    native_handle_t* handle = native_handle_create(1,0);
+    handle->data[0] = fd;
+    hardware::Return<Status> s = mInterface->dumpState(handle);
+    native_handle_delete(handle);
+    if (!s.isOk()) {
+        return INVALID_OPERATION;
+    }
+    return mapToStatusT(s);
+}
+
 CameraProviderManager::ProviderInfo::DeviceInfo3::DeviceInfo3(const std::string& name,
         const metadata_vendor_id_t tagId, const std::string &id,
         uint16_t minorVersion,
@@ -1011,6 +1030,17 @@
     return isBackwardCompatible;
 }
 
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::dumpState(int fd) const {
+    native_handle_t* handle = native_handle_create(1,0);
+    handle->data[0] = fd;
+    auto ret = mInterface->dumpState(handle);
+    native_handle_delete(handle);
+    if (!ret.isOk()) {
+        return INVALID_OPERATION;
+    }
+    return OK;
+}
+
 status_t CameraProviderManager::ProviderInfo::DeviceInfo3::getCameraCharacteristics(
         CameraMetadata *characteristics) const {
     if (characteristics == nullptr) return BAD_VALUE;
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index e82282f..0f1f07b 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -293,6 +293,7 @@
             virtual status_t setTorchMode(bool enabled) = 0;
             virtual status_t getCameraInfo(hardware::CameraInfo *info) const = 0;
             virtual bool isAPI1Compatible() const = 0;
+            virtual status_t dumpState(int fd) const = 0;
             virtual status_t getCameraCharacteristics(CameraMetadata *characteristics) const {
                 (void) characteristics;
                 return INVALID_OPERATION;
@@ -326,6 +327,7 @@
             virtual status_t getCameraInfo(hardware::CameraInfo *info) const override;
             //In case of Device1Info assume that we are always API1 compatible
             virtual bool isAPI1Compatible() const override { return true; }
+            virtual status_t dumpState(int fd) const override;
             DeviceInfo1(const std::string& name, const metadata_vendor_id_t tagId,
                     const std::string &id, uint16_t minorVersion,
                     const hardware::camera::common::V1_0::CameraResourceCost& resourceCost,
@@ -343,6 +345,7 @@
             virtual status_t setTorchMode(bool enabled) override;
             virtual status_t getCameraInfo(hardware::CameraInfo *info) const override;
             virtual bool isAPI1Compatible() const override;
+            virtual status_t dumpState(int fd) const override;
             virtual status_t getCameraCharacteristics(
                     CameraMetadata *characteristics) const override;
 
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index a980cde..d99fc1d 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -55,6 +55,8 @@
 #include "device3/Camera3SharedOutputStream.h"
 #include "CameraService.h"
 
+#include <android/hardware/camera/device/3.4/ICameraDeviceSession.h>
+
 using namespace android::camera3;
 using namespace android::hardware::camera;
 using namespace android::hardware::camera::device::V3_2;
@@ -1102,7 +1104,7 @@
     if (mStatus == STATUS_UNCONFIGURED || mNeedConfig) {
         // This point should only be reached via API1 (API2 must explicitly call configureStreams)
         // so unilaterally select normal operating mode.
-        res = configureStreamsLocked(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE);
+        res = configureStreamsLocked(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE, mSessionParams);
         // Stream configuration failed. Client might try other configuraitons.
         if (res != OK) {
             CLOGE("Can't set up streams: %s (%d)", strerror(-res), res);
@@ -1205,8 +1207,8 @@
     // Continue captures if active at start
     if (wasActive) {
         ALOGV("%s: Restarting activity to reconfigure streams", __FUNCTION__);
-        // Reuse current operating mode for new stream config
-        res = configureStreamsLocked(mOperatingMode);
+        // Reuse current operating mode and session parameters for new stream config
+        res = configureStreamsLocked(mOperatingMode, mSessionParams);
         if (res != OK) {
             ALOGE("%s: Can't reconfigure device for new stream %d: %s (%d)",
                     __FUNCTION__, mNextStreamId, strerror(-res), res);
@@ -1222,7 +1224,7 @@
 status_t Camera3Device::createStream(sp<Surface> consumer,
             uint32_t width, uint32_t height, int format,
             android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id,
-            int streamSetId, bool isShared, uint64_t consumerUsage) {
+            std::vector<int> *surfaceIds, int streamSetId, bool isShared, uint64_t consumerUsage) {
     ATRACE_CALL();
 
     if (consumer == nullptr) {
@@ -1234,14 +1236,15 @@
     consumers.push_back(consumer);
 
     return createStream(consumers, /*hasDeferredConsumer*/ false, width, height,
-            format, dataSpace, rotation, id, streamSetId, isShared, consumerUsage);
+            format, dataSpace, rotation, id, surfaceIds, streamSetId, isShared, consumerUsage);
 }
 
 status_t Camera3Device::createStream(const std::vector<sp<Surface>>& consumers,
         bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
         android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id,
-        int streamSetId, bool isShared, uint64_t consumerUsage) {
+        std::vector<int> *surfaceIds, int streamSetId, bool isShared, uint64_t consumerUsage) {
     ATRACE_CALL();
+
     Mutex::Autolock il(mInterfaceLock);
     nsecs_t maxExpectedDuration = getExpectedInFlightDuration();
     Mutex::Autolock l(mLock);
@@ -1330,6 +1333,19 @@
                 width, height, format, dataSpace, rotation,
                 mTimestampOffset, streamSetId);
     }
+
+    size_t consumerCount = consumers.size();
+    for (size_t i = 0; i < consumerCount; i++) {
+        int id = newStream->getSurfaceId(consumers[i]);
+        if (id < 0) {
+            SET_ERR_L("Invalid surface id");
+            return BAD_VALUE;
+        }
+        if (surfaceIds != nullptr) {
+            surfaceIds->push_back(id);
+        }
+    }
+
     newStream->setStatusTracker(mStatusTracker);
 
     newStream->setBufferManager(mBufferManager);
@@ -1346,8 +1362,8 @@
     // Continue captures if active at start
     if (wasActive) {
         ALOGV("%s: Restarting activity to reconfigure streams", __FUNCTION__);
-        // Reuse current operating mode for new stream config
-        res = configureStreamsLocked(mOperatingMode);
+        // Reuse current operating mode and session parameters for new stream config
+        res = configureStreamsLocked(mOperatingMode, mSessionParams);
         if (res != OK) {
             CLOGE("Can't reconfigure device for new stream %d: %s (%d)",
                     mNextStreamId, strerror(-res), res);
@@ -1485,14 +1501,29 @@
     return res;
 }
 
-status_t Camera3Device::configureStreams(int operatingMode) {
+status_t Camera3Device::configureStreams(const CameraMetadata& sessionParams, int operatingMode) {
     ATRACE_CALL();
     ALOGV("%s: E", __FUNCTION__);
 
     Mutex::Autolock il(mInterfaceLock);
     Mutex::Autolock l(mLock);
 
-    return configureStreamsLocked(operatingMode);
+    //Filter out any incoming session parameters
+    const CameraMetadata params(sessionParams);
+    CameraMetadata filteredParams;
+    camera_metadata_entry_t availableSessionKeys = mDeviceInfo.find(
+            ANDROID_REQUEST_AVAILABLE_SESSION_KEYS);
+    if (availableSessionKeys.count > 0) {
+        for (size_t i = 0; i < availableSessionKeys.count; i++) {
+            camera_metadata_ro_entry entry = params.find(
+                    availableSessionKeys.data.i32[i]);
+            if (entry.count > 0) {
+                filteredParams.update(entry);
+            }
+        }
+    }
+
+    return configureStreamsLocked(operatingMode, filteredParams);
 }
 
 status_t Camera3Device::getInputBufferProducer(
@@ -1936,10 +1967,15 @@
 }
 
 status_t Camera3Device::setConsumerSurfaces(int streamId,
-        const std::vector<sp<Surface>>& consumers) {
+        const std::vector<sp<Surface>>& consumers, std::vector<int> *surfaceIds) {
     ATRACE_CALL();
     ALOGV("%s: Camera %s: set consumer surface for stream %d",
             __FUNCTION__, mId.string(), streamId);
+
+    if (surfaceIds == nullptr) {
+        return BAD_VALUE;
+    }
+
     Mutex::Autolock il(mInterfaceLock);
     Mutex::Autolock l(mLock);
 
@@ -1960,6 +1996,15 @@
         return res;
     }
 
+    for (auto &consumer : consumers) {
+        int id = stream->getSurfaceId(consumer);
+        if (id < 0) {
+            CLOGE("Invalid surface id!");
+            return BAD_VALUE;
+        }
+        surfaceIds->push_back(id);
+    }
+
     if (stream->isConsumerConfigurationDeferred()) {
         if (!stream->isConfiguring()) {
             CLOGE("Stream %d was already fully configured.", streamId);
@@ -1977,6 +2022,40 @@
     return OK;
 }
 
+status_t Camera3Device::updateStream(int streamId, const std::vector<sp<Surface>> &newSurfaces,
+        const std::vector<OutputStreamInfo> &outputInfo,
+        const std::vector<size_t> &removedSurfaceIds, KeyedVector<sp<Surface>, size_t> *outputMap) {
+    Mutex::Autolock il(mInterfaceLock);
+    Mutex::Autolock l(mLock);
+
+    ssize_t idx = mOutputStreams.indexOfKey(streamId);
+    if (idx == NAME_NOT_FOUND) {
+        CLOGE("Stream %d is unknown", streamId);
+        return idx;
+    }
+
+    for (const auto &it : removedSurfaceIds) {
+        if (mRequestThread->isOutputSurfacePending(streamId, it)) {
+            CLOGE("Shared surface still part of a pending request!");
+            return -EBUSY;
+        }
+    }
+
+    sp<Camera3OutputStreamInterface> stream = mOutputStreams[idx];
+    status_t res = stream->updateStream(newSurfaces, outputInfo, removedSurfaceIds, outputMap);
+    if (res != OK) {
+        CLOGE("Stream %d failed to update stream (error %d %s) ",
+              streamId, res, strerror(-res));
+        if (res == UNKNOWN_ERROR) {
+            SET_ERR_L("%s: Stream update failed to revert to previous output configuration!",
+                    __FUNCTION__);
+        }
+        return res;
+    }
+
+    return res;
+}
+
 status_t Camera3Device::dropStreamBuffers(bool dropping, int streamId) {
     Mutex::Autolock il(mInterfaceLock);
     Mutex::Autolock l(mLock);
@@ -2126,7 +2205,8 @@
     mNeedConfig = true;
 }
 
-status_t Camera3Device::configureStreamsLocked(int operatingMode) {
+status_t Camera3Device::configureStreamsLocked(int operatingMode,
+        const CameraMetadata& sessionParams) {
     ATRACE_CALL();
     status_t res;
 
@@ -2210,7 +2290,9 @@
     // Do the HAL configuration; will potentially touch stream
     // max_buffers, usage, priv fields.
 
-    res = mInterface->configureStreams(&config);
+    const camera_metadata_t *sessionBuffer = sessionParams.getAndLock();
+    res = mInterface->configureStreams(sessionBuffer, &config);
+    sessionParams.unlock(sessionBuffer);
 
     if (res == BAD_VALUE) {
         // HAL rejected this set of streams as unsupported, clean up config
@@ -2275,6 +2357,14 @@
     }
 
     // Update device state
+    const camera_metadata_t *newSessionParams = sessionParams.getAndLock();
+    const camera_metadata_t *currentSessionParams = mSessionParams.getAndLock();
+    bool updateSessionParams = (newSessionParams != currentSessionParams) ? true : false;
+    sessionParams.unlock(newSessionParams);
+    mSessionParams.unlock(currentSessionParams);
+    if (updateSessionParams)  {
+        mSessionParams = sessionParams;
+    }
 
     mNeedConfig = false;
 
@@ -3162,17 +3252,18 @@
     return res;
 }
 
-status_t Camera3Device::HalInterface::configureStreams(camera3_stream_configuration *config) {
+status_t Camera3Device::HalInterface::configureStreams(const camera_metadata_t *sessionParams,
+        camera3_stream_configuration *config) {
     ATRACE_NAME("CameraHal::configureStreams");
     if (!valid()) return INVALID_OPERATION;
     status_t res = OK;
 
     // Convert stream config to HIDL
     std::set<int> activeStreams;
-    StreamConfiguration requestedConfiguration;
-    requestedConfiguration.streams.resize(config->num_streams);
+    device::V3_4::StreamConfiguration requestedConfiguration;
+    requestedConfiguration.v3_2.streams.resize(config->num_streams);
     for (size_t i = 0; i < config->num_streams; i++) {
-        Stream &dst = requestedConfiguration.streams[i];
+        Stream &dst = requestedConfiguration.v3_2.streams[i];
         camera3_stream_t *src = config->streams[i];
 
         Camera3Stream* cam3stream = Camera3Stream::cast(src);
@@ -3219,29 +3310,50 @@
 
     res = mapToStreamConfigurationMode(
             (camera3_stream_configuration_mode_t) config->operation_mode,
-            /*out*/ &requestedConfiguration.operationMode);
+            /*out*/ &requestedConfiguration.v3_2.operationMode);
     if (res != OK) {
         return res;
     }
 
+    requestedConfiguration.sessionParams.setToExternal(
+            reinterpret_cast<uint8_t*>(const_cast<camera_metadata_t*>(sessionParams)),
+            get_camera_metadata_size(sessionParams));
+
     // Invoke configureStreams
 
     device::V3_3::HalStreamConfiguration finalConfiguration;
     common::V1_0::Status status;
 
-    // See if we have v3.3 HAL
+    // See if we have v3.4 or v3.3 HAL
+    sp<device::V3_4::ICameraDeviceSession> hidlSession_3_4;
     sp<device::V3_3::ICameraDeviceSession> hidlSession_3_3;
-    auto castResult = device::V3_3::ICameraDeviceSession::castFrom(mHidlSession);
-    if (castResult.isOk()) {
-        hidlSession_3_3 = castResult;
+    auto castResult_3_4 = device::V3_4::ICameraDeviceSession::castFrom(mHidlSession);
+    if (castResult_3_4.isOk()) {
+        hidlSession_3_4 = castResult_3_4;
     } else {
-        ALOGE("%s: Transaction error when casting ICameraDeviceSession: %s", __FUNCTION__,
-                castResult.description().c_str());
+        auto castResult_3_3 = device::V3_3::ICameraDeviceSession::castFrom(mHidlSession);
+        if (castResult_3_3.isOk()) {
+            hidlSession_3_3 = castResult_3_3;
+        }
     }
-    if (hidlSession_3_3 != nullptr) {
+
+    if (hidlSession_3_4 != nullptr) {
+        // We do; use v3.4 for the call
+        ALOGV("%s: v3.4 device found", __FUNCTION__);
+        auto err = hidlSession_3_4->configureStreams_3_4(requestedConfiguration,
+            [&status, &finalConfiguration]
+            (common::V1_0::Status s, const device::V3_3::HalStreamConfiguration& halConfiguration) {
+                finalConfiguration = halConfiguration;
+                status = s;
+            });
+        if (!err.isOk()) {
+            ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
+            return DEAD_OBJECT;
+        }
+    } else if (hidlSession_3_3 != nullptr) {
         // We do; use v3.3 for the call
         ALOGV("%s: v3.3 device found", __FUNCTION__);
-        auto err = hidlSession_3_3->configureStreams_3_3(requestedConfiguration,
+        auto err = hidlSession_3_3->configureStreams_3_3(requestedConfiguration.v3_2,
             [&status, &finalConfiguration]
             (common::V1_0::Status s, const device::V3_3::HalStreamConfiguration& halConfiguration) {
                 finalConfiguration = halConfiguration;
@@ -3255,7 +3367,7 @@
         // We don't; use v3.2 call and construct a v3.3 HalStreamConfiguration
         ALOGV("%s: v3.2 device found", __FUNCTION__);
         HalStreamConfiguration finalConfiguration_3_2;
-        auto err = mHidlSession->configureStreams(requestedConfiguration,
+        auto err = mHidlSession->configureStreams(requestedConfiguration.v3_2,
                 [&status, &finalConfiguration_3_2]
                 (common::V1_0::Status s, const HalStreamConfiguration& halConfiguration) {
                     finalConfiguration_3_2 = halConfiguration;
@@ -3269,7 +3381,7 @@
         for (size_t i = 0; i < finalConfiguration_3_2.streams.size(); i++) {
             finalConfiguration.streams[i].v3_2 = finalConfiguration_3_2.streams[i];
             finalConfiguration.streams[i].overrideDataSpace =
-                    requestedConfiguration.streams[i].dataSpace;
+                    requestedConfiguration.v3_2.streams[i].dataSpace;
         }
     }
 
@@ -4356,6 +4468,46 @@
     return false;
 }
 
+bool Camera3Device::RequestThread::isOutputSurfacePending(int streamId, size_t surfaceId) {
+    ATRACE_CALL();
+    Mutex::Autolock l(mRequestLock);
+
+    for (const auto& nextRequest : mNextRequests) {
+        for (const auto& s : nextRequest.captureRequest->mOutputSurfaces) {
+            if (s.first == streamId) {
+                const auto &it = std::find(s.second.begin(), s.second.end(), surfaceId);
+                if (it != s.second.end()) {
+                    return true;
+                }
+            }
+        }
+    }
+
+    for (const auto& request : mRequestQueue) {
+        for (const auto& s : request->mOutputSurfaces) {
+            if (s.first == streamId) {
+                const auto &it = std::find(s.second.begin(), s.second.end(), surfaceId);
+                if (it != s.second.end()) {
+                  return true;
+                }
+            }
+        }
+    }
+
+    for (const auto& request : mRepeatingRequests) {
+        for (const auto& s : request->mOutputSurfaces) {
+            if (s.first == streamId) {
+                const auto &it = std::find(s.second.begin(), s.second.end(), surfaceId);
+                if (it != s.second.end()) {
+                  return true;
+                }
+            }
+        }
+    }
+
+    return false;
+}
+
 nsecs_t Camera3Device::getExpectedInFlightDuration() {
     ATRACE_CALL();
     Mutex::Autolock al(mInFlightLock);
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 081af19..cc7eb35 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -44,6 +44,8 @@
 #include "utils/LatencyHistogram.h"
 #include <camera_metadata_hidden.h>
 
+using android::camera3::OutputStreamInfo;
+
 /**
  * Function pointer types with C calling convention to
  * use for HAL callback functions.
@@ -117,11 +119,13 @@
     status_t createStream(sp<Surface> consumer,
             uint32_t width, uint32_t height, int format,
             android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id,
+            std::vector<int> *surfaceIds = nullptr,
             int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
             bool isShared = false, uint64_t consumerUsage = 0) override;
     status_t createStream(const std::vector<sp<Surface>>& consumers,
             bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
             android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id,
+            std::vector<int> *surfaceIds = nullptr,
             int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
             bool isShared = false, uint64_t consumerUsage = 0) override;
 
@@ -134,7 +138,8 @@
 
     status_t deleteStream(int id) override;
 
-    status_t configureStreams(int operatingMode =
+    status_t configureStreams(const CameraMetadata& sessionParams,
+            int operatingMode =
             static_cast<int>(hardware::camera::device::V3_2::StreamConfigurationMode::NORMAL_MODE))
             override;
     status_t getInputBufferProducer(
@@ -176,7 +181,17 @@
      * Set the deferred consumer surfaces to the output stream and finish the deferred
      * consumer configuration.
      */
-    status_t setConsumerSurfaces(int streamId, const std::vector<sp<Surface>>& consumers) override;
+    status_t setConsumerSurfaces(
+            int streamId, const std::vector<sp<Surface>>& consumers,
+            std::vector<int> *surfaceIds /*out*/) override;
+
+    /**
+     * Update a given stream.
+     */
+    status_t updateStream(int streamId, const std::vector<sp<Surface>> &newSurfaces,
+            const std::vector<OutputStreamInfo> &outputInfo,
+            const std::vector<size_t> &removedSurfaceIds,
+            KeyedVector<sp<Surface>, size_t> *outputMap/*out*/);
 
     /**
      * Drop buffers for stream of streamId if dropping is true. If dropping is false, do not
@@ -222,6 +237,9 @@
 
     // Current stream configuration mode;
     int                        mOperatingMode;
+    // Current session wide parameters
+    hardware::camera2::impl::CameraMetadataNative mSessionParams;
+
     // Constant to use for no set operating mode
     static const int           NO_MODE = -1;
 
@@ -258,7 +276,8 @@
         // Caller takes ownership of requestTemplate
         status_t constructDefaultRequestSettings(camera3_request_template_t templateId,
                 /*out*/ camera_metadata_t **requestTemplate);
-        status_t configureStreams(/*inout*/ camera3_stream_configuration *config);
+        status_t configureStreams(const camera_metadata_t *sessionParams,
+                /*inout*/ camera3_stream_configuration *config);
         status_t processCaptureRequest(camera3_capture_request_t *request);
         status_t processBatchCaptureRequests(
                 std::vector<camera3_capture_request_t*>& requests,
@@ -536,7 +555,8 @@
      * Take the currently-defined set of streams and configure the HAL to use
      * them. This is a long-running operation (may be several hundered ms).
      */
-    status_t           configureStreamsLocked(int operatingMode);
+    status_t           configureStreamsLocked(int operatingMode,
+            const CameraMetadata& sessionParams);
 
     /**
      * Cancel stream configuration that did not finish successfully.
@@ -711,6 +731,12 @@
          */
         bool isStreamPending(sp<camera3::Camera3StreamInterface>& stream);
 
+        /**
+         * Returns true if the surface is a target of any queued or repeating
+         * capture request
+         */
+        bool isOutputSurfacePending(int streamId, size_t surfaceId);
+
         // dump processCaptureRequest latency
         void dumpCaptureRequestLatency(int fd, const char* name) {
             mRequestLatency.dump(fd, name);
diff --git a/services/camera/libcameraservice/device3/Camera3DummyStream.cpp b/services/camera/libcameraservice/device3/Camera3DummyStream.cpp
index 84fb890..0a245c4 100644
--- a/services/camera/libcameraservice/device3/Camera3DummyStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3DummyStream.cpp
@@ -117,6 +117,15 @@
             __FUNCTION__, mId);
     return INVALID_OPERATION;
 }
+
+status_t Camera3DummyStream::updateStream(const std::vector<sp<Surface>> &/*outputSurfaces*/,
+            const std::vector<OutputStreamInfo> &/*outputInfo*/,
+            const std::vector<size_t> &/*removedSurfaceIds*/,
+            KeyedVector<sp<Surface>, size_t> * /*outputMap*/) {
+    ALOGE("%s: this method is not supported!", __FUNCTION__);
+    return INVALID_OPERATION;
+}
+
 }; // namespace camera3
 
 }; // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3DummyStream.h b/services/camera/libcameraservice/device3/Camera3DummyStream.h
index 9710765..684f4b0 100644
--- a/services/camera/libcameraservice/device3/Camera3DummyStream.h
+++ b/services/camera/libcameraservice/device3/Camera3DummyStream.h
@@ -77,6 +77,19 @@
      */
     virtual status_t setConsumers(const std::vector<sp<Surface>>& consumers);
 
+    /**
+     * Query the output surface id.
+     */
+    virtual ssize_t getSurfaceId(const sp<Surface> &/*surface*/) { return 0; }
+
+    /**
+     * Update the stream output surfaces.
+     */
+    virtual status_t updateStream(const std::vector<sp<Surface>> &outputSurfaces,
+            const std::vector<OutputStreamInfo> &outputInfo,
+            const std::vector<size_t> &removedSurfaceIds,
+            KeyedVector<sp<Surface>, size_t> *outputMap/*out*/);
+
   protected:
 
     /**
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 8460c34..e79eecc 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -700,6 +700,14 @@
     return OK;
 }
 
+status_t Camera3OutputStream::updateStream(const std::vector<sp<Surface>> &/*outputSurfaces*/,
+            const std::vector<OutputStreamInfo> &/*outputInfo*/,
+            const std::vector<size_t> &/*removedSurfaceIds*/,
+            KeyedVector<sp<Surface>, size_t> * /*outputMapo*/) {
+    ALOGE("%s: this method is not supported!", __FUNCTION__);
+    return INVALID_OPERATION;
+}
+
 void Camera3OutputStream::BufferReleasedListener::onBufferReleased() {
     sp<Camera3OutputStream> stream = mParent.promote();
     if (stream == nullptr) {
@@ -737,7 +745,7 @@
         const std::vector<sp<GraphicBuffer>>& removedBuffers) {
     sp<Camera3StreamBufferFreedListener> callback = mBufferFreedListener.promote();
     if (callback != nullptr) {
-        for (auto gb : removedBuffers) {
+        for (const auto& gb : removedBuffers) {
             callback->onBufferFreed(mId, gb->handle);
         }
     }
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index 4865be7..18b1901 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -177,6 +177,19 @@
      */
     status_t setBufferManager(sp<Camera3BufferManager> bufferManager);
 
+    /**
+     * Query the ouput surface id.
+     */
+    virtual ssize_t getSurfaceId(const sp<Surface> &/*surface*/) { return 0; }
+
+    /**
+     * Update the stream output surfaces.
+     */
+    virtual status_t updateStream(const std::vector<sp<Surface>> &outputSurfaces,
+            const std::vector<OutputStreamInfo> &outputInfo,
+            const std::vector<size_t> &removedSurfaceIds,
+            KeyedVector<sp<Surface>, size_t> *outputMap/*out*/);
+
   protected:
     Camera3OutputStream(int id, camera3_stream_type_t type,
             uint32_t width, uint32_t height, int format,
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
index 1719d74..08fcf38 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
@@ -18,6 +18,7 @@
 #define ANDROID_SERVERS_CAMERA3_OUTPUT_STREAM_INTERFACE_H
 
 #include "Camera3StreamInterface.h"
+#include <utils/KeyedVector.h>
 
 namespace android {
 
@@ -61,6 +62,19 @@
     virtual status_t detachBuffer(sp<GraphicBuffer>* buffer, int* fenceFd) = 0;
 
     /**
+     * Query the surface id.
+     */
+    virtual ssize_t getSurfaceId(const sp<Surface> &surface) = 0;
+
+    /**
+     * Update the stream output surfaces.
+     */
+    virtual status_t updateStream(const std::vector<sp<Surface>> &outputSurfaces,
+            const std::vector<OutputStreamInfo> &outputInfo,
+            const std::vector<size_t> &removedSurfaceIds,
+            KeyedVector<sp<Surface>, size_t> *outputMap/*out*/) = 0;
+
+    /**
      * Drop buffers if dropping is true. If dropping is false, do not drop buffers.
      */
     virtual status_t dropBuffers(bool /*dropping*/) = 0;
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
index 5051711..1c9417b 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
@@ -20,6 +20,8 @@
 
 namespace camera3 {
 
+const size_t Camera3SharedOutputStream::kMaxOutputs;
+
 Camera3SharedOutputStream::Camera3SharedOutputStream(int id,
         const std::vector<sp<Surface>>& surfaces,
         uint32_t width, uint32_t height, int format,
@@ -28,8 +30,14 @@
         nsecs_t timestampOffset, int setId) :
         Camera3OutputStream(id, CAMERA3_STREAM_OUTPUT, width, height,
                             format, dataSpace, rotation, consumerUsage,
-                            timestampOffset, setId),
-        mSurfaces(surfaces) {
+                            timestampOffset, setId) {
+    size_t consumerCount = std::min(surfaces.size(), kMaxOutputs);
+    if (surfaces.size() > consumerCount) {
+        ALOGE("%s: Trying to add more consumers than the maximum ", __func__);
+    }
+    for (size_t i = 0; i < consumerCount; i++) {
+        mSurfaces[i] = surfaces[i];
+    }
 }
 
 Camera3SharedOutputStream::~Camera3SharedOutputStream() {
@@ -44,7 +52,16 @@
     uint64_t usage;
     getEndpointUsage(&usage);
 
-    res = mStreamSplitter->connect(mSurfaces, usage, camera3_stream::max_buffers, &mConsumer);
+    std::unordered_map<size_t, sp<Surface>> initialSurfaces;
+    for (size_t i = 0; i < kMaxOutputs; i++) {
+        if (mSurfaces[i] != nullptr) {
+            initialSurfaces.emplace(i, mSurfaces[i]);
+        }
+    }
+
+    android::PixelFormat format = isFormatOverridden() ? getOriginalFormat() : getFormat();
+    res = mStreamSplitter->connect(initialSurfaces, usage, mUsage, camera3_stream::max_buffers,
+            getWidth(), getHeight(), format, &mConsumer);
     if (res != OK) {
         ALOGE("%s: Failed to connect to stream splitter: %s(%d)",
                 __FUNCTION__, strerror(-res), res);
@@ -68,7 +85,11 @@
 
 bool Camera3SharedOutputStream::isConsumerConfigurationDeferred(size_t surface_id) const {
     Mutex::Autolock l(mLock);
-    return (surface_id >= mSurfaces.size());
+    if (surface_id >= kMaxOutputs) {
+        return true;
+    }
+
+    return (mSurfaces[surface_id] == nullptr);
 }
 
 status_t Camera3SharedOutputStream::setConsumers(const std::vector<sp<Surface>>& surfaces) {
@@ -85,11 +106,17 @@
             return INVALID_OPERATION;
         }
 
-        mSurfaces.push_back(surface);
+        ssize_t id = getNextSurfaceIdLocked();
+        if (id < 0) {
+            ALOGE("%s: No surface ids available!", __func__);
+            return NO_MEMORY;
+        }
+
+        mSurfaces[id] = surface;
 
         // Only call addOutput if the splitter has been connected.
         if (mStreamSplitter != nullptr) {
-            ret = mStreamSplitter->addOutput(surface);
+            ret = mStreamSplitter->addOutput(id, surface);
             if (ret != OK) {
                 ALOGE("%s: addOutput failed with error code %d", __FUNCTION__, ret);
                 return ret;
@@ -200,9 +227,9 @@
         // Called before shared buffer queue is constructed.
         *usage = getPresetConsumerUsage();
 
-        for (auto surface : mSurfaces) {
-            if (surface != nullptr) {
-                res = getEndpointUsageForSurface(&u, surface);
+        for (size_t id = 0; id < kMaxOutputs; id++) {
+            if (mSurfaces[id] != nullptr) {
+                res = getEndpointUsageForSurface(&u, mSurfaces[id]);
                 *usage |= u;
             }
         }
@@ -215,6 +242,140 @@
     return res;
 }
 
+ssize_t Camera3SharedOutputStream::getNextSurfaceIdLocked() {
+    ssize_t id = -1;
+    for (size_t i = 0; i < kMaxOutputs; i++) {
+        if (mSurfaces[i] == nullptr) {
+            id = i;
+            break;
+        }
+    }
+
+    return id;
+}
+
+ssize_t Camera3SharedOutputStream::getSurfaceId(const sp<Surface> &surface) {
+    Mutex::Autolock l(mLock);
+    ssize_t id = -1;
+    for (size_t i = 0; i < kMaxOutputs; i++) {
+        if (mSurfaces[i] == surface) {
+            id = i;
+            break;
+        }
+    }
+
+    return id;
+}
+
+status_t Camera3SharedOutputStream::revertPartialUpdateLocked(
+        const KeyedVector<sp<Surface>, size_t> &removedSurfaces,
+        const KeyedVector<sp<Surface>, size_t> &attachedSurfaces) {
+    status_t ret = OK;
+
+    for (size_t i = 0; i < attachedSurfaces.size(); i++) {
+        size_t index = attachedSurfaces.valueAt(i);
+        if (mStreamSplitter != nullptr) {
+            ret = mStreamSplitter->removeOutput(index);
+            if (ret != OK) {
+                return UNKNOWN_ERROR;
+            }
+        }
+        mSurfaces[index] = nullptr;
+    }
+
+    for (size_t i = 0; i < removedSurfaces.size(); i++) {
+        size_t index = removedSurfaces.valueAt(i);
+        if (mStreamSplitter != nullptr) {
+            ret = mStreamSplitter->addOutput(index, removedSurfaces.keyAt(i));
+            if (ret != OK) {
+                return UNKNOWN_ERROR;
+            }
+        }
+        mSurfaces[index] = removedSurfaces.keyAt(i);
+    }
+
+    return ret;
+}
+
+status_t Camera3SharedOutputStream::updateStream(const std::vector<sp<Surface>> &outputSurfaces,
+        const std::vector<OutputStreamInfo> &outputInfo,
+        const std::vector<size_t> &removedSurfaceIds,
+        KeyedVector<sp<Surface>, size_t> *outputMap) {
+    status_t ret = OK;
+    Mutex::Autolock l(mLock);
+
+    if ((outputMap == nullptr) || (outputInfo.size() != outputSurfaces.size()) ||
+            (outputSurfaces.size() > kMaxOutputs)) {
+        return BAD_VALUE;
+    }
+
+    uint64_t usage;
+    getEndpointUsage(&usage);
+    KeyedVector<sp<Surface>, size_t> removedSurfaces;
+    //Check whether the new surfaces are compatible.
+    for (const auto &infoIt : outputInfo) {
+        bool imgReaderUsage = (infoIt.consumerUsage & GRALLOC_USAGE_SW_READ_OFTEN) ? true : false;
+        bool sizeMismatch = ((static_cast<uint32_t>(infoIt.width) != getWidth()) ||
+                                (static_cast<uint32_t> (infoIt.height) != getHeight())) ?
+                                true : false;
+        if ((imgReaderUsage && sizeMismatch) ||
+                (infoIt.format != getOriginalFormat() && infoIt.format != getFormat()) ||
+                (infoIt.dataSpace != getDataSpace() &&
+                 infoIt.dataSpace != getOriginalDataSpace())) {
+            ALOGE("%s: Shared surface parameters format: 0x%x dataSpace: 0x%x "
+                    " don't match source stream format: 0x%x  dataSpace: 0x%x", __FUNCTION__,
+                    infoIt.format, infoIt.dataSpace, getFormat(), getDataSpace());
+            return BAD_VALUE;
+        }
+    }
+
+    //First remove all absent outputs
+    for (const auto &it : removedSurfaceIds) {
+        if (mStreamSplitter != nullptr) {
+            ret = mStreamSplitter->removeOutput(it);
+            if (ret != OK) {
+                ALOGE("%s: failed with error code %d", __FUNCTION__, ret);
+                status_t res = revertPartialUpdateLocked(removedSurfaces, *outputMap);
+                if (res != OK) {
+                    return res;
+                }
+                return ret;
+
+            }
+        }
+        mSurfaces[it] = nullptr;
+        removedSurfaces.add(mSurfaces[it], it);
+    }
+
+    //Next add the new outputs
+    for (const auto &it : outputSurfaces) {
+        ssize_t surfaceId = getNextSurfaceIdLocked();
+        if (surfaceId < 0) {
+            ALOGE("%s: No more available output slots!", __FUNCTION__);
+            status_t res = revertPartialUpdateLocked(removedSurfaces, *outputMap);
+            if (res != OK) {
+                return res;
+            }
+            return NO_MEMORY;
+        }
+        if (mStreamSplitter != nullptr) {
+            ret = mStreamSplitter->addOutput(surfaceId, it);
+            if (ret != OK) {
+                ALOGE("%s: failed with error code %d", __FUNCTION__, ret);
+                status_t res = revertPartialUpdateLocked(removedSurfaces, *outputMap);
+                if (res != OK) {
+                    return res;
+                }
+                return ret;
+            }
+        }
+        mSurfaces[surfaceId] = it;
+        outputMap->add(it, surfaceId);
+    }
+
+    return ret;
+}
+
 } // namespace camera3
 
 } // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
index 22bb2fc..6eab8bd 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
@@ -46,9 +46,24 @@
 
     virtual status_t setConsumers(const std::vector<sp<Surface>>& consumers);
 
+    virtual ssize_t getSurfaceId(const sp<Surface> &surface);
+
+    virtual status_t updateStream(const std::vector<sp<Surface>> &outputSurfaces,
+            const std::vector<OutputStreamInfo> &outputInfo,
+            const std::vector<size_t> &removedSurfaceIds,
+            KeyedVector<sp<Surface>, size_t> *outputMap/*out*/);
+
 private:
-    // Surfaces passed in constructor from app
-    std::vector<sp<Surface> > mSurfaces;
+
+    static const size_t kMaxOutputs = 4;
+
+    // Map surfaceId -> output surfaces
+    sp<Surface> mSurfaces[kMaxOutputs];
+
+    ssize_t getNextSurfaceIdLocked();
+
+    status_t revertPartialUpdateLocked(const KeyedVector<sp<Surface>, size_t> &removedSurfaces,
+            const KeyedVector<sp<Surface>, size_t> &attachedSurfaces);
 
     /**
      * The Camera3StreamSplitter object this stream uses for stream
diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index cc9bf8e..9ed7184 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
@@ -43,6 +43,24 @@
 
 class StatusTracker;
 
+// OutputStreamInfo describes the property of a camera stream.
+class OutputStreamInfo {
+    public:
+        int width;
+        int height;
+        int format;
+        android_dataspace dataSpace;
+        uint64_t consumerUsage;
+        bool finalized = false;
+        OutputStreamInfo() :
+            width(-1), height(-1), format(-1), dataSpace(HAL_DATASPACE_UNKNOWN),
+            consumerUsage(0) {}
+        OutputStreamInfo(int _width, int _height, int _format, android_dataspace _dataSpace,
+                uint64_t _consumerUsage) :
+            width(_width), height(_height), format(_format),
+            dataSpace(_dataSpace), consumerUsage(_consumerUsage) {}
+};
+
 /**
  * An interface for managing a single stream of input and/or output data from
  * the camera device.
diff --git a/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp b/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp
index a0a50c2..bc12f39 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp
+++ b/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp
@@ -38,8 +38,9 @@
 
 namespace android {
 
-status_t Camera3StreamSplitter::connect(const std::vector<sp<Surface> >& surfaces,
-        uint64_t consumerUsage, size_t halMaxBuffers, sp<Surface>* consumer) {
+status_t Camera3StreamSplitter::connect(const std::unordered_map<size_t, sp<Surface>> &surfaces,
+        uint64_t consumerUsage, uint64_t producerUsage, size_t halMaxBuffers, uint32_t width,
+        uint32_t height, android::PixelFormat format, sp<Surface>* consumer) {
     ATRACE_CALL();
     if (consumer == nullptr) {
         SP_LOGE("%s: consumer pointer is NULL", __FUNCTION__);
@@ -62,12 +63,12 @@
     mConsumerName = getUniqueConsumerName();
     // Add output surfaces. This has to be before creating internal buffer queue
     // in order to get max consumer side buffers.
-    for (size_t i = 0; i < surfaces.size(); i++) {
-        if (surfaces[i] == nullptr) {
+    for (auto &it : surfaces) {
+        if (it.second == nullptr) {
             SP_LOGE("%s: Fatal: surface is NULL", __FUNCTION__);
             return BAD_VALUE;
         }
-        res = addOutputLocked(surfaces[i]);
+        res = addOutputLocked(it.first, it.second);
         if (res != OK) {
             SP_LOGE("%s: Failed to add output surface: %s(%d)",
                     __FUNCTION__, strerror(-res), res);
@@ -94,8 +95,20 @@
         return NO_MEMORY;
     }
 
+    res = mProducer->setAsyncMode(true);
+    if (res != OK) {
+        SP_LOGE("%s: Failed to enable input queue async mode: %s(%d)", __FUNCTION__,
+                strerror(-res), res);
+        return res;
+    }
+
     res = mConsumer->consumerConnect(this, /* controlledByApp */ false);
 
+    mWidth = width;
+    mHeight = height;
+    mFormat = format;
+    mProducerUsage = producerUsage;
+
     SP_LOGV("%s: connected", __FUNCTION__);
     return res;
 }
@@ -117,10 +130,13 @@
     mNotifiers.clear();
 
     for (auto& output : mOutputs) {
-        output->disconnect(NATIVE_WINDOW_API_CAMERA);
+        if (output.second != nullptr) {
+            output.second->disconnect(NATIVE_WINDOW_API_CAMERA);
+        }
     }
     mOutputs.clear();
     mOutputSlots.clear();
+    mConsumerBufferCount.clear();
 
     mConsumer->consumerDisconnect();
 
@@ -139,10 +155,10 @@
     disconnect();
 }
 
-status_t Camera3StreamSplitter::addOutput(const sp<Surface>& outputQueue) {
+status_t Camera3StreamSplitter::addOutput(size_t surfaceId, const sp<Surface>& outputQueue) {
     ATRACE_CALL();
     Mutex::Autolock lock(mMutex);
-    status_t res = addOutputLocked(outputQueue);
+    status_t res = addOutputLocked(surfaceId, outputQueue);
 
     if (res != OK) {
         SP_LOGE("%s: addOutputLocked failed %d", __FUNCTION__, res);
@@ -154,18 +170,30 @@
     return res;
 }
 
-status_t Camera3StreamSplitter::addOutputLocked(const sp<Surface>& outputQueue) {
+status_t Camera3StreamSplitter::addOutputLocked(size_t surfaceId, const sp<Surface>& outputQueue) {
     ATRACE_CALL();
     if (outputQueue == nullptr) {
         SP_LOGE("addOutput: outputQueue must not be NULL");
         return BAD_VALUE;
     }
 
+    if (mOutputs[surfaceId] != nullptr) {
+        SP_LOGE("%s: surfaceId: %u already taken!", __FUNCTION__, (unsigned) surfaceId);
+        return BAD_VALUE;
+    }
+
+  status_t res = native_window_set_buffers_dimensions(outputQueue.get(),
+            mWidth, mHeight);
+    if (res != NO_ERROR) {
+        SP_LOGE("addOutput: failed to set buffer dimensions (%d)", res);
+        return res;
+    }
+
     sp<IGraphicBufferProducer> gbp = outputQueue->getIGraphicBufferProducer();
     // Connect to the buffer producer
     sp<OutputListener> listener(new OutputListener(this, gbp));
     IInterface::asBinder(gbp)->linkToDeath(listener);
-    status_t res = outputQueue->connect(NATIVE_WINDOW_API_CAMERA, listener);
+    res = outputQueue->connect(NATIVE_WINDOW_API_CAMERA, listener);
     if (res != NO_ERROR) {
         SP_LOGE("addOutput: failed to connect (%d)", res);
         return res;
@@ -208,7 +236,8 @@
     }
 
     // Add new entry into mOutputs
-    mOutputs.push_back(gbp);
+    mOutputs[surfaceId] = gbp;
+    mConsumerBufferCount[surfaceId] = maxConsumerBuffers;
     mNotifiers[gbp] = listener;
     mOutputSlots[gbp] = std::make_unique<OutputSlots>(totalBufferCount);
 
@@ -216,8 +245,80 @@
     return NO_ERROR;
 }
 
+status_t Camera3StreamSplitter::removeOutput(size_t surfaceId) {
+    ATRACE_CALL();
+    Mutex::Autolock lock(mMutex);
+
+    status_t res = removeOutputLocked(surfaceId);
+    if (res != OK) {
+        SP_LOGE("%s: removeOutputLocked failed %d", __FUNCTION__, res);
+        return res;
+    }
+
+    res = mConsumer->setMaxAcquiredBufferCount(mMaxConsumerBuffers+1);
+    if (res != OK) {
+        SP_LOGE("%s: setMaxAcquiredBufferCount failed %d", __FUNCTION__, res);
+        return res;
+    }
+
+    return res;
+}
+
+status_t Camera3StreamSplitter::removeOutputLocked(size_t surfaceId) {
+    if (mOutputs[surfaceId] == nullptr) {
+        SP_LOGE("%s: output surface is not present!", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    sp<IGraphicBufferProducer> gbp = mOutputs[surfaceId];
+    //Search and decrement the ref. count of any buffers that are
+    //still attached to the removed surface.
+    std::vector<uint64_t> pendingBufferIds;
+    auto& outputSlots = *mOutputSlots[gbp];
+    for (size_t i = 0; i < outputSlots.size(); i++) {
+        if (outputSlots[i] != nullptr) {
+            pendingBufferIds.push_back(outputSlots[i]->getId());
+            auto rc = gbp->detachBuffer(i);
+            if (rc != NO_ERROR) {
+                //Buffers that fail to detach here will be scheduled for detach in the
+                //input buffer queue and the rest of the registered outputs instead.
+                //This will help ensure that camera stops accessing buffers that still
+                //can get referenced by the disconnected output.
+                mDetachedBuffers.emplace(outputSlots[i]->getId());
+            }
+        }
+    }
+    mOutputs[surfaceId] = nullptr;
+    mOutputSlots[gbp] = nullptr;
+    for (const auto &id : pendingBufferIds) {
+        decrementBufRefCountLocked(id, surfaceId);
+    }
+
+    auto res = IInterface::asBinder(gbp)->unlinkToDeath(mNotifiers[gbp]);
+    if (res != OK) {
+        SP_LOGE("%s: Failed to unlink producer death listener: %d ", __FUNCTION__, res);
+        return res;
+    }
+
+    res = gbp->disconnect(NATIVE_WINDOW_API_CAMERA);
+    if (res != OK) {
+        SP_LOGE("%s: Unable disconnect from producer interface: %d ", __FUNCTION__, res);
+        return res;
+    }
+
+    mNotifiers[gbp] = nullptr;
+    if (mConsumerBufferCount[surfaceId] < mMaxHalBuffers) {
+        mMaxConsumerBuffers -= mConsumerBufferCount[surfaceId];
+    } else {
+        SP_LOGE("%s: Cached consumer buffer count mismatch!", __FUNCTION__);
+    }
+    mConsumerBufferCount[surfaceId] = 0;
+
+    return res;
+}
+
 status_t Camera3StreamSplitter::outputBufferLocked(const sp<IGraphicBufferProducer>& output,
-        const BufferItem& bufferItem) {
+        const BufferItem& bufferItem, size_t surfaceId) {
     ATRACE_CALL();
     status_t res;
     IGraphicBufferProducer::QueueBufferInput queueInput(
@@ -242,6 +343,11 @@
 
     SP_LOGV("%s: Queuing buffer to buffer queue %p slot %d returns %d",
             __FUNCTION__, output.get(), slot, res);
+    //During buffer queue 'mMutex' is not held which makes the removal of
+    //"output" possible. Check whether this is the case and return.
+    if (mOutputSlots[output] == nullptr) {
+        return res;
+    }
     if (res != OK) {
         if (res != NO_INIT && res != DEAD_OBJECT) {
             SP_LOGE("Queuing buffer to output failed (%d)", res);
@@ -250,7 +356,7 @@
         // that, increment the release count so that we still release this
         // buffer eventually, and move on to the next output
         onAbandonedLocked();
-        decrementBufRefCountLocked(bufferItem.mGraphicBuffer->getId(), output);
+        decrementBufRefCountLocked(bufferItem.mGraphicBuffer->getId(), surfaceId);
         return res;
     }
 
@@ -258,7 +364,7 @@
     // queue, no onBufferReleased is called by the buffer queue.
     // Proactively trigger the callback to avoid buffer loss.
     if (queueOutput.bufferReplaced) {
-        onBufferReleasedByOutputLocked(output);
+        onBufferReleasedByOutputLocked(output, surfaceId);
     }
 
     return res;
@@ -271,7 +377,6 @@
 
 status_t Camera3StreamSplitter::notifyBufferReleased(const sp<GraphicBuffer>& buffer) {
     ATRACE_CALL();
-    status_t res = OK;
 
     Mutex::Autolock lock(mMutex);
 
@@ -279,17 +384,7 @@
     std::unique_ptr<BufferTracker> tracker_ptr = std::move(mBuffers[bufferId]);
     mBuffers.erase(bufferId);
 
-    for (const auto surface : tracker_ptr->requestedSurfaces()) {
-        sp<IGraphicBufferProducer>& gbp = mOutputs[surface];
-        OutputSlots& outputSlots = *(mOutputSlots[gbp]);
-        int slot = getSlotForOutputLocked(gbp, buffer);
-        if (slot != BufferItem::INVALID_BUFFER_SLOT) {
-             gbp->detachBuffer(slot);
-             outputSlots[slot].clear();
-        }
-    }
-
-    return res;
+    return OK;
 }
 
 status_t Camera3StreamSplitter::attachBufferToOutputs(ANativeWindowBuffer* anb,
@@ -307,7 +402,15 @@
 
     for (auto& surface_id : surface_ids) {
         sp<IGraphicBufferProducer>& gbp = mOutputs[surface_id];
-        int slot = BufferItem::INVALID_BUFFER_SLOT;
+        if (gbp.get() == nullptr) {
+            //Output surface got likely removed by client.
+            continue;
+        }
+        int slot = getSlotForOutputLocked(gbp, gb);
+        if (slot != BufferItem::INVALID_BUFFER_SLOT) {
+            //Buffer is already attached to this output surface.
+            continue;
+        }
         //Temporarly Unlock the mutex when trying to attachBuffer to the output
         //queue, because attachBuffer could block in case of a slow consumer. If
         //we block while holding the lock, onFrameAvailable and onBufferReleased
@@ -320,12 +423,17 @@
                     __FUNCTION__, gbp.get(), strerror(-res), res);
             return res;
         }
+        //During buffer attach 'mMutex' is not held which makes the removal of
+        //"gbp" possible. Check whether this is the case and continue.
+        if (mOutputSlots[gbp] == nullptr) {
+            continue;
+        }
         auto& outputSlots = *mOutputSlots[gbp];
         if (outputSlots[slot] != nullptr) {
             // If the buffer is attached to a slot which already contains a buffer,
             // the previous buffer will be removed from the output queue. Decrement
             // the reference count accordingly.
-            decrementBufRefCountLocked(outputSlots[slot]->getId(), gbp);
+            decrementBufRefCountLocked(outputSlots[slot]->getId(), surface_id);
         }
         SP_LOGV("%s: Attached buffer %p to slot %d on output %p.",__FUNCTION__, gb.get(),
                 slot, gbp.get());
@@ -349,7 +457,21 @@
         mOnFrameAvailableRes.store(res);
         return;
     }
-    if (mBuffers.find(bufferItem.mGraphicBuffer->getId()) == mBuffers.end()) {
+
+    uint64_t bufferId;
+    if (bufferItem.mGraphicBuffer != nullptr) {
+        mInputSlots[bufferItem.mSlot] = bufferItem;
+    } else if (bufferItem.mAcquireCalled) {
+        bufferItem.mGraphicBuffer = mInputSlots[bufferItem.mSlot].mGraphicBuffer;
+        mInputSlots[bufferItem.mSlot].mFrameNumber = bufferItem.mFrameNumber;
+    } else {
+        SP_LOGE("%s: Invalid input graphic buffer!", __FUNCTION__);
+        res = BAD_VALUE;
+        return;
+    }
+    bufferId = bufferItem.mGraphicBuffer->getId();
+
+    if (mBuffers.find(bufferId) == mBuffers.end()) {
         SP_LOGE("%s: Acquired buffer doesn't exist in attached buffer map",
                 __FUNCTION__);
         mOnFrameAvailableRes.store(INVALID_OPERATION);
@@ -359,24 +481,19 @@
     SP_LOGV("acquired buffer %" PRId64 " from input at slot %d",
             bufferItem.mGraphicBuffer->getId(), bufferItem.mSlot);
 
-    res = mConsumer->detachBuffer(bufferItem.mSlot);
-    if (res != NO_ERROR) {
-        SP_LOGE("%s: detaching buffer from input failed (%d)", __FUNCTION__, res);
-        mOnFrameAvailableRes.store(res);
-        return;
-    }
-
     // Attach and queue the buffer to each of the outputs
-    BufferTracker& tracker = *(mBuffers[bufferItem.mGraphicBuffer->getId()]);
+    BufferTracker& tracker = *(mBuffers[bufferId]);
 
     SP_LOGV("%s: BufferTracker for buffer %" PRId64 ", number of requests %zu",
            __FUNCTION__, bufferItem.mGraphicBuffer->getId(), tracker.requestedSurfaces().size());
     for (const auto id : tracker.requestedSurfaces()) {
 
-        LOG_ALWAYS_FATAL_IF(id >= mOutputs.size(),
-                "requested surface id exceeding max registered ids");
+        if (mOutputs[id] == nullptr) {
+            //Output surface got likely removed by client.
+            continue;
+        }
 
-        res = outputBufferLocked(mOutputs[id], bufferItem);
+        res = outputBufferLocked(mOutputs[id], bufferItem, id);
         if (res != OK) {
             SP_LOGE("%s: outputBufferLocked failed %d", __FUNCTION__, res);
             mOnFrameAvailableRes.store(res);
@@ -389,12 +506,14 @@
     mOnFrameAvailableRes.store(res);
 }
 
-void Camera3StreamSplitter::decrementBufRefCountLocked(uint64_t id,
-        const sp<IGraphicBufferProducer>& from) {
+void Camera3StreamSplitter::decrementBufRefCountLocked(uint64_t id, size_t surfaceId) {
     ATRACE_CALL();
-    size_t referenceCount = mBuffers[id]->decrementReferenceCountLocked();
 
-    removeSlotForOutputLocked(from, mBuffers[id]->getBuffer());
+    if (mBuffers[id] == nullptr) {
+        return;
+    }
+
+    size_t referenceCount = mBuffers[id]->decrementReferenceCountLocked(surfaceId);
     if (referenceCount > 0) {
         return;
     }
@@ -407,14 +526,28 @@
     std::unique_ptr<BufferTracker> tracker_ptr = std::move(mBuffers[id]);
     mBuffers.erase(id);
 
-    // Attach and release the buffer back to the input
-    int consumerSlot = BufferItem::INVALID_BUFFER_SLOT;
-    status_t res = mConsumer->attachBuffer(&consumerSlot, tracker_ptr->getBuffer());
-    if (res != NO_ERROR) {
-        SP_LOGE("%s: attaching buffer to input failed (%d)", __FUNCTION__, res);
+    uint64_t bufferId = tracker_ptr->getBuffer()->getId();
+    int consumerSlot = -1;
+    uint64_t frameNumber;
+    auto inputSlot = mInputSlots.begin();
+    for (; inputSlot != mInputSlots.end(); inputSlot++) {
+        if (inputSlot->second.mGraphicBuffer->getId() == bufferId) {
+            consumerSlot = inputSlot->second.mSlot;
+            frameNumber = inputSlot->second.mFrameNumber;
+            break;
+        }
+    }
+    if (consumerSlot == -1) {
+        SP_LOGE("%s: Buffer missing inside input slots!", __FUNCTION__);
         return;
     }
 
+    auto detachBuffer = mDetachedBuffers.find(bufferId);
+    bool detach = (detachBuffer != mDetachedBuffers.end());
+    if (detach) {
+        mDetachedBuffers.erase(detachBuffer);
+        mInputSlots.erase(inputSlot);
+    }
     // Temporarily unlock mutex to avoid circular lock:
     // 1. This function holds splitter lock, calls releaseBuffer which triggers
     // onBufferReleased in Camera3OutputStream. onBufferReleased waits on the
@@ -424,16 +557,25 @@
     // splitter lock.
     sp<IGraphicBufferConsumer> consumer(mConsumer);
     mMutex.unlock();
+    int res = NO_ERROR;
     if (consumer != nullptr) {
-        res = consumer->releaseBuffer(consumerSlot, /* frameNumber */ 0,
-                EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, tracker_ptr->getMergedFence());
+        if (detach) {
+            res = consumer->detachBuffer(consumerSlot);
+        } else {
+            res = consumer->releaseBuffer(consumerSlot, frameNumber,
+                    EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, tracker_ptr->getMergedFence());
+        }
     } else {
         SP_LOGE("%s: consumer has become null!", __FUNCTION__);
     }
     mMutex.lock();
-    // If the producer of this queue is disconnected, -22 error will occur
+
     if (res != NO_ERROR) {
-        SP_LOGE("%s: releaseBuffer returns %d", __FUNCTION__, res);
+        if (detach) {
+            SP_LOGE("%s: detachBuffer returns %d", __FUNCTION__, res);
+        } else {
+            SP_LOGE("%s: releaseBuffer returns %d", __FUNCTION__, res);
+        }
     }
 }
 
@@ -442,27 +584,61 @@
     ATRACE_CALL();
     Mutex::Autolock lock(mMutex);
 
-    onBufferReleasedByOutputLocked(from);
+    size_t surfaceId = 0;
+    bool found = false;
+    for (const auto& it : mOutputs) {
+        if (it.second == from) {
+            found = true;
+            surfaceId = it.first;
+            break;
+        }
+    }
+    if (!found) {
+        SP_LOGV("%s: output surface not registered anymore!", __FUNCTION__);
+        return;
+    }
+
+    onBufferReleasedByOutputLocked(from, surfaceId);
 }
 
 void Camera3StreamSplitter::onBufferReleasedByOutputLocked(
-        const sp<IGraphicBufferProducer>& from) {
+        const sp<IGraphicBufferProducer>& from, size_t surfaceId) {
     ATRACE_CALL();
     sp<GraphicBuffer> buffer;
     sp<Fence> fence;
-    status_t res = from->detachNextBuffer(&buffer, &fence);
+    if (mOutputSlots[from] == nullptr) {
+        //Output surface got likely removed by client.
+        return;
+    }
+    auto outputSlots = *mOutputSlots[from];
+
+    int slot = BufferItem::INVALID_BUFFER_SLOT;
+    auto res = from->dequeueBuffer(&slot, &fence, mWidth, mHeight, mFormat, mProducerUsage,
+            nullptr, nullptr);
     if (res == NO_INIT) {
         // If we just discovered that this output has been abandoned, note that,
         // but we can't do anything else, since buffer is invalid
         onAbandonedLocked();
         return;
-    } else if (res == NO_MEMORY) {
-        SP_LOGV("%s: No free buffers", __FUNCTION__);
+    } else if (res == IGraphicBufferProducer::BUFFER_NEEDS_REALLOCATION) {
+        SP_LOGE("%s: Producer needs to re-allocate buffer!", __FUNCTION__);
+        SP_LOGE("%s: This should not happen with buffer allocation disabled!", __FUNCTION__);
         return;
-    } else if (res != OK) {
-        SP_LOGE("%s: detaching buffer from output failed (%d)", __FUNCTION__, res);
+    } else if (res == IGraphicBufferProducer::RELEASE_ALL_BUFFERS) {
+        SP_LOGE("%s: All slot->buffer mapping should be released!", __FUNCTION__);
+        SP_LOGE("%s: This should not happen with buffer allocation disabled!", __FUNCTION__);
+        return;
+    } else if (res == NO_MEMORY) {
+        SP_LOGE("%s: No free buffers", __FUNCTION__);
+        return;
+    } else if (res == WOULD_BLOCK) {
+        SP_LOGE("%s: Dequeue call will block", __FUNCTION__);
+        return;
+    } else if (res != OK || (slot == BufferItem::INVALID_BUFFER_SLOT)) {
+        SP_LOGE("%s: dequeue buffer from output failed (%d)", __FUNCTION__, res);
         return;
     }
+    buffer = outputSlots[slot];
 
     BufferTracker& tracker = *(mBuffers[buffer->getId()]);
     // Merge the release fence of the incoming buffer so that the fence we send
@@ -470,11 +646,22 @@
     if (fence != nullptr && fence->isValid()) {
         tracker.mergeFence(fence);
     }
-    SP_LOGV("detached buffer %" PRId64 " %p from output %p",
+    SP_LOGV("%s: dequeued buffer %" PRId64 " %p from output %p", __FUNCTION__,
             buffer->getId(), buffer.get(), from.get());
 
+    auto detachBuffer = mDetachedBuffers.find(buffer->getId());
+    bool detach = (detachBuffer != mDetachedBuffers.end());
+    if (detach) {
+        res = from->detachBuffer(slot);
+        if (res == NO_ERROR) {
+            outputSlots[slot] = nullptr;
+        } else {
+            SP_LOGE("%s: detach buffer from output failed (%d)", __FUNCTION__, res);
+        }
+    }
+
     // Check to see if this is the last outstanding reference to this buffer
-    decrementBufRefCountLocked(buffer->getId(), from);
+    decrementBufRefCountLocked(buffer->getId(), surfaceId);
 }
 
 void Camera3StreamSplitter::onAbandonedLocked() {
@@ -501,27 +688,11 @@
         }
     }
 
-    SP_LOGE("%s: Cannot find slot for gb %p on output %p", __FUNCTION__, gb.get(),
+    SP_LOGV("%s: Cannot find slot for gb %p on output %p", __FUNCTION__, gb.get(),
             gbp.get());
     return BufferItem::INVALID_BUFFER_SLOT;
 }
 
-status_t Camera3StreamSplitter::removeSlotForOutputLocked(const sp<IGraphicBufferProducer>& gbp,
-        const sp<GraphicBuffer>& gb) {
-    auto& outputSlots = *mOutputSlots[gbp];
-
-    for (size_t i = 0; i < outputSlots.size(); i++) {
-        if (outputSlots[i] == gb) {
-           outputSlots[i].clear();
-           return NO_ERROR;
-        }
-    }
-
-    SP_LOGE("%s: Cannot find slot for gb %p on output %p", __FUNCTION__, gb.get(),
-            gbp.get());
-    return BAD_VALUE;
-}
-
 Camera3StreamSplitter::OutputListener::OutputListener(
         wp<Camera3StreamSplitter> splitter,
         wp<IGraphicBufferProducer> output)
@@ -553,7 +724,14 @@
     mMergedFence = Fence::merge(String8("Camera3StreamSplitter"), mMergedFence, with);
 }
 
-size_t Camera3StreamSplitter::BufferTracker::decrementReferenceCountLocked() {
+size_t Camera3StreamSplitter::BufferTracker::decrementReferenceCountLocked(size_t surfaceId) {
+    const auto& it = std::find(mRequestedSurfaces.begin(), mRequestedSurfaces.end(), surfaceId);
+    if (it == mRequestedSurfaces.end()) {
+        return mReferenceCount;
+    } else {
+        mRequestedSurfaces.erase(it);
+    }
+
     if (mReferenceCount > 0)
         --mReferenceCount;
     return mReferenceCount;
diff --git a/services/camera/libcameraservice/device3/Camera3StreamSplitter.h b/services/camera/libcameraservice/device3/Camera3StreamSplitter.h
index 3b8839e..76a5b7d 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamSplitter.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamSplitter.h
@@ -17,6 +17,8 @@
 #ifndef ANDROID_SERVERS_STREAMSPLITTER_H
 #define ANDROID_SERVERS_STREAMSPLITTER_H
 
+#include <unordered_set>
+
 #include <gui/IConsumerListener.h>
 #include <gui/IProducerListener.h>
 #include <gui/BufferItemConsumer.h>
@@ -51,22 +53,25 @@
 
     // Connect to the stream splitter by creating buffer queue and connecting it
     // with output surfaces.
-    status_t connect(const std::vector<sp<Surface> >& surfaces,
-            uint64_t consumerUsage, size_t halMaxBuffers,
-            sp<Surface>* consumer);
+    status_t connect(const std::unordered_map<size_t, sp<Surface>> &surfaces,
+            uint64_t consumerUsage, uint64_t producerUsage, size_t halMaxBuffers, uint32_t width,
+            uint32_t height, android::PixelFormat format, sp<Surface>* consumer);
 
     // addOutput adds an output BufferQueue to the splitter. The splitter
     // connects to outputQueue as a CPU producer, and any buffers queued
-    // to the input will be queued to each output. It is assumed that all of the
-    // outputs are added before any buffers are queued on the input. If any
-    // output is abandoned by its consumer, the splitter will abandon its input
-    // queue (see onAbandoned).
+    // to the input will be queued to each output. If any  output is abandoned
+    // by its consumer, the splitter will abandon its input queue (see onAbandoned).
     //
     // A return value other than NO_ERROR means that an error has occurred and
     // outputQueue has not been added to the splitter. BAD_VALUE is returned if
     // outputQueue is NULL. See IGraphicBufferProducer::connect for explanations
     // of other error codes.
-    status_t addOutput(const sp<Surface>& outputQueue);
+    status_t addOutput(size_t surfaceId, const sp<Surface>& outputQueue);
+
+    //removeOutput will remove a BufferQueue that was previously added to
+    //the splitter outputs. Any pending buffers in the BufferQueue will get
+    //reclaimed.
+    status_t removeOutput(size_t surfaceId);
 
     // Notification that the graphic buffer has been released to the input
     // BufferQueue. The buffer should be reused by the camera device instead of
@@ -120,7 +125,7 @@
     // This is the implementation of onBufferReleasedByOutput without the mutex locked.
     // It could either be called from onBufferReleasedByOutput or from
     // onFrameAvailable when a buffer in the async buffer queue is overwritten.
-    void onBufferReleasedByOutputLocked(const sp<IGraphicBufferProducer>& from);
+    void onBufferReleasedByOutputLocked(const sp<IGraphicBufferProducer>& from, size_t surfaceId);
 
     // When this is called, the splitter disconnects from (i.e., abandons) its
     // input queue and signals any waiting onFrameAvailable calls to wake up.
@@ -131,7 +136,7 @@
 
     // Decrement the buffer's reference count. Once the reference count becomes
     // 0, return the buffer back to the input BufferQueue.
-    void decrementBufRefCountLocked(uint64_t id, const sp<IGraphicBufferProducer>& from);
+    void decrementBufRefCountLocked(uint64_t id, size_t surfaceId);
 
     // This is a thin wrapper class that lets us determine which BufferQueue
     // the IProducerListener::onBufferReleased callback is associated with. We
@@ -168,7 +173,7 @@
 
         // Returns the new value
         // Only called while mMutex is held
-        size_t decrementReferenceCountLocked();
+        size_t decrementReferenceCountLocked(size_t surfaceId);
 
         const std::vector<size_t> requestedSurfaces() const { return mRequestedSurfaces; }
 
@@ -191,13 +196,15 @@
     // Must be accessed through RefBase
     virtual ~Camera3StreamSplitter();
 
-    status_t addOutputLocked(const sp<Surface>& outputQueue);
+    status_t addOutputLocked(size_t surfaceId, const sp<Surface>& outputQueue);
+
+    status_t removeOutputLocked(size_t surfaceId);
 
     // Send a buffer to particular output, and increment the reference count
     // of the buffer. If this output is abandoned, the buffer's reference count
     // won't be incremented.
     status_t outputBufferLocked(const sp<IGraphicBufferProducer>& output,
-            const BufferItem& bufferItem);
+            const BufferItem& bufferItem, size_t surfaceId);
 
     // Get unique name for the buffer queue consumer
     String8 getUniqueConsumerName();
@@ -205,14 +212,14 @@
     // Helper function to get the BufferQueue slot where a particular buffer is attached to.
     int getSlotForOutputLocked(const sp<IGraphicBufferProducer>& gbp,
             const sp<GraphicBuffer>& gb);
-    // Helper function to remove the buffer from the BufferQueue slot
-    status_t removeSlotForOutputLocked(const sp<IGraphicBufferProducer>& gbp,
-            const sp<GraphicBuffer>& gb);
-
 
     // Sum of max consumer buffers for all outputs
     size_t mMaxConsumerBuffers = 0;
     size_t mMaxHalBuffers = 0;
+    uint32_t mWidth = 0;
+    uint32_t mHeight = 0;
+    android::PixelFormat mFormat = android::PIXEL_FORMAT_NONE;
+    uint64_t mProducerUsage = 0;
 
     static const nsecs_t kDequeueBufferTimeout   = s2ns(1); // 1 sec
 
@@ -223,7 +230,15 @@
     sp<BufferItemConsumer> mBufferItemConsumer;
     sp<Surface> mSurface;
 
-    std::vector<sp<IGraphicBufferProducer> > mOutputs;
+    //Map graphic buffer ids -> buffer items
+    std::unordered_map<uint64_t, BufferItem> mInputSlots;
+
+    //Map surface ids -> gbp outputs
+    std::unordered_map<int, sp<IGraphicBufferProducer> > mOutputs;
+
+    //Map surface ids -> consumer buffer count
+    std::unordered_map<int, size_t > mConsumerBufferCount;
+
     // Map of GraphicBuffer IDs (GraphicBuffer::getId()) to buffer tracking
     // objects (which are mostly for counting how many outputs have released the
     // buffer, but also contain merged release fences).
@@ -242,6 +257,10 @@
     std::unordered_map<sp<IGraphicBufferProducer>, std::unique_ptr<OutputSlots>,
             GBPHash> mOutputSlots;
 
+    //A set of buffers that could potentially stay in some of the outputs after removal
+    //and therefore should be detached from the input queue.
+    std::unordered_set<uint64_t> mDetachedBuffers;
+
     // Latest onFrameAvailable return value
     std::atomic<status_t> mOnFrameAvailableRes{0};
 
diff --git a/services/mediaanalytics/MediaAnalyticsService.cpp b/services/mediaanalytics/MediaAnalyticsService.cpp
index c7f9270..7f42b1b 100644
--- a/services/mediaanalytics/MediaAnalyticsService.cpp
+++ b/services/mediaanalytics/MediaAnalyticsService.cpp
@@ -159,7 +159,8 @@
           mMaxRecordAgeNs(kMaxRecordAgeNs),
           mMaxRecordSets(kMaxRecordSets),
           mNewSetInterval(kNewSetIntervalNs),
-          mDumpProto(MediaAnalyticsItem::PROTO_V0) {
+          mDumpProto(MediaAnalyticsItem::PROTO_V1),
+          mDumpProtoDefault(MediaAnalyticsItem::PROTO_V1) {
 
     ALOGD("MediaAnalyticsService created");
     // clear our queues
@@ -271,7 +272,7 @@
     }
 
     ALOGV("given uid %d; sanitized uid: %d sanitized pkg: %s "
-          "sanitized pkg version: %d",
+          "sanitized pkg version: %"  PRId64,
           uid_given, item->getUid(),
           item->getPkgName().c_str(),
           item->getPkgVersionCode());
@@ -299,6 +300,8 @@
 
     bool finalizing = item->getFinalized();
 
+    Mutex::Autolock _l(mLock);
+
     // if finalizing, we'll remove it
     MediaAnalyticsItem *oitem = findItem(mOpen, item, finalizing | forcenew);
     if (oitem != NULL) {
@@ -316,6 +319,7 @@
                 saveItem(mFinalized, oitem, 0);
             }
             // new record could itself be marked finalized...
+            id = item->getSessionID();
             if (finalizing) {
                 summarize(item);
                 saveItem(mFinalized, item, 0);
@@ -323,16 +327,15 @@
             } else {
                 saveItem(mOpen, item, 1);
             }
-            id = item->getSessionID();
         } else {
             // combine the records, send it to finalized if appropriate
             oitem->merge(item);
+            id = oitem->getSessionID();
             if (finalizing) {
                 summarize(oitem);
                 saveItem(mFinalized, oitem, 0);
                 mItemsFinalized++;
             }
-            id = oitem->getSessionID();
 
             // we're responsible for disposing of the dead record
             delete item;
@@ -379,6 +382,7 @@
     String16 summaryOption("-summary");
     bool summary = false;
     String16 protoOption("-proto");
+    int chosenProto = mDumpProtoDefault;
     String16 clearOption("-clear");
     bool clear = false;
     String16 sinceOption("-since");
@@ -398,7 +402,7 @@
             i++;
             if (i < n) {
                 String8 value(args[i]);
-                int proto = MediaAnalyticsItem::PROTO_V0;       // default to original
+                int proto = MediaAnalyticsItem::PROTO_V0;
                 char *endp;
                 const char *p = value.string();
                 proto = strtol(p, &endp, 10);
@@ -408,8 +412,12 @@
                     } else if (proto > MediaAnalyticsItem::PROTO_LAST) {
                         proto = MediaAnalyticsItem::PROTO_LAST;
                     }
-                    mDumpProto = proto;
+                    chosenProto = proto;
+                } else {
+                    result.append("unable to parse value for -proto\n\n");
                 }
+            } else {
+                result.append("missing value for -proto\n\n");
             }
         } else if (args[i] == sinceOption) {
             i++;
@@ -435,7 +443,7 @@
         } else if (args[i] == helpOption) {
             result.append("Recognized parameters:\n");
             result.append("-help        this help message\n");
-            result.append("-proto X     dump using protocol X (defaults to 1)");
+            result.append("-proto #     dump using protocol #");
             result.append("-summary     show summary info\n");
             result.append("-clear       clears out saved records\n");
             result.append("-only X      process records for component X\n");
@@ -448,6 +456,8 @@
 
     Mutex::Autolock _l(mLock);
 
+    mDumpProto = chosenProto;
+
     // we ALWAYS dump this piece
     snprintf(buffer, SIZE, "Dump of the %s process:\n", kServiceName);
     result.append(buffer);
@@ -609,21 +619,31 @@
 // XXX: rewrite this to manage persistence, etc.
 
 // insert appropriately into queue
+// caller should hold mLock
 void MediaAnalyticsService::saveItem(List<MediaAnalyticsItem *> *l, MediaAnalyticsItem * item, int front) {
 
-    Mutex::Autolock _l(mLock);
-
-    // adding at back of queue (fifo order)
     if (front)  {
+        // for non-finalized stuff, since we expect to reference it again soon,
+        // make it quicker to find (nearer the front of our list)
         l->push_front(item);
     } else {
+        // for finalized records, which we want to dump 'in sequence order'
         l->push_back(item);
     }
 
+    // our reclaim process is for oldest-first queues
+    if (front) {
+        return;
+    }
+
+
     // keep removing old records the front until we're in-bounds (count)
     if (mMaxRecords > 0) {
         while (l->size() > (size_t) mMaxRecords) {
             MediaAnalyticsItem * oitem = *(l->begin());
+            if (oitem == item) {
+                break;
+            }
             l->erase(l->begin());
             delete oitem;
             mItemsDiscarded++;
@@ -637,6 +657,9 @@
         while (l->size() > 0) {
             MediaAnalyticsItem * oitem = *(l->begin());
             nsecs_t when = oitem->getTimestamp();
+            if (oitem == item) {
+                break;
+            }
             // careful about timejumps too
             if ((now > when) && (now-when) <= mMaxRecordAgeNs) {
                 // this (and the rest) are recent enough to keep
@@ -682,6 +705,7 @@
 }
 
 // find the incomplete record that this will overlay
+// caller should hold mLock
 MediaAnalyticsItem *MediaAnalyticsService::findItem(List<MediaAnalyticsItem*> *theList, MediaAnalyticsItem *nitem, bool removeit) {
     if (nitem == NULL) {
         return NULL;
@@ -689,8 +713,6 @@
 
     MediaAnalyticsItem *item = NULL;
 
-    Mutex::Autolock _l(mLock);
-
     for (List<MediaAnalyticsItem *>::iterator it = theList->begin();
         it != theList->end(); it++) {
         MediaAnalyticsItem *tmp = (*it);
@@ -711,10 +733,9 @@
 
 
 // delete the indicated record
+// caller should hold mLock
 void MediaAnalyticsService::deleteItem(List<MediaAnalyticsItem *> *l, MediaAnalyticsItem *item) {
 
-    Mutex::Autolock _l(mLock);
-
     for (List<MediaAnalyticsItem *>::iterator it = l->begin();
         it != l->end(); it++) {
         if ((*it)->getSessionID() != item->getSessionID())
@@ -835,7 +856,7 @@
     } else {
         AString pkg;
         std::string installer = "";
-        int32_t versionCode = 0;
+        int64_t versionCode = 0;
 
         struct passwd *pw = getpwuid(uid);
         if (pw) {
@@ -905,7 +926,7 @@
                 }
 
 
-                ALOGV("package '%s' installed by '%s' versioncode %d / %08x",
+                ALOGV("package '%s' installed by '%s' versioncode %"  PRId64 " / %" PRIx64,
                       pkg.c_str(), installer.c_str(), versionCode, versionCode);
 
                 if (strncmp(installer.c_str(), "com.android.", 12) == 0) {
diff --git a/services/mediaanalytics/MediaAnalyticsService.h b/services/mediaanalytics/MediaAnalyticsService.h
index 52e4631..fce7d08 100644
--- a/services/mediaanalytics/MediaAnalyticsService.h
+++ b/services/mediaanalytics/MediaAnalyticsService.h
@@ -125,6 +125,7 @@
 
     // support for generating output
     int mDumpProto;
+    int mDumpProtoDefault;
     String8 dumpQueue(List<MediaAnalyticsItem*> *);
     String8 dumpQueue(List<MediaAnalyticsItem*> *, nsecs_t, const char *only);
 
@@ -137,7 +138,7 @@
         uid_t uid;
         AString pkg;
         AString installer;
-        int32_t versionCode;
+        int64_t versionCode;
         nsecs_t expiration;
     };
 
diff --git a/services/mediaanalytics/OWNERS b/services/mediaanalytics/OWNERS
new file mode 100644
index 0000000..9af258b
--- /dev/null
+++ b/services/mediaanalytics/OWNERS
@@ -0,0 +1 @@
+essick@google.com
diff --git a/services/mediacodec/Android.mk b/services/mediacodec/Android.mk
index 1ead944..8e5b260 100644
--- a/services/mediacodec/Android.mk
+++ b/services/mediacodec/Android.mk
@@ -1,21 +1,5 @@
 LOCAL_PATH := $(call my-dir)
 
-# service library
-include $(CLEAR_VARS)
-LOCAL_SRC_FILES := MediaCodecService.cpp
-LOCAL_SHARED_LIBRARIES := \
-    libmedia_omx \
-    libbinder \
-    libgui \
-    libutils \
-    liblog \
-    libstagefright_omx \
-    libstagefright_xmlparser
-LOCAL_MODULE:= libmediacodecservice
-LOCAL_VENDOR_MODULE := true
-LOCAL_32_BIT_ONLY := true
-include $(BUILD_SHARED_LIBRARY)
-
 # service executable
 include $(CLEAR_VARS)
 # seccomp is not required for coverage build.
@@ -26,7 +10,6 @@
 LOCAL_SRC_FILES := main_codecservice.cpp
 LOCAL_SHARED_LIBRARIES := \
     libmedia_omx \
-    libmediacodecservice \
     libbinder \
     libutils \
     liblog \
@@ -45,6 +28,7 @@
 LOCAL_VENDOR_MODULE := true
 LOCAL_32_BIT_ONLY := true
 LOCAL_INIT_RC := android.hardware.media.omx@1.0-service.rc
+
 include $(BUILD_EXECUTABLE)
 
 # service seccomp policy
diff --git a/services/mediacodec/MediaCodecService.cpp b/services/mediacodec/MediaCodecService.cpp
deleted file mode 100644
index 6b510c6..0000000
--- a/services/mediacodec/MediaCodecService.cpp
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Copyright (C) 2015 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "MediaCodecService"
-//#define LOG_NDEBUG 0
-#include <utils/Log.h>
-
-#include "MediaCodecService.h"
-
-namespace android {
-
-sp<IOMX> MediaCodecService::getOMX() {
-
-    Mutex::Autolock autoLock(mOMXLock);
-
-    if (mOMX.get() == NULL) {
-        mOMX = new OMX();
-    }
-
-    return mOMX;
-}
-
-sp<IOMXStore> MediaCodecService::getOMXStore() {
-
-    Mutex::Autolock autoLock(mOMXStoreLock);
-
-    if (mOMXStore.get() == NULL) {
-        mOMXStore = new OMXStore();
-    }
-
-    return mOMXStore;
-}
-
-status_t MediaCodecService::onTransact(uint32_t code, const Parcel& data, Parcel* reply,
-        uint32_t flags)
-{
-    return BnMediaCodecService::onTransact(code, data, reply, flags);
-}
-
-}   // namespace android
diff --git a/services/mediacodec/MediaCodecService.h b/services/mediacodec/MediaCodecService.h
deleted file mode 100644
index 9301135..0000000
--- a/services/mediacodec/MediaCodecService.h
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Copyright (C) 2015 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_MEDIA_CODEC_SERVICE_H
-#define ANDROID_MEDIA_CODEC_SERVICE_H
-
-#include <binder/BinderService.h>
-#include <media/IMediaCodecService.h>
-#include <media/stagefright/omx/OMX.h>
-#include <media/stagefright/omx/OMXStore.h>
-
-namespace android {
-
-class MediaCodecService : public BinderService<MediaCodecService>,
-        public BnMediaCodecService
-{
-    friend class BinderService<MediaCodecService>;    // for MediaCodecService()
-public:
-    MediaCodecService() : BnMediaCodecService() { }
-    virtual ~MediaCodecService() { }
-    virtual void onFirstRef() { }
-
-    static const char*    getServiceName() { return "media.codec"; }
-
-    virtual sp<IOMX>      getOMX();
-
-    virtual sp<IOMXStore> getOMXStore();
-
-    virtual status_t      onTransact(uint32_t code, const Parcel& data,
-                                     Parcel* reply, uint32_t flags);
-
-private:
-    Mutex                 mOMXLock;
-    sp<IOMX>              mOMX;
-    Mutex                 mOMXStoreLock;
-    sp<IOMXStore>         mOMXStore;
-};
-
-}   // namespace android
-
-#endif  // ANDROID_MEDIA_CODEC_SERVICE_H
diff --git a/services/mediacodec/android.hardware.media.omx@1.0-service.rc b/services/mediacodec/android.hardware.media.omx@1.0-service.rc
index ec51d65..3ef9a85 100644
--- a/services/mediacodec/android.hardware.media.omx@1.0-service.rc
+++ b/services/mediacodec/android.hardware.media.omx@1.0-service.rc
@@ -1,4 +1,4 @@
-service mediacodec /vendor/bin/hw/android.hardware.media.omx@1.0-service
+service vendor.media.omx /vendor/bin/hw/android.hardware.media.omx@1.0-service
     class main
     user mediacodec
     group camera drmrpc mediadrm
diff --git a/services/mediacodec/main_codecservice.cpp b/services/mediacodec/main_codecservice.cpp
index 79d6da5..701ca6e 100644
--- a/services/mediacodec/main_codecservice.cpp
+++ b/services/mediacodec/main_codecservice.cpp
@@ -15,22 +15,12 @@
 ** limitations under the License.
 */
 
-#include <fcntl.h>
-#include <sys/prctl.h>
-#include <sys/wait.h>
-#include <binder/IPCThreadState.h>
-#include <binder/ProcessState.h>
-#include <binder/IServiceManager.h>
-#include <cutils/properties.h>
-
-#include <string>
-
 #include <android-base/logging.h>
 
 // from LOCAL_C_INCLUDES
-#include "MediaCodecService.h"
 #include "minijail.h"
 
+#include <binder/ProcessState.h>
 #include <hidl/HidlTransportSupport.h>
 #include <media/stagefright/omx/1.0/Omx.h>
 #include <media/stagefright/omx/1.0/OmxStore.h>
@@ -45,41 +35,31 @@
 
 int main(int argc __unused, char** argv)
 {
+    strcpy(argv[0], "media.codec");
     LOG(INFO) << "mediacodecservice starting";
-    bool treble = property_get_bool("persist.media.treble_omx", true);
-    if (treble) {
-      android::ProcessState::initWithDriver("/dev/vndbinder");
-    }
-
     signal(SIGPIPE, SIG_IGN);
     SetUpMinijail(kSystemSeccompPolicyPath, kVendorSeccompPolicyPath);
 
-    strcpy(argv[0], "media.codec");
+    android::ProcessState::initWithDriver("/dev/vndbinder");
+    android::ProcessState::self()->startThreadPool();
 
     ::android::hardware::configureRpcThreadpool(64, false);
-    sp<ProcessState> proc(ProcessState::self());
 
-    if (treble) {
-        using namespace ::android::hardware::media::omx::V1_0;
-        sp<IOmxStore> omxStore = new implementation::OmxStore();
-        if (omxStore == nullptr) {
-            LOG(ERROR) << "Cannot create IOmxStore HAL service.";
-        } else if (omxStore->registerAsService() != OK) {
-            LOG(ERROR) << "Cannot register IOmxStore HAL service.";
-        }
-        sp<IOmx> omx = new implementation::Omx();
-        if (omx == nullptr) {
-            LOG(ERROR) << "Cannot create IOmx HAL service.";
-        } else if (omx->registerAsService() != OK) {
-            LOG(ERROR) << "Cannot register IOmx HAL service.";
-        } else {
-            LOG(INFO) << "Treble OMX service created.";
-        }
+    using namespace ::android::hardware::media::omx::V1_0;
+    sp<IOmxStore> omxStore = new implementation::OmxStore();
+    if (omxStore == nullptr) {
+        LOG(ERROR) << "Cannot create IOmxStore HAL service.";
+    } else if (omxStore->registerAsService() != OK) {
+        LOG(ERROR) << "Cannot register IOmxStore HAL service.";
+    }
+    sp<IOmx> omx = new implementation::Omx();
+    if (omx == nullptr) {
+        LOG(ERROR) << "Cannot create IOmx HAL service.";
+    } else if (omx->registerAsService() != OK) {
+        LOG(ERROR) << "Cannot register IOmx HAL service.";
     } else {
-        MediaCodecService::instantiate();
-        LOG(INFO) << "Non-Treble OMX service created.";
+        LOG(INFO) << "IOmx HAL service created.";
     }
 
-    ProcessState::self()->startThreadPool();
-    IPCThreadState::self()->joinThreadPool();
+    ::android::hardware::joinRpcThreadpool();
 }
diff --git a/services/mediaextractor/Android.mk b/services/mediaextractor/Android.mk
index d41da39..cd086f9 100644
--- a/services/mediaextractor/Android.mk
+++ b/services/mediaextractor/Android.mk
@@ -15,6 +15,20 @@
 LOCAL_REQUIRED_MODULES_arm := mediaextractor.policy
 LOCAL_REQUIRED_MODULES_arm64 := mediaextractor.policy
 LOCAL_REQUIRED_MODULES_x86 := mediaextractor.policy
+
+# extractor libraries
+LOCAL_REQUIRED_MODULES := \
+    libaacextractor \
+    libamrextractor \
+    libflacextractor \
+    libmidiextractor \
+    libmkvextractor \
+    libmp3extractor \
+    libmp4extractor \
+    libmpeg2extractor \
+    liboggextractor \
+    libwavextractor \
+
 LOCAL_SRC_FILES := main_extractorservice.cpp
 LOCAL_SHARED_LIBRARIES := libmedia libmediaextractorservice libbinder libutils \
     liblog libbase libicuuc libavservices_minijail
diff --git a/services/mediaextractor/MediaExtractorService.cpp b/services/mediaextractor/MediaExtractorService.cpp
index 08cbef6..f09d7cf 100644
--- a/services/mediaextractor/MediaExtractorService.cpp
+++ b/services/mediaextractor/MediaExtractorService.cpp
@@ -20,8 +20,11 @@
 
 #include <utils/Vector.h>
 
-#include <media/stagefright/DataSource.h>
-#include <media/stagefright/MediaExtractor.h>
+#include <media/DataSource.h>
+#include <media/MediaExtractor.h>
+#include <media/stagefright/DataSourceFactory.h>
+#include <media/stagefright/InterfaceUtils.h>
+#include <media/stagefright/MediaExtractorFactory.h>
 #include <media/stagefright/RemoteDataSource.h>
 #include "MediaExtractorService.h"
 
@@ -31,25 +34,26 @@
         const sp<IDataSource> &remoteSource, const char *mime) {
     ALOGV("@@@ MediaExtractorService::makeExtractor for %s", mime);
 
-    sp<DataSource> localSource = DataSource::CreateFromIDataSource(remoteSource);
+    sp<DataSource> localSource = CreateDataSourceFromIDataSource(remoteSource);
 
-    sp<IMediaExtractor> ret = MediaExtractor::CreateFromService(localSource, mime);
+    sp<MediaExtractor> extractor = MediaExtractorFactory::CreateFromService(localSource, mime);
 
     ALOGV("extractor service created %p (%s)",
-            ret.get(),
-            ret == NULL ? "" : ret->name());
+            extractor.get(),
+            extractor == nullptr ? "" : extractor->name());
 
-    if (ret != NULL) {
+    if (extractor != nullptr) {
+        sp<IMediaExtractor> ret = CreateIMediaExtractorFromMediaExtractor(extractor);
         registerMediaExtractor(ret, localSource, mime);
+        return ret;
     }
-
-    return ret;
+    return nullptr;
 }
 
 sp<IDataSource> MediaExtractorService::makeIDataSource(int fd, int64_t offset, int64_t length)
 {
-    sp<DataSource> source = DataSource::CreateFromFd(fd, offset, length);
-    return source.get() != nullptr ? source->asIDataSource() : nullptr;
+    sp<DataSource> source = DataSourceFactory::CreateFromFd(fd, offset, length);
+    return CreateIDataSourceFromDataSource(source);
 }
 
 status_t MediaExtractorService::dump(int fd, const Vector<String16>& args) {
diff --git a/services/mediaextractor/seccomp_policy/mediaextractor-arm.policy b/services/mediaextractor/seccomp_policy/mediaextractor-arm.policy
index 4fa69d7..a9ee98d 100644
--- a/services/mediaextractor/seccomp_policy/mediaextractor-arm.policy
+++ b/services/mediaextractor/seccomp_policy/mediaextractor-arm.policy
@@ -41,6 +41,9 @@
 nanosleep: 1
 getrandom: 1
 
+# for dynamically loading extractors
+pread64: 1
+
 # for FileSource
 readlinkat: 1
 _llseek: 1
diff --git a/services/mediaextractor/seccomp_policy/mediaextractor-arm64.policy b/services/mediaextractor/seccomp_policy/mediaextractor-arm64.policy
index eed804a..dd71ed7 100644
--- a/services/mediaextractor/seccomp_policy/mediaextractor-arm64.policy
+++ b/services/mediaextractor/seccomp_policy/mediaextractor-arm64.policy
@@ -34,6 +34,12 @@
 # for FileSource
 readlinkat: 1
 
+# for dynamically loading extractors
+getdents64: 1
+readlinkat: 1
+pread64: 1
+mremap: 1
+
 # for attaching to debuggerd on process crash
 tgkill: 1
 rt_sigprocmask: 1
diff --git a/services/mediaextractor/seccomp_policy/mediaextractor-x86.policy b/services/mediaextractor/seccomp_policy/mediaextractor-x86.policy
index 3b37f92..ede108e 100644
--- a/services/mediaextractor/seccomp_policy/mediaextractor-x86.policy
+++ b/services/mediaextractor/seccomp_policy/mediaextractor-x86.policy
@@ -39,6 +39,12 @@
 nanosleep: 1
 getrandom: 1
 
+# for dynamically loading extractors
+getdents64: 1
+readlinkat: 1
+pread64: 1
+mremap: 1
+
 # for FileSource
 readlinkat: 1
 _llseek: 1
diff --git a/services/medialog/Android.bp b/services/medialog/Android.bp
index 1f811d3..29e6dfc 100644
--- a/services/medialog/Android.bp
+++ b/services/medialog/Android.bp
@@ -11,6 +11,7 @@
         "libbinder",
         "liblog",
         "libnbaio",
+        "libnblog",
         "libutils",
     ],
 
diff --git a/services/medialog/MediaLogService.cpp b/services/medialog/MediaLogService.cpp
index a5512e1..1be5544 100644
--- a/services/medialog/MediaLogService.cpp
+++ b/services/medialog/MediaLogService.cpp
@@ -20,19 +20,25 @@
 #include <sys/mman.h>
 #include <utils/Log.h>
 #include <binder/PermissionCache.h>
-#include <media/nbaio/NBLog.h>
+#include <media/nblog/NBLog.h>
 #include <private/android_filesystem_config.h>
 #include "MediaLogService.h"
 
 namespace android {
 
- static const char kDeadlockedString[] = "MediaLogService may be deadlocked\n";
+static const char kDeadlockedString[] = "MediaLogService may be deadlocked\n";
+
+// mMerger, mMergeReader, and mMergeThread all point to the same location in memory
+// mMergerShared. This is the local memory FIFO containing data merged from all
+// individual thread FIFOs in shared memory. mMergeThread is used to periodically
+// call NBLog::Merger::merge() to collect the data and write it to the FIFO, and call
+// NBLog::MergeReader::getAndProcessSnapshot to process the merged data.
 MediaLogService::MediaLogService() :
     BnMediaLogService(),
     mMergerShared((NBLog::Shared*) malloc(NBLog::Timeline::sharedSize(kMergeBufferSize))),
     mMerger(mMergerShared, kMergeBufferSize),
     mMergeReader(mMergerShared, kMergeBufferSize, mMerger),
-    mMergeThread(new NBLog::MergeThread(mMerger))
+    mMergeThread(new NBLog::MergeThread(mMerger, mMergeReader))
 {
     mMergeThread->run("MergeThread");
 }
@@ -123,15 +129,10 @@
                 } else {
                     ALOGI("%s:", namedReader.name());
                 }
-                // TODO This code is for testing, remove it when done
-                // namedReader.reader()->dump(fd, 0 /*indent*/);
             }
-
             mLock.unlock();
         }
     }
-
-    // FIXME request merge to make sure log is up to date
     mMergeReader.dump(fd);
     return NO_ERROR;
 }
diff --git a/services/medialog/MediaLogService.h b/services/medialog/MediaLogService.h
index 39d9cc0..c945d1f 100644
--- a/services/medialog/MediaLogService.h
+++ b/services/medialog/MediaLogService.h
@@ -19,7 +19,7 @@
 
 #include <binder/BinderService.h>
 #include <media/IMediaLogService.h>
-#include <media/nbaio/NBLog.h>
+#include <media/nblog/NBLog.h>
 
 namespace android {
 
diff --git a/services/medialog/OWNERS b/services/medialog/OWNERS
index fb8b8ee..21723ba 100644
--- a/services/medialog/OWNERS
+++ b/services/medialog/OWNERS
@@ -1,3 +1,3 @@
 elaurent@google.com
-gkasten@android.com
+gkasten@google.com
 hunga@google.com
diff --git a/services/minijail/OWNERS b/services/minijail/OWNERS
new file mode 100644
index 0000000..19f4f9f
--- /dev/null
+++ b/services/minijail/OWNERS
@@ -0,0 +1,2 @@
+jorgelo@google.com
+marcone@google.com
diff --git a/services/oboeservice/AAudioClientTracker.cpp b/services/oboeservice/AAudioClientTracker.cpp
index 75392bd..549a4e9 100644
--- a/services/oboeservice/AAudioClientTracker.cpp
+++ b/services/oboeservice/AAudioClientTracker.cpp
@@ -15,7 +15,7 @@
  */
 
 
-#define LOG_TAG "AAudioService"
+#define LOG_TAG "AAudioClientTracker"
 //#define LOG_NDEBUG 0
 #include <utils/Log.h>
 
@@ -64,8 +64,7 @@
 // Create a tracker for the client.
 aaudio_result_t AAudioClientTracker::registerClient(pid_t pid,
                                          const sp<IAAudioClient>& client) {
-    ALOGV("AAudioClientTracker::registerClient(), calling pid = %d, getpid() = %d\n",
-          pid, getpid());
+    ALOGV("registerClient(), calling pid = %d, getpid() = %d\n", pid, getpid());
 
     std::lock_guard<std::mutex> lock(mLock);
     if (mNotificationClients.count(pid) == 0) {
@@ -74,18 +73,16 @@
 
         sp<IBinder> binder = IInterface::asBinder(client);
         status_t status = binder->linkToDeath(notificationClient);
-        ALOGW_IF(status != NO_ERROR,
-                 "AAudioClientTracker::registerClient() linkToDeath = %d\n", status);
+        ALOGW_IF(status != NO_ERROR, "registerClient() linkToDeath = %d\n", status);
         return AAudioConvert_androidToAAudioResult(status);
     } else {
-        ALOGW("AAudioClientTracker::registerClient(%d) already registered!", pid);
+        ALOGW("registerClient(%d) already registered!", pid);
         return AAUDIO_OK; // TODO should this be considered an error
     }
 }
 
 void AAudioClientTracker::unregisterClient(pid_t pid) {
-    ALOGV("AAudioClientTracker::unregisterClient(), calling pid = %d, getpid() = %d\n",
-          pid, getpid());
+    ALOGV("unregisterClient(), calling pid = %d, getpid() = %d\n", pid, getpid());
     std::lock_guard<std::mutex> lock(mLock);
     mNotificationClients.erase(pid);
 }
@@ -103,12 +100,12 @@
 aaudio_result_t
 AAudioClientTracker::registerClientStream(pid_t pid, sp<AAudioServiceStreamBase> serviceStream) {
     aaudio_result_t result = AAUDIO_OK;
-    ALOGV("AAudioClientTracker::registerClientStream(%d, %p)\n", pid, serviceStream.get());
+    ALOGV("registerClientStream(%d, %p)\n", pid, serviceStream.get());
     std::lock_guard<std::mutex> lock(mLock);
     sp<NotificationClient> notificationClient = mNotificationClients[pid];
     if (notificationClient == 0) {
         // This will get called the first time the audio server registers an internal stream.
-        ALOGV("AAudioClientTracker::registerClientStream(%d,) unrecognized pid\n", pid);
+        ALOGV("registerClientStream(%d,) unrecognized pid\n", pid);
         notificationClient = new NotificationClient(pid);
         mNotificationClients[pid] = notificationClient;
     }
@@ -120,15 +117,15 @@
 aaudio_result_t
 AAudioClientTracker::unregisterClientStream(pid_t pid,
                                             sp<AAudioServiceStreamBase> serviceStream) {
-    ALOGV("AAudioClientTracker::unregisterClientStream(%d, %p)\n", pid, serviceStream.get());
+    ALOGV("unregisterClientStream(%d, %p)\n", pid, serviceStream.get());
     std::lock_guard<std::mutex> lock(mLock);
     auto it = mNotificationClients.find(pid);
     if (it != mNotificationClients.end()) {
-        ALOGV("AAudioClientTracker::unregisterClientStream(%d, %p) found NotificationClient\n",
+        ALOGV("unregisterClientStream(%d, %p) found NotificationClient\n",
               pid, serviceStream.get());
         it->second->unregisterClientStream(serviceStream);
     } else {
-        ALOGE("AAudioClientTracker::unregisterClientStream(%d, %p) missing NotificationClient\n",
+        ALOGE("unregisterClientStream(%d, %p) missing NotificationClient\n",
               pid, serviceStream.get());
     }
     return AAUDIO_OK;
@@ -136,11 +133,11 @@
 
 AAudioClientTracker::NotificationClient::NotificationClient(pid_t pid)
         : mProcessId(pid) {
-    //ALOGD("AAudioClientTracker::NotificationClient(%d) created %p\n", pid, this);
+    //ALOGD("NotificationClient(%d) created %p\n", pid, this);
 }
 
 AAudioClientTracker::NotificationClient::~NotificationClient() {
-    //ALOGD("AAudioClientTracker::~NotificationClient() destroyed %p\n", this);
+    //ALOGD("~NotificationClient() destroyed %p\n", this);
 }
 
 int32_t AAudioClientTracker::NotificationClient::getStreamCount() {
@@ -172,14 +169,14 @@
 
         {
             std::lock_guard<std::mutex> lock(mLock);
-            for (auto serviceStream : mStreams) {
+            for (const auto& serviceStream : mStreams) {
                 streamsToClose.insert(serviceStream);
             }
         }
 
-        for (auto serviceStream : streamsToClose) {
+        for (const auto& serviceStream : streamsToClose) {
             aaudio_handle_t handle = serviceStream->getHandle();
-            ALOGW("AAudioClientTracker::binderDied() close abandoned stream 0x%08X\n", handle);
+            ALOGW("binderDied() close abandoned stream 0x%08X\n", handle);
             aaudioService->closeStream(handle);
         }
         // mStreams should be empty now
@@ -200,7 +197,7 @@
     }
 
     result << "  client: pid = " << mProcessId << " has " << mStreams.size() << " streams\n";
-    for (auto serviceStream : mStreams) {
+    for (const auto& serviceStream : mStreams) {
         result << "     stream: 0x" << std::hex << serviceStream->getHandle() << std::dec << "\n";
     }
 
diff --git a/services/oboeservice/AAudioEndpointManager.cpp b/services/oboeservice/AAudioEndpointManager.cpp
index f996f74..b0c3771 100644
--- a/services/oboeservice/AAudioEndpointManager.cpp
+++ b/services/oboeservice/AAudioEndpointManager.cpp
@@ -102,7 +102,7 @@
         }
     }
 
-    ALOGV("AAudioEndpointManager.findExclusiveEndpoint_l(), found %p for device = %d",
+    ALOGV("findExclusiveEndpoint_l(), found %p for device = %d",
           endpoint.get(), configuration.getDeviceId());
     return endpoint;
 }
@@ -118,7 +118,7 @@
         }
     }
 
-    ALOGV("AAudioEndpointManager.findSharedEndpoint_l(), found %p for device = %d",
+    ALOGV("findSharedEndpoint_l(), found %p for device = %d",
           endpoint.get(), configuration.getDeviceId());
     return endpoint;
 }
@@ -146,23 +146,23 @@
 
     // If we find an existing one then this one cannot be exclusive.
     if (endpoint.get() != nullptr) {
-        ALOGE("AAudioEndpointManager.openExclusiveEndpoint() already in use");
+        ALOGE("openExclusiveEndpoint() already in use");
         // Already open so do not allow a second stream.
         return nullptr;
     } else {
         sp<AAudioServiceEndpointMMAP> endpointMMap = new AAudioServiceEndpointMMAP();
-        ALOGE("AAudioEndpointManager.openEndpoint(),created MMAP %p", endpointMMap.get());
+        ALOGD("openEndpoint(),created MMAP %p", endpointMMap.get());
         endpoint = endpointMMap;
 
         aaudio_result_t result = endpoint->open(request);
         if (result != AAUDIO_OK) {
-            ALOGE("AAudioEndpointManager.openEndpoint(), open failed");
+            ALOGE("openEndpoint(), open failed");
             endpoint.clear();
         } else {
             mExclusiveStreams.push_back(endpointMMap);
         }
 
-        ALOGD("AAudioEndpointManager.openEndpoint(), created %p for device = %d",
+        ALOGD("openEndpoint(), created %p for device = %d",
               endpoint.get(), configuration.getDeviceId());
     }
 
@@ -203,13 +203,13 @@
         if (endpoint.get() != nullptr) {
             aaudio_result_t result = endpoint->open(request);
             if (result != AAUDIO_OK) {
-                ALOGE("AAudioEndpointManager.openEndpoint(), open failed");
+                ALOGE("openSharedEndpoint(), open failed");
                 endpoint.clear();
             } else {
                 mSharedStreams.push_back(endpoint);
             }
         }
-        ALOGD("AAudioEndpointManager.openSharedEndpoint(), created %p for device = %d, dir = %d",
+        ALOGD("openSharedEndpoint(), created %p for device = %d, dir = %d",
               endpoint.get(), configuration.getDeviceId(), (int)direction);
         IPCThreadState::self()->restoreCallingIdentity(token);
     }
@@ -239,14 +239,14 @@
     int32_t newRefCount = serviceEndpoint->getOpenCount() - 1;
     serviceEndpoint->setOpenCount(newRefCount);
 
-    // If no longer in use then close and delete it.
+    // If no longer in use then actually close it.
     if (newRefCount <= 0) {
         mExclusiveStreams.erase(
                 std::remove(mExclusiveStreams.begin(), mExclusiveStreams.end(), serviceEndpoint),
                 mExclusiveStreams.end());
 
         serviceEndpoint->close();
-        ALOGD("AAudioEndpointManager::closeExclusiveEndpoint() %p for device %d",
+        ALOGD("closeExclusiveEndpoint() %p for device %d",
               serviceEndpoint.get(), serviceEndpoint->getDeviceId());
     }
 }
@@ -261,14 +261,14 @@
     int32_t newRefCount = serviceEndpoint->getOpenCount() - 1;
     serviceEndpoint->setOpenCount(newRefCount);
 
-    // If no longer in use then close and delete it.
+    // If no longer in use then actually close it.
     if (newRefCount <= 0) {
         mSharedStreams.erase(
                 std::remove(mSharedStreams.begin(), mSharedStreams.end(), serviceEndpoint),
                 mSharedStreams.end());
 
         serviceEndpoint->close();
-        ALOGD("AAudioEndpointManager::closeSharedEndpoint() %p for device %d",
+        ALOGD("closeSharedEndpoint() %p for device %d",
               serviceEndpoint.get(), serviceEndpoint->getDeviceId());
     }
 }
diff --git a/services/oboeservice/AAudioMixer.cpp b/services/oboeservice/AAudioMixer.cpp
index 952aa82..b031888 100644
--- a/services/oboeservice/AAudioMixer.cpp
+++ b/services/oboeservice/AAudioMixer.cpp
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-#define LOG_TAG "AAudioService"
+#define LOG_TAG "AAudioMixer"
 //#define LOG_NDEBUG 0
 #include <utils/Log.h>
 
@@ -49,10 +49,9 @@
     memset(mOutputBuffer, 0, mBufferSizeInBytes);
 }
 
-bool AAudioMixer::mix(int trackIndex, FifoBuffer *fifo, float volume) {
+int32_t AAudioMixer::mix(int streamIndex, FifoBuffer *fifo, bool allowUnderflow) {
     WrappingBuffer wrappingBuffer;
     float *destination = mOutputBuffer;
-    fifo_frames_t framesLeft = mFramesPerBurst;
 
 #if AAUDIO_MIXER_ATRACE_ENABLED
     ATRACE_BEGIN("aaMix");
@@ -63,48 +62,57 @@
 #if AAUDIO_MIXER_ATRACE_ENABLED
     if (ATRACE_ENABLED()) {
         char rdyText[] = "aaMixRdy#";
-        char letter = 'A' + (trackIndex % 26);
+        char letter = 'A' + (streamIndex % 26);
         rdyText[sizeof(rdyText) - 2] = letter;
         ATRACE_INT(rdyText, fullFrames);
     }
 #else /* MIXER_ATRACE_ENABLED */
     (void) trackIndex;
-    (void) fullFrames;
 #endif /* AAUDIO_MIXER_ATRACE_ENABLED */
 
+    // If allowUnderflow then always advance by one burst even if we do not have the data.
+    // Otherwise the stream timing will drift whenever there is an underflow.
+    // This actual underflow can then be detected by the client for XRun counting.
+    //
+    // Generally, allowUnderflow will be false when stopping a stream and we want to
+    // use up whatever data is in the queue.
+    fifo_frames_t framesDesired = mFramesPerBurst;
+    if (!allowUnderflow && fullFrames < framesDesired) {
+        framesDesired = fullFrames; // just use what is available then stop
+    }
+
     // Mix data in one or two parts.
     int partIndex = 0;
+    int32_t framesLeft = framesDesired;
     while (framesLeft > 0 && partIndex < WrappingBuffer::SIZE) {
-        fifo_frames_t framesToMix = framesLeft;
-        fifo_frames_t framesAvailable = wrappingBuffer.numFrames[partIndex];
-        if (framesAvailable > 0) {
-            if (framesToMix > framesAvailable) {
-                framesToMix = framesAvailable;
+        fifo_frames_t framesToMixFromPart = framesLeft;
+        fifo_frames_t framesAvailableFromPart = wrappingBuffer.numFrames[partIndex];
+        if (framesAvailableFromPart > 0) {
+            if (framesToMixFromPart > framesAvailableFromPart) {
+                framesToMixFromPart = framesAvailableFromPart;
             }
-            mixPart(destination, (float *)wrappingBuffer.data[partIndex], framesToMix, volume);
+            mixPart(destination, (float *)wrappingBuffer.data[partIndex],
+                    framesToMixFromPart);
 
-            destination += framesToMix * mSamplesPerFrame;
-            framesLeft -= framesToMix;
+            destination += framesToMixFromPart * mSamplesPerFrame;
+            framesLeft -= framesToMixFromPart;
         }
         partIndex++;
     }
-    // Always advance by one burst even if we do not have the data.
-    // Otherwise the stream timing will drift whenever there is an underflow.
-    // This actual underflow can then be detected by the client for XRun counting.
-    fifo->getFifoControllerBase()->advanceReadIndex(mFramesPerBurst);
+    fifo->getFifoControllerBase()->advanceReadIndex(framesDesired);
 
 #if AAUDIO_MIXER_ATRACE_ENABLED
     ATRACE_END();
 #endif /* AAUDIO_MIXER_ATRACE_ENABLED */
 
-    return (framesLeft > 0); // did not get all the frames we needed, ie. "underflow"
+    return (framesDesired - framesLeft); // framesRead
 }
 
-void AAudioMixer::mixPart(float *destination, float *source, int32_t numFrames, float volume) {
+void AAudioMixer::mixPart(float *destination, float *source, int32_t numFrames) {
     int32_t numSamples = numFrames * mSamplesPerFrame;
     // TODO maybe optimize using SIMD
     for (int sampleIndex = 0; sampleIndex < numSamples; sampleIndex++) {
-        *destination++ += *source++ * volume;
+        *destination++ += *source++;
     }
 }
 
diff --git a/services/oboeservice/AAudioMixer.h b/services/oboeservice/AAudioMixer.h
index a8090bc..d5abc5b 100644
--- a/services/oboeservice/AAudioMixer.h
+++ b/services/oboeservice/AAudioMixer.h
@@ -33,22 +33,24 @@
 
     /**
      * Mix from this FIFO
-     * @param fifo
-     * @param volume
-     * @return true if underflowed
+     * @param streamIndex for marking stream variables in systrace
+     * @param fifo to read from
+     * @param allowUnderflow if true then allow mixer to advance read index past the write index
+     * @return frames read from this stream
      */
-    bool mix(int trackIndex, android::FifoBuffer *fifo, float volume);
-
-    void mixPart(float *destination, float *source, int32_t numFrames, float volume);
+    int32_t mix(int streamIndex, android::FifoBuffer *fifo, bool allowUnderflow);
 
     float *getOutputBuffer();
 
+    int32_t getFramesPerBurst() const { return mFramesPerBurst; }
+
 private:
+    void mixPart(float *destination, float *source, int32_t numFrames);
+
     float   *mOutputBuffer = nullptr;
     int32_t  mSamplesPerFrame = 0;
     int32_t  mFramesPerBurst = 0;
     int32_t  mBufferSizeInBytes = 0;
 };
 
-
 #endif //AAUDIO_AAUDIO_MIXER_H
diff --git a/services/oboeservice/AAudioService.cpp b/services/oboeservice/AAudioService.cpp
index 5a3488d..ac3202b 100644
--- a/services/oboeservice/AAudioService.cpp
+++ b/services/oboeservice/AAudioService.cpp
@@ -92,14 +92,14 @@
     if (pid != mAudioClient.clientPid) {
         int32_t count = AAudioClientTracker::getInstance().getStreamCount(pid);
         if (count >= MAX_STREAMS_PER_PROCESS) {
-            ALOGE("AAudioService::openStream(): exceeded max streams per process %d >= %d",
+            ALOGE("openStream(): exceeded max streams per process %d >= %d",
                   count,  MAX_STREAMS_PER_PROCESS);
             return AAUDIO_ERROR_UNAVAILABLE;
         }
     }
 
     if (sharingMode != AAUDIO_SHARING_MODE_EXCLUSIVE && sharingMode != AAUDIO_SHARING_MODE_SHARED) {
-        ALOGE("AAudioService::openStream(): unrecognized sharing mode = %d", sharingMode);
+        ALOGE("openStream(): unrecognized sharing mode = %d", sharingMode);
         return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
     }
 
@@ -114,7 +114,7 @@
         result = serviceStream->open(request);
         if (result != AAUDIO_OK) {
             // Clear it so we can possibly fall back to using a shared stream.
-            ALOGW("AAudioService::openStream(), could not open in EXCLUSIVE mode");
+            ALOGW("openStream(), could not open in EXCLUSIVE mode");
             serviceStream.clear();
         }
     }
@@ -128,12 +128,12 @@
 
     if (result != AAUDIO_OK) {
         serviceStream.clear();
-        ALOGE("AAudioService::openStream(): failed, return %d = %s",
+        ALOGE("openStream(): failed, return %d = %s",
               result, AAudio_convertResultToText(result));
         return result;
     } else {
         aaudio_handle_t handle = mStreamTracker.addStreamForHandle(serviceStream.get());
-        ALOGD("AAudioService::openStream(): handle = 0x%08X", handle);
+        ALOGD("openStream(): handle = 0x%08X", handle);
         serviceStream->setHandle(handle);
         pid_t pid = request.getProcessId();
         AAudioClientTracker::getInstance().registerClientStream(pid, serviceStream);
@@ -142,31 +142,46 @@
     }
 }
 
+// If a close request is pending then close the stream
+bool AAudioService::releaseStream(const sp<AAudioServiceStreamBase> &serviceStream) {
+    bool closed = false;
+    if ((serviceStream->decrementServiceReferenceCount() == 0) && serviceStream->isCloseNeeded()) {
+        // removeStreamByHandle() uses a lock so that if there are two simultaneous closes
+        // then only one will get the pointer and do the close.
+        sp<AAudioServiceStreamBase> foundStream = mStreamTracker.removeStreamByHandle(serviceStream->getHandle());
+        if (foundStream.get() != nullptr) {
+            foundStream->close();
+            pid_t pid = foundStream->getOwnerProcessId();
+            AAudioClientTracker::getInstance().unregisterClientStream(pid, foundStream);
+        }
+        closed = true;
+    }
+    return closed;
+}
+
+aaudio_result_t AAudioService::checkForPendingClose(
+        const sp<AAudioServiceStreamBase> &serviceStream,
+        aaudio_result_t defaultResult) {
+    return releaseStream(serviceStream) ? AAUDIO_ERROR_INVALID_STATE : defaultResult;
+}
+
 aaudio_result_t AAudioService::closeStream(aaudio_handle_t streamHandle) {
+    ALOGD("closeStream(0x%08X)", streamHandle);
     // Check permission and ownership first.
     sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
     if (serviceStream.get() == nullptr) {
-        ALOGE("AAudioService::closeStream(0x%0x), illegal stream handle", streamHandle);
+        ALOGE("closeStream(0x%0x), illegal stream handle", streamHandle);
         return AAUDIO_ERROR_INVALID_HANDLE;
     }
 
-    ALOGD("AAudioService.closeStream(0x%08X)", streamHandle);
-    // Remove handle from tracker so that we cannot look up the raw address any more.
-    // removeStreamByHandle() uses a lock so that if there are two simultaneous closes
-    // then only one will get the pointer and do the close.
-    serviceStream = mStreamTracker.removeStreamByHandle(streamHandle);
-    if (serviceStream.get() != nullptr) {
-        serviceStream->close();
-        pid_t pid = serviceStream->getOwnerProcessId();
-        AAudioClientTracker::getInstance().unregisterClientStream(pid, serviceStream);
-        return AAUDIO_OK;
-    } else {
-        ALOGW("AAudioService::closeStream(0x%0x) being handled by another thread", streamHandle);
-        return AAUDIO_ERROR_INVALID_HANDLE;
-    }
+    pid_t pid = serviceStream->getOwnerProcessId();
+    AAudioClientTracker::getInstance().unregisterClientStream(pid, serviceStream);
+
+    serviceStream->setCloseNeeded(true);
+    (void) releaseStream(serviceStream);
+    return AAUDIO_OK;
 }
 
-
 sp<AAudioServiceStreamBase> AAudioService::convertHandleToServiceStream(
         aaudio_handle_t streamHandle) {
     sp<AAudioServiceStreamBase> serviceStream = mStreamTracker.getStreamByHandle(streamHandle);
@@ -181,7 +196,9 @@
         if (!allowed) {
             ALOGE("AAudioService: calling uid %d cannot access stream 0x%08X owned by %d",
                   callingUserId, streamHandle, ownerUserId);
-            serviceStream = nullptr;
+            serviceStream.clear();
+        } else {
+            serviceStream->incrementServiceReferenceCount();
         }
     }
     return serviceStream;
@@ -192,94 +209,97 @@
                 aaudio::AudioEndpointParcelable &parcelable) {
     sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
     if (serviceStream.get() == nullptr) {
-        ALOGE("AAudioService::getStreamDescription(), illegal stream handle = 0x%0x", streamHandle);
+        ALOGE("getStreamDescription(), illegal stream handle = 0x%0x", streamHandle);
         return AAUDIO_ERROR_INVALID_HANDLE;
     }
 
     aaudio_result_t result = serviceStream->getDescription(parcelable);
     // parcelable.dump();
-    return result;
+    return checkForPendingClose(serviceStream, result);
 }
 
 aaudio_result_t AAudioService::startStream(aaudio_handle_t streamHandle) {
     sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
     if (serviceStream.get() == nullptr) {
-        ALOGE("AAudioService::startStream(), illegal stream handle = 0x%0x", streamHandle);
+        ALOGE("startStream(), illegal stream handle = 0x%0x", streamHandle);
         return AAUDIO_ERROR_INVALID_HANDLE;
     }
 
-    return serviceStream->start();
+    aaudio_result_t result = serviceStream->start();
+    return checkForPendingClose(serviceStream, result);
 }
 
 aaudio_result_t AAudioService::pauseStream(aaudio_handle_t streamHandle) {
     sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
     if (serviceStream.get() == nullptr) {
-        ALOGE("AAudioService::pauseStream(), illegal stream handle = 0x%0x", streamHandle);
+        ALOGE("pauseStream(), illegal stream handle = 0x%0x", streamHandle);
         return AAUDIO_ERROR_INVALID_HANDLE;
     }
     aaudio_result_t result = serviceStream->pause();
-    return result;
+    return checkForPendingClose(serviceStream, result);
 }
 
 aaudio_result_t AAudioService::stopStream(aaudio_handle_t streamHandle) {
     sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
     if (serviceStream.get() == nullptr) {
-        ALOGE("AAudioService::stopStream(), illegal stream handle = 0x%0x", streamHandle);
+        ALOGE("stopStream(), illegal stream handle = 0x%0x", streamHandle);
         return AAUDIO_ERROR_INVALID_HANDLE;
     }
     aaudio_result_t result = serviceStream->stop();
-    return result;
+    return checkForPendingClose(serviceStream, result);
 }
 
 aaudio_result_t AAudioService::flushStream(aaudio_handle_t streamHandle) {
     sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
     if (serviceStream.get() == nullptr) {
-        ALOGE("AAudioService::flushStream(), illegal stream handle = 0x%0x", streamHandle);
+        ALOGE("flushStream(), illegal stream handle = 0x%0x", streamHandle);
         return AAUDIO_ERROR_INVALID_HANDLE;
     }
-    return serviceStream->flush();
+    aaudio_result_t result = serviceStream->flush();
+    return checkForPendingClose(serviceStream, result);
 }
 
 aaudio_result_t AAudioService::registerAudioThread(aaudio_handle_t streamHandle,
                                                    pid_t clientThreadId,
                                                    int64_t periodNanoseconds) {
+    aaudio_result_t result = AAUDIO_OK;
     sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
     if (serviceStream.get() == nullptr) {
-        ALOGE("AAudioService::registerAudioThread(), illegal stream handle = 0x%0x", streamHandle);
+        ALOGE("registerAudioThread(), illegal stream handle = 0x%0x", streamHandle);
         return AAUDIO_ERROR_INVALID_HANDLE;
     }
     if (serviceStream->getRegisteredThread() != AAudioServiceStreamBase::ILLEGAL_THREAD_ID) {
         ALOGE("AAudioService::registerAudioThread(), thread already registered");
-        return AAUDIO_ERROR_INVALID_STATE;
-    }
-
-    const pid_t ownerPid = IPCThreadState::self()->getCallingPid(); // TODO review
-    serviceStream->setRegisteredThread(clientThreadId);
-    int err = android::requestPriority(ownerPid, clientThreadId,
-                                       DEFAULT_AUDIO_PRIORITY, true /* isForApp */);
-    if (err != 0){
-        ALOGE("AAudioService::registerAudioThread(%d) failed, errno = %d, priority = %d",
-              clientThreadId, errno, DEFAULT_AUDIO_PRIORITY);
-        return AAUDIO_ERROR_INTERNAL;
+        result = AAUDIO_ERROR_INVALID_STATE;
     } else {
-        return AAUDIO_OK;
+        const pid_t ownerPid = IPCThreadState::self()->getCallingPid(); // TODO review
+        serviceStream->setRegisteredThread(clientThreadId);
+        int err = android::requestPriority(ownerPid, clientThreadId,
+                                           DEFAULT_AUDIO_PRIORITY, true /* isForApp */);
+        if (err != 0) {
+            ALOGE("AAudioService::registerAudioThread(%d) failed, errno = %d, priority = %d",
+                  clientThreadId, errno, DEFAULT_AUDIO_PRIORITY);
+            result = AAUDIO_ERROR_INTERNAL;
+        }
     }
+    return checkForPendingClose(serviceStream, result);
 }
 
 aaudio_result_t AAudioService::unregisterAudioThread(aaudio_handle_t streamHandle,
                                                      pid_t clientThreadId) {
+    aaudio_result_t result = AAUDIO_OK;
     sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
     if (serviceStream.get() == nullptr) {
-        ALOGE("AAudioService::unregisterAudioThread(), illegal stream handle = 0x%0x",
-              streamHandle);
+        ALOGE("unregisterAudioThread(), illegal stream handle = 0x%0x", streamHandle);
         return AAUDIO_ERROR_INVALID_HANDLE;
     }
     if (serviceStream->getRegisteredThread() != clientThreadId) {
         ALOGE("AAudioService::unregisterAudioThread(), wrong thread");
-        return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
+        result = AAUDIO_ERROR_ILLEGAL_ARGUMENT;
+    } else {
+        serviceStream->setRegisteredThread(0);
     }
-    serviceStream->setRegisteredThread(0);
-    return AAUDIO_OK;
+    return checkForPendingClose(serviceStream, result);
 }
 
 aaudio_result_t AAudioService::startClient(aaudio_handle_t streamHandle,
@@ -287,20 +307,20 @@
                                   audio_port_handle_t *clientHandle) {
     sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
     if (serviceStream.get() == nullptr) {
-        ALOGE("AAudioService::startClient(), illegal stream handle = 0x%0x",
-              streamHandle);
+        ALOGE("startClient(), illegal stream handle = 0x%0x", streamHandle);
         return AAUDIO_ERROR_INVALID_HANDLE;
     }
-    return serviceStream->startClient(client, clientHandle);
+    aaudio_result_t result = serviceStream->startClient(client, clientHandle);
+    return checkForPendingClose(serviceStream, result);
 }
 
 aaudio_result_t AAudioService::stopClient(aaudio_handle_t streamHandle,
                                           audio_port_handle_t clientHandle) {
     sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
     if (serviceStream.get() == nullptr) {
-        ALOGE("AAudioService::stopClient(), illegal stream handle = 0x%0x",
-              streamHandle);
+        ALOGE("stopClient(), illegal stream handle = 0x%0x", streamHandle);
         return AAUDIO_ERROR_INVALID_HANDLE;
     }
-    return serviceStream->stopClient(clientHandle);
+    aaudio_result_t result = serviceStream->stopClient(clientHandle);
+    return checkForPendingClose(serviceStream, result);
 }
diff --git a/services/oboeservice/AAudioService.h b/services/oboeservice/AAudioService.h
index eef0824..bdd9e0b 100644
--- a/services/oboeservice/AAudioService.h
+++ b/services/oboeservice/AAudioService.h
@@ -94,9 +94,15 @@
             aaudio::aaudio_handle_t streamHandle);
 
 
-    android::AudioClient mAudioClient;
 
-    aaudio::AAudioStreamTracker                 mStreamTracker;
+    bool releaseStream(const sp<aaudio::AAudioServiceStreamBase> &serviceStream);
+
+    aaudio_result_t checkForPendingClose(const sp<aaudio::AAudioServiceStreamBase> &serviceStream,
+                                         aaudio_result_t defaultResult);
+
+    android::AudioClient            mAudioClient;
+
+    aaudio::AAudioStreamTracker     mStreamTracker;
 
     enum constants {
         DEFAULT_AUDIO_PRIORITY = 2
diff --git a/services/oboeservice/AAudioServiceEndpoint.cpp b/services/oboeservice/AAudioServiceEndpoint.cpp
index 3095bc9..f917675 100644
--- a/services/oboeservice/AAudioServiceEndpoint.cpp
+++ b/services/oboeservice/AAudioServiceEndpoint.cpp
@@ -60,6 +60,7 @@
     result << "    Reference Count:      " << mOpenCount << "\n";
     result << "    Requested Device Id:  " << mRequestedDeviceId << "\n";
     result << "    Device Id:            " << getDeviceId() << "\n";
+    result << "    Connected:            " << mConnected.load() << "\n";
     result << "    Registered Streams:" << "\n";
     result << AAudioServiceStreamShared::dumpHeader() << "\n";
     for (const auto stream : mRegisteredStreams) {
@@ -74,7 +75,9 @@
 
 void AAudioServiceEndpoint::disconnectRegisteredStreams() {
     std::lock_guard<std::mutex> lock(mLockStreams);
+    mConnected.store(false);
     for (const auto stream : mRegisteredStreams) {
+        ALOGD("disconnectRegisteredStreams() stop and disconnect %p", stream.get());
         stream->stop();
         stream->disconnect();
     }
@@ -96,6 +99,9 @@
 }
 
 bool AAudioServiceEndpoint::matches(const AAudioStreamConfiguration& configuration) {
+    if (!mConnected.load()) {
+        return false; // Only use an endpoint if it is connected to a device.
+    }
     if (configuration.getDirection() != getDirection()) {
         return false;
     }
diff --git a/services/oboeservice/AAudioServiceEndpoint.h b/services/oboeservice/AAudioServiceEndpoint.h
index 2ef6234..6312c51 100644
--- a/services/oboeservice/AAudioServiceEndpoint.h
+++ b/services/oboeservice/AAudioServiceEndpoint.h
@@ -97,6 +97,10 @@
         mOpenCount = count;
     }
 
+    bool isConnected() const {
+        return mConnected;
+    }
+
 protected:
     void                     disconnectRegisteredStreams();
 
@@ -111,6 +115,8 @@
     int32_t                  mOpenCount = 0;
     int32_t                  mRequestedDeviceId = 0;
 
+    std::atomic<bool>        mConnected{true};
+
 };
 
 } /* namespace aaudio */
diff --git a/services/oboeservice/AAudioServiceEndpointCapture.cpp b/services/oboeservice/AAudioServiceEndpointCapture.cpp
index c7d9b8e..efac788 100644
--- a/services/oboeservice/AAudioServiceEndpointCapture.cpp
+++ b/services/oboeservice/AAudioServiceEndpointCapture.cpp
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-#define LOG_TAG "AAudioService"
+#define LOG_TAG "AAudioServiceEndpointCapture"
 //#define LOG_NDEBUG 0
 #include <utils/Log.h>
 
@@ -57,8 +57,7 @@
 
 // Read data from the shared MMAP stream and then distribute it to the client streams.
 void *AAudioServiceEndpointCapture::callbackLoop() {
-    ALOGD("AAudioServiceEndpointCapture(): callbackLoop() entering");
-    int32_t underflowCount = 0;
+    ALOGD("callbackLoop() entering");
     aaudio_result_t result = AAUDIO_OK;
     int64_t timeoutNanos = getStreamInternal()->calculateReasonableTimeout();
 
@@ -73,7 +72,7 @@
             disconnectRegisteredStreams();
             break;
         } else if (result != getFramesPerBurst()) {
-            ALOGW("AAudioServiceEndpointCapture(): callbackLoop() read %d / %d",
+            ALOGW("callbackLoop() read %d / %d",
                   result, getFramesPerBurst());
             break;
         }
@@ -102,9 +101,10 @@
                             int64_t positionOffset = mmapFramesRead - clientFramesWritten;
                             streamShared->setTimestampPositionOffset(positionOffset);
 
+                            // Is the buffer too full to write a burst?
                             if (fifo->getFifoControllerBase()->getEmptyFramesAvailable() <
-                                getFramesPerBurst()) {
-                                underflowCount++;
+                                    getFramesPerBurst()) {
+                                streamShared->incrementXRunCount();
                             } else {
                                 fifo->write(mDistributionBuffer, getFramesPerBurst());
                             }
@@ -125,6 +125,6 @@
         }
     }
 
-    ALOGD("AAudioServiceEndpointCapture(): callbackLoop() exiting, %d underflows", underflowCount);
+    ALOGD("callbackLoop() exiting");
     return NULL; // TODO review
 }
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.cpp b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
index 7e6e247..8db1761 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.cpp
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
@@ -72,13 +72,6 @@
 
 aaudio_result_t AAudioServiceEndpointMMAP::open(const aaudio::AAudioStreamRequest &request) {
     aaudio_result_t result = AAUDIO_OK;
-    const audio_attributes_t attributes = {
-            .content_type = AUDIO_CONTENT_TYPE_MUSIC,
-            .usage = AUDIO_USAGE_MEDIA,
-            .source = AUDIO_SOURCE_VOICE_RECOGNITION,
-            .flags = AUDIO_FLAG_LOW_LATENCY,
-            .tags = ""
-    };
     audio_config_base_t config;
     audio_port_handle_t deviceId;
 
@@ -87,6 +80,24 @@
 
     copyFrom(request.getConstantConfiguration());
 
+    aaudio_direction_t direction = getDirection();
+
+    const audio_content_type_t contentType =
+            AAudioConvert_contentTypeToInternal(getContentType());
+    const audio_usage_t usage = (direction == AAUDIO_DIRECTION_OUTPUT)
+            ? AAudioConvert_usageToInternal(getUsage())
+            : AUDIO_USAGE_UNKNOWN;
+    const audio_source_t source = (direction == AAUDIO_DIRECTION_INPUT)
+            ? AAudioConvert_inputPresetToAudioSource(getInputPreset())
+            : AUDIO_SOURCE_DEFAULT;
+
+    const audio_attributes_t attributes = {
+            .content_type = contentType,
+            .usage = usage,
+            .source = source,
+            .flags = AUDIO_FLAG_LOW_LATENCY,
+            .tags = ""
+    };
     mMmapClient.clientUid = request.getUserId();
     mMmapClient.clientPid = request.getProcessId();
     mMmapClient.packageName.setTo(String16(""));
@@ -108,7 +119,6 @@
 
     int32_t aaudioSamplesPerFrame = getSamplesPerFrame();
 
-    aaudio_direction_t direction = getDirection();
     if (direction == AAUDIO_DIRECTION_OUTPUT) {
         config.channel_mask = (aaudioSamplesPerFrame == AAUDIO_UNSPECIFIED)
                               ? AUDIO_CHANNEL_OUT_STEREO
@@ -140,7 +150,7 @@
                                                           this, // callback
                                                           mMmapStream,
                                                           &mPortHandle);
-    ALOGD("AAudioServiceEndpointMMAP::open() mMapClient.uid = %d, pid = %d => portHandle = %d\n",
+    ALOGD("open() mMapClient.uid = %d, pid = %d => portHandle = %d\n",
           mMmapClient.clientUid,  mMmapClient.clientPid, mPortHandle);
     if (status != OK) {
         ALOGE("openMmapStream returned status %d", status);
@@ -148,7 +158,7 @@
     }
 
     if (deviceId == AAUDIO_UNSPECIFIED) {
-        ALOGW("AAudioServiceEndpointMMAP::open() - openMmapStream() failed to set deviceId");
+        ALOGW("open() - openMmapStream() failed to set deviceId");
     }
     setDeviceId(deviceId);
 
@@ -159,7 +169,7 @@
     }
     status = mMmapStream->createMmapBuffer(minSizeFrames, &mMmapBufferinfo);
     if (status != OK) {
-        ALOGE("AAudioServiceEndpointMMAP::open() - createMmapBuffer() failed with status %d %s",
+        ALOGE("open() - createMmapBuffer() failed with status %d %s",
               status, strerror(-status));
         result = AAUDIO_ERROR_UNAVAILABLE;
         goto error;
@@ -186,7 +196,7 @@
             // Fallback is handled by caller but indicate what is possible in case
             // this is used in the future
             setSharingMode(AAUDIO_SHARING_MODE_SHARED);
-            ALOGW("AAudioServiceEndpointMMAP::open() - exclusive FD cannot be used by client");
+            ALOGW("open() - exclusive FD cannot be used by client");
             result = AAUDIO_ERROR_UNAVAILABLE;
             goto error;
         }
@@ -201,7 +211,7 @@
     // Assume that AudioFlinger will close the original shared_memory_fd.
     mAudioDataFileDescriptor.reset(dup(mMmapBufferinfo.shared_memory_fd));
     if (mAudioDataFileDescriptor.get() == -1) {
-        ALOGE("AAudioServiceEndpointMMAP::open() - could not dup shared_memory_fd");
+        ALOGE("open() - could not dup shared_memory_fd");
         result = AAUDIO_ERROR_INTERNAL;
         goto error;
     }
@@ -219,10 +229,10 @@
         burstMicros = mFramesPerBurst * static_cast<int64_t>(1000000) / getSampleRate();
     } while (burstMicros < burstMinMicros);
 
-    ALOGD("AAudioServiceEndpointMMAP::open() original burst = %d, minMicros = %d, to burst = %d\n",
+    ALOGD("open() original burst = %d, minMicros = %d, to burst = %d\n",
           mMmapBufferinfo.burst_size_frames, burstMinMicros, mFramesPerBurst);
 
-    ALOGD("AAudioServiceEndpointMMAP::open() actual rate = %d, channels = %d"
+    ALOGD("open() actual rate = %d, channels = %d"
           ", deviceId = %d, capacity = %d\n",
           getSampleRate(), getSamplesPerFrame(), deviceId, getBufferCapacity());
 
@@ -236,7 +246,7 @@
 aaudio_result_t AAudioServiceEndpointMMAP::close() {
 
     if (mMmapStream != 0) {
-        ALOGD("AAudioServiceEndpointMMAP::close() clear() endpoint");
+        ALOGD("close() clear() endpoint");
         // Needs to be explicitly cleared or CTS will fail but it is not clear why.
         mMmapStream.clear();
         // Apparently the above close is asynchronous. An attempt to open a new device
@@ -271,12 +281,12 @@
 aaudio_result_t AAudioServiceEndpointMMAP::startClient(const android::AudioClient& client,
                                                        audio_port_handle_t *clientHandle) {
     if (mMmapStream == nullptr) return AAUDIO_ERROR_NULL;
-    ALOGD("AAudioServiceEndpointMMAP::startClient(%p(uid=%d, pid=%d))",
+    ALOGD("startClient(%p(uid=%d, pid=%d))",
           &client, client.clientUid, client.clientPid);
     audio_port_handle_t originalHandle =  *clientHandle;
     status_t status = mMmapStream->start(client, clientHandle);
     aaudio_result_t result = AAudioConvert_androidToAAudioResult(status);
-    ALOGD("AAudioServiceEndpointMMAP::startClient() , %d => %d returns %d",
+    ALOGD("startClient() , %d => %d returns %d",
           originalHandle, *clientHandle, result);
     return result;
 }
@@ -284,7 +294,7 @@
 aaudio_result_t AAudioServiceEndpointMMAP::stopClient(audio_port_handle_t clientHandle) {
     if (mMmapStream == nullptr) return AAUDIO_ERROR_NULL;
     aaudio_result_t result = AAudioConvert_androidToAAudioResult(mMmapStream->stop(clientHandle));
-    ALOGD("AAudioServiceEndpointMMAP::stopClient(%d) returns %d", clientHandle, result);
+    ALOGD("stopClient(%d) returns %d", clientHandle, result);
     return result;
 }
 
@@ -296,7 +306,7 @@
         return AAUDIO_ERROR_NULL;
     }
     status_t status = mMmapStream->getMmapPosition(&position);
-    ALOGV("AAudioServiceEndpointMMAP::getFreeRunningPosition() status= %d, pos = %d, nanos = %lld\n",
+    ALOGV("getFreeRunningPosition() status= %d, pos = %d, nanos = %lld\n",
           status, position.position_frames, (long long) position.time_nanoseconds);
     aaudio_result_t result = AAudioConvert_androidToAAudioResult(status);
     if (result == AAUDIO_ERROR_UNAVAILABLE) {
@@ -319,7 +329,7 @@
 
 
 void AAudioServiceEndpointMMAP::onTearDown() {
-    ALOGD("AAudioServiceEndpointMMAP::onTearDown() called");
+    ALOGD("onTearDown() called");
     disconnectRegisteredStreams();
 };
 
@@ -327,7 +337,7 @@
                                               android::Vector<float> values) {
     // TODO do we really need a different volume for each channel?
     float volume = values[0];
-    ALOGD("AAudioServiceEndpointMMAP::onVolumeChanged() volume[0] = %f", volume);
+    ALOGD("onVolumeChanged() volume[0] = %f", volume);
     std::lock_guard<std::mutex> lock(mLockStreams);
     for(const auto stream : mRegisteredStreams) {
         stream->onVolumeChanged(volume);
@@ -335,7 +345,7 @@
 };
 
 void AAudioServiceEndpointMMAP::onRoutingChanged(audio_port_handle_t deviceId) {
-    ALOGD("AAudioServiceEndpointMMAP::onRoutingChanged() called with %d, old = %d",
+    ALOGD("onRoutingChanged() called with dev %d, old = %d",
           deviceId, getDeviceId());
     if (getDeviceId() != AUDIO_PORT_HANDLE_NONE  && getDeviceId() != deviceId) {
         disconnectRegisteredStreams();
diff --git a/services/oboeservice/AAudioServiceEndpointPlay.cpp b/services/oboeservice/AAudioServiceEndpointPlay.cpp
index 9b1833a..2601f3f 100644
--- a/services/oboeservice/AAudioServiceEndpointPlay.cpp
+++ b/services/oboeservice/AAudioServiceEndpointPlay.cpp
@@ -34,6 +34,7 @@
 #include "AAudioServiceStreamShared.h"
 #include "AAudioServiceEndpointPlay.h"
 #include "AAudioServiceEndpointShared.h"
+#include "AAudioServiceStreamBase.h"
 
 using namespace android;  // TODO just import names needed
 using namespace aaudio;   // TODO just import names needed
@@ -82,9 +83,13 @@
             std::lock_guard <std::mutex> lock(mLockStreams);
             for (const auto clientStream : mRegisteredStreams) {
                 int64_t clientFramesRead = 0;
+                bool allowUnderflow = true;
 
-                if (!clientStream->isRunning()) {
-                    continue;
+                aaudio_stream_state_t state = clientStream->getState();
+                if (state == AAUDIO_STREAM_STATE_STOPPING) {
+                    allowUnderflow = false; // just read what is already in the FIFO
+                } else if (state != AAUDIO_STREAM_STATE_STARTED) {
+                    continue; // this stream is not running so skip it.
                 }
 
                 sp<AAudioServiceStreamShared> streamShared =
@@ -104,10 +109,19 @@
                         int64_t positionOffset = mmapFramesWritten - clientFramesRead;
                         streamShared->setTimestampPositionOffset(positionOffset);
 
-                        float volume = 1.0; // to match legacy volume
-                        bool underflowed = mMixer.mix(index, fifo, volume);
-                        if (underflowed) {
-                            streamShared->incrementXRunCount();
+                        int32_t framesMixed = mMixer.mix(index, fifo, allowUnderflow);
+
+                        if (streamShared->isFlowing()) {
+                            // Consider it an underflow if we got less than a burst
+                            // after the data started flowing.
+                            bool underflowed = allowUnderflow
+                                               && framesMixed < mMixer.getFramesPerBurst();
+                            if (underflowed) {
+                                streamShared->incrementXRunCount();
+                            }
+                        } else if (framesMixed > 0) {
+                            // Mark beginning of data flow after a start.
+                            streamShared->setFlowing(true);
                         }
                         clientFramesRead = fifo->getReadCounter();
                     }
@@ -132,7 +146,7 @@
             AAudioServiceEndpointShared::disconnectRegisteredStreams();
             break;
         } else if (result != getFramesPerBurst()) {
-            ALOGW("AAudioServiceEndpoint(): callbackLoop() wrote %d / %d",
+            ALOGW("callbackLoop() wrote %d / %d",
                   result, getFramesPerBurst());
             break;
         }
diff --git a/services/oboeservice/AAudioServiceEndpointShared.cpp b/services/oboeservice/AAudioServiceEndpointShared.cpp
index cd40066..6af9e7e 100644
--- a/services/oboeservice/AAudioServiceEndpointShared.cpp
+++ b/services/oboeservice/AAudioServiceEndpointShared.cpp
@@ -47,6 +47,7 @@
            << std::setfill('0') << std::setw(8)
            << std::hex << mStreamInternal->getServiceHandle()
            << std::dec << std::setfill(' ');
+    result << ", XRuns = " << mStreamInternal->getXRunCount();
     result << "\n";
     result << "    Running Stream Count: " << mRunningStreamCount << "\n";
 
@@ -91,7 +92,12 @@
 static void *aaudio_endpoint_thread_proc(void *context) {
     AAudioServiceEndpointShared *endpoint = (AAudioServiceEndpointShared *) context;
     if (endpoint != NULL) {
-        return endpoint->callbackLoop();
+        void *result = endpoint->callbackLoop();
+        // Close now so that the HW resource is freed and we can open a new device.
+        if (!endpoint->isConnected()) {
+            endpoint->close();
+        }
+        return result;
     } else {
         return NULL;
     }
diff --git a/services/oboeservice/AAudioServiceEndpointShared.h b/services/oboeservice/AAudioServiceEndpointShared.h
index e3bd2c1..74cd817 100644
--- a/services/oboeservice/AAudioServiceEndpointShared.h
+++ b/services/oboeservice/AAudioServiceEndpointShared.h
@@ -30,8 +30,7 @@
 namespace aaudio {
 
 /**
- * This Service class corresponds to a Client stream that shares an MMAP device through a mixer
- * or an input distributor.
+ * This manages an internal stream that is shared by multiple Client streams.
  */
 class AAudioServiceEndpointShared : public AAudioServiceEndpoint {
 
diff --git a/services/oboeservice/AAudioServiceStreamBase.cpp b/services/oboeservice/AAudioServiceStreamBase.cpp
index ff0b037..53d2860 100644
--- a/services/oboeservice/AAudioServiceStreamBase.cpp
+++ b/services/oboeservice/AAudioServiceStreamBase.cpp
@@ -51,7 +51,7 @@
 }
 
 AAudioServiceStreamBase::~AAudioServiceStreamBase() {
-    ALOGD("AAudioServiceStreamBase::~AAudioServiceStreamBase() destroying %p", this);
+    ALOGD("~AAudioServiceStreamBase() destroying %p", this);
     // If the stream is deleted when OPEN or in use then audio resources will leak.
     // This would indicate an internal error. So we want to find this ASAP.
     LOG_ALWAYS_FATAL_IF(!(getState() == AAUDIO_STREAM_STATE_CLOSED
@@ -93,7 +93,7 @@
     {
         std::lock_guard<std::mutex> lock(mUpMessageQueueLock);
         if (mUpMessageQueue != nullptr) {
-            ALOGE("AAudioServiceStreamBase::open() called twice");
+            ALOGE("open() called twice");
             return AAUDIO_ERROR_INVALID_STATE;
         }
 
@@ -108,7 +108,7 @@
                                                          request,
                                                          sharingMode);
         if (mServiceEndpoint == nullptr) {
-            ALOGE("AAudioServiceStreamBase::open() openEndpoint() failed");
+            ALOGE("open() openEndpoint() failed");
             result = AAUDIO_ERROR_UNAVAILABLE;
             goto error;
         }
@@ -167,11 +167,13 @@
     }
 
     if (mServiceEndpoint == nullptr) {
-        ALOGE("AAudioServiceStreamBase::start() missing endpoint");
+        ALOGE("start() missing endpoint");
         result = AAUDIO_ERROR_INVALID_STATE;
         goto error;
     }
 
+    setFlowing(false);
+
     // Start with fresh presentation timestamps.
     mAtomicTimestamp.clear();
 
@@ -199,7 +201,7 @@
         return result;
     }
     if (mServiceEndpoint == nullptr) {
-        ALOGE("AAudioServiceStreamShared::pause() missing endpoint");
+        ALOGE("pause() missing endpoint");
         return AAUDIO_ERROR_INVALID_STATE;
     }
 
@@ -215,7 +217,7 @@
 
     result = mServiceEndpoint->stopStream(this, mClientHandle);
     if (result != AAUDIO_OK) {
-        ALOGE("AAudioServiceStreamShared::pause() mServiceEndpoint returned %d", result);
+        ALOGE("pause() mServiceEndpoint returned %d", result);
         disconnect(); // TODO should we return or pause Base first?
     }
 
@@ -231,10 +233,12 @@
     }
 
     if (mServiceEndpoint == nullptr) {
-        ALOGE("AAudioServiceStreamShared::stop() missing endpoint");
+        ALOGE("stop() missing endpoint");
         return AAUDIO_ERROR_INVALID_STATE;
     }
 
+    setState(AAUDIO_STREAM_STATE_STOPPING);
+
     // Send it now because the timestamp gets rounded up when stopStream() is called below.
     // Also we don't need the timestamps while we are shutting down.
     sendCurrentTimestamp(); // warning - this calls a virtual function
@@ -247,7 +251,7 @@
     // TODO wait for data to be played out
     result = mServiceEndpoint->stopStream(this, mClientHandle);
     if (result != AAUDIO_OK) {
-        ALOGE("AAudioServiceStreamShared::stop() mServiceEndpoint returned %d", result);
+        ALOGE("stop() mServiceEndpoint returned %d", result);
         disconnect();
         // TODO what to do with result here?
     }
@@ -268,7 +272,7 @@
 
 aaudio_result_t AAudioServiceStreamBase::flush() {
     if (getState() != AAUDIO_STREAM_STATE_PAUSED) {
-        ALOGE("AAudioServiceStreamBase::flush() stream not paused, state = %s",
+        ALOGE("flush() stream not paused, state = %s",
               AAudio_convertStreamStateToText(mState));
         return AAUDIO_ERROR_INVALID_STATE;
     }
@@ -280,7 +284,7 @@
 
 // implement Runnable, periodically send timestamps to client
 void AAudioServiceStreamBase::run() {
-    ALOGD("AAudioServiceStreamBase::run() entering ----------------");
+    ALOGD("run() entering ----------------");
     TimestampScheduler timestampScheduler;
     timestampScheduler.setBurstPeriod(mFramesPerBurst, getSampleRate());
     timestampScheduler.start(AudioClock::getNanoseconds());
@@ -298,7 +302,7 @@
             AudioClock::sleepUntilNanoTime(nextTime);
         }
     }
-    ALOGD("AAudioServiceStreamBase::run() exiting ----------------");
+    ALOGD("run() exiting ----------------");
 }
 
 void AAudioServiceStreamBase::disconnect() {
@@ -309,12 +313,19 @@
 }
 
 aaudio_result_t AAudioServiceStreamBase::sendServiceEvent(aaudio_service_event_t event,
-                                               double  dataDouble,
-                                               int64_t dataLong) {
+                                                          double  dataDouble) {
     AAudioServiceMessage command;
     command.what = AAudioServiceMessage::code::EVENT;
     command.event.event = event;
     command.event.dataDouble = dataDouble;
+    return writeUpMessageQueue(&command);
+}
+
+aaudio_result_t AAudioServiceStreamBase::sendServiceEvent(aaudio_service_event_t event,
+                                                          int64_t dataLong) {
+    AAudioServiceMessage command;
+    command.what = AAudioServiceMessage::code::EVENT;
+    command.event.event = event;
     command.event.dataLong = dataLong;
     return writeUpMessageQueue(&command);
 }
@@ -334,6 +345,10 @@
     }
 }
 
+aaudio_result_t AAudioServiceStreamBase::sendXRunCount(int32_t xRunCount) {
+    return sendServiceEvent(AAUDIO_SERVICE_EVENT_XRUN, (int64_t) xRunCount);
+}
+
 aaudio_result_t AAudioServiceStreamBase::sendCurrentTimestamp() {
     AAudioServiceMessage command;
     // Send a timestamp for the clock model.
@@ -387,3 +402,13 @@
 void AAudioServiceStreamBase::onVolumeChanged(float volume) {
     sendServiceEvent(AAUDIO_SERVICE_EVENT_VOLUME, volume);
 }
+
+int32_t AAudioServiceStreamBase::incrementServiceReferenceCount() {
+    std::lock_guard<std::mutex> lock(mCallingCountLock);
+    return ++mCallingCount;
+}
+
+int32_t AAudioServiceStreamBase::decrementServiceReferenceCount() {
+    std::lock_guard<std::mutex> lock(mCallingCountLock);
+    return --mCallingCount;
+}
diff --git a/services/oboeservice/AAudioServiceStreamBase.h b/services/oboeservice/AAudioServiceStreamBase.h
index af435b4..5f5bb98 100644
--- a/services/oboeservice/AAudioServiceStreamBase.h
+++ b/services/oboeservice/AAudioServiceStreamBase.h
@@ -129,11 +129,15 @@
     // -------------------------------------------------------------------
 
     /**
-     * Send a message to the client.
+     * Send a message to the client with an int64_t data value.
      */
     aaudio_result_t sendServiceEvent(aaudio_service_event_t event,
-                                     double  dataDouble = 0.0,
                                      int64_t dataLong = 0);
+    /**
+     * Send a message to the client with an double data value.
+     */
+    aaudio_result_t sendServiceEvent(aaudio_service_event_t event,
+                                     double  dataDouble);
 
     /**
      * Fill in a parcelable description of stream.
@@ -182,6 +186,39 @@
 
     void onVolumeChanged(float volume);
 
+    /**
+     * Set false when the stream is started.
+     * Set true when data is first read from the stream.
+     * @param b
+     */
+    void setFlowing(bool b) {
+        mFlowing = b;
+    }
+
+    bool isFlowing() const {
+        return mFlowing;
+    }
+
+    /**
+     * Atomically increment the number of active references to the stream by AAudioService.
+     * @return value after the increment
+     */
+    int32_t incrementServiceReferenceCount();
+
+    /**
+     * Atomically decrement the number of active references to the stream by AAudioService.
+     * @return value after the decrement
+     */
+    int32_t decrementServiceReferenceCount();
+
+    bool isCloseNeeded() const {
+        return mCloseNeeded.load();
+    }
+
+    void setCloseNeeded(bool needed) {
+        mCloseNeeded.store(needed);
+    }
+
 protected:
 
     /**
@@ -204,6 +241,8 @@
 
     aaudio_result_t sendCurrentTimestamp();
 
+    aaudio_result_t sendXRunCount(int32_t xRunCount);
+
     /**
      * @param positionFrames
      * @param timeNanos
@@ -237,6 +276,11 @@
 
 private:
     aaudio_handle_t         mHandle = -1;
+    bool                    mFlowing = false;
+
+    std::mutex              mCallingCountLock;
+    std::atomic<int32_t>    mCallingCount{0};
+    std::atomic<bool>       mCloseNeeded{false};
 };
 
 } /* namespace aaudio */
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.cpp b/services/oboeservice/AAudioServiceStreamMMAP.cpp
index 44ba1ca..34ddb4b 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.cpp
+++ b/services/oboeservice/AAudioServiceStreamMMAP.cpp
@@ -81,9 +81,7 @@
     return AAUDIO_OK;
 }
 
-/**
- * Start the flow of data.
- */
+// Start the flow of data.
 aaudio_result_t AAudioServiceStreamMMAP::startDevice() {
     aaudio_result_t result = AAudioServiceStreamBase::startDevice();
     if (!mInService && result == AAUDIO_OK) {
@@ -93,9 +91,7 @@
     return result;
 }
 
-/**
- * Stop the flow of data such that start() can resume with loss of data.
- */
+// Stop the flow of data such that start() can resume with loss of data.
 aaudio_result_t AAudioServiceStreamMMAP::pause() {
     if (!isRunning()) {
         return AAUDIO_OK;
@@ -165,9 +161,7 @@
     }
 }
 
-/**
- * Get an immutable description of the data queue from the HAL.
- */
+// Get an immutable description of the data queue from the HAL.
 aaudio_result_t AAudioServiceStreamMMAP::getAudioDataDescription(
         AudioEndpointParcelable &parcelable)
 {
diff --git a/services/oboeservice/AAudioServiceStreamShared.cpp b/services/oboeservice/AAudioServiceStreamShared.cpp
index 084f996..75d88cf 100644
--- a/services/oboeservice/AAudioServiceStreamShared.cpp
+++ b/services/oboeservice/AAudioServiceStreamShared.cpp
@@ -74,7 +74,7 @@
                                                            int32_t framesPerBurst) {
 
     if (requestedCapacityFrames > MAX_FRAMES_PER_BUFFER) {
-        ALOGE("AAudioServiceStreamShared::calculateBufferCapacity() requested capacity %d > max %d",
+        ALOGE("calculateBufferCapacity() requested capacity %d > max %d",
               requestedCapacityFrames, MAX_FRAMES_PER_BUFFER);
         return AAUDIO_ERROR_OUT_OF_RANGE;
     }
@@ -99,7 +99,7 @@
     }
     // Check for numeric overflow.
     if (numBursts > 0x8000 || framesPerBurst > 0x8000) {
-        ALOGE("AAudioServiceStreamShared::calculateBufferCapacity() overflow, capacity = %d * %d",
+        ALOGE("calculateBufferCapacity() overflow, capacity = %d * %d",
               numBursts, framesPerBurst);
         return AAUDIO_ERROR_OUT_OF_RANGE;
     }
@@ -107,11 +107,11 @@
 
     // Final sanity check.
     if (capacityInFrames > MAX_FRAMES_PER_BUFFER) {
-        ALOGE("AAudioServiceStreamShared::calculateBufferCapacity() calc capacity %d > max %d",
+        ALOGE("calculateBufferCapacity() calc capacity %d > max %d",
               capacityInFrames, MAX_FRAMES_PER_BUFFER);
         return AAUDIO_ERROR_OUT_OF_RANGE;
     }
-    ALOGD("AAudioServiceStreamShared::calculateBufferCapacity() requested %d frames, actual = %d",
+    ALOGD("calculateBufferCapacity() requested %d frames, actual = %d",
           requestedCapacityFrames, capacityInFrames);
     return capacityInFrames;
 }
@@ -122,7 +122,7 @@
 
     aaudio_result_t result = AAudioServiceStreamBase::open(request, AAUDIO_SHARING_MODE_SHARED);
     if (result != AAUDIO_OK) {
-        ALOGE("AAudioServiceStreamBase open() returned %d", result);
+        ALOGE("open() returned %d", result);
         return result;
     }
 
@@ -134,7 +134,7 @@
     if (getFormat() == AAUDIO_FORMAT_UNSPECIFIED) {
         setFormat(AAUDIO_FORMAT_PCM_FLOAT);
     } else if (getFormat() != AAUDIO_FORMAT_PCM_FLOAT) {
-        ALOGE("AAudioServiceStreamShared::open() mAudioFormat = %d, need FLOAT", getFormat());
+        ALOGE("open() mAudioFormat = %d, need FLOAT", getFormat());
         result = AAUDIO_ERROR_INVALID_FORMAT;
         goto error;
     }
@@ -143,7 +143,7 @@
     if (getSampleRate() == AAUDIO_UNSPECIFIED) {
         setSampleRate(mServiceEndpoint->getSampleRate());
     } else if (getSampleRate() != mServiceEndpoint->getSampleRate()) {
-        ALOGE("AAudioServiceStreamShared::open() mSampleRate = %d, need %d",
+        ALOGE("open() mSampleRate = %d, need %d",
               getSampleRate(), mServiceEndpoint->getSampleRate());
         result = AAUDIO_ERROR_INVALID_RATE;
         goto error;
@@ -153,7 +153,7 @@
     if (getSamplesPerFrame() == AAUDIO_UNSPECIFIED) {
         setSamplesPerFrame(mServiceEndpoint->getSamplesPerFrame());
     } else if (getSamplesPerFrame() != mServiceEndpoint->getSamplesPerFrame()) {
-        ALOGE("AAudioServiceStreamShared::open() mSamplesPerFrame = %d, need %d",
+        ALOGE("open() mSamplesPerFrame = %d, need %d",
               getSamplesPerFrame(), mServiceEndpoint->getSamplesPerFrame());
         result = AAUDIO_ERROR_OUT_OF_RANGE;
         goto error;
@@ -173,14 +173,14 @@
         mAudioDataQueue = new SharedRingBuffer();
         result = mAudioDataQueue->allocate(calculateBytesPerFrame(), getBufferCapacity());
         if (result != AAUDIO_OK) {
-            ALOGE("AAudioServiceStreamShared::open() could not allocate FIFO with %d frames",
+            ALOGE("open() could not allocate FIFO with %d frames",
                   getBufferCapacity());
             result = AAUDIO_ERROR_NO_MEMORY;
             goto error;
         }
     }
 
-    ALOGD("AAudioServiceStreamShared::open() actual rate = %d, channels = %d, deviceId = %d",
+    ALOGD("open() actual rate = %d, channels = %d, deviceId = %d",
           getSampleRate(), getSamplesPerFrame(), mServiceEndpoint->getDeviceId());
 
     result = mServiceEndpoint->registerStream(keep);
diff --git a/services/oboeservice/AAudioServiceStreamShared.h b/services/oboeservice/AAudioServiceStreamShared.h
index 8499ea5..3b12e61 100644
--- a/services/oboeservice/AAudioServiceStreamShared.h
+++ b/services/oboeservice/AAudioServiceStreamShared.h
@@ -1,4 +1,4 @@
-/*
+ /*
  * Copyright (C) 2017 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
@@ -80,7 +80,7 @@
     }
 
     void incrementXRunCount() {
-        mXRunCount++;
+        sendXRunCount(++mXRunCount);
     }
 
     int32_t getXRunCount() const {
diff --git a/services/oboeservice/AAudioThread.cpp b/services/oboeservice/AAudioThread.cpp
index c6fb57d..fbb0da4 100644
--- a/services/oboeservice/AAudioThread.cpp
+++ b/services/oboeservice/AAudioThread.cpp
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-#define LOG_TAG "AAudioService"
+#define LOG_TAG "AAudioThread"
 //#define LOG_NDEBUG 0
 #include <utils/Log.h>
 
@@ -53,7 +53,7 @@
 
 aaudio_result_t AAudioThread::start(Runnable *runnable) {
     if (mHasThread) {
-        ALOGE("AAudioThread::start() - mHasThread already true");
+        ALOGE("start() - mHasThread already true");
         return AAUDIO_ERROR_INVALID_STATE;
     }
     // mRunnable will be read by the new thread when it starts.
@@ -61,7 +61,7 @@
     mRunnable = runnable;
     int err = pthread_create(&mThread, nullptr, AAudioThread_internalThreadProc, this);
     if (err != 0) {
-        ALOGE("AAudioThread::start() - pthread_create() returned %d %s", err, strerror(err));
+        ALOGE("start() - pthread_create() returned %d %s", err, strerror(err));
         return AAudioConvert_androidToAAudioResult(-err);
     } else {
         mHasThread = true;
@@ -71,13 +71,13 @@
 
 aaudio_result_t AAudioThread::stop() {
     if (!mHasThread) {
-        ALOGE("AAudioThread::stop() but no thread running");
+        ALOGE("stop() but no thread running");
         return AAUDIO_ERROR_INVALID_STATE;
     }
     int err = pthread_join(mThread, nullptr);
     mHasThread = false;
     if (err != 0) {
-        ALOGE("AAudioThread::stop() - pthread_join() returned %d %s", err, strerror(err));
+        ALOGE("stop() - pthread_join() returned %d %s", err, strerror(err));
         return AAudioConvert_androidToAAudioResult(-err);
     } else {
         return AAUDIO_OK;
diff --git a/services/oboeservice/SharedMemoryProxy.cpp b/services/oboeservice/SharedMemoryProxy.cpp
index fb991bb..c43ed22 100644
--- a/services/oboeservice/SharedMemoryProxy.cpp
+++ b/services/oboeservice/SharedMemoryProxy.cpp
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-#define LOG_TAG "AAudioService"
+#define LOG_TAG "SharedMemoryProxy"
 //#define LOG_NDEBUG 0
 #include <log/log.h>
 
@@ -45,12 +45,12 @@
 
     mProxyFileDescriptor = ashmem_create_region("AAudioProxyDataBuffer", mSharedMemorySizeInBytes);
     if (mProxyFileDescriptor < 0) {
-        ALOGE("SharedMemoryProxy::open() ashmem_create_region() failed %d", errno);
+        ALOGE("open() ashmem_create_region() failed %d", errno);
         return AAUDIO_ERROR_INTERNAL;
     }
     int err = ashmem_set_prot_region(mProxyFileDescriptor, PROT_READ|PROT_WRITE);
     if (err < 0) {
-        ALOGE("SharedMemoryProxy::open() ashmem_set_prot_region() failed %d", errno);
+        ALOGE("open() ashmem_set_prot_region() failed %d", errno);
         close(mProxyFileDescriptor);
         mProxyFileDescriptor = -1;
         return AAUDIO_ERROR_INTERNAL; // TODO convert errno to a better AAUDIO_ERROR;
@@ -62,7 +62,7 @@
                          MAP_SHARED,
                          mOriginalFileDescriptor, 0);
     if (mOriginalSharedMemory == MAP_FAILED) {
-        ALOGE("SharedMemoryProxy::open() original mmap(%d) failed %d (%s)",
+        ALOGE("open() original mmap(%d) failed %d (%s)",
                 mOriginalFileDescriptor, errno, strerror(errno));
         return AAUDIO_ERROR_INTERNAL; // TODO convert errno to a better AAUDIO_ERROR;
     }
@@ -73,7 +73,7 @@
                          MAP_SHARED,
                          mProxyFileDescriptor, 0);
     if (mProxySharedMemory != mOriginalSharedMemory) {
-        ALOGE("SharedMemoryProxy::open() proxy mmap(%d) failed %d", mProxyFileDescriptor, errno);
+        ALOGE("open() proxy mmap(%d) failed %d", mProxyFileDescriptor, errno);
         munmap(mOriginalSharedMemory, mSharedMemorySizeInBytes);
         mOriginalSharedMemory = nullptr;
         close(mProxyFileDescriptor);
diff --git a/services/oboeservice/SharedRingBuffer.cpp b/services/oboeservice/SharedRingBuffer.cpp
index 83b25b3..2454446 100644
--- a/services/oboeservice/SharedRingBuffer.cpp
+++ b/services/oboeservice/SharedRingBuffer.cpp
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-#define LOG_TAG "AAudioService"
+#define LOG_TAG "SharedRingBuffer"
 //#define LOG_NDEBUG 0
 #include <utils/Log.h>
 
@@ -46,14 +46,14 @@
     mSharedMemorySizeInBytes = mDataMemorySizeInBytes + (2 * (sizeof(fifo_counter_t)));
     mFileDescriptor.reset(ashmem_create_region("AAudioSharedRingBuffer", mSharedMemorySizeInBytes));
     if (mFileDescriptor.get() == -1) {
-        ALOGE("SharedRingBuffer::allocate() ashmem_create_region() failed %d", errno);
+        ALOGE("allocate() ashmem_create_region() failed %d", errno);
         return AAUDIO_ERROR_INTERNAL;
     }
-    ALOGV("SharedRingBuffer::allocate() mFileDescriptor = %d\n", mFileDescriptor.get());
+    ALOGV("allocate() mFileDescriptor = %d\n", mFileDescriptor.get());
 
     int err = ashmem_set_prot_region(mFileDescriptor.get(), PROT_READ|PROT_WRITE); // TODO error handling?
     if (err < 0) {
-        ALOGE("SharedRingBuffer::allocate() ashmem_set_prot_region() failed %d", errno);
+        ALOGE("allocate() ashmem_set_prot_region() failed %d", errno);
         mFileDescriptor.reset();
         return AAUDIO_ERROR_INTERNAL; // TODO convert errno to a better AAUDIO_ERROR;
     }
@@ -64,7 +64,7 @@
                          MAP_SHARED,
                          mFileDescriptor.get(), 0);
     if (mSharedMemory == MAP_FAILED) {
-        ALOGE("SharedRingBuffer::allocate() mmap() failed %d", errno);
+        ALOGE("allocate() mmap() failed %d", errno);
         mFileDescriptor.reset();
         return AAUDIO_ERROR_INTERNAL; // TODO convert errno to a better AAUDIO_ERROR;
     }
diff --git a/services/soundtrigger/SoundTriggerHwService.cpp b/services/soundtrigger/SoundTriggerHwService.cpp
index 22519a3..a7d6e83 100644
--- a/services/soundtrigger/SoundTriggerHwService.cpp
+++ b/services/soundtrigger/SoundTriggerHwService.cpp
@@ -206,9 +206,10 @@
     service->sendRecognitionEvent(event, module);
 }
 
-sp<IMemory> SoundTriggerHwService::prepareRecognitionEvent_l(
+sp<IMemory> SoundTriggerHwService::prepareRecognitionEvent(
                                                     struct sound_trigger_recognition_event *event)
 {
+    AutoMutex lock(mMemoryDealerLock);
     sp<IMemory> eventMemory;
 
     //sanitize event
@@ -216,21 +217,21 @@
     case SOUND_MODEL_TYPE_KEYPHRASE:
         ALOGW_IF(event->data_size != 0 && event->data_offset !=
                     sizeof(struct sound_trigger_phrase_recognition_event),
-                    "prepareRecognitionEvent_l(): invalid data offset %u for keyphrase event type",
+                    "prepareRecognitionEvent(): invalid data offset %u for keyphrase event type",
                     event->data_offset);
         event->data_offset = sizeof(struct sound_trigger_phrase_recognition_event);
         break;
     case SOUND_MODEL_TYPE_GENERIC:
         ALOGW_IF(event->data_size != 0 && event->data_offset !=
                     sizeof(struct sound_trigger_generic_recognition_event),
-                    "prepareRecognitionEvent_l(): invalid data offset %u for generic event type",
+                    "prepareRecognitionEvent(): invalid data offset %u for generic event type",
                     event->data_offset);
         event->data_offset = sizeof(struct sound_trigger_generic_recognition_event);
         break;
     case SOUND_MODEL_TYPE_UNKNOWN:
         ALOGW_IF(event->data_size != 0 && event->data_offset !=
                     sizeof(struct sound_trigger_recognition_event),
-                    "prepareRecognitionEvent_l(): invalid data offset %u for unknown event type",
+                    "prepareRecognitionEvent(): invalid data offset %u for unknown event type",
                     event->data_offset);
         event->data_offset = sizeof(struct sound_trigger_recognition_event);
         break;
@@ -251,30 +252,19 @@
 
 void SoundTriggerHwService::sendRecognitionEvent(struct sound_trigger_recognition_event *event,
                                                  Module *module)
- {
-     AutoMutex lock(mServiceLock);
-     if (module == NULL) {
-         return;
-     }
-     sp<IMemory> eventMemory = prepareRecognitionEvent_l(event);
-     if (eventMemory == 0) {
-         return;
-     }
-     sp<Module> strongModule;
-     for (size_t i = 0; i < mModules.size(); i++) {
-         if (mModules.valueAt(i).get() == module) {
-             strongModule = mModules.valueAt(i);
-             break;
-         }
-     }
-     if (strongModule == 0) {
-         return;
-     }
+{
+    if (module == NULL) {
+        return;
+    }
+    sp<IMemory> eventMemory = prepareRecognitionEvent(event);
+    if (eventMemory == 0) {
+        return;
+    }
 
     sp<CallbackEvent> callbackEvent = new CallbackEvent(CallbackEvent::TYPE_RECOGNITION,
                                                         eventMemory);
-    callbackEvent->setModule(strongModule);
-    sendCallbackEvent_l(callbackEvent);
+    callbackEvent->setModule(module);
+    sendCallbackEvent(callbackEvent);
 }
 
 // static
@@ -293,8 +283,9 @@
     service->sendSoundModelEvent(event, module);
 }
 
-sp<IMemory> SoundTriggerHwService::prepareSoundModelEvent_l(struct sound_trigger_model_event *event)
+sp<IMemory> SoundTriggerHwService::prepareSoundModelEvent(struct sound_trigger_model_event *event)
 {
+    AutoMutex lock(mMemoryDealerLock);
     sp<IMemory> eventMemory;
 
     size_t size = event->data_offset + event->data_size;
@@ -311,30 +302,20 @@
 void SoundTriggerHwService::sendSoundModelEvent(struct sound_trigger_model_event *event,
                                                 Module *module)
 {
-    AutoMutex lock(mServiceLock);
-    sp<IMemory> eventMemory = prepareSoundModelEvent_l(event);
+    sp<IMemory> eventMemory = prepareSoundModelEvent(event);
     if (eventMemory == 0) {
         return;
     }
-    sp<Module> strongModule;
-    for (size_t i = 0; i < mModules.size(); i++) {
-        if (mModules.valueAt(i).get() == module) {
-            strongModule = mModules.valueAt(i);
-            break;
-        }
-    }
-    if (strongModule == 0) {
-        return;
-    }
     sp<CallbackEvent> callbackEvent = new CallbackEvent(CallbackEvent::TYPE_SOUNDMODEL,
                                                         eventMemory);
-    callbackEvent->setModule(strongModule);
-    sendCallbackEvent_l(callbackEvent);
+    callbackEvent->setModule(module);
+    sendCallbackEvent(callbackEvent);
 }
 
 
-sp<IMemory> SoundTriggerHwService::prepareServiceStateEvent_l(sound_trigger_service_state_t state)
+sp<IMemory> SoundTriggerHwService::prepareServiceStateEvent(sound_trigger_service_state_t state)
 {
+    AutoMutex lock(mMemoryDealerLock);
     sp<IMemory> eventMemory;
 
     size_t size = sizeof(sound_trigger_service_state_t);
@@ -347,45 +328,33 @@
     return eventMemory;
 }
 
-// call with mServiceLock held
-void SoundTriggerHwService::sendServiceStateEvent_l(sound_trigger_service_state_t state,
+void SoundTriggerHwService::sendServiceStateEvent(sound_trigger_service_state_t state,
                                                   Module *module)
 {
-    sp<IMemory> eventMemory = prepareServiceStateEvent_l(state);
+    sp<IMemory> eventMemory = prepareServiceStateEvent(state);
     if (eventMemory == 0) {
         return;
     }
-    sp<Module> strongModule;
-    for (size_t i = 0; i < mModules.size(); i++) {
-        if (mModules.valueAt(i).get() == module) {
-            strongModule = mModules.valueAt(i);
-            break;
-        }
-    }
-    if (strongModule == 0) {
-        return;
-    }
     sp<CallbackEvent> callbackEvent = new CallbackEvent(CallbackEvent::TYPE_SERVICE_STATE,
                                                         eventMemory);
-    callbackEvent->setModule(strongModule);
-    sendCallbackEvent_l(callbackEvent);
+    callbackEvent->setModule(module);
+    sendCallbackEvent(callbackEvent);
 }
 
-void SoundTriggerHwService::sendServiceStateEvent_l(sound_trigger_service_state_t state,
-                                                    ModuleClient *moduleClient)
+void SoundTriggerHwService::sendServiceStateEvent(sound_trigger_service_state_t state,
+                                                  ModuleClient *moduleClient)
 {
-    sp<IMemory> eventMemory = prepareServiceStateEvent_l(state);
+    sp<IMemory> eventMemory = prepareServiceStateEvent(state);
     if (eventMemory == 0) {
         return;
     }
     sp<CallbackEvent> callbackEvent = new CallbackEvent(CallbackEvent::TYPE_SERVICE_STATE,
                                                         eventMemory);
     callbackEvent->setModuleClient(moduleClient);
-    sendCallbackEvent_l(callbackEvent);
+    sendCallbackEvent(callbackEvent);
 }
 
-// call with mServiceLock held
-void SoundTriggerHwService::sendCallbackEvent_l(const sp<CallbackEvent>& event)
+void SoundTriggerHwService::sendCallbackEvent(const sp<CallbackEvent>& event)
 {
     mCallbackThread->sendCallbackEvent(event);
 }
@@ -404,6 +373,19 @@
             if (moduleClient == 0) {
                 return;
             }
+        } else {
+            // Sanity check on this being a Module we know about.
+            bool foundModule = false;
+            for (size_t i = 0; i < mModules.size(); i++) {
+                if (mModules.valueAt(i).get() == module.get()) {
+                    foundModule = true;
+                    break;
+                }
+            }
+            if (!foundModule) {
+                ALOGE("onCallbackEvent for unknown module");
+                return;
+            }
         }
     }
     if (module != 0) {
@@ -757,11 +739,12 @@
         return;
     }
 
+    Vector< sp<ModuleClient> > clients;
+
     switch (event->mType) {
     case CallbackEvent::TYPE_RECOGNITION: {
         struct sound_trigger_recognition_event *recognitionEvent =
                 (struct sound_trigger_recognition_event *)eventMemory->pointer();
-        sp<ISoundTriggerClient> client;
         {
             AutoMutex lock(mLock);
             sp<Model> model = getModel(recognitionEvent->model);
@@ -776,16 +759,12 @@
 
             recognitionEvent->capture_session = model->mCaptureSession;
             model->mState = Model::STATE_IDLE;
-            client = model->mModuleClient->client();
-        }
-        if (client != 0) {
-            client->onRecognitionEvent(eventMemory);
+            clients.add(model->mModuleClient);
         }
     } break;
     case CallbackEvent::TYPE_SOUNDMODEL: {
         struct sound_trigger_model_event *soundmodelEvent =
                 (struct sound_trigger_model_event *)eventMemory->pointer();
-        sp<ISoundTriggerClient> client;
         {
             AutoMutex lock(mLock);
             sp<Model> model = getModel(soundmodelEvent->model);
@@ -793,29 +772,26 @@
                 ALOGW("%s model == 0", __func__);
                 return;
             }
-            client = model->mModuleClient->client();
-        }
-        if (client != 0) {
-            client->onSoundModelEvent(eventMemory);
+            clients.add(model->mModuleClient);
         }
     } break;
     case CallbackEvent::TYPE_SERVICE_STATE: {
-        Vector< sp<ISoundTriggerClient> > clients;
         {
             AutoMutex lock(mLock);
             for (size_t i = 0; i < mModuleClients.size(); i++) {
                 if (mModuleClients[i] != 0) {
-                    clients.add(mModuleClients[i]->client());
+                    clients.add(mModuleClients[i]);
                 }
             }
         }
-        for (size_t i = 0; i < clients.size(); i++) {
-            clients[i]->onServiceStateChange(eventMemory);
-        }
     } break;
     default:
         LOG_ALWAYS_FATAL("onCallbackEvent unknown event type %d", event->mType);
     }
+
+    for (size_t i = 0; i < clients.size(); i++) {
+        clients[i]->onCallbackEvent(event);
+    }
 }
 
 sp<SoundTriggerHwService::Model> SoundTriggerHwService::Module::getModel(
@@ -878,7 +854,7 @@
                     event.common.type = model->mType;
                     event.common.model = model->mHandle;
                     event.common.data_size = 0;
-                    sp<IMemory> eventMemory = service->prepareRecognitionEvent_l(&event.common);
+                    sp<IMemory> eventMemory = service->prepareRecognitionEvent(&event.common);
                     if (eventMemory != 0) {
                         events.add(eventMemory);
                     }
@@ -889,7 +865,7 @@
                     event.common.type = model->mType;
                     event.common.model = model->mHandle;
                     event.common.data_size = 0;
-                    sp<IMemory> eventMemory = service->prepareRecognitionEvent_l(&event.common);
+                    sp<IMemory> eventMemory = service->prepareRecognitionEvent(&event.common);
                     if (eventMemory != 0) {
                         events.add(eventMemory);
                     }
@@ -900,7 +876,7 @@
                     event.common.type = model->mType;
                     event.common.model = model->mHandle;
                     event.common.data_size = 0;
-                    sp<IMemory> eventMemory = service->prepareRecognitionEvent_l(&event.common);
+                    sp<IMemory> eventMemory = service->prepareRecognitionEvent(&event.common);
                     if (eventMemory != 0) {
                         events.add(eventMemory);
                     }
@@ -915,11 +891,11 @@
         sp<CallbackEvent> callbackEvent = new CallbackEvent(CallbackEvent::TYPE_RECOGNITION,
                                                             events[i]);
         callbackEvent->setModule(this);
-        service->sendCallbackEvent_l(callbackEvent);
+        service->sendCallbackEvent(callbackEvent);
     }
 
 exit:
-    service->sendServiceStateEvent_l(state, this);
+    service->sendServiceStateEvent(state, this);
 }
 
 
@@ -1064,7 +1040,7 @@
             return;
         }
     }
-    service->sendServiceStateEvent_l(state, this);
+    service->sendServiceStateEvent(state, this);
 }
 
 void SoundTriggerHwService::ModuleClient::onCallbackEvent(const sp<CallbackEvent>& event)
@@ -1077,19 +1053,26 @@
         return;
     }
 
-    switch (event->mType) {
-    case CallbackEvent::TYPE_SERVICE_STATE: {
-        sp<ISoundTriggerClient> client;
-        {
-            AutoMutex lock(mLock);
-            client = mClient;
-        }
-        if (client !=0 ) {
+    sp<ISoundTriggerClient> client;
+    {
+        AutoMutex lock(mLock);
+        client = mClient;
+    }
+
+    if (client != 0) {
+        switch (event->mType) {
+        case CallbackEvent::TYPE_RECOGNITION: {
+            client->onRecognitionEvent(eventMemory);
+        } break;
+        case CallbackEvent::TYPE_SOUNDMODEL: {
+            client->onSoundModelEvent(eventMemory);
+        } break;
+        case CallbackEvent::TYPE_SERVICE_STATE: {
             client->onServiceStateChange(eventMemory);
+        } break;
+        default:
+            LOG_ALWAYS_FATAL("onCallbackEvent unknown event type %d", event->mType);
         }
-    } break;
-    default:
-        LOG_ALWAYS_FATAL("onCallbackEvent unknown event type %d", event->mType);
     }
 }
 
diff --git a/services/soundtrigger/SoundTriggerHwService.h b/services/soundtrigger/SoundTriggerHwService.h
index 95efc4b..708fc98 100644
--- a/services/soundtrigger/SoundTriggerHwService.h
+++ b/services/soundtrigger/SoundTriggerHwService.h
@@ -214,19 +214,19 @@
     };
 
     static void recognitionCallback(struct sound_trigger_recognition_event *event, void *cookie);
-           sp<IMemory> prepareRecognitionEvent_l(struct sound_trigger_recognition_event *event);
+           sp<IMemory> prepareRecognitionEvent(struct sound_trigger_recognition_event *event);
            void sendRecognitionEvent(struct sound_trigger_recognition_event *event, Module *module);
 
     static void soundModelCallback(struct sound_trigger_model_event *event, void *cookie);
-           sp<IMemory> prepareSoundModelEvent_l(struct sound_trigger_model_event *event);
+           sp<IMemory> prepareSoundModelEvent(struct sound_trigger_model_event *event);
            void sendSoundModelEvent(struct sound_trigger_model_event *event, Module *module);
 
-           sp<IMemory> prepareServiceStateEvent_l(sound_trigger_service_state_t state);
-           void sendServiceStateEvent_l(sound_trigger_service_state_t state, Module *module);
-           void sendServiceStateEvent_l(sound_trigger_service_state_t state,
-                                        ModuleClient *moduleClient);
+           sp<IMemory> prepareServiceStateEvent(sound_trigger_service_state_t state);
+           void sendServiceStateEvent(sound_trigger_service_state_t state, Module *module);
+           void sendServiceStateEvent(sound_trigger_service_state_t state,
+                                      ModuleClient *moduleClient);
 
-           void sendCallbackEvent_l(const sp<CallbackEvent>& event);
+           void sendCallbackEvent(const sp<CallbackEvent>& event);
            void onCallbackEvent(const sp<CallbackEvent>& event);
 
 private:
@@ -238,6 +238,7 @@
     DefaultKeyedVector< sound_trigger_module_handle_t, sp<Module> >     mModules;
     sp<CallbackThread>  mCallbackThread;
     sp<MemoryDealer>    mMemoryDealer;
+    Mutex               mMemoryDealerLock;
     bool                mCaptureState;
 };
 
diff --git a/tools/OWNERS b/tools/OWNERS
index 6dcb035..f9cb567 100644
--- a/tools/OWNERS
+++ b/tools/OWNERS
@@ -1 +1 @@
-gkasten@android.com
+gkasten@google.com
diff --git a/tools/resampler_tools/OWNERS b/tools/resampler_tools/OWNERS
new file mode 100644
index 0000000..b4a6798
--- /dev/null
+++ b/tools/resampler_tools/OWNERS
@@ -0,0 +1 @@
+hunga@google.com