Merge "Implement getPropertyByteArray and setPropertyByteArray."
diff --git a/camera/Android.bp b/camera/Android.bp
index c76ae50..24b3918 100644
--- a/camera/Android.bp
+++ b/camera/Android.bp
@@ -29,12 +29,7 @@
         // AIDL files for camera interfaces
         // The headers for these interfaces will be available to any modules that
         // include libcamera_client, at the path "aidl/package/path/BnFoo.h"
-        "aidl/android/hardware/ICameraService.aidl",
-        "aidl/android/hardware/ICameraServiceListener.aidl",
-        "aidl/android/hardware/ICameraServiceProxy.aidl",
-        "aidl/android/hardware/camera2/ICameraDeviceCallbacks.aidl",
-        "aidl/android/hardware/camera2/ICameraDeviceUser.aidl",
-
+        ":libcamera_client_aidl",
 
         // Source for camera interface parcelables, and manually-written interfaces
         "Camera.cpp",
@@ -81,3 +76,25 @@
     ],
 
 }
+
+// AIDL interface between camera clients and the camera service.
+filegroup {
+    name: "libcamera_client_aidl",
+    srcs: [
+        "aidl/android/hardware/ICameraService.aidl",
+        "aidl/android/hardware/ICameraServiceListener.aidl",
+        "aidl/android/hardware/ICameraServiceProxy.aidl",
+        "aidl/android/hardware/camera2/ICameraDeviceCallbacks.aidl",
+        "aidl/android/hardware/camera2/ICameraDeviceUser.aidl",
+    ],
+}
+
+// Extra AIDL files that are used by framework.jar but not libcamera_client
+// because they have hand-written native implementations.
+filegroup {
+    name: "libcamera_client_framework_aidl",
+    srcs: [
+        "aidl/android/hardware/ICamera.aidl",
+        "aidl/android/hardware/ICameraClient.aidl",
+    ],
+}
diff --git a/camera/include/camera/camera2/CaptureRequest.h b/camera/include/camera/camera2/CaptureRequest.h
index 0180183..e39dfcf 100644
--- a/camera/include/camera/camera2/CaptureRequest.h
+++ b/camera/include/camera/camera2/CaptureRequest.h
@@ -43,6 +43,7 @@
     CameraMetadata          mMetadata;
     Vector<sp<Surface> >    mSurfaceList;
     bool                    mIsReprocess;
+    void*                   mContext; // arbitrary user context from NDK apps, null for java apps
 
     /**
      * Keep impl up-to-date with CaptureRequest.java in frameworks/base
diff --git a/camera/ndk/NdkCaptureRequest.cpp b/camera/ndk/NdkCaptureRequest.cpp
index 5b4c180..ac1856b 100644
--- a/camera/ndk/NdkCaptureRequest.cpp
+++ b/camera/ndk/NdkCaptureRequest.cpp
@@ -142,3 +142,40 @@
     delete request;
     return;
 }
+
+EXPORT
+camera_status_t ACaptureRequest_setUserContext(
+        ACaptureRequest* request, void* context) {
+    if (request == nullptr) {
+        ALOGE("%s: invalid argument! request is NULL", __FUNCTION__);
+        return ACAMERA_ERROR_INVALID_PARAMETER;
+    }
+    return request->setContext(context);
+}
+
+EXPORT
+camera_status_t ACaptureRequest_getUserContext(
+        const ACaptureRequest* request, /*out*/void** context) {
+    if (request == nullptr || context == nullptr) {
+        ALOGE("%s: invalid argument! request %p, context %p",
+                __FUNCTION__, request, context);
+        return ACAMERA_ERROR_INVALID_PARAMETER;
+    }
+    return request->getContext(context);
+}
+
+EXPORT
+ACaptureRequest* ACaptureRequest_copy(const ACaptureRequest* src) {
+    ATRACE_CALL();
+    if (src == nullptr) {
+        ALOGE("%s: src is null!", __FUNCTION__);
+        return nullptr;
+    }
+
+    ACaptureRequest* pRequest = new ACaptureRequest();
+    pRequest->settings = new ACameraMetadata(*(src->settings));
+    pRequest->targets  = new ACameraOutputTargets();
+    *(pRequest->targets)  = *(src->targets);
+    pRequest->context = src->context;
+    return pRequest;
+}
diff --git a/camera/ndk/impl/ACameraDevice.cpp b/camera/ndk/impl/ACameraDevice.cpp
index 9ce0ac8..45fa28e 100644
--- a/camera/ndk/impl/ACameraDevice.cpp
+++ b/camera/ndk/impl/ACameraDevice.cpp
@@ -372,6 +372,7 @@
     sp<CaptureRequest> req(new CaptureRequest());
     req->mMetadata = request->settings->getInternalData();
     req->mIsReprocess = false; // NDK does not support reprocessing yet
+    req->mContext = request->context;
 
     for (auto outputTarget : request->targets->mOutputs) {
         ANativeWindow* anw = outputTarget.mWindow;
@@ -398,6 +399,7 @@
         ACameraOutputTarget outputTarget(anw);
         pRequest->targets->mOutputs.insert(outputTarget);
     }
+    pRequest->context = req->mContext;
     return pRequest;
 }
 
diff --git a/camera/ndk/impl/ACaptureRequest.h b/camera/ndk/impl/ACaptureRequest.h
index e5b453e..06b2cc3 100644
--- a/camera/ndk/impl/ACaptureRequest.h
+++ b/camera/ndk/impl/ACaptureRequest.h
@@ -45,8 +45,19 @@
 };
 
 struct ACaptureRequest {
+    camera_status_t setContext(void* ctx) {
+        context = ctx;
+        return ACAMERA_OK;
+    }
+
+    camera_status_t getContext(void** ctx) const {
+        *ctx = context;
+        return ACAMERA_OK;
+    }
+
     ACameraMetadata*      settings;
     ACameraOutputTargets* targets;
+    void*                 context;
 };
 
 #endif // _ACAPTURE_REQUEST_H
diff --git a/camera/ndk/include/camera/NdkCaptureRequest.h b/camera/ndk/include/camera/NdkCaptureRequest.h
index c62ba2c..4961ce3 100644
--- a/camera/ndk/include/camera/NdkCaptureRequest.h
+++ b/camera/ndk/include/camera/NdkCaptureRequest.h
@@ -305,6 +305,58 @@
 
 #endif /* __ANDROID_API__ >= 24 */
 
+#if __ANDROID_API__ >= 28
+
+/**
+ * Associate an arbitrary user context pointer to the {@link ACaptureRequest}
+ *
+ * This method is useful for user to identify the capture request in capture session callbacks.
+ * The context is NULL for newly created request.
+ * {@link ACameraOutputTarget_free} will not free the context. Also calling this method twice
+ * will not cause the previous context be freed.
+ * Also note that calling this method after the request has been sent to capture session will not
+ * change the context pointer in the capture callbacks.
+ *
+ * @param request the {@link ACaptureRequest} of interest.
+ * @param context the user context pointer to be associated with this capture request.
+ *
+ * @return <ul>
+ *         <li>{@link ACAMERA_OK} if the method call succeeds.</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if request is NULL.</li></ul>
+ */
+camera_status_t ACaptureRequest_setUserContext(
+        ACaptureRequest* request, void* context);
+
+/**
+ * Get the user context pointer of the {@link ACaptureRequest}
+ *
+ * This method is useful for user to identify the capture request in capture session callbacks.
+ * The context is NULL for newly created request.
+ *
+ * @param request the {@link ACaptureRequest} of interest.
+ * @param context the user context pointer of this capture request.
+ *
+ * @return <ul>
+ *         <li>{@link ACAMERA_OK} if the method call succeeds.</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if request is NULL.</li></ul>
+ */
+camera_status_t ACaptureRequest_getUserContext(
+        const ACaptureRequest* request, /*out*/void** context);
+
+/**
+ * Create a copy of input {@link ACaptureRequest}.
+ *
+ * <p>The returned ACaptureRequest must be freed by the application by {@link ACaptureRequest_free}
+ * after application is done using it.</p>
+ *
+ * @param src the input {@link ACaptureRequest} to be copied.
+ *
+ * @return a valid ACaptureRequest pointer or NULL if the input request cannot be copied.
+ */
+ACaptureRequest* ACaptureRequest_copy(const ACaptureRequest* src);
+
+#endif /* __ANDROID_API__ >= 28 */
+
 __END_DECLS
 
 #endif /* _NDK_CAPTURE_REQUEST_H */
diff --git a/camera/ndk/libcamera2ndk.map.txt b/camera/ndk/libcamera2ndk.map.txt
index 0a8a6e9..58d239b 100644
--- a/camera/ndk/libcamera2ndk.map.txt
+++ b/camera/ndk/libcamera2ndk.map.txt
@@ -26,9 +26,11 @@
     ACameraOutputTarget_create;
     ACameraOutputTarget_free;
     ACaptureRequest_addTarget;
+    ACaptureRequest_copy;
     ACaptureRequest_free;
     ACaptureRequest_getAllTags;
     ACaptureRequest_getConstEntry;
+    ACaptureRequest_getUserContext;
     ACaptureRequest_removeTarget;
     ACaptureRequest_setEntry_double;
     ACaptureRequest_setEntry_float;
@@ -36,6 +38,7 @@
     ACaptureRequest_setEntry_i64;
     ACaptureRequest_setEntry_rational;
     ACaptureRequest_setEntry_u8;
+    ACaptureRequest_setUserContext;
     ACaptureSessionOutputContainer_add;
     ACaptureSessionOutputContainer_create;
     ACaptureSessionOutputContainer_free;
diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp
index ddc4b16..44ed034 100644
--- a/cmds/stagefright/stagefright.cpp
+++ b/cmds/stagefright/stagefright.cpp
@@ -80,6 +80,7 @@
 static bool gDisplayHistogram;
 static bool showProgress = true;
 static String8 gWriteMP4Filename;
+static String8 gComponentNameOverride;
 
 static sp<ANativeWindow> gSurface;
 
@@ -193,7 +194,10 @@
             CHECK(!gPreferSoftwareCodec);
             flags |= MediaCodecList::kHardwareCodecsOnly;
         }
-        rawSource = SimpleDecodingSource::Create(source, flags, gSurface);
+        rawSource = SimpleDecodingSource::Create(
+                source, flags, gSurface,
+                gComponentNameOverride.isEmpty() ? nullptr : gComponentNameOverride.c_str(),
+                !gComponentNameOverride.isEmpty());
         if (rawSource == NULL) {
             return;
         }
@@ -618,6 +622,7 @@
     fprintf(stderr, "       -o playback audio\n");
     fprintf(stderr, "       -w(rite) filename (write to .mp4 file)\n");
     fprintf(stderr, "       -k seek test\n");
+    fprintf(stderr, "       -O(verride) name of the component\n");
     fprintf(stderr, "       -x display a histogram of decoding times/fps "
                     "(video only)\n");
     fprintf(stderr, "       -q don't show progress indicator\n");
@@ -703,7 +708,7 @@
     sp<ALooper> looper;
 
     int res;
-    while ((res = getopt(argc, argv, "haqn:lm:b:ptsrow:kxSTd:D:")) >= 0) {
+    while ((res = getopt(argc, argv, "haqn:lm:b:ptsrow:kO:xSTd:D:")) >= 0) {
         switch (res) {
             case 'a':
             {
@@ -732,6 +737,12 @@
                 break;
             }
 
+            case 'O':
+            {
+                gComponentNameOverride.setTo(optarg);
+                break;
+            }
+
             case 'l':
             {
                 listComponents = true;
@@ -1073,7 +1084,7 @@
                             i, MediaExtractor::kIncludeExtensiveMetaData);
 
                     if (meta == NULL) {
-                        break;
+                        continue;
                     }
                     const char *mime;
                     meta->findCString(kKeyMIMEType, &mime);
diff --git a/include/media/AudioClient.h b/include/media/AudioClient.h
deleted file mode 100644
index 9efd76d..0000000
--- a/include/media/AudioClient.h
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-#ifndef ANDROID_AUDIO_CLIENT_H
-#define ANDROID_AUDIO_CLIENT_H
-
-#include <system/audio.h>
-#include <utils/String16.h>
-
-namespace android {
-
-class AudioClient {
- public:
-    AudioClient() :
-        clientUid(-1), clientPid(-1), packageName("") {}
-
-    uid_t clientUid;
-    pid_t clientPid;
-    String16 packageName;
-};
-
-}; // namespace android
-
-#endif  // ANDROID_AUDIO_CLIENT_H
diff --git a/include/media/AudioClient.h b/include/media/AudioClient.h
new file mode 120000
index 0000000..feac9b9
--- /dev/null
+++ b/include/media/AudioClient.h
@@ -0,0 +1 @@
+media/libaudioclient/include/media/AudioClient.h
\ No newline at end of file
diff --git a/media/extractors/mp3/MP3Extractor.cpp b/media/extractors/mp3/MP3Extractor.cpp
index 2731f0f..f26ed25 100644
--- a/media/extractors/mp3/MP3Extractor.cpp
+++ b/media/extractors/mp3/MP3Extractor.cpp
@@ -678,6 +678,15 @@
     off64_t pos = 0;
     off64_t post_id3_pos;
     uint32_t header;
+    uint8_t mpeg_header[5];
+    if (source->readAt(0, mpeg_header, sizeof(mpeg_header)) < (ssize_t)sizeof(mpeg_header)) {
+        return NULL;
+    }
+
+    if (!memcmp("\x00\x00\x01\xba", mpeg_header, 4) && (mpeg_header[4] >> 4) == 2) {
+        ALOGV("MPEG1PS container is not supported!");
+        return NULL;
+    }
     if (!Resync(source, 0, &pos, &post_id3_pos, &header)) {
         return NULL;
     }
diff --git a/media/extractors/mp4/ItemTable.cpp b/media/extractors/mp4/ItemTable.cpp
index 9a6cb64..85c66b2 100644
--- a/media/extractors/mp4/ItemTable.cpp
+++ b/media/extractors/mp4/ItemTable.cpp
@@ -1425,7 +1425,7 @@
     meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC);
 
     if (image->itemId == mPrimaryItemId) {
-        meta->setInt32(kKeyIsPrimaryImage, 1);
+        meta->setInt32(kKeyTrackIsDefault, 1);
     }
 
     ALOGV("image[%u]: size %dx%d", imageIndex, image->width, image->height);
diff --git a/media/extractors/mp4/MPEG4Extractor.cpp b/media/extractors/mp4/MPEG4Extractor.cpp
index 6671956..b411125 100644
--- a/media/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/extractors/mp4/MPEG4Extractor.cpp
@@ -347,7 +347,7 @@
       mHeaderTimescale(0),
       mIsQT(false),
       mIsHeif(false),
-      mIsHeifSequence(false),
+      mHasMoovBox(false),
       mPreferHeif(mime != NULL && !strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_HEIF)),
       mFirstTrack(NULL),
       mLastTrack(NULL),
@@ -563,9 +563,9 @@
     status_t err;
     bool sawMoovOrSidx = false;
 
-    while (!((!mIsHeif && sawMoovOrSidx && (mMdatFound || mMoofFound)) ||
-             (mIsHeif && (mPreferHeif || !mIsHeifSequence)
-                     && (mItemTable != NULL) && mItemTable->isValid()))) {
+    while (!((mHasMoovBox && sawMoovOrSidx && (mMdatFound || mMoofFound)) ||
+             (mIsHeif && (mPreferHeif || !mHasMoovBox) &&
+                     (mItemTable != NULL) && mItemTable->isValid()))) {
         off64_t orig_offset = offset;
         err = parseChunk(&offset, 0);
 
@@ -582,34 +582,30 @@
         }
     }
 
-    if (mIsHeif) {
-        uint32_t imageCount = mItemTable->countImages();
-        if (imageCount == 0) {
-            ALOGE("found no image in heif!");
-        } else {
-            for (uint32_t imageIndex = 0; imageIndex < imageCount; imageIndex++) {
-                sp<MetaData> meta = mItemTable->getImageMeta(imageIndex);
-                if (meta == NULL) {
-                    ALOGE("heif image %u has no meta!", imageIndex);
-                    continue;
-                }
-
-                ALOGV("adding HEIF image track %u", imageIndex);
-                Track *track = new Track;
-                track->next = NULL;
-                if (mLastTrack != NULL) {
-                    mLastTrack->next = track;
-                } else {
-                    mFirstTrack = track;
-                }
-                mLastTrack = track;
-
-                track->meta = meta;
-                track->meta->setInt32(kKeyTrackID, imageIndex);
-                track->includes_expensive_metadata = false;
-                track->skipTrack = false;
-                track->timescale = 0;
+    if (mIsHeif && (mItemTable != NULL) && (mItemTable->countImages() > 0)) {
+        for (uint32_t imageIndex = 0;
+                imageIndex < mItemTable->countImages(); imageIndex++) {
+            sp<MetaData> meta = mItemTable->getImageMeta(imageIndex);
+            if (meta == NULL) {
+                ALOGE("heif image %u has no meta!", imageIndex);
+                continue;
             }
+
+            ALOGV("adding HEIF image track %u", imageIndex);
+            Track *track = new Track;
+            track->next = NULL;
+            if (mLastTrack != NULL) {
+                mLastTrack->next = track;
+            } else {
+                mFirstTrack = track;
+            }
+            mLastTrack = track;
+
+            track->meta = meta;
+            track->meta->setInt32(kKeyTrackID, imageIndex);
+            track->includes_expensive_metadata = false;
+            track->skipTrack = false;
+            track->timescale = 0;
         }
     }
 
@@ -2512,13 +2508,18 @@
             } else {
                 if (brandSet.count(FOURCC('m', 'i', 'f', '1')) > 0
                  && brandSet.count(FOURCC('h', 'e', 'i', 'c')) > 0) {
-                    mIsHeif = true;
                     ALOGV("identified HEIF image");
+
+                    mIsHeif = true;
+                    brandSet.erase(FOURCC('m', 'i', 'f', '1'));
+                    brandSet.erase(FOURCC('h', 'e', 'i', 'c'));
                 }
-                if (brandSet.count(FOURCC('m', 's', 'f', '1')) > 0
-                 && brandSet.count(FOURCC('h', 'e', 'v', 'c')) > 0) {
-                    mIsHeifSequence = true;
-                    ALOGV("identified HEIF image sequence");
+
+                if (!brandSet.empty()) {
+                    // This means that the file should have moov box.
+                    // It could be any iso files (mp4, heifs, etc.)
+                    mHasMoovBox = true;
+                    ALOGV("identified HEIF image with other tracks");
                 }
             }
 
diff --git a/media/extractors/mp4/MPEG4Extractor.h b/media/extractors/mp4/MPEG4Extractor.h
index d4f17e3..76b549d 100644
--- a/media/extractors/mp4/MPEG4Extractor.h
+++ b/media/extractors/mp4/MPEG4Extractor.h
@@ -104,7 +104,7 @@
     uint32_t mHeaderTimescale;
     bool mIsQT;
     bool mIsHeif;
-    bool mIsHeifSequence;
+    bool mHasMoovBox;
     bool mPreferHeif;
 
     Track *mFirstTrack, *mLastTrack;
diff --git a/media/libaaudio/examples/input_monitor/Android.bp b/media/libaaudio/examples/input_monitor/Android.bp
index 2c3418d..d8c5843 100644
--- a/media/libaaudio/examples/input_monitor/Android.bp
+++ b/media/libaaudio/examples/input_monitor/Android.bp
@@ -2,6 +2,7 @@
     name: "input_monitor",
     gtest: false,
     srcs: ["src/input_monitor.cpp"],
+    cflags: ["-Wall", "-Werror"],
     shared_libs: ["libaaudio"],
     header_libs: ["libaaudio_example_utils"],
 }
@@ -10,6 +11,7 @@
     name: "input_monitor_callback",
     gtest: false,
     srcs: ["src/input_monitor_callback.cpp"],
+    cflags: ["-Wall", "-Werror"],
     shared_libs: ["libaaudio"],
     header_libs: ["libaaudio_example_utils"],
 }
diff --git a/media/libaaudio/examples/loopback/Android.bp b/media/libaaudio/examples/loopback/Android.bp
index 2b624a8..fa8fdc9 100644
--- a/media/libaaudio/examples/loopback/Android.bp
+++ b/media/libaaudio/examples/loopback/Android.bp
@@ -2,6 +2,7 @@
     name: "aaudio_loopback",
     gtest: false,
     srcs: ["src/loopback.cpp"],
+    cflags: ["-Wall", "-Werror"],
     shared_libs: ["libaaudio"],
     header_libs: ["libaaudio_example_utils"],
 }
diff --git a/media/libaaudio/examples/utils/AAudioSimplePlayer.h b/media/libaaudio/examples/utils/AAudioSimplePlayer.h
index 1061e42..3fafecf 100644
--- a/media/libaaudio/examples/utils/AAudioSimplePlayer.h
+++ b/media/libaaudio/examples/utils/AAudioSimplePlayer.h
@@ -30,8 +30,8 @@
 #define SHARING_MODE  AAUDIO_SHARING_MODE_SHARED
 #define PERFORMANCE_MODE AAUDIO_PERFORMANCE_MODE_NONE
 
-// Arbitrary period for glitches, once per second at 48000 Hz.
-#define FORCED_UNDERRUN_PERIOD_FRAMES    48000
+// Arbitrary period for glitches
+#define FORCED_UNDERRUN_PERIOD_FRAMES    (2 * 48000)
 // How long to sleep in a callback to cause an intentional glitch. For testing.
 #define FORCED_UNDERRUN_SLEEP_MICROS     (10 * 1000)
 
diff --git a/media/libaaudio/examples/write_sine/Android.bp b/media/libaaudio/examples/write_sine/Android.bp
index f162e85..aa25e67 100644
--- a/media/libaaudio/examples/write_sine/Android.bp
+++ b/media/libaaudio/examples/write_sine/Android.bp
@@ -1,6 +1,7 @@
 cc_test {
     name: "write_sine",
     srcs: ["src/write_sine.cpp"],
+    cflags: ["-Wall", "-Werror"],
     shared_libs: ["libaaudio"],
     header_libs: ["libaaudio_example_utils"],
 }
@@ -8,6 +9,7 @@
 cc_test {
     name: "write_sine_callback",
     srcs: ["src/write_sine_callback.cpp"],
+    cflags: ["-Wall", "-Werror"],
     shared_libs: ["libaaudio"],
     header_libs: ["libaaudio_example_utils"],
 }
diff --git a/media/libaaudio/examples/write_sine/src/write_sine_callback.cpp b/media/libaaudio/examples/write_sine/src/write_sine_callback.cpp
index c2dd7af..5d41fd0 100644
--- a/media/libaaudio/examples/write_sine/src/write_sine_callback.cpp
+++ b/media/libaaudio/examples/write_sine/src/write_sine_callback.cpp
@@ -28,6 +28,7 @@
 #include <aaudio/AAudio.h>
 #include "AAudioExampleUtils.h"
 #include "AAudioSimplePlayer.h"
+#include "AAudioArgsParser.h"
 
 /**
  * Open stream, play some sine waves, then close the stream.
@@ -37,7 +38,8 @@
  */
 static aaudio_result_t testOpenPlayClose(AAudioArgsParser &argParser,
                                          int32_t loopCount,
-                                         int32_t prefixToneMsec)
+                                         int32_t prefixToneMsec,
+                                         bool forceUnderruns)
 {
     SineThreadedData_t myData;
     AAudioSimplePlayer &player = myData.simplePlayer;
@@ -49,8 +51,7 @@
     printf("----------------------- run complete test --------------------------\n");
     myData.schedulerChecked = false;
     myData.callbackCount = 0;
-    // TODO add a command line option for the forceUnderruns
-    myData.forceUnderruns = false; // set true to test AAudioStream_getXRunCount()
+    myData.forceUnderruns = forceUnderruns; // test AAudioStream_getXRunCount()
 
     result = player.open(argParser,
                          SimplePlayerDataCallbackProc, SimplePlayerErrorCallbackProc, &myData);
@@ -202,7 +203,8 @@
 static void usage() {
     AAudioArgsParser::usage();
     printf("      -l{count} loopCount start/stop, every other one is silent\n");
-    printf("      -t{msec} play a high pitched tone at the beginning\n");
+    printf("      -t{msec}  play a high pitched tone at the beginning\n");
+    printf("      -u        force periodic Underruns by sleeping in callback\n");
 }
 
 int main(int argc, const char **argv)
@@ -211,6 +213,7 @@
     aaudio_result_t    result;
     int32_t            loopCount = 1;
     int32_t            prefixToneMsec = 0;
+    bool               forceUnderruns = false;
 
     // Make printf print immediately so that debug info is not stuck
     // in a buffer if we hang or crash.
@@ -231,6 +234,9 @@
                     case 't':
                         prefixToneMsec = atoi(&arg[2]);
                         break;
+                    case 'u':
+                        forceUnderruns = true;
+                        break;
                     default:
                         usage();
                         exit(EXIT_FAILURE);
@@ -245,7 +251,7 @@
     }
 
     // Keep looping until we can complete the test without disconnecting.
-    while((result = testOpenPlayClose(argParser, loopCount, prefixToneMsec))
+    while((result = testOpenPlayClose(argParser, loopCount, prefixToneMsec, forceUnderruns))
             == AAUDIO_ERROR_DISCONNECTED);
 
     return (result) ? EXIT_FAILURE : EXIT_SUCCESS;
diff --git a/media/libaaudio/src/binding/AAudioServiceMessage.h b/media/libaaudio/src/binding/AAudioServiceMessage.h
index 54e8001..9779f24 100644
--- a/media/libaaudio/src/binding/AAudioServiceMessage.h
+++ b/media/libaaudio/src/binding/AAudioServiceMessage.h
@@ -38,13 +38,16 @@
     AAUDIO_SERVICE_EVENT_FLUSHED,
     AAUDIO_SERVICE_EVENT_CLOSED,
     AAUDIO_SERVICE_EVENT_DISCONNECTED,
-    AAUDIO_SERVICE_EVENT_VOLUME
+    AAUDIO_SERVICE_EVENT_VOLUME,
+    AAUDIO_SERVICE_EVENT_XRUN
 } aaudio_service_event_t;
 
 struct AAudioMessageEvent {
     aaudio_service_event_t event;
-    double                 dataDouble;
-    int64_t                dataLong;
+    union {
+        double  dataDouble;
+        int64_t dataLong;
+    };
 };
 
 typedef struct AAudioServiceMessage_s {
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index 1944d5b..b7b4b5c 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -61,7 +61,6 @@
         , mClockModel()
         , mAudioEndpoint()
         , mServiceStreamHandle(AAUDIO_HANDLE_INVALID)
-        , mFramesPerBurst(16)
         , mInService(inService)
         , mServiceInterface(serviceInterface)
         , mAtomicTimestamp()
@@ -79,6 +78,7 @@
 
     aaudio_result_t result = AAUDIO_OK;
     int32_t capacity;
+    int32_t framesPerBurst;
     AAudioStreamRequest request;
     AAudioStreamConfiguration configurationOutput;
 
@@ -151,16 +151,18 @@
         goto error;
     }
 
-    mFramesPerBurst = mEndpointDescriptor.dataQueueDescriptor.framesPerBurst;
-    capacity = mEndpointDescriptor.dataQueueDescriptor.capacityInFrames;
 
     // Validate result from server.
-    if (mFramesPerBurst < 16 || mFramesPerBurst > 16 * 1024) {
-        ALOGE("%s - framesPerBurst out of range = %d", __func__, mFramesPerBurst);
+    framesPerBurst = mEndpointDescriptor.dataQueueDescriptor.framesPerBurst;
+    if (framesPerBurst < MIN_FRAMES_PER_BURST || framesPerBurst > MAX_FRAMES_PER_BURST) {
+        ALOGE("%s - framesPerBurst out of range = %d", __func__, framesPerBurst);
         result = AAUDIO_ERROR_OUT_OF_RANGE;
         goto error;
     }
-    if (capacity < mFramesPerBurst || capacity > 32 * 1024) {
+    mFramesPerBurst = framesPerBurst; // only save good value
+
+    capacity = mEndpointDescriptor.dataQueueDescriptor.capacityInFrames;
+    if (capacity < mFramesPerBurst || capacity > MAX_BUFFER_CAPACITY_IN_FRAMES) {
         ALOGE("%s - bufferCapacity out of range = %d", __func__, capacity);
         result = AAUDIO_ERROR_OUT_OF_RANGE;
         goto error;
@@ -490,6 +492,9 @@
             doSetVolume();
             ALOGD("%s - AAUDIO_SERVICE_EVENT_VOLUME %lf", __func__, message->event.dataDouble);
             break;
+        case AAUDIO_SERVICE_EVENT_XRUN:
+            mXRunCount = static_cast<int32_t>(message->event.dataLong);
+            break;
         default:
             ALOGE("%s - Unrecognized event = %d", __func__, (int) message->event.event);
             break;
@@ -649,14 +654,29 @@
 }
 
 aaudio_result_t AudioStreamInternal::setBufferSize(int32_t requestedFrames) {
+    int32_t adjustedFrames = requestedFrames;
     int32_t actualFrames = 0;
-    // Round to the next highest burst size.
-    if (getFramesPerBurst() > 0) {
-        int32_t numBursts = (requestedFrames + getFramesPerBurst() - 1) / getFramesPerBurst();
-        requestedFrames = numBursts * getFramesPerBurst();
+    int32_t maximumSize = getBufferCapacity();
+
+    // Clip to minimum size so that rounding up will work better.
+    if (adjustedFrames < 1) {
+        adjustedFrames = 1;
     }
 
-    aaudio_result_t result = mAudioEndpoint.setBufferSizeInFrames(requestedFrames, &actualFrames);
+    if (adjustedFrames > maximumSize) {
+        // Clip to maximum size.
+        adjustedFrames = maximumSize;
+    } else {
+        // Round to the next highest burst size.
+        int32_t numBursts = (adjustedFrames + mFramesPerBurst - 1) / mFramesPerBurst;
+        adjustedFrames = numBursts * mFramesPerBurst;
+        // Rounding may have gone above maximum.
+        if (adjustedFrames > maximumSize) {
+            adjustedFrames = maximumSize;
+        }
+    }
+
+    aaudio_result_t result = mAudioEndpoint.setBufferSizeInFrames(adjustedFrames, &actualFrames);
     ALOGD("setBufferSize() req = %d => %d", requestedFrames, actualFrames);
     if (result < 0) {
         return result;
@@ -674,7 +694,7 @@
 }
 
 int32_t AudioStreamInternal::getFramesPerBurst() const {
-    return mEndpointDescriptor.dataQueueDescriptor.framesPerBurst;
+    return mFramesPerBurst;
 }
 
 aaudio_result_t AudioStreamInternal::joinThread(void** returnArg) {
diff --git a/media/libaaudio/src/client/AudioStreamInternal.h b/media/libaaudio/src/client/AudioStreamInternal.h
index 47024c0..117756d 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.h
+++ b/media/libaaudio/src/client/AudioStreamInternal.h
@@ -34,6 +34,12 @@
 
 namespace aaudio {
 
+    // These are intended to be outside the range of what is normally encountered.
+    // TODO MAXes should probably be much bigger.
+    constexpr int32_t MIN_FRAMES_PER_BURST = 16; // arbitrary
+    constexpr int32_t MAX_FRAMES_PER_BURST = 16 * 1024;  // arbitrary
+    constexpr int32_t MAX_BUFFER_CAPACITY_IN_FRAMES = 32 * 1024;  // arbitrary
+
 // A stream that talks to the AAudioService or directly to a HAL.
 class AudioStreamInternal : public AudioStream {
 
@@ -141,7 +147,7 @@
     AudioEndpoint            mAudioEndpoint;   // source for reads or sink for writes
     aaudio_handle_t          mServiceStreamHandle; // opaque handle returned from service
 
-    int32_t                  mFramesPerBurst;     // frames per HAL transfer
+    int32_t                  mFramesPerBurst = MIN_FRAMES_PER_BURST; // frames per HAL transfer
     int32_t                  mXRunCount = 0;      // how many underrun events?
 
     // Offset from underlying frame position.
diff --git a/media/libaaudio/src/client/AudioStreamInternalCapture.cpp b/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
index 77a481b..3e82a88 100644
--- a/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
@@ -102,7 +102,8 @@
     }
 
     // If the write index passed the read index then consider it an overrun.
-    if (mAudioEndpoint.getEmptyFramesAvailable() < 0) {
+    // For shared streams, the xRunCount is passed up from the service.
+    if (mAudioEndpoint.isFreeRunning() && mAudioEndpoint.getEmptyFramesAvailable() < 0) {
         mXRunCount++;
         if (ATRACE_ENABLED()) {
             ATRACE_INT("aaOverRuns", mXRunCount);
diff --git a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
index 1cf2c72..b49e08c 100644
--- a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
@@ -140,7 +140,8 @@
     }
 
     // If the read index passed the write index then consider it an underrun.
-    if (mAudioEndpoint.getFullFramesAvailable() < 0) {
+    // For shared streams, the xRunCount is passed up from the service.
+    if (mAudioEndpoint.isFreeRunning() && mAudioEndpoint.getFullFramesAvailable() < 0) {
         mXRunCount++;
         if (ATRACE_ENABLED()) {
             ATRACE_INT("aaUnderRuns", mXRunCount);
@@ -219,8 +220,7 @@
             // Data conversion.
             float levelFrom;
             float levelTo;
-            bool ramping = mVolumeRamp.nextSegment(framesToWrite * getSamplesPerFrame(),
-                                                   &levelFrom, &levelTo);
+            bool ramping = mVolumeRamp.nextSegment(framesToWrite, &levelFrom, &levelTo);
             // The formats are validated when the stream is opened so we do not have to
             // check for illegal combinations here.
             // TODO factor this out into a utility function
diff --git a/media/libaaudio/src/utility/AAudioUtilities.cpp b/media/libaaudio/src/utility/AAudioUtilities.cpp
index 2450920..fc5830a 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.cpp
+++ b/media/libaaudio/src/utility/AAudioUtilities.cpp
@@ -25,6 +25,7 @@
 
 #include "aaudio/AAudio.h"
 #include <aaudio/AAudioTesting.h>
+#include <math.h>
 
 #include "utility/AAudioUtilities.h"
 
@@ -50,44 +51,10 @@
     return size;
 }
 
-
 // TODO expose and call clamp16_from_float function in primitives.h
 static inline int16_t clamp16_from_float(float f) {
-    /* Offset is used to expand the valid range of [-1.0, 1.0) into the 16 lsbs of the
-     * floating point significand. The normal shift is 3<<22, but the -15 offset
-     * is used to multiply by 32768.
-     */
-    static const float offset = (float)(3 << (22 - 15));
-    /* zero = (0x10f << 22) =  0x43c00000 (not directly used) */
-    static const int32_t limneg = (0x10f << 22) /*zero*/ - 32768; /* 0x43bf8000 */
-    static const int32_t limpos = (0x10f << 22) /*zero*/ + 32767; /* 0x43c07fff */
-
-    union {
-        float f;
-        int32_t i;
-    } u;
-
-    u.f = f + offset; /* recenter valid range */
-    /* Now the valid range is represented as integers between [limneg, limpos].
-     * Clamp using the fact that float representation (as an integer) is an ordered set.
-     */
-    if (u.i < limneg)
-        u.i = -32768;
-    else if (u.i > limpos)
-        u.i = 32767;
-    return u.i; /* Return lower 16 bits, the part of interest in the significand. */
-}
-
-// Same but without clipping.
-// Convert -1.0f to +1.0f to -32768 to +32767
-static inline int16_t floatToInt16(float f) {
-    static const float offset = (float)(3 << (22 - 15));
-    union {
-        float f;
-        int32_t i;
-    } u;
-    u.f = f + offset; /* recenter valid range */
-    return u.i; /* Return lower 16 bits, the part of interest in the significand. */
+    static const float scale = 1 << 15;
+    return (int16_t) roundf(fmaxf(fminf(f * scale, scale - 1.f), -scale));
 }
 
 static float clipAndClampFloatToPcm16(float sample, float scaler) {
@@ -188,13 +155,14 @@
                        int32_t samplesPerFrame,
                        float amplitude1,
                        float amplitude2) {
-    float scaler = amplitude1 / SHORT_SCALE;
-    float delta = (amplitude2 - amplitude1) / (SHORT_SCALE * (float) numFrames);
+    // Because we are converting from int16 to 1nt16, we do not have to scale by 1/32768.
+    float scaler = amplitude1;
+    float delta = (amplitude2 - amplitude1) / numFrames;
     for (int frameIndex = 0; frameIndex < numFrames; frameIndex++) {
         for (int sampleIndex = 0; sampleIndex < samplesPerFrame; sampleIndex++) {
             // No need to clip because int16_t range is inherently limited.
             float sample =  *source++ * scaler;
-            *destination++ =  floatToInt16(sample);
+            *destination++ = (int16_t) roundf(sample);
         }
         scaler += delta;
     }
diff --git a/media/libaaudio/src/utility/LinearRamp.h b/media/libaaudio/src/utility/LinearRamp.h
index ff09dce..2b1b8e0 100644
--- a/media/libaaudio/src/utility/LinearRamp.h
+++ b/media/libaaudio/src/utility/LinearRamp.h
@@ -87,7 +87,7 @@
 
     std::atomic<float>   mTarget;
 
-    int32_t mLengthInFrames  = 48000 / 50; // 20 msec at 48000 Hz
+    int32_t mLengthInFrames  = 48000 / 100; // 10 msec at 48000 Hz
     int32_t mRemaining       = 0;
     float   mLevelFrom       = 0.0f;
     float   mLevelTo         = 0.0f;
diff --git a/media/libaaudio/tests/Android.bp b/media/libaaudio/tests/Android.bp
index 87a4273..9f80695 100644
--- a/media/libaaudio/tests/Android.bp
+++ b/media/libaaudio/tests/Android.bp
@@ -99,3 +99,28 @@
         "libutils",
     ],
 }
+
+cc_test {
+    name: "test_various",
+    defaults: ["libaaudio_tests_defaults"],
+    srcs: ["test_various.cpp"],
+    shared_libs: [
+        "libaaudio",
+        "libbinder",
+        "libcutils",
+        "libutils",
+    ],
+}
+
+cc_test {
+    name: "test_aaudio_monkey",
+    defaults: ["libaaudio_tests_defaults"],
+    srcs: ["test_aaudio_monkey.cpp"],
+    header_libs: ["libaaudio_example_utils"],
+    shared_libs: [
+        "libaaudio",
+        "libbinder",
+        "libcutils",
+        "libutils",
+    ],
+}
diff --git a/media/libaaudio/tests/test_aaudio_monkey.cpp b/media/libaaudio/tests/test_aaudio_monkey.cpp
new file mode 100644
index 0000000..be54835
--- /dev/null
+++ b/media/libaaudio/tests/test_aaudio_monkey.cpp
@@ -0,0 +1,307 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Try to trigger bugs by playing randomly on multiple streams.
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <vector>
+
+#include <aaudio/AAudio.h>
+#include "AAudioArgsParser.h"
+#include "AAudioExampleUtils.h"
+#include "AAudioSimplePlayer.h"
+#include "SineGenerator.h"
+
+#define DEFAULT_TIMEOUT_NANOS  (1 * NANOS_PER_SECOND)
+
+#define NUM_LOOPS          1000
+#define MAX_MICROS_DELAY   (2 * 1000 * 1000)
+
+// TODO Consider adding an input stream.
+#define PROB_START   (0.20)
+#define PROB_PAUSE   (PROB_START + 0.10)
+#define PROB_FLUSH   (PROB_PAUSE + 0.10)
+#define PROB_STOP    (PROB_FLUSH + 0.10)
+#define PROB_CLOSE   (PROB_STOP + 0.10)
+static_assert(PROB_CLOSE < 0.9, "Probability sum too high.");
+
+aaudio_data_callback_result_t AAudioMonkeyDataCallback(
+        AAudioStream *stream,
+        void *userData,
+        void *audioData,
+        int32_t numFrames);
+
+void AAudioMonkeyErrorCallbackProc(
+        AAudioStream *stream __unused,
+        void *userData __unused,
+        aaudio_result_t error) {
+    printf("Error Callback, error: %d\n",(int)error);
+}
+
+// This function is not thread safe. Only use this from a single thread.
+double nextRandomDouble() {
+    return drand48();
+}
+
+class AAudioMonkey : public AAudioSimplePlayer {
+public:
+
+    AAudioMonkey(int index, AAudioArgsParser *argParser)
+            : mArgParser(argParser)
+            , mIndex(index) {}
+
+    aaudio_result_t open() {
+        printf("Monkey # %d ---------------------------------------------- OPEN\n", mIndex);
+        double offset = mIndex * 50;
+        mSine1.setup(440.0, 48000);
+        mSine1.setSweep(300.0 + offset, 600.0 + offset, 5.0);
+        mSine2.setup(660.0, 48000);
+        mSine2.setSweep(350.0 + offset, 900.0 + offset, 7.0);
+
+        aaudio_result_t result = AAudioSimplePlayer::open(*mArgParser,
+                                      AAudioMonkeyDataCallback,
+                                      AAudioMonkeyErrorCallbackProc,
+                                      this);
+        if (result != AAUDIO_OK) {
+            printf("ERROR -  player.open() returned %d\n", result);
+        }
+
+        mArgParser->compareWithStream(getStream());
+        return result;
+    }
+
+    bool isOpen() {
+        return (getStream() != nullptr);
+
+    }
+    /**
+     *
+     * @return true if stream passes tests
+     */
+    bool validate() {
+        if (!isOpen()) return true; // closed is OK
+
+        // update and query stream state
+        aaudio_stream_state_t state = AAUDIO_STREAM_STATE_UNKNOWN;
+        aaudio_result_t result = AAudioStream_waitForStateChange(getStream(),
+            AAUDIO_STREAM_STATE_UNKNOWN, &state, 0);
+        if (result != AAUDIO_OK) {
+            printf("ERROR - AAudioStream_waitForStateChange returned %d\n", result);
+            return false;
+        }
+
+        int64_t framesRead = AAudioStream_getFramesRead(getStream());
+        int64_t framesWritten = AAudioStream_getFramesWritten(getStream());
+        int32_t xRuns = AAudioStream_getXRunCount(getStream());
+        // Print status
+        printf("%30s, framesWritten = %8lld, framesRead = %8lld, xRuns = %d\n",
+               AAudio_convertStreamStateToText(state),
+               (unsigned long long) framesWritten,
+               (unsigned long long) framesRead,
+               xRuns);
+
+        if (framesWritten < framesRead) {
+            printf("WARNING - UNDERFLOW - diff = %d !!!!!!!!!!!!\n",
+                   (int) (framesWritten - framesRead));
+        }
+        return true;
+    }
+
+    aaudio_result_t invoke() {
+        aaudio_result_t result = AAUDIO_OK;
+        if (!isOpen()) {
+            result = open();
+            if (result != AAUDIO_OK) return result;
+        }
+
+        if (!validate()) {
+            return -1;
+        }
+
+        double dice = nextRandomDouble();
+        // Select an action based on a weighted probability.
+        if (dice < PROB_START) {
+            printf("start\n");
+            result = AAudioStream_requestStart(getStream());
+        } else if (dice < PROB_PAUSE) {
+            printf("pause\n");
+            result = AAudioStream_requestPause(getStream());
+        } else if (dice < PROB_FLUSH) {
+            printf("flush\n");
+            result = AAudioStream_requestFlush(getStream());
+        } else if (dice < PROB_STOP) {
+            printf("stop\n");
+            result = AAudioStream_requestStop(getStream());
+        } else if (dice < PROB_CLOSE) {
+            printf("close\n");
+            result = close();
+        } else {
+            printf("do nothing\n");
+        }
+
+        if (result == AAUDIO_ERROR_INVALID_STATE) {
+            printf("    got AAUDIO_ERROR_INVALID_STATE - expected from a monkey\n");
+            result = AAUDIO_OK;
+        }
+        if (result == AAUDIO_OK && isOpen()) {
+            if (!validate()) {
+                result = -1;
+            }
+        }
+        return result;
+    }
+
+    aaudio_data_callback_result_t renderAudio(
+            AAudioStream *stream,
+            void *audioData,
+            int32_t numFrames) {
+
+        int32_t samplesPerFrame = AAudioStream_getChannelCount(stream);
+        // This code only plays on the first one or two channels.
+        // TODO Support arbitrary number of channels.
+        switch (AAudioStream_getFormat(stream)) {
+            case AAUDIO_FORMAT_PCM_I16: {
+                int16_t *audioBuffer = (int16_t *) audioData;
+                // Render sine waves as shorts to first channel.
+                mSine1.render(&audioBuffer[0], samplesPerFrame, numFrames);
+                // Render sine waves to second channel if there is one.
+                if (samplesPerFrame > 1) {
+                    mSine2.render(&audioBuffer[1], samplesPerFrame, numFrames);
+                }
+            }
+                break;
+            case AAUDIO_FORMAT_PCM_FLOAT: {
+                float *audioBuffer = (float *) audioData;
+                // Render sine waves as floats to first channel.
+                mSine1.render(&audioBuffer[0], samplesPerFrame, numFrames);
+                // Render sine waves to second channel if there is one.
+                if (samplesPerFrame > 1) {
+                    mSine2.render(&audioBuffer[1], samplesPerFrame, numFrames);
+                }
+            }
+                break;
+            default:
+                return AAUDIO_CALLBACK_RESULT_STOP;
+        }
+        return AAUDIO_CALLBACK_RESULT_CONTINUE;
+    }
+
+private:
+    const AAudioArgsParser  *mArgParser;
+    const int                mIndex;
+    SineGenerator            mSine1;
+    SineGenerator            mSine2;
+};
+
+// Callback function that fills the audio output buffer.
+aaudio_data_callback_result_t AAudioMonkeyDataCallback(
+        AAudioStream *stream,
+        void *userData,
+        void *audioData,
+        int32_t numFrames
+) {
+    // should not happen but just in case...
+    if (userData == nullptr) {
+        printf("ERROR - AAudioMonkeyDataCallback needs userData\n");
+        return AAUDIO_CALLBACK_RESULT_STOP;
+    }
+    AAudioMonkey *monkey = (AAudioMonkey *) userData;
+    return monkey->renderAudio(stream, audioData, numFrames);
+}
+
+
+static void usage() {
+    AAudioArgsParser::usage();
+    printf("      -i{seed}  Initial random seed\n");
+    printf("      -t{count} number of monkeys in the Troop\n");
+}
+
+int main(int argc, const char **argv) {
+    AAudioArgsParser argParser;
+    std::vector<AAudioMonkey> monkeys;
+    aaudio_result_t result;
+    int numMonkeys = 1;
+
+    // Make printf print immediately so that debug info is not stuck
+    // in a buffer if we hang or crash.
+    setvbuf(stdout, nullptr, _IONBF, (size_t) 0);
+
+    printf("%s - Monkeys\n", argv[0]);
+
+    long int seed = (long int)getNanoseconds();  // different every time by default
+
+    for (int i = 1; i < argc; i++) {
+        const char *arg = argv[i];
+        if (argParser.parseArg(arg)) {
+            // Handle options that are not handled by the ArgParser
+            if (arg[0] == '-') {
+                char option = arg[1];
+                switch (option) {
+                    case 'i':
+                        seed = atol(&arg[2]);
+                        break;
+                    case 't':
+                        numMonkeys = atoi(&arg[2]);
+                        break;
+                    default:
+                        usage();
+                        exit(EXIT_FAILURE);
+                        break;
+                }
+            } else {
+                usage();
+                exit(EXIT_FAILURE);
+                break;
+            }
+        }
+    }
+
+    srand48(seed);
+    printf("seed = %ld, nextRandomDouble() = %f\n", seed, nextRandomDouble());
+
+    for (int m = 0; m < numMonkeys; m++) {
+        monkeys.emplace_back(m, &argParser);
+    }
+
+    for (int i = 0; i < NUM_LOOPS; i++) {
+        // pick a random monkey and invoke it
+        double dice = nextRandomDouble();
+        int monkeyIndex = floor(dice * numMonkeys);
+        printf("----------- Monkey #%d\n", monkeyIndex);
+        result = monkeys[monkeyIndex].invoke();
+        if (result != AAUDIO_OK) {
+            goto error;
+        }
+
+        // sleep some random time
+        dice = nextRandomDouble();
+        dice = dice * dice * dice; // skew towards smaller delays
+        int micros = (int) (dice * MAX_MICROS_DELAY);
+        usleep(micros);
+
+        // TODO consider making this multi-threaded, one thread per monkey, to catch more bugs
+    }
+
+    printf("PASS\n");
+    return EXIT_SUCCESS;
+
+error:
+    printf("FAIL - AAudio result = %d = %s\n", result, AAudio_convertResultToText(result));
+    usleep(1000 * 1000); // give me time to stop the logcat
+    return EXIT_FAILURE;
+}
+
diff --git a/media/libaaudio/tests/test_linear_ramp.cpp b/media/libaaudio/tests/test_linear_ramp.cpp
index 5c53982..93226ba 100644
--- a/media/libaaudio/tests/test_linear_ramp.cpp
+++ b/media/libaaudio/tests/test_linear_ramp.cpp
@@ -15,13 +15,13 @@
  */
 
 #include <iostream>
+#include <math.h>
 
 #include <gtest/gtest.h>
 
 #include "utility/AAudioUtilities.h"
 #include "utility/LinearRamp.h"
 
-
 TEST(test_linear_ramp, linear_ramp_segments) {
     LinearRamp ramp;
     const float source[4] = {1.0f, 1.0f, 1.0f, 1.0f };
@@ -32,40 +32,40 @@
     ramp.setLengthInFrames(8);
     ramp.setTarget(8.0f);
 
-    ASSERT_EQ(8, ramp.getLengthInFrames());
+    EXPECT_EQ(8, ramp.getLengthInFrames());
 
     bool ramping = ramp.nextSegment(4, &levelFrom, &levelTo);
-    ASSERT_EQ(1, ramping);
-    ASSERT_EQ(0.0f, levelFrom);
-    ASSERT_EQ(4.0f, levelTo);
+    EXPECT_EQ(1, ramping);
+    EXPECT_EQ(0.0f, levelFrom);
+    EXPECT_EQ(4.0f, levelTo);
 
     AAudio_linearRamp(source, destination, 4, 1, levelFrom, levelTo);
-    ASSERT_EQ(0.0f, destination[0]);
-    ASSERT_EQ(1.0f, destination[1]);
-    ASSERT_EQ(2.0f, destination[2]);
-    ASSERT_EQ(3.0f, destination[3]);
+    EXPECT_EQ(0.0f, destination[0]);
+    EXPECT_EQ(1.0f, destination[1]);
+    EXPECT_EQ(2.0f, destination[2]);
+    EXPECT_EQ(3.0f, destination[3]);
 
     ramping = ramp.nextSegment(4, &levelFrom, &levelTo);
-    ASSERT_EQ(1, ramping);
-    ASSERT_EQ(4.0f, levelFrom);
-    ASSERT_EQ(8.0f, levelTo);
+    EXPECT_EQ(1, ramping);
+    EXPECT_EQ(4.0f, levelFrom);
+    EXPECT_EQ(8.0f, levelTo);
 
     AAudio_linearRamp(source, destination, 4, 1, levelFrom, levelTo);
-    ASSERT_EQ(4.0f, destination[0]);
-    ASSERT_EQ(5.0f, destination[1]);
-    ASSERT_EQ(6.0f, destination[2]);
-    ASSERT_EQ(7.0f, destination[3]);
+    EXPECT_EQ(4.0f, destination[0]);
+    EXPECT_EQ(5.0f, destination[1]);
+    EXPECT_EQ(6.0f, destination[2]);
+    EXPECT_EQ(7.0f, destination[3]);
 
     ramping = ramp.nextSegment(4, &levelFrom, &levelTo);
-    ASSERT_EQ(0, ramping);
-    ASSERT_EQ(8.0f, levelFrom);
-    ASSERT_EQ(8.0f, levelTo);
+    EXPECT_EQ(0, ramping);
+    EXPECT_EQ(8.0f, levelFrom);
+    EXPECT_EQ(8.0f, levelTo);
 
     AAudio_linearRamp(source, destination, 4, 1, levelFrom, levelTo);
-    ASSERT_EQ(8.0f, destination[0]);
-    ASSERT_EQ(8.0f, destination[1]);
-    ASSERT_EQ(8.0f, destination[2]);
-    ASSERT_EQ(8.0f, destination[3]);
+    EXPECT_EQ(8.0f, destination[0]);
+    EXPECT_EQ(8.0f, destination[1]);
+    EXPECT_EQ(8.0f, destination[2]);
+    EXPECT_EQ(8.0f, destination[3]);
 
 };
 
@@ -80,29 +80,101 @@
     ramp.setLengthInFrames(4);
     ramp.setTarget(8.0f);
     ramp.forceCurrent(4.0f);
-    ASSERT_EQ(4.0f, ramp.getCurrent());
+    EXPECT_EQ(4.0f, ramp.getCurrent());
 
     bool ramping = ramp.nextSegment(4, &levelFrom, &levelTo);
-    ASSERT_EQ(1, ramping);
-    ASSERT_EQ(4.0f, levelFrom);
-    ASSERT_EQ(8.0f, levelTo);
+    EXPECT_EQ(1, ramping);
+    EXPECT_EQ(4.0f, levelFrom);
+    EXPECT_EQ(8.0f, levelTo);
 
     AAudio_linearRamp(source, destination, 4, 1, levelFrom, levelTo);
-    ASSERT_EQ(4.0f, destination[0]);
-    ASSERT_EQ(5.0f, destination[1]);
-    ASSERT_EQ(6.0f, destination[2]);
-    ASSERT_EQ(7.0f, destination[3]);
+    EXPECT_EQ(4.0f, destination[0]);
+    EXPECT_EQ(5.0f, destination[1]);
+    EXPECT_EQ(6.0f, destination[2]);
+    EXPECT_EQ(7.0f, destination[3]);
 
     ramping = ramp.nextSegment(4, &levelFrom, &levelTo);
-    ASSERT_EQ(0, ramping);
-    ASSERT_EQ(8.0f, levelFrom);
-    ASSERT_EQ(8.0f, levelTo);
+    EXPECT_EQ(0, ramping);
+    EXPECT_EQ(8.0f, levelFrom);
+    EXPECT_EQ(8.0f, levelTo);
 
     AAudio_linearRamp(source, destination, 4, 1, levelFrom, levelTo);
-    ASSERT_EQ(8.0f, destination[0]);
-    ASSERT_EQ(8.0f, destination[1]);
-    ASSERT_EQ(8.0f, destination[2]);
-    ASSERT_EQ(8.0f, destination[3]);
+    EXPECT_EQ(8.0f, destination[0]);
+    EXPECT_EQ(8.0f, destination[1]);
+    EXPECT_EQ(8.0f, destination[2]);
+    EXPECT_EQ(8.0f, destination[3]);
 
 };
 
+constexpr int16_t kMaxI16 = INT16_MAX;
+constexpr int16_t kMinI16 = INT16_MIN;
+constexpr int16_t kHalfI16 = 16384;
+constexpr int16_t kTenthI16 = 3277;
+
+//void AAudioConvert_floatToPcm16(const float *source,
+//                                int16_t *destination,
+//                                int32_t numSamples,
+//                                float amplitude);
+TEST(test_linear_ramp, float_to_i16) {
+    const float source[] = {12345.6f, 1.0f, 0.5f, 0.1f, 0.0f, -0.1f, -0.5f, -1.0f, -12345.6f};
+    constexpr size_t count = sizeof(source) / sizeof(source[0]);
+    int16_t destination[count];
+    const int16_t expected[count] = {kMaxI16, kMaxI16, kHalfI16, kTenthI16, 0,
+                                     -kTenthI16, -kHalfI16, kMinI16, kMinI16};
+
+    AAudioConvert_floatToPcm16(source, destination, count, 1.0f);
+    for (size_t i = 0; i < count; i++) {
+        EXPECT_EQ(expected[i], destination[i]);
+    }
+
+}
+
+//void AAudioConvert_pcm16ToFloat(const int16_t *source,
+//                                float *destination,
+//                                int32_t numSamples,
+//                                float amplitude);
+TEST(test_linear_ramp, i16_to_float) {
+    const int16_t source[] = {kMaxI16, kHalfI16, kTenthI16, 0,
+                              -kTenthI16, -kHalfI16, kMinI16};
+    constexpr size_t count = sizeof(source) / sizeof(source[0]);
+    float destination[count];
+    const float expected[count] = {(32767.0f / 32768.0f), 0.5f, 0.1f, 0.0f, -0.1f, -0.5f, -1.0f};
+
+    AAudioConvert_pcm16ToFloat(source, destination, count, 1.0f);
+    for (size_t i = 0; i < count; i++) {
+        EXPECT_NEAR(expected[i], destination[i], 0.0001f);
+    }
+
+}
+
+//void AAudio_linearRamp(const int16_t *source,
+//                       int16_t *destination,
+//                       int32_t numFrames,
+//                       int32_t samplesPerFrame,
+//                       float amplitude1,
+//                       float amplitude2);
+TEST(test_linear_ramp, ramp_i16_to_i16) {
+    const int16_t source[] = {1, 1, 1, 1, 1, 1, 1, 1};
+    constexpr size_t count = sizeof(source) / sizeof(source[0]);
+    int16_t destination[count];
+    // Ramp will sweep from -1 to almost +1
+    const int16_t expected[count] = {
+            -1, // from -1.00
+            -1, // from -0.75
+            -1, // from -0.55, round away from zero
+            0,  // from -0.25, round up to zero
+            0,  // from  0.00
+            0,  // from  0.25, round down to zero
+            1,  // from  0.50, round away from zero
+            1   // from  0.75
+    };
+
+    // sweep across zero to test symmetry
+    constexpr float amplitude1 = -1.0;
+    constexpr float amplitude2 = 1.0;
+    AAudio_linearRamp(source, destination, count, 1, amplitude1, amplitude2);
+    for (size_t i = 0; i < count; i++) {
+        EXPECT_EQ(expected[i], destination[i]);
+    }
+
+}
diff --git a/media/libaaudio/tests/test_various.cpp b/media/libaaudio/tests/test_various.cpp
new file mode 100644
index 0000000..de386da
--- /dev/null
+++ b/media/libaaudio/tests/test_various.cpp
@@ -0,0 +1,172 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Test various AAudio features including AAudioStream_setBufferSizeInFrames().
+
+#include <stdio.h>
+//#include <stdlib.h>
+//#include <math.h>
+
+#include <android-base/macros.h>
+#include <aaudio/AAudio.h>
+
+#include <gtest/gtest.h>
+
+// Callback function that does nothing.
+aaudio_data_callback_result_t MyDataCallbackProc(
+        AAudioStream *stream,
+        void *userData,
+        void *audioData,
+        int32_t numFrames
+) {
+    (void) stream;
+    (void) userData;
+    (void) audioData;
+    (void) numFrames;
+    return AAUDIO_CALLBACK_RESULT_CONTINUE;
+}
+
+// Test AAudioStream_setBufferSizeInFrames()
+
+constexpr int64_t NANOS_PER_MILLISECOND = 1000 * 1000;
+
+//int foo() { // To fix Android Studio formatting when editing.
+TEST(test_various, aaudio_stop_when_open) {
+    AAudioStreamBuilder *aaudioBuilder = nullptr;
+    AAudioStream *aaudioStream = nullptr;
+
+// Use an AAudioStreamBuilder to contain requested parameters.
+    ASSERT_EQ(AAUDIO_OK, AAudio_createStreamBuilder(&aaudioBuilder));
+
+// Request stream properties.
+    AAudioStreamBuilder_setDataCallback(aaudioBuilder, MyDataCallbackProc, nullptr);
+    AAudioStreamBuilder_setPerformanceMode(aaudioBuilder, AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
+
+// Create an AAudioStream using the Builder.
+    EXPECT_EQ(AAUDIO_OK, AAudioStreamBuilder_openStream(aaudioBuilder, &aaudioStream));
+
+
+    aaudio_stream_state_t state = AAUDIO_STREAM_STATE_UNKNOWN;
+    EXPECT_EQ(AAUDIO_OK, AAudioStream_waitForStateChange(aaudioStream,
+                                                         AAUDIO_STREAM_STATE_UNKNOWN, &state,
+                                                         1000 * NANOS_PER_MILLISECOND));
+    EXPECT_EQ(AAUDIO_STREAM_STATE_OPEN, state);
+
+    EXPECT_EQ(AAUDIO_OK, AAudioStream_requestStop(aaudioStream));
+
+    state = AAUDIO_STREAM_STATE_UNKNOWN;
+    EXPECT_EQ(AAUDIO_OK, AAudioStream_waitForStateChange(aaudioStream,
+                                                         AAUDIO_STREAM_STATE_UNKNOWN, &state, 0));
+    EXPECT_EQ(AAUDIO_STREAM_STATE_OPEN, state);
+
+    AAudioStream_close(aaudioStream);
+    AAudioStreamBuilder_delete(aaudioBuilder);
+}
+
+//int boo() { // To fix Android Studio formatting when editing.
+TEST(test_various, aaudio_flush_when_started) {
+    AAudioStreamBuilder *aaudioBuilder = nullptr;
+    AAudioStream *aaudioStream = nullptr;
+
+// Use an AAudioStreamBuilder to contain requested parameters.
+    ASSERT_EQ(AAUDIO_OK, AAudio_createStreamBuilder(&aaudioBuilder));
+
+// Request stream properties.
+    AAudioStreamBuilder_setDataCallback(aaudioBuilder, MyDataCallbackProc, nullptr);
+    AAudioStreamBuilder_setPerformanceMode(aaudioBuilder, AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
+
+// Create an AAudioStream using the Builder.
+    EXPECT_EQ(AAUDIO_OK, AAudioStreamBuilder_openStream(aaudioBuilder, &aaudioStream));
+    EXPECT_EQ(AAUDIO_OK, AAudioStream_requestStart(aaudioStream));
+
+    aaudio_stream_state_t state = AAUDIO_STREAM_STATE_UNKNOWN;
+    EXPECT_EQ(AAUDIO_OK, AAudioStream_waitForStateChange(aaudioStream,
+                                                         AAUDIO_STREAM_STATE_STARTING, &state,
+                                                         1000 * NANOS_PER_MILLISECOND));
+    EXPECT_EQ(AAUDIO_STREAM_STATE_STARTED, state);
+
+    EXPECT_EQ(AAUDIO_ERROR_INVALID_STATE, AAudioStream_requestFlush(aaudioStream));
+
+    state = AAUDIO_STREAM_STATE_UNKNOWN;
+    EXPECT_EQ(AAUDIO_OK, AAudioStream_waitForStateChange(aaudioStream,
+                                                         AAUDIO_STREAM_STATE_UNKNOWN, &state, 0));
+    EXPECT_EQ(AAUDIO_STREAM_STATE_STARTED, state);
+
+    AAudioStream_close(aaudioStream);
+    AAudioStreamBuilder_delete(aaudioBuilder);
+}
+
+//int main() { // To fix Android Studio formatting when editing.
+TEST(test_various, aaudio_set_buffer_size) {
+    int32_t bufferCapacity;
+    int32_t framesPerBurst = 0;
+    int32_t actualSize = 0;
+
+    AAudioStreamBuilder *aaudioBuilder = nullptr;
+    AAudioStream *aaudioStream = nullptr;
+
+    // Use an AAudioStreamBuilder to contain requested parameters.
+    ASSERT_EQ(AAUDIO_OK, AAudio_createStreamBuilder(&aaudioBuilder));
+
+    // Request stream properties.
+    AAudioStreamBuilder_setDataCallback(aaudioBuilder, MyDataCallbackProc, nullptr);
+    AAudioStreamBuilder_setPerformanceMode(aaudioBuilder, AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
+
+    // Create an AAudioStream using the Builder.
+    EXPECT_EQ(AAUDIO_OK, AAudioStreamBuilder_openStream(aaudioBuilder, &aaudioStream));
+
+    // This is the number of frames that are read in one chunk by a DMA controller
+    // or a DSP or a mixer.
+    framesPerBurst = AAudioStream_getFramesPerBurst(aaudioStream);
+    bufferCapacity = AAudioStream_getBufferCapacityInFrames(aaudioStream);
+    printf("          bufferCapacity = %d, remainder = %d\n",
+           bufferCapacity, bufferCapacity % framesPerBurst);
+
+    actualSize = AAudioStream_setBufferSizeInFrames(aaudioStream, 0);
+    EXPECT_GT(actualSize, 0);
+    EXPECT_LE(actualSize, bufferCapacity);
+
+    actualSize = AAudioStream_setBufferSizeInFrames(aaudioStream, 2 * framesPerBurst);
+    EXPECT_GT(actualSize, framesPerBurst);
+    EXPECT_LE(actualSize, bufferCapacity);
+
+    actualSize = AAudioStream_setBufferSizeInFrames(aaudioStream, bufferCapacity - 1);
+    EXPECT_GT(actualSize, framesPerBurst);
+    EXPECT_LE(actualSize, bufferCapacity);
+
+    actualSize = AAudioStream_setBufferSizeInFrames(aaudioStream, bufferCapacity);
+    EXPECT_GT(actualSize, framesPerBurst);
+    EXPECT_LE(actualSize, bufferCapacity);
+
+    actualSize = AAudioStream_setBufferSizeInFrames(aaudioStream, bufferCapacity + 1);
+    EXPECT_GT(actualSize, framesPerBurst);
+    EXPECT_LE(actualSize, bufferCapacity);
+
+    actualSize = AAudioStream_setBufferSizeInFrames(aaudioStream, 1234567);
+    EXPECT_GT(actualSize, framesPerBurst);
+    EXPECT_LE(actualSize, bufferCapacity);
+
+    actualSize = AAudioStream_setBufferSizeInFrames(aaudioStream, INT32_MAX);
+    EXPECT_GT(actualSize, framesPerBurst);
+    EXPECT_LE(actualSize, bufferCapacity);
+
+    actualSize = AAudioStream_setBufferSizeInFrames(aaudioStream, INT32_MIN);
+    EXPECT_GT(actualSize, 0);
+    EXPECT_LE(actualSize, bufferCapacity);
+
+    AAudioStream_close(aaudioStream);
+    AAudioStreamBuilder_delete(aaudioBuilder);
+}
diff --git a/media/libaudioclient/Android.bp b/media/libaudioclient/Android.bp
index 98e8d95..bedde43 100644
--- a/media/libaudioclient/Android.bp
+++ b/media/libaudioclient/Android.bp
@@ -20,7 +20,7 @@
         // The headers for these interfaces will be available to any modules that
         // include libaudioclient, at the path "aidl/package/path/BnFoo.h"
         "aidl/android/media/IAudioRecord.aidl",
-        "aidl/android/media/IPlayer.aidl",
+        ":libaudioclient_aidl",
 
         "AudioEffect.cpp",
         "AudioPolicy.cpp",
@@ -70,3 +70,11 @@
         ],
     },
 }
+
+// AIDL interface between libaudioclient and framework.jar
+filegroup {
+    name: "libaudioclient_aidl",
+    srcs: [
+        "aidl/android/media/IPlayer.aidl",
+    ],
+}
diff --git a/media/libaudioclient/AudioRecord.cpp b/media/libaudioclient/AudioRecord.cpp
index 26a320c..741d084 100644
--- a/media/libaudioclient/AudioRecord.cpp
+++ b/media/libaudioclient/AudioRecord.cpp
@@ -69,8 +69,7 @@
     : mActive(false), mStatus(NO_INIT), mOpPackageName(opPackageName),
       mSessionId(AUDIO_SESSION_ALLOCATE),
       mPreviousPriority(ANDROID_PRIORITY_NORMAL), mPreviousSchedulingGroup(SP_DEFAULT),
-      mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE), mRoutedDeviceId(AUDIO_PORT_HANDLE_NONE),
-      mPortId(AUDIO_PORT_HANDLE_NONE)
+      mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE), mRoutedDeviceId(AUDIO_PORT_HANDLE_NONE)
 {
 }
 
@@ -89,20 +88,19 @@
         audio_input_flags_t flags,
         uid_t uid,
         pid_t pid,
-        const audio_attributes_t* pAttributes)
+        const audio_attributes_t* pAttributes,
+        audio_port_handle_t selectedDeviceId)
     : mActive(false),
       mStatus(NO_INIT),
       mOpPackageName(opPackageName),
       mSessionId(AUDIO_SESSION_ALLOCATE),
       mPreviousPriority(ANDROID_PRIORITY_NORMAL),
       mPreviousSchedulingGroup(SP_DEFAULT),
-      mProxy(NULL),
-      mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE),
-      mPortId(AUDIO_PORT_HANDLE_NONE)
+      mProxy(NULL)
 {
-    mStatus = set(inputSource, sampleRate, format, channelMask, frameCount, cbf, user,
+    (void)set(inputSource, sampleRate, format, channelMask, frameCount, cbf, user,
             notificationFrames, false /*threadCanCallJava*/, sessionId, transferType, flags,
-            uid, pid, pAttributes);
+            uid, pid, pAttributes, selectedDeviceId);
 }
 
 AudioRecord::~AudioRecord()
@@ -148,14 +146,22 @@
         audio_input_flags_t flags,
         uid_t uid,
         pid_t pid,
-        const audio_attributes_t* pAttributes)
+        const audio_attributes_t* pAttributes,
+        audio_port_handle_t selectedDeviceId)
 {
+    status_t status = NO_ERROR;
+    uint32_t channelCount;
+    pid_t callingPid;
+    pid_t myPid;
+
     ALOGV("set(): inputSource %d, sampleRate %u, format %#x, channelMask %#x, frameCount %zu, "
           "notificationFrames %u, sessionId %d, transferType %d, flags %#x, opPackageName %s "
           "uid %d, pid %d",
           inputSource, sampleRate, format, channelMask, frameCount, notificationFrames,
           sessionId, transferType, flags, String8(mOpPackageName).string(), uid, pid);
 
+    mSelectedDeviceId = selectedDeviceId;
+
     switch (transferType) {
     case TRANSFER_DEFAULT:
         if (cbf == NULL || threadCanCallJava) {
@@ -167,7 +173,8 @@
     case TRANSFER_CALLBACK:
         if (cbf == NULL) {
             ALOGE("Transfer type TRANSFER_CALLBACK but cbf == NULL");
-            return BAD_VALUE;
+            status = BAD_VALUE;
+            goto exit;
         }
         break;
     case TRANSFER_OBTAIN:
@@ -175,14 +182,16 @@
         break;
     default:
         ALOGE("Invalid transfer type %d", transferType);
-        return BAD_VALUE;
+        status = BAD_VALUE;
+        goto exit;
     }
     mTransfer = transferType;
 
     // invariant that mAudioRecord != 0 is true only after set() returns successfully
     if (mAudioRecord != 0) {
         ALOGE("Track already in use");
-        return INVALID_OPERATION;
+        status = INVALID_OPERATION;
+        goto exit;
     }
 
     if (pAttributes == NULL) {
@@ -206,16 +215,18 @@
     // AudioFlinger capture only supports linear PCM
     if (!audio_is_valid_format(format) || !audio_is_linear_pcm(format)) {
         ALOGE("Format %#x is not linear pcm", format);
-        return BAD_VALUE;
+        status = BAD_VALUE;
+        goto exit;
     }
     mFormat = format;
 
     if (!audio_is_input_channel(channelMask)) {
         ALOGE("Invalid channel mask %#x", channelMask);
-        return BAD_VALUE;
+        status = BAD_VALUE;
+        goto exit;
     }
     mChannelMask = channelMask;
-    uint32_t channelCount = audio_channel_count_from_in_mask(channelMask);
+    channelCount = audio_channel_count_from_in_mask(channelMask);
     mChannelCount = channelCount;
 
     if (audio_is_linear_pcm(format)) {
@@ -224,28 +235,24 @@
         mFrameSize = sizeof(uint8_t);
     }
 
-    // mFrameCount is initialized in openRecord_l
+    // mFrameCount is initialized in createRecord_l
     mReqFrameCount = frameCount;
 
     mNotificationFramesReq = notificationFrames;
-    // mNotificationFramesAct is initialized in openRecord_l
+    // mNotificationFramesAct is initialized in createRecord_l
 
-    if (sessionId == AUDIO_SESSION_ALLOCATE) {
-        mSessionId = (audio_session_t) AudioSystem::newAudioUniqueId(AUDIO_UNIQUE_ID_USE_SESSION);
-    } else {
-        mSessionId = sessionId;
-    }
+    mSessionId = sessionId;
     ALOGV("set(): mSessionId %d", mSessionId);
 
-    int callingpid = IPCThreadState::self()->getCallingPid();
-    int mypid = getpid();
-    if (uid == AUDIO_UID_INVALID || (callingpid != mypid)) {
+    callingPid = IPCThreadState::self()->getCallingPid();
+    myPid = getpid();
+    if (uid == AUDIO_UID_INVALID || (callingPid != myPid)) {
         mClientUid = IPCThreadState::self()->getCallingUid();
     } else {
         mClientUid = uid;
     }
-    if (pid == -1 || (callingpid != mypid)) {
-        mClientPid = callingpid;
+    if (pid == -1 || (callingPid != myPid)) {
+        mClientPid = callingPid;
     } else {
         mClientPid = pid;
     }
@@ -260,7 +267,7 @@
     }
 
     // create the IAudioRecord
-    status_t status = openRecord_l(0 /*epoch*/, mOpPackageName);
+    status = createRecord_l(0 /*epoch*/, mOpPackageName);
 
     if (status != NO_ERROR) {
         if (mAudioRecordThread != 0) {
@@ -268,10 +275,9 @@
             mAudioRecordThread->requestExitAndWait();
             mAudioRecordThread.clear();
         }
-        return status;
+        goto exit;
     }
 
-    mStatus = NO_ERROR;
     mUserData = user;
     // TODO: add audio hardware input latency here
     mLatency = (1000LL * mFrameCount) / mSampleRate;
@@ -286,7 +292,9 @@
     mFramesRead = 0;
     mFramesReadServerOffset = 0;
 
-    return NO_ERROR;
+exit:
+    mStatus = status;
+    return status;
 }
 
 // -------------------------------------------------------------------------
@@ -489,6 +497,7 @@
                 mAudioRecord->stop();
             }
             android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
+            mProxy->interrupt();
         }
     }
     return NO_ERROR;
@@ -536,70 +545,29 @@
 }
 
 // must be called with mLock held
-status_t AudioRecord::openRecord_l(const Modulo<uint32_t> &epoch, const String16& opPackageName)
+status_t AudioRecord::createRecord_l(const Modulo<uint32_t> &epoch, const String16& opPackageName)
 {
     const sp<IAudioFlinger>& audioFlinger = AudioSystem::get_audio_flinger();
+    IAudioFlinger::CreateRecordInput input;
+    IAudioFlinger::CreateRecordOutput output;
+    audio_session_t originalSessionId;
+    sp<media::IAudioRecord> record;
+    void *iMemPointer;
+    audio_track_cblk_t* cblk;
+    status_t status;
+
     if (audioFlinger == 0) {
         ALOGE("Could not get audioflinger");
-        return NO_INIT;
+        status = NO_INIT;
+        goto exit;
     }
 
-    audio_io_handle_t input;
-
     // mFlags (not mOrigFlags) is modified depending on whether fast request is accepted.
     // After fast request is denied, we will request again if IAudioRecord is re-created.
 
-    status_t status;
-
-    // Not a conventional loop, but a retry loop for at most two iterations total.
-    // Try first maybe with FAST flag then try again without FAST flag if that fails.
-    // Exits loop normally via a return at the bottom, or with error via a break.
-    // The sp<> references will be dropped when re-entering scope.
-    // The lack of indentation is deliberate, to reduce code churn and ease merges.
-    for (;;) {
-    audio_config_base_t config  = {
-            .sample_rate = mSampleRate,
-            .channel_mask = mChannelMask,
-            .format = mFormat
-        };
-    mRoutedDeviceId = mSelectedDeviceId;
-    status = AudioSystem::getInputForAttr(&mAttributes, &input,
-                                        mSessionId,
-                                        // FIXME compare to AudioTrack
-                                        mClientPid,
-                                        mClientUid,
-                                        &config,
-                                        mFlags, &mRoutedDeviceId, &mPortId);
-
-    if (status != NO_ERROR || input == AUDIO_IO_HANDLE_NONE) {
-        ALOGE("Could not get audio input for session %d, record source %d, sample rate %u, "
-              "format %#x, channel mask %#x, flags %#x",
-              mSessionId, mAttributes.source, mSampleRate, mFormat, mChannelMask, mFlags);
-        return BAD_VALUE;
-    }
-
     // Now that we have a reference to an I/O handle and have not yet handed it off to AudioFlinger,
     // we must release it ourselves if anything goes wrong.
 
-#if 0
-    size_t afFrameCount;
-    status = AudioSystem::getFrameCount(input, &afFrameCount);
-    if (status != NO_ERROR) {
-        ALOGE("getFrameCount(input=%d) status %d", input, status);
-        break;
-    }
-#endif
-
-    uint32_t afSampleRate;
-    status = AudioSystem::getSamplingRate(input, &afSampleRate);
-    if (status != NO_ERROR) {
-        ALOGE("getSamplingRate(input=%d) status %d", input, status);
-        break;
-    }
-    if (mSampleRate == 0) {
-        mSampleRate = afSampleRate;
-    }
-
     // Client can only express a preference for FAST.  Server will perform additional tests.
     if (mFlags & AUDIO_INPUT_FLAG_FAST) {
         bool useCaseAllowed =
@@ -618,66 +586,41 @@
         if (!useCaseAllowed) {
             ALOGW("AUDIO_INPUT_FLAG_FAST denied, incompatible transfer = %s",
                   convertTransferToText(mTransfer));
-        }
-
-        // sample rates must also match
-        bool sampleRateAllowed = mSampleRate == afSampleRate;
-        if (!sampleRateAllowed) {
-            ALOGW("AUDIO_INPUT_FLAG_FAST denied, rates do not match %u Hz, require %u Hz",
-                  mSampleRate, afSampleRate);
-        }
-
-        bool fastAllowed = useCaseAllowed && sampleRateAllowed;
-        if (!fastAllowed) {
             mFlags = (audio_input_flags_t) (mFlags & ~(AUDIO_INPUT_FLAG_FAST |
                     AUDIO_INPUT_FLAG_RAW));
-            AudioSystem::releaseInput(input, mSessionId);
-            continue;   // retry
         }
     }
 
-    // The notification frame count is the period between callbacks, as suggested by the client
-    // but moderated by the server.  For record, the calculations are done entirely on server side.
-    size_t notificationFrames = mNotificationFramesReq;
-    size_t frameCount = mReqFrameCount;
-
-    audio_input_flags_t flags = mFlags;
-
-    pid_t tid = -1;
+    input.attr = mAttributes;
+    input.config.sample_rate = mSampleRate;
+    input.config.channel_mask = mChannelMask;
+    input.config.format = mFormat;
+    input.clientInfo.clientUid = mClientUid;
+    input.clientInfo.clientPid = mClientPid;
+    input.clientInfo.clientTid = -1;
     if (mFlags & AUDIO_INPUT_FLAG_FAST) {
         if (mAudioRecordThread != 0) {
-            tid = mAudioRecordThread->getTid();
+            input.clientInfo.clientTid = mAudioRecordThread->getTid();
         }
     }
+    input.opPackageName = opPackageName;
 
-    size_t temp = frameCount;   // temp may be replaced by a revised value of frameCount,
-                                // but we will still need the original value also
-    audio_session_t originalSessionId = mSessionId;
+    input.flags = mFlags;
+    // The notification frame count is the period between callbacks, as suggested by the client
+    // but moderated by the server.  For record, the calculations are done entirely on server side.
+    input.frameCount = mReqFrameCount;
+    input.notificationFrameCount = mNotificationFramesReq;
+    input.selectedDeviceId = mSelectedDeviceId;
+    input.sessionId = mSessionId;
+    originalSessionId = mSessionId;
 
-    sp<IMemory> iMem;           // for cblk
-    sp<IMemory> bufferMem;
-    sp<media::IAudioRecord> record = audioFlinger->openRecord(input,
-                                                              mSampleRate,
-                                                              mFormat,
-                                                              mChannelMask,
-                                                              opPackageName,
-                                                              &temp,
-                                                              &flags,
-                                                              mClientPid,
-                                                              tid,
-                                                              mClientUid,
-                                                              &mSessionId,
-                                                              &notificationFrames,
-                                                              iMem,
-                                                              bufferMem,
-                                                              &status,
-                                                              mPortId);
-    ALOGE_IF(originalSessionId != AUDIO_SESSION_ALLOCATE && mSessionId != originalSessionId,
-            "session ID changed from %d to %d", originalSessionId, mSessionId);
+    record = audioFlinger->createRecord(input,
+                                                              output,
+                                                              &status);
 
     if (status != NO_ERROR) {
         ALOGE("AudioFlinger could not create record track, status: %d", status);
-        break;
+        goto exit;
     }
     ALOG_ASSERT(record != 0);
 
@@ -685,41 +628,41 @@
     // so we are no longer responsible for releasing it.
 
     mAwaitBoost = false;
-    if (mFlags & AUDIO_INPUT_FLAG_FAST) {
-        if (flags & AUDIO_INPUT_FLAG_FAST) {
-            ALOGI("AUDIO_INPUT_FLAG_FAST successful; frameCount %zu -> %zu", frameCount, temp);
-            mAwaitBoost = true;
-        } else {
-            ALOGW("AUDIO_INPUT_FLAG_FAST denied by server; frameCount %zu -> %zu", frameCount, temp);
-            mFlags = (audio_input_flags_t) (mFlags & ~(AUDIO_INPUT_FLAG_FAST |
-                    AUDIO_INPUT_FLAG_RAW));
-            continue;   // retry
-        }
+    if (output.flags & AUDIO_INPUT_FLAG_FAST) {
+        ALOGI("AUDIO_INPUT_FLAG_FAST successful; frameCount %zu -> %zu",
+              mReqFrameCount, output.frameCount);
+        mAwaitBoost = true;
     }
-    mFlags = flags;
+    mFlags = output.flags;
+    mRoutedDeviceId = output.selectedDeviceId;
+    mSessionId = output.sessionId;
+    mSampleRate = output.sampleRate;
 
-    if (iMem == 0) {
+    if (output.cblk == 0) {
         ALOGE("Could not get control block");
-        return NO_INIT;
+        status = NO_INIT;
+        goto exit;
     }
-    void *iMemPointer = iMem->pointer();
+    iMemPointer = output.cblk ->pointer();
     if (iMemPointer == NULL) {
         ALOGE("Could not get control block pointer");
-        return NO_INIT;
+        status = NO_INIT;
+        goto exit;
     }
-    audio_track_cblk_t* cblk = static_cast<audio_track_cblk_t*>(iMemPointer);
+    cblk = static_cast<audio_track_cblk_t*>(iMemPointer);
 
     // Starting address of buffers in shared memory.
     // The buffers are either immediately after the control block,
     // or in a separate area at discretion of server.
     void *buffers;
-    if (bufferMem == 0) {
+    if (output.buffers == 0) {
         buffers = cblk + 1;
     } else {
-        buffers = bufferMem->pointer();
+        buffers = output.buffers->pointer();
         if (buffers == NULL) {
             ALOGE("Could not get buffer pointer");
-            return NO_INIT;
+            status = NO_INIT;
+            goto exit;
         }
     }
 
@@ -729,43 +672,42 @@
         mDeathNotifier.clear();
     }
     mAudioRecord = record;
-    mCblkMemory = iMem;
-    mBufferMemory = bufferMem;
+    mCblkMemory = output.cblk;
+    mBufferMemory = output.buffers;
     IPCThreadState::self()->flushCommands();
 
     mCblk = cblk;
-    // note that temp is the (possibly revised) value of frameCount
-    if (temp < frameCount || (frameCount == 0 && temp == 0)) {
-        ALOGW("Requested frameCount %zu but received frameCount %zu", frameCount, temp);
+    // note that output.frameCount is the (possibly revised) value of mReqFrameCount
+    if (output.frameCount < mReqFrameCount || (mReqFrameCount == 0 && output.frameCount == 0)) {
+        ALOGW("Requested frameCount %zu but received frameCount %zu",
+              mReqFrameCount,  output.frameCount);
     }
-    frameCount = temp;
 
     // Make sure that application is notified with sufficient margin before overrun.
     // The computation is done on server side.
-    if (mNotificationFramesReq > 0 && notificationFrames != mNotificationFramesReq) {
+    if (mNotificationFramesReq > 0 && output.notificationFrameCount != mNotificationFramesReq) {
         ALOGW("Server adjusted notificationFrames from %u to %zu for frameCount %zu",
-                mNotificationFramesReq, notificationFrames, frameCount);
+                mNotificationFramesReq, output.notificationFrameCount, output.frameCount);
     }
-    mNotificationFramesAct = (uint32_t) notificationFrames;
-
+    mNotificationFramesAct = (uint32_t)output.notificationFrameCount;
 
     //mInput != input includes the case where mInput == AUDIO_IO_HANDLE_NONE for first creation
-    if (mDeviceCallback != 0 && mInput != input) {
+    if (mDeviceCallback != 0 && mInput != output.inputId) {
         if (mInput != AUDIO_IO_HANDLE_NONE) {
             AudioSystem::removeAudioDeviceCallback(this, mInput);
         }
-        AudioSystem::addAudioDeviceCallback(this, input);
+        AudioSystem::addAudioDeviceCallback(this, output.inputId);
     }
 
     // We retain a copy of the I/O handle, but don't own the reference
-    mInput = input;
+    mInput = output.inputId;
     mRefreshRemaining = true;
 
-    mFrameCount = frameCount;
+    mFrameCount = output.frameCount;
     // If IAudioRecord is re-created, don't let the requested frameCount
     // decrease.  This can confuse clients that cache frameCount().
-    if (frameCount > mReqFrameCount) {
-        mReqFrameCount = frameCount;
+    if (mFrameCount > mReqFrameCount) {
+        mReqFrameCount = mFrameCount;
     }
 
     // update proxy
@@ -776,17 +718,9 @@
     mDeathNotifier = new DeathNotifier(this);
     IInterface::asBinder(mAudioRecord)->linkToDeath(mDeathNotifier, this);
 
-    return NO_ERROR;
-
-    // End of retry loop.
-    // The lack of indentation is deliberate, to reduce code churn and ease merges.
-    }
-
-// Arrive here on error, via a break
-    AudioSystem::releaseInput(input, mSessionId);
-    if (status == NO_ERROR) {
-        status = NO_INIT;
-    }
+exit:
+    mStatus = status;
+    // sp<IAudioTrack> track destructor will cause releaseOutput() to be called by AudioFlinger
     return status;
 }
 
@@ -1218,12 +1152,12 @@
 
     mFlags = mOrigFlags;
 
-    // if the new IAudioRecord is created, openRecord_l() will modify the
+    // if the new IAudioRecord is created, createRecord_l() will modify the
     // following member variables: mAudioRecord, mCblkMemory, mCblk, mBufferMemory.
     // It will also delete the strong references on previous IAudioRecord and IMemory
     Modulo<uint32_t> position(mProxy->getPosition());
     mNewPosition = position + mUpdatePeriod;
-    status_t result = openRecord_l(position, mOpPackageName);
+    status_t result = createRecord_l(position, mOpPackageName);
     if (result == NO_ERROR) {
         if (mActive) {
             // callback thread or sync event hasn't changed
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index 58330ae..c284f73 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -20,6 +20,7 @@
 #include <utils/Log.h>
 #include <binder/IServiceManager.h>
 #include <binder/ProcessState.h>
+#include <media/AudioResamplerPublic.h>
 #include <media/AudioSystem.h>
 #include <media/IAudioFlinger.h>
 #include <media/IAudioPolicyService.h>
@@ -253,6 +254,31 @@
     return volume ? 100 - int(dBConvertInverse * log(volume) + 0.5) : 0;
 }
 
+/* static */ size_t AudioSystem::calculateMinFrameCount(
+        uint32_t afLatencyMs, uint32_t afFrameCount, uint32_t afSampleRate,
+        uint32_t sampleRate, float speed /*, uint32_t notificationsPerBufferReq*/)
+{
+    // Ensure that buffer depth covers at least audio hardware latency
+    uint32_t minBufCount = afLatencyMs / ((1000 * afFrameCount) / afSampleRate);
+    if (minBufCount < 2) {
+        minBufCount = 2;
+    }
+#if 0
+    // The notificationsPerBufferReq parameter is not yet used for non-fast tracks,
+    // but keeping the code here to make it easier to add later.
+    if (minBufCount < notificationsPerBufferReq) {
+        minBufCount = notificationsPerBufferReq;
+    }
+#endif
+    ALOGV("calculateMinFrameCount afLatency %u  afFrameCount %u  afSampleRate %u  "
+            "sampleRate %u  speed %f  minBufCount: %u" /*"  notificationsPerBufferReq %u"*/,
+            afLatencyMs, afFrameCount, afSampleRate, sampleRate, speed, minBufCount
+            /*, notificationsPerBufferReq*/);
+    return minBufCount * sourceFramesNeededWithTimestretch(
+            sampleRate, afFrameCount, afSampleRate, speed);
+}
+
+
 status_t AudioSystem::getOutputSamplingRate(uint32_t* samplingRate, audio_stream_type_t streamType)
 {
     audio_io_handle_t output;
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 356b321..c8fa618 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -99,32 +99,6 @@
     return kFixPitch ? AUDIO_TIMESTRETCH_PITCH_NORMAL : pitch;
 }
 
-// Must match similar computation in createTrack_l in Threads.cpp.
-// TODO: Move to a common library
-static size_t calculateMinFrameCount(
-        uint32_t afLatencyMs, uint32_t afFrameCount, uint32_t afSampleRate,
-        uint32_t sampleRate, float speed /*, uint32_t notificationsPerBufferReq*/)
-{
-    // Ensure that buffer depth covers at least audio hardware latency
-    uint32_t minBufCount = afLatencyMs / ((1000 * afFrameCount) / afSampleRate);
-    if (minBufCount < 2) {
-        minBufCount = 2;
-    }
-#if 0
-    // The notificationsPerBufferReq parameter is not yet used for non-fast tracks,
-    // but keeping the code here to make it easier to add later.
-    if (minBufCount < notificationsPerBufferReq) {
-        minBufCount = notificationsPerBufferReq;
-    }
-#endif
-    ALOGV("calculateMinFrameCount afLatency %u  afFrameCount %u  afSampleRate %u  "
-            "sampleRate %u  speed %f  minBufCount: %u" /*"  notificationsPerBufferReq %u"*/,
-            afLatencyMs, afFrameCount, afSampleRate, sampleRate, speed, minBufCount
-            /*, notificationsPerBufferReq*/);
-    return minBufCount * sourceFramesNeededWithTimestretch(
-            sampleRate, afFrameCount, afSampleRate, speed);
-}
-
 // static
 status_t AudioTrack::getMinFrameCount(
         size_t* frameCount,
@@ -165,8 +139,8 @@
 
     // When called from createTrack, speed is 1.0f (normal speed).
     // This is rechecked again on setting playback rate (TODO: on setting sample rate, too).
-    *frameCount = calculateMinFrameCount(afLatency, afFrameCount, afSampleRate, sampleRate, 1.0f
-            /*, 0 notificationsPerBufferReq*/);
+    *frameCount = AudioSystem::calculateMinFrameCount(afLatency, afFrameCount, afSampleRate,
+                                              sampleRate, 1.0f /*, 0 notificationsPerBufferReq*/);
 
     // The formula above should always produce a non-zero value under normal circumstances:
     // AudioTrack.SAMPLE_RATE_HZ_MIN <= sampleRate <= AudioTrack.SAMPLE_RATE_HZ_MAX.
@@ -190,8 +164,7 @@
       mPreviousSchedulingGroup(SP_DEFAULT),
       mPausedPosition(0),
       mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE),
-      mRoutedDeviceId(AUDIO_PORT_HANDLE_NONE),
-      mPortId(AUDIO_PORT_HANDLE_NONE)
+      mRoutedDeviceId(AUDIO_PORT_HANDLE_NONE)
 {
     mAttributes.content_type = AUDIO_CONTENT_TYPE_UNKNOWN;
     mAttributes.usage = AUDIO_USAGE_UNKNOWN;
@@ -222,10 +195,9 @@
       mState(STATE_STOPPED),
       mPreviousPriority(ANDROID_PRIORITY_NORMAL),
       mPreviousSchedulingGroup(SP_DEFAULT),
-      mPausedPosition(0),
-      mPortId(AUDIO_PORT_HANDLE_NONE)
+      mPausedPosition(0)
 {
-    mStatus = set(streamType, sampleRate, format, channelMask,
+    (void)set(streamType, sampleRate, format, channelMask,
             frameCount, flags, cbf, user, notificationFrames,
             0 /*sharedBuffer*/, false /*threadCanCallJava*/, sessionId, transferType,
             offloadInfo, uid, pid, pAttributes, doNotReconnect, maxRequiredSpeed, selectedDeviceId);
@@ -254,10 +226,9 @@
       mPreviousPriority(ANDROID_PRIORITY_NORMAL),
       mPreviousSchedulingGroup(SP_DEFAULT),
       mPausedPosition(0),
-      mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE),
-      mPortId(AUDIO_PORT_HANDLE_NONE)
+      mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE)
 {
-    mStatus = set(streamType, sampleRate, format, channelMask,
+    (void)set(streamType, sampleRate, format, channelMask,
             0 /*frameCount*/, flags, cbf, user, notificationFrames,
             sharedBuffer, false /*threadCanCallJava*/, sessionId, transferType, offloadInfo,
             uid, pid, pAttributes, doNotReconnect, maxRequiredSpeed);
@@ -313,6 +284,11 @@
         float maxRequiredSpeed,
         audio_port_handle_t selectedDeviceId)
 {
+    status_t status;
+    uint32_t channelCount;
+    pid_t callingPid;
+    pid_t myPid;
+
     ALOGV("set(): streamType %d, sampleRate %u, format %#x, channelMask %#x, frameCount %zu, "
           "flags #%x, notificationFrames %d, sessionId %d, transferType %d, uid %d, pid %d",
           streamType, sampleRate, format, channelMask, frameCount, flags, notificationFrames,
@@ -320,6 +296,7 @@
 
     mThreadCanCallJava = threadCanCallJava;
     mSelectedDeviceId = selectedDeviceId;
+    mSessionId = sessionId;
 
     switch (transferType) {
     case TRANSFER_DEFAULT:
@@ -334,25 +311,29 @@
     case TRANSFER_CALLBACK:
         if (cbf == NULL || sharedBuffer != 0) {
             ALOGE("Transfer type TRANSFER_CALLBACK but cbf == NULL || sharedBuffer != 0");
-            return BAD_VALUE;
+            status = BAD_VALUE;
+            goto exit;
         }
         break;
     case TRANSFER_OBTAIN:
     case TRANSFER_SYNC:
         if (sharedBuffer != 0) {
             ALOGE("Transfer type TRANSFER_OBTAIN but sharedBuffer != 0");
-            return BAD_VALUE;
+            status = BAD_VALUE;
+            goto exit;
         }
         break;
     case TRANSFER_SHARED:
         if (sharedBuffer == 0) {
             ALOGE("Transfer type TRANSFER_SHARED but sharedBuffer == 0");
-            return BAD_VALUE;
+            status = BAD_VALUE;
+            goto exit;
         }
         break;
     default:
         ALOGE("Invalid transfer type %d", transferType);
-        return BAD_VALUE;
+        status = BAD_VALUE;
+        goto exit;
     }
     mSharedBuffer = sharedBuffer;
     mTransfer = transferType;
@@ -366,7 +347,8 @@
     // invariant that mAudioTrack != 0 is true only after set() returns successfully
     if (mAudioTrack != 0) {
         ALOGE("Track already in use");
-        return INVALID_OPERATION;
+        status = INVALID_OPERATION;
+        goto exit;
     }
 
     // handle default values first.
@@ -376,7 +358,8 @@
     if (pAttributes == NULL) {
         if (uint32_t(streamType) >= AUDIO_STREAM_PUBLIC_CNT) {
             ALOGE("Invalid stream type %d", streamType);
-            return BAD_VALUE;
+            status = BAD_VALUE;
+            goto exit;
         }
         mStreamType = streamType;
 
@@ -408,16 +391,18 @@
     // validate parameters
     if (!audio_is_valid_format(format)) {
         ALOGE("Invalid format %#x", format);
-        return BAD_VALUE;
+        status = BAD_VALUE;
+        goto exit;
     }
     mFormat = format;
 
     if (!audio_is_output_channel(channelMask)) {
         ALOGE("Invalid channel mask %#x", channelMask);
-        return BAD_VALUE;
+        status = BAD_VALUE;
+        goto exit;
     }
     mChannelMask = channelMask;
-    uint32_t channelCount = audio_channel_count_from_out_mask(channelMask);
+    channelCount = audio_channel_count_from_out_mask(channelMask);
     mChannelCount = channelCount;
 
     // force direct flag if format is not linear PCM
@@ -452,7 +437,8 @@
 
     // sampling rate must be specified for direct outputs
     if (sampleRate == 0 && (flags & AUDIO_OUTPUT_FLAG_DIRECT) != 0) {
-        return BAD_VALUE;
+        status = BAD_VALUE;
+        goto exit;
     }
     mSampleRate = sampleRate;
     mOriginalSampleRate = sampleRate;
@@ -483,12 +469,14 @@
         if (!(flags & AUDIO_OUTPUT_FLAG_FAST)) {
             ALOGE("notificationFrames=%d not permitted for non-fast track",
                     notificationFrames);
-            return BAD_VALUE;
+            status = BAD_VALUE;
+            goto exit;
         }
         if (frameCount > 0) {
             ALOGE("notificationFrames=%d not permitted with non-zero frameCount=%zu",
                     notificationFrames, frameCount);
-            return BAD_VALUE;
+            status = BAD_VALUE;
+            goto exit;
         }
         mNotificationFramesReq = 0;
         const uint32_t minNotificationsPerBuffer = 1;
@@ -500,20 +488,15 @@
                 notificationFrames, minNotificationsPerBuffer, maxNotificationsPerBuffer);
     }
     mNotificationFramesAct = 0;
-    if (sessionId == AUDIO_SESSION_ALLOCATE) {
-        mSessionId = (audio_session_t) AudioSystem::newAudioUniqueId(AUDIO_UNIQUE_ID_USE_SESSION);
-    } else {
-        mSessionId = sessionId;
-    }
-    int callingpid = IPCThreadState::self()->getCallingPid();
-    int mypid = getpid();
-    if (uid == AUDIO_UID_INVALID || (callingpid != mypid)) {
+    callingPid = IPCThreadState::self()->getCallingPid();
+    myPid = getpid();
+    if (uid == AUDIO_UID_INVALID || (callingPid != myPid)) {
         mClientUid = IPCThreadState::self()->getCallingUid();
     } else {
         mClientUid = uid;
     }
-    if (pid == -1 || (callingpid != mypid)) {
-        mClientPid = callingpid;
+    if (pid == -1 || (callingPid != myPid)) {
+        mClientPid = callingPid;
     } else {
         mClientPid = pid;
     }
@@ -528,7 +511,7 @@
     }
 
     // create the IAudioTrack
-    status_t status = createTrack_l();
+    status = createTrack_l();
 
     if (status != NO_ERROR) {
         if (mAudioTrackThread != 0) {
@@ -536,10 +519,9 @@
             mAudioTrackThread->requestExitAndWait();
             mAudioTrackThread.clear();
         }
-        return status;
+        goto exit;
     }
 
-    mStatus = NO_ERROR;
     mUserData = user;
     mLoopCount = 0;
     mLoopStart = 0;
@@ -567,7 +549,10 @@
     mFramesWrittenServerOffset = 0;
     mFramesWrittenAtRestore = -1; // -1 is a unique initializer.
     mVolumeHandler = new media::VolumeHandler();
-    return NO_ERROR;
+
+exit:
+    mStatus = status;
+    return status;
 }
 
 // -------------------------------------------------------------------------
@@ -1311,76 +1296,19 @@
 
 status_t AudioTrack::createTrack_l()
 {
+    status_t status;
+    bool callbackAdded = false;
+
     const sp<IAudioFlinger>& audioFlinger = AudioSystem::get_audio_flinger();
     if (audioFlinger == 0) {
         ALOGE("Could not get audioflinger");
-        return NO_INIT;
+        status = NO_INIT;
+        goto exit;
     }
 
-    audio_io_handle_t output;
-    audio_stream_type_t streamType = mStreamType;
-    audio_attributes_t *attr = (mStreamType == AUDIO_STREAM_DEFAULT) ? &mAttributes : NULL;
-    bool callbackAdded = false;
-
+    {
     // mFlags (not mOrigFlags) is modified depending on whether fast request is accepted.
     // After fast request is denied, we will request again if IAudioTrack is re-created.
-
-    status_t status;
-    audio_config_t config = AUDIO_CONFIG_INITIALIZER;
-    config.sample_rate = mSampleRate;
-    config.channel_mask = mChannelMask;
-    config.format = mFormat;
-    config.offload_info = mOffloadInfoCopy;
-    mRoutedDeviceId = mSelectedDeviceId;
-    status = AudioSystem::getOutputForAttr(attr, &output,
-                                           mSessionId, &streamType, mClientUid,
-                                           &config,
-                                           mFlags, &mRoutedDeviceId, &mPortId);
-
-    if (status != NO_ERROR || output == AUDIO_IO_HANDLE_NONE) {
-        ALOGE("Could not get audio output for session %d, stream type %d, usage %d, sample rate %u,"
-              " format %#x, channel mask %#x, flags %#x",
-              mSessionId, streamType, mAttributes.usage, mSampleRate, mFormat, mChannelMask,
-              mFlags);
-        return BAD_VALUE;
-    }
-    {
-    // Now that we have a reference to an I/O handle and have not yet handed it off to AudioFlinger,
-    // we must release it ourselves if anything goes wrong.
-
-    // Not all of these values are needed under all conditions, but it is easier to get them all
-    status = AudioSystem::getLatency(output, &mAfLatency);
-    if (status != NO_ERROR) {
-        ALOGE("getLatency(%d) failed status %d", output, status);
-        goto release;
-    }
-    ALOGV("createTrack_l() output %d afLatency %u", output, mAfLatency);
-
-    status = AudioSystem::getFrameCount(output, &mAfFrameCount);
-    if (status != NO_ERROR) {
-        ALOGE("getFrameCount(output=%d) status %d", output, status);
-        goto release;
-    }
-
-    // TODO consider making this a member variable if there are other uses for it later
-    size_t afFrameCountHAL;
-    status = AudioSystem::getFrameCountHAL(output, &afFrameCountHAL);
-    if (status != NO_ERROR) {
-        ALOGE("getFrameCountHAL(output=%d) status %d", output, status);
-        goto release;
-    }
-    ALOG_ASSERT(afFrameCountHAL > 0);
-
-    status = AudioSystem::getSamplingRate(output, &mAfSampleRate);
-    if (status != NO_ERROR) {
-        ALOGE("getSamplingRate(output=%d) status %d", output, status);
-        goto release;
-    }
-    if (mSampleRate == 0) {
-        mSampleRate = mAfSampleRate;
-        mOriginalSampleRate = mAfSampleRate;
-    }
-
     // Client can only express a preference for FAST.  Server will perform additional tests.
     if (mFlags & AUDIO_OUTPUT_FLAG_FAST) {
         // either of these use cases:
@@ -1394,130 +1322,81 @@
             // use case 4: synchronous write
             ((mTransfer == TRANSFER_SYNC) && mThreadCanCallJava);
 
-        bool useCaseAllowed = sharedBuffer || transferAllowed;
-        if (!useCaseAllowed) {
+        bool fastAllowed = sharedBuffer || transferAllowed;
+        if (!fastAllowed) {
             ALOGW("AUDIO_OUTPUT_FLAG_FAST denied by client, not shared buffer and transfer = %s",
                   convertTransferToText(mTransfer));
-        }
-
-        // sample rates must also match
-        bool sampleRateAllowed = mSampleRate == mAfSampleRate;
-        if (!sampleRateAllowed) {
-            ALOGW("AUDIO_OUTPUT_FLAG_FAST denied by client, sample rate %u Hz but HAL needs %u Hz",
-                  mSampleRate, mAfSampleRate);
-        }
-
-        bool fastAllowed = useCaseAllowed && sampleRateAllowed;
-        if (!fastAllowed) {
             mFlags = (audio_output_flags_t) (mFlags & ~AUDIO_OUTPUT_FLAG_FAST);
         }
     }
 
-    mNotificationFramesAct = mNotificationFramesReq;
-
-    size_t frameCount = mReqFrameCount;
-    if (!audio_has_proportional_frames(mFormat)) {
-
-        if (mSharedBuffer != 0) {
-            // Same comment as below about ignoring frameCount parameter for set()
-            frameCount = mSharedBuffer->size();
-        } else if (frameCount == 0) {
-            frameCount = mAfFrameCount;
-        }
-        if (mNotificationFramesAct != frameCount) {
-            mNotificationFramesAct = frameCount;
-        }
-    } else if (mSharedBuffer != 0) {
-        // FIXME: Ensure client side memory buffers need
-        // not have additional alignment beyond sample
-        // (e.g. 16 bit stereo accessed as 32 bit frame).
-        size_t alignment = audio_bytes_per_sample(mFormat);
-        if (alignment & 1) {
-            // for AUDIO_FORMAT_PCM_24_BIT_PACKED (not exposed through Java).
-            alignment = 1;
-        }
-        if (mChannelCount > 1) {
-            // More than 2 channels does not require stronger alignment than stereo
-            alignment <<= 1;
-        }
-        if (((uintptr_t)mSharedBuffer->pointer() & (alignment - 1)) != 0) {
-            ALOGE("Invalid buffer alignment: address %p, channel count %u",
-                    mSharedBuffer->pointer(), mChannelCount);
-            status = BAD_VALUE;
-            goto release;
-        }
-
-        // When initializing a shared buffer AudioTrack via constructors,
-        // there's no frameCount parameter.
-        // But when initializing a shared buffer AudioTrack via set(),
-        // there _is_ a frameCount parameter.  We silently ignore it.
-        frameCount = mSharedBuffer->size() / mFrameSize;
+    IAudioFlinger::CreateTrackInput input;
+    if (mStreamType != AUDIO_STREAM_DEFAULT) {
+        stream_type_to_audio_attributes(mStreamType, &input.attr);
     } else {
-        size_t minFrameCount = 0;
-        // For fast tracks the frame count calculations and checks are mostly done by server,
-        // but we try to respect the application's request for notifications per buffer.
-        if (mFlags & AUDIO_OUTPUT_FLAG_FAST) {
-            if (mNotificationsPerBufferReq > 0) {
-                // Avoid possible arithmetic overflow during multiplication.
-                // mNotificationsPerBuffer is clamped to a small integer earlier, so it is unlikely.
-                if (mNotificationsPerBufferReq > SIZE_MAX / afFrameCountHAL) {
-                    ALOGE("Requested notificationPerBuffer=%u ignored for HAL frameCount=%zu",
-                            mNotificationsPerBufferReq, afFrameCountHAL);
-                } else {
-                    minFrameCount = afFrameCountHAL * mNotificationsPerBufferReq;
-                }
-            }
-        } else {
-            // for normal tracks precompute the frame count based on speed.
-            const float speed = !isPurePcmData_l() || isOffloadedOrDirect_l() ? 1.0f :
-                            max(mMaxRequiredSpeed, mPlaybackRate.mSpeed);
-            minFrameCount = calculateMinFrameCount(
-                    mAfLatency, mAfFrameCount, mAfSampleRate, mSampleRate,
-                    speed /*, 0 mNotificationsPerBufferReq*/);
-        }
-        if (frameCount < minFrameCount) {
-            frameCount = minFrameCount;
-        }
+        input.attr = mAttributes;
     }
-
-    audio_output_flags_t flags = mFlags;
-
-    pid_t tid = -1;
+    input.config = AUDIO_CONFIG_INITIALIZER;
+    input.config.sample_rate = mSampleRate;
+    input.config.channel_mask = mChannelMask;
+    input.config.format = mFormat;
+    input.config.offload_info = mOffloadInfoCopy;
+    input.clientInfo.clientUid = mClientUid;
+    input.clientInfo.clientPid = mClientPid;
+    input.clientInfo.clientTid = -1;
     if (mFlags & AUDIO_OUTPUT_FLAG_FAST) {
         // It is currently meaningless to request SCHED_FIFO for a Java thread.  Even if the
         // application-level code follows all non-blocking design rules, the language runtime
         // doesn't also follow those rules, so the thread will not benefit overall.
         if (mAudioTrackThread != 0 && !mThreadCanCallJava) {
-            tid = mAudioTrackThread->getTid();
+            input.clientInfo.clientTid = mAudioTrackThread->getTid();
         }
     }
+    input.sharedBuffer = mSharedBuffer;
+    input.notificationsPerBuffer = mNotificationsPerBufferReq;
+    input.speed = 1.0;
+    if (audio_has_proportional_frames(mFormat) && mSharedBuffer == 0 &&
+            (mFlags & AUDIO_OUTPUT_FLAG_FAST) == 0) {
+        input.speed  = !isPurePcmData_l() || isOffloadedOrDirect_l() ? 1.0f :
+                        max(mMaxRequiredSpeed, mPlaybackRate.mSpeed);
+    }
+    input.flags = mFlags;
+    input.frameCount = mReqFrameCount;
+    input.notificationFrameCount = mNotificationFramesReq;
+    input.selectedDeviceId = mSelectedDeviceId;
+    input.sessionId = mSessionId;
 
-    size_t temp = frameCount;   // temp may be replaced by a revised value of frameCount,
-                                // but we will still need the original value also
-    audio_session_t originalSessionId = mSessionId;
-    sp<IAudioTrack> track = audioFlinger->createTrack(streamType,
-                                                      mSampleRate,
-                                                      mFormat,
-                                                      mChannelMask,
-                                                      &temp,
-                                                      &flags,
-                                                      mSharedBuffer,
+    IAudioFlinger::CreateTrackOutput output;
+
+    sp<IAudioTrack> track = audioFlinger->createTrack(input,
                                                       output,
-                                                      mClientPid,
-                                                      tid,
-                                                      &mSessionId,
-                                                      mClientUid,
-                                                      &status,
-                                                      mPortId);
-    ALOGE_IF(originalSessionId != AUDIO_SESSION_ALLOCATE && mSessionId != originalSessionId,
-            "session ID changed from %d to %d", originalSessionId, mSessionId);
+                                                      &status);
 
-    if (status != NO_ERROR) {
-        ALOGE("AudioFlinger could not create track, status: %d", status);
-        goto release;
+    if (status != NO_ERROR || output.outputId == AUDIO_IO_HANDLE_NONE) {
+        ALOGE("AudioFlinger could not create track, status: %d output %d", status, output.outputId);
+        if (status == NO_ERROR) {
+            status = NO_INIT;
+        }
+        goto exit;
     }
     ALOG_ASSERT(track != 0);
 
+    mFrameCount = output.frameCount;
+    mNotificationFramesAct = (uint32_t)output.notificationFrameCount;
+    mRoutedDeviceId = output.selectedDeviceId;
+    mSessionId = output.sessionId;
+
+    mSampleRate = output.sampleRate;
+    if (mOriginalSampleRate == 0) {
+        mOriginalSampleRate = mSampleRate;
+    }
+
+    mAfFrameCount = output.afFrameCount;
+    mAfSampleRate = output.afSampleRate;
+    mAfLatency = output.afLatencyMs;
+
+    mLatency = mAfLatency + (1000LL * mFrameCount) / mSampleRate;
+
     // AudioFlinger now owns the reference to the I/O handle,
     // so we are no longer responsible for releasing it.
 
@@ -1526,13 +1405,13 @@
     if (iMem == 0) {
         ALOGE("Could not get control block");
         status = NO_INIT;
-        goto release;
+        goto exit;
     }
     void *iMemPointer = iMem->pointer();
     if (iMemPointer == NULL) {
         ALOGE("Could not get control block pointer");
         status = NO_INIT;
-        goto release;
+        goto exit;
     }
     // invariant that mAudioTrack != 0 is true only after set() returns successfully
     if (mAudioTrack != 0) {
@@ -1545,75 +1424,33 @@
 
     audio_track_cblk_t* cblk = static_cast<audio_track_cblk_t*>(iMemPointer);
     mCblk = cblk;
-    // note that temp is the (possibly revised) value of frameCount
-    if (temp < frameCount || (frameCount == 0 && temp == 0)) {
-        // In current design, AudioTrack client checks and ensures frame count validity before
-        // passing it to AudioFlinger so AudioFlinger should not return a different value except
-        // for fast track as it uses a special method of assigning frame count.
-        ALOGW("Requested frameCount %zu but received frameCount %zu", frameCount, temp);
-    }
-    frameCount = temp;
 
     mAwaitBoost = false;
     if (mFlags & AUDIO_OUTPUT_FLAG_FAST) {
-        if (flags & AUDIO_OUTPUT_FLAG_FAST) {
-            ALOGI("AUDIO_OUTPUT_FLAG_FAST successful; frameCount %zu -> %zu", frameCount, temp);
+        if (output.flags & AUDIO_OUTPUT_FLAG_FAST) {
+            ALOGI("AUDIO_OUTPUT_FLAG_FAST successful; frameCount %zu -> %zu",
+                  mReqFrameCount, mFrameCount);
             if (!mThreadCanCallJava) {
                 mAwaitBoost = true;
             }
         } else {
-            ALOGW("AUDIO_OUTPUT_FLAG_FAST denied by server; frameCount %zu -> %zu", frameCount,
-                    temp);
+            ALOGW("AUDIO_OUTPUT_FLAG_FAST denied by server; frameCount %zu -> %zu", mReqFrameCount,
+                  mFrameCount);
         }
     }
-    mFlags = flags;
-
-    // Make sure that application is notified with sufficient margin before underrun.
-    // The client can divide the AudioTrack buffer into sub-buffers,
-    // and expresses its desire to server as the notification frame count.
-    if (mSharedBuffer == 0 && audio_is_linear_pcm(mFormat)) {
-        size_t maxNotificationFrames;
-        if (mFlags & AUDIO_OUTPUT_FLAG_FAST) {
-            // notify every HAL buffer, regardless of the size of the track buffer
-            maxNotificationFrames = afFrameCountHAL;
-        } else {
-            // For normal tracks, use at least double-buffering if no sample rate conversion,
-            // or at least triple-buffering if there is sample rate conversion
-            const int nBuffering = mOriginalSampleRate == mAfSampleRate ? 2 : 3;
-            maxNotificationFrames = frameCount / nBuffering;
-            // If client requested a fast track but this was denied, then use the smaller maximum.
-            // FMS_20 is the minimum task wakeup period in ms for which CFS operates reliably.
-#define FMS_20 20   // FIXME share a common declaration with the same symbol in Threads.cpp
-            if (mOrigFlags & AUDIO_OUTPUT_FLAG_FAST) {
-                size_t maxNotificationFramesFastDenied = FMS_20 * mSampleRate / 1000;
-                if (maxNotificationFrames > maxNotificationFramesFastDenied) {
-                    maxNotificationFrames = maxNotificationFramesFastDenied;
-                }
-            }
-        }
-        if (mNotificationFramesAct == 0 || mNotificationFramesAct > maxNotificationFrames) {
-            if (mNotificationFramesAct == 0) {
-                ALOGD("Client defaulted notificationFrames to %zu for frameCount %zu",
-                    maxNotificationFrames, frameCount);
-            } else {
-                ALOGW("Client adjusted notificationFrames from %u to %zu for frameCount %zu",
-                    mNotificationFramesAct, maxNotificationFrames, frameCount);
-            }
-            mNotificationFramesAct = (uint32_t) maxNotificationFrames;
-        }
-    }
+    mFlags = output.flags;
 
     //mOutput != output includes the case where mOutput == AUDIO_IO_HANDLE_NONE for first creation
-    if (mDeviceCallback != 0 && mOutput != output) {
+    if (mDeviceCallback != 0 && mOutput != output.outputId) {
         if (mOutput != AUDIO_IO_HANDLE_NONE) {
             AudioSystem::removeAudioDeviceCallback(this, mOutput);
         }
-        AudioSystem::addAudioDeviceCallback(this, output);
+        AudioSystem::addAudioDeviceCallback(this, output.outputId);
         callbackAdded = true;
     }
 
     // We retain a copy of the I/O handle, but don't own the reference
-    mOutput = output;
+    mOutput = output.outputId;
     mRefreshRemaining = true;
 
     // Starting address of buffers in shared memory.  If there is a shared buffer, buffers
@@ -1628,18 +1465,16 @@
         if (buffers == NULL) {
             ALOGE("Could not get buffer pointer");
             status = NO_INIT;
-            goto release;
+            goto exit;
         }
     }
 
     mAudioTrack->attachAuxEffect(mAuxEffectId);
-    mFrameCount = frameCount;
-    updateLatency_l();  // this refetches mAfLatency and sets mLatency
 
     // If IAudioTrack is re-created, don't let the requested frameCount
     // decrease.  This can confuse clients that cache frameCount().
-    if (frameCount > mReqFrameCount) {
-        mReqFrameCount = frameCount;
+    if (mFrameCount > mReqFrameCount) {
+        mReqFrameCount = mFrameCount;
     }
 
     // reset server position to 0 as we have new cblk.
@@ -1648,9 +1483,9 @@
     // update proxy
     if (mSharedBuffer == 0) {
         mStaticProxy.clear();
-        mProxy = new AudioTrackClientProxy(cblk, buffers, frameCount, mFrameSize);
+        mProxy = new AudioTrackClientProxy(cblk, buffers, mFrameCount, mFrameSize);
     } else {
-        mStaticProxy = new StaticAudioTrackClientProxy(cblk, buffers, frameCount, mFrameSize);
+        mStaticProxy = new StaticAudioTrackClientProxy(cblk, buffers, mFrameCount, mFrameSize);
         mProxy = mStaticProxy;
     }
 
@@ -1673,18 +1508,17 @@
     mDeathNotifier = new DeathNotifier(this);
     IInterface::asBinder(mAudioTrack)->linkToDeath(mDeathNotifier, this);
 
-    return NO_ERROR;
     }
 
-release:
-    AudioSystem::releaseOutput(output, streamType, mSessionId);
-    if (callbackAdded) {
+exit:
+    if (status != NO_ERROR && callbackAdded) {
         // note: mOutput is always valid is callbackAdded is true
         AudioSystem::removeAudioDeviceCallback(this, mOutput);
     }
-    if (status == NO_ERROR) {
-        status = NO_INIT;
-    }
+
+    mStatus = status;
+
+    // sp<IAudioTrack> track destructor will cause releaseOutput() to be called by AudioFlinger
     return status;
 }
 
@@ -2420,8 +2254,8 @@
         return true; // static tracks do not have issues with buffer sizing.
     }
     const size_t minFrameCount =
-            calculateMinFrameCount(mAfLatency, mAfFrameCount, mAfSampleRate, sampleRate, speed
-                /*, 0 mNotificationsPerBufferReq*/);
+            AudioSystem::calculateMinFrameCount(mAfLatency, mAfFrameCount, mAfSampleRate,
+                                            sampleRate, speed /*, 0 mNotificationsPerBufferReq*/);
     const bool allowed = mFrameCount >= minFrameCount;
     ALOGD_IF(!allowed,
             "isSampleRateSpeedAllowed_l denied "
@@ -2837,23 +2671,28 @@
 
 status_t AudioTrack::dump(int fd, const Vector<String16>& args __unused) const
 {
-
-    const size_t SIZE = 256;
-    char buffer[SIZE];
     String8 result;
 
     result.append(" AudioTrack::dump\n");
-    snprintf(buffer, 255, "  stream type(%d), left - right volume(%f, %f)\n", mStreamType,
-            mVolume[AUDIO_INTERLEAVE_LEFT], mVolume[AUDIO_INTERLEAVE_RIGHT]);
-    result.append(buffer);
-    snprintf(buffer, 255, "  format(%d), channel count(%d), frame count(%zu)\n", mFormat,
-            mChannelCount, mFrameCount);
-    result.append(buffer);
-    snprintf(buffer, 255, "  sample rate(%u), speed(%f), status(%d)\n",
-            mSampleRate, mPlaybackRate.mSpeed, mStatus);
-    result.append(buffer);
-    snprintf(buffer, 255, "  state(%d), latency (%d)\n", mState, mLatency);
-    result.append(buffer);
+    result.appendFormat("  status(%d), state(%d), session Id(%d), flags(%x)\n",
+                        mStatus, mState, mSessionId, mFlags);
+    result.appendFormat("  stream type(%d), left - right volume(%f, %f)\n",
+                        (mStreamType == AUDIO_STREAM_DEFAULT) ?
+                                audio_attributes_to_stream_type(&mAttributes) : mStreamType,
+                        mVolume[AUDIO_INTERLEAVE_LEFT], mVolume[AUDIO_INTERLEAVE_RIGHT]);
+    result.appendFormat("  format(%x), channel mask(%x), channel count(%u)\n",
+                  mFormat, mChannelMask, mChannelCount);
+    result.appendFormat("  sample rate(%u), original sample rate(%u), speed(%f)\n",
+                  mSampleRate, mOriginalSampleRate, mPlaybackRate.mSpeed);
+    result.appendFormat("  frame count(%zu), req. frame count(%zu)\n",
+                  mFrameCount, mReqFrameCount);
+    result.appendFormat("  notif. frame count(%u), req. notif. frame count(%u),"
+            " req. notif. per buff(%u)\n",
+             mNotificationFramesAct, mNotificationFramesReq, mNotificationsPerBufferReq);
+    result.appendFormat("  latency (%d), selected device Id(%d), routed device Id(%d)\n",
+                        mLatency, mSelectedDeviceId, mRoutedDeviceId);
+    result.appendFormat("  output(%d) AF latency (%u) AF frame count(%zu) AF SampleRate(%u)\n",
+                        mOutput, mAfLatency, mAfFrameCount, mAfSampleRate);
     ::write(fd, result.string(), result.size());
     return NO_ERROR;
 }
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index fc8c11a..5db60f3 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -30,7 +30,7 @@
 
 enum {
     CREATE_TRACK = IBinder::FIRST_CALL_TRANSACTION,
-    OPEN_RECORD,
+    CREATE_RECORD,
     SAMPLE_RATE,
     RESERVED,   // obsolete, was CHANNEL_COUNT
     FORMAT,
@@ -95,182 +95,74 @@
     {
     }
 
-    virtual sp<IAudioTrack> createTrack(
-                                audio_stream_type_t streamType,
-                                uint32_t sampleRate,
-                                audio_format_t format,
-                                audio_channel_mask_t channelMask,
-                                size_t *pFrameCount,
-                                audio_output_flags_t *flags,
-                                const sp<IMemory>& sharedBuffer,
-                                audio_io_handle_t output,
-                                pid_t pid,
-                                pid_t tid,
-                                audio_session_t *sessionId,
-                                int clientUid,
-                                status_t *status,
-                                audio_port_handle_t portId)
+    virtual sp<IAudioTrack> createTrack(const CreateTrackInput& input,
+                                        CreateTrackOutput& output,
+                                        status_t *status)
     {
         Parcel data, reply;
         sp<IAudioTrack> track;
         data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeInt32((int32_t) streamType);
-        data.writeInt32(sampleRate);
-        data.writeInt32(format);
-        data.writeInt32(channelMask);
-        size_t frameCount = pFrameCount != NULL ? *pFrameCount : 0;
-        data.writeInt64(frameCount);
-        audio_output_flags_t lFlags = flags != NULL ? *flags : AUDIO_OUTPUT_FLAG_NONE;
-        data.writeInt32(lFlags);
-        // haveSharedBuffer
-        if (sharedBuffer != 0) {
-            data.writeInt32(true);
-            data.writeStrongBinder(IInterface::asBinder(sharedBuffer));
-        } else {
-            data.writeInt32(false);
+
+        if (status == nullptr) {
+            return track;
         }
-        data.writeInt32((int32_t) output);
-        data.writeInt32((int32_t) pid);
-        data.writeInt32((int32_t) tid);
-        audio_session_t lSessionId = AUDIO_SESSION_ALLOCATE;
-        if (sessionId != NULL) {
-            lSessionId = *sessionId;
-        }
-        data.writeInt32(lSessionId);
-        data.writeInt32(clientUid);
-        data.writeInt32(portId);
+
+        input.writeToParcel(&data);
+
         status_t lStatus = remote()->transact(CREATE_TRACK, data, &reply);
         if (lStatus != NO_ERROR) {
-            ALOGE("createTrack error: %s", strerror(-lStatus));
-        } else {
-            frameCount = reply.readInt64();
-            if (pFrameCount != NULL) {
-                *pFrameCount = frameCount;
-            }
-            lFlags = (audio_output_flags_t)reply.readInt32();
-            if (flags != NULL) {
-                *flags = lFlags;
-            }
-            lSessionId = (audio_session_t) reply.readInt32();
-            if (sessionId != NULL) {
-                *sessionId = lSessionId;
-            }
-            lStatus = reply.readInt32();
-            track = interface_cast<IAudioTrack>(reply.readStrongBinder());
-            if (lStatus == NO_ERROR) {
-                if (track == 0) {
-                    ALOGE("createTrack should have returned an IAudioTrack");
-                    lStatus = UNKNOWN_ERROR;
-                }
-            } else {
-                if (track != 0) {
-                    ALOGE("createTrack returned an IAudioTrack but with status %d", lStatus);
-                    track.clear();
-                }
-            }
+            ALOGE("createTrack transaction error %d", lStatus);
+            *status = DEAD_OBJECT;
+            return track;
         }
-        if (status != NULL) {
-            *status = lStatus;
+        *status = reply.readInt32();
+        if (*status != NO_ERROR) {
+            ALOGE("createTrack returned error %d", *status);
+            return track;
         }
+        track = interface_cast<IAudioTrack>(reply.readStrongBinder());
+        if (track == 0) {
+            ALOGE("createTrack returned an NULL IAudioTrack with status OK");
+            *status = DEAD_OBJECT;
+            return track;
+        }
+        output.readFromParcel(&reply);
         return track;
     }
 
-    virtual sp<media::IAudioRecord> openRecord(
-                                audio_io_handle_t input,
-                                uint32_t sampleRate,
-                                audio_format_t format,
-                                audio_channel_mask_t channelMask,
-                                const String16& opPackageName,
-                                size_t *pFrameCount,
-                                audio_input_flags_t *flags,
-                                pid_t pid,
-                                pid_t tid,
-                                int clientUid,
-                                audio_session_t *sessionId,
-                                size_t *notificationFrames,
-                                sp<IMemory>& cblk,
-                                sp<IMemory>& buffers,
-                                status_t *status,
-                                audio_port_handle_t portId)
+    virtual sp<media::IAudioRecord> createRecord(const CreateRecordInput& input,
+                                                 CreateRecordOutput& output,
+                                                 status_t *status)
     {
         Parcel data, reply;
         sp<media::IAudioRecord> record;
         data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeInt32((int32_t) input);
-        data.writeInt32(sampleRate);
-        data.writeInt32(format);
-        data.writeInt32(channelMask);
-        data.writeString16(opPackageName);
-        size_t frameCount = pFrameCount != NULL ? *pFrameCount : 0;
-        data.writeInt64(frameCount);
-        audio_input_flags_t lFlags = flags != NULL ? *flags : AUDIO_INPUT_FLAG_NONE;
-        data.writeInt32(lFlags);
-        data.writeInt32((int32_t) pid);
-        data.writeInt32((int32_t) tid);
-        data.writeInt32((int32_t) clientUid);
-        audio_session_t lSessionId = AUDIO_SESSION_ALLOCATE;
-        if (sessionId != NULL) {
-            lSessionId = *sessionId;
+
+        if (status == nullptr) {
+            return record;
         }
-        data.writeInt32(lSessionId);
-        data.writeInt64(notificationFrames != NULL ? *notificationFrames : 0);
-        data.writeInt32(portId);
-        cblk.clear();
-        buffers.clear();
-        status_t lStatus = remote()->transact(OPEN_RECORD, data, &reply);
+
+        input.writeToParcel(&data);
+
+        status_t lStatus = remote()->transact(CREATE_RECORD, data, &reply);
         if (lStatus != NO_ERROR) {
-            ALOGE("openRecord error: %s", strerror(-lStatus));
-        } else {
-            frameCount = reply.readInt64();
-            if (pFrameCount != NULL) {
-                *pFrameCount = frameCount;
-            }
-            lFlags = (audio_input_flags_t)reply.readInt32();
-            if (flags != NULL) {
-                *flags = lFlags;
-            }
-            lSessionId = (audio_session_t) reply.readInt32();
-            if (sessionId != NULL) {
-                *sessionId = lSessionId;
-            }
-            size_t lNotificationFrames = (size_t) reply.readInt64();
-            if (notificationFrames != NULL) {
-                *notificationFrames = lNotificationFrames;
-            }
-            lStatus = reply.readInt32();
-            record = interface_cast<media::IAudioRecord>(reply.readStrongBinder());
-            cblk = interface_cast<IMemory>(reply.readStrongBinder());
-            if (cblk != 0 && cblk->pointer() == NULL) {
-                cblk.clear();
-            }
-            buffers = interface_cast<IMemory>(reply.readStrongBinder());
-            if (buffers != 0 && buffers->pointer() == NULL) {
-                buffers.clear();
-            }
-            if (lStatus == NO_ERROR) {
-                if (record == 0) {
-                    ALOGE("openRecord should have returned an IAudioRecord");
-                    lStatus = UNKNOWN_ERROR;
-                } else if (cblk == 0) {
-                    ALOGE("openRecord should have returned a cblk");
-                    lStatus = NO_MEMORY;
-                }
-                // buffers is permitted to be 0
-            } else {
-                if (record != 0 || cblk != 0 || buffers != 0) {
-                    ALOGE("openRecord returned an IAudioRecord, cblk, "
-                          "or buffers but with status %d", lStatus);
-                }
-            }
-            if (lStatus != NO_ERROR) {
-                record.clear();
-                cblk.clear();
-                buffers.clear();
-            }
+            ALOGE("createRecord transaction error %d", lStatus);
+            *status = DEAD_OBJECT;
+            return record;
         }
-        if (status != NULL) {
-            *status = lStatus;
+        *status = reply.readInt32();
+        if (*status != NO_ERROR) {
+            ALOGE("createRecord returned error %d", *status);
+            return record;
         }
+
+        record = interface_cast<media::IAudioRecord>(reply.readStrongBinder());
+        if (record == 0) {
+            ALOGE("createRecord returned a NULL IAudioRecord with status OK");
+            *status = DEAD_OBJECT;
+            return record;
+        }
+        output.readFromParcel(&reply);
         return record;
     }
 
@@ -950,7 +842,7 @@
     // TODO should select more wisely the items from the list
     switch (code) {
         case CREATE_TRACK:
-        case OPEN_RECORD:
+        case CREATE_RECORD:
         case SET_MASTER_VOLUME:
         case SET_MASTER_MUTE:
         case SET_STREAM_VOLUME:
@@ -970,74 +862,52 @@
     switch (code) {
         case CREATE_TRACK: {
             CHECK_INTERFACE(IAudioFlinger, data, reply);
-            int streamType = data.readInt32();
-            uint32_t sampleRate = data.readInt32();
-            audio_format_t format = (audio_format_t) data.readInt32();
-            audio_channel_mask_t channelMask = data.readInt32();
-            size_t frameCount = data.readInt64();
-            audio_output_flags_t flags = (audio_output_flags_t) data.readInt32();
-            bool haveSharedBuffer = data.readInt32() != 0;
-            sp<IMemory> buffer;
-            if (haveSharedBuffer) {
-                buffer = interface_cast<IMemory>(data.readStrongBinder());
+
+            CreateTrackInput input;
+            if (input.readFromParcel((Parcel*)&data) != NO_ERROR) {
+                reply->writeInt32(DEAD_OBJECT);
+                return NO_ERROR;
             }
-            audio_io_handle_t output = (audio_io_handle_t) data.readInt32();
-            pid_t pid = (pid_t) data.readInt32();
-            pid_t tid = (pid_t) data.readInt32();
-            audio_session_t sessionId = (audio_session_t) data.readInt32();
-            int clientUid = data.readInt32();
-            audio_port_handle_t portId = (audio_port_handle_t) data.readInt32();
-            status_t status = NO_ERROR;
-            sp<IAudioTrack> track;
-            if ((haveSharedBuffer && (buffer == 0)) ||
-                    ((buffer != 0) && (buffer->pointer() == NULL))) {
-                ALOGW("CREATE_TRACK: cannot retrieve shared memory");
-                status = DEAD_OBJECT;
-            } else {
-                track = createTrack(
-                        (audio_stream_type_t) streamType, sampleRate, format,
-                        channelMask, &frameCount, &flags, buffer, output, pid, tid,
-                        &sessionId, clientUid, &status, portId);
-                LOG_ALWAYS_FATAL_IF((track != 0) != (status == NO_ERROR));
-            }
-            reply->writeInt64(frameCount);
-            reply->writeInt32(flags);
-            reply->writeInt32(sessionId);
+
+            status_t status;
+            CreateTrackOutput output;
+
+            sp<IAudioTrack> track= createTrack(input,
+                                               output,
+                                               &status);
+
+            LOG_ALWAYS_FATAL_IF((track != 0) != (status == NO_ERROR));
             reply->writeInt32(status);
+            if (status != NO_ERROR) {
+                return NO_ERROR;
+            }
             reply->writeStrongBinder(IInterface::asBinder(track));
+            output.writeToParcel(reply);
             return NO_ERROR;
         } break;
-        case OPEN_RECORD: {
+        case CREATE_RECORD: {
             CHECK_INTERFACE(IAudioFlinger, data, reply);
-            audio_io_handle_t input = (audio_io_handle_t) data.readInt32();
-            uint32_t sampleRate = data.readInt32();
-            audio_format_t format = (audio_format_t) data.readInt32();
-            audio_channel_mask_t channelMask = data.readInt32();
-            const String16& opPackageName = data.readString16();
-            size_t frameCount = data.readInt64();
-            audio_input_flags_t flags = (audio_input_flags_t) data.readInt32();
-            pid_t pid = (pid_t) data.readInt32();
-            pid_t tid = (pid_t) data.readInt32();
-            int clientUid = data.readInt32();
-            audio_session_t sessionId = (audio_session_t) data.readInt32();
-            size_t notificationFrames = data.readInt64();
-            audio_port_handle_t portId = (audio_port_handle_t) data.readInt32();
-            sp<IMemory> cblk;
-            sp<IMemory> buffers;
-            status_t status = NO_ERROR;
-            sp<media::IAudioRecord> record = openRecord(input,
-                    sampleRate, format, channelMask, opPackageName, &frameCount, &flags,
-                    pid, tid, clientUid, &sessionId, &notificationFrames, cblk, buffers,
-                    &status, portId);
+
+            CreateRecordInput input;
+            if (input.readFromParcel((Parcel*)&data) != NO_ERROR) {
+                reply->writeInt32(DEAD_OBJECT);
+                return NO_ERROR;
+            }
+
+            status_t status;
+            CreateRecordOutput output;
+
+            sp<media::IAudioRecord> record = createRecord(input,
+                                                          output,
+                                                          &status);
+
             LOG_ALWAYS_FATAL_IF((record != 0) != (status == NO_ERROR));
-            reply->writeInt64(frameCount);
-            reply->writeInt32(flags);
-            reply->writeInt32(sessionId);
-            reply->writeInt64(notificationFrames);
             reply->writeInt32(status);
+            if (status != NO_ERROR) {
+                return NO_ERROR;
+            }
             reply->writeStrongBinder(IInterface::asBinder(record));
-            reply->writeStrongBinder(IInterface::asBinder(cblk));
-            reply->writeStrongBinder(IInterface::asBinder(buffers));
+            output.writeToParcel(reply);
             return NO_ERROR;
         } break;
         case SAMPLE_RATE: {
diff --git a/media/libaudioclient/IAudioPolicyService.cpp b/media/libaudioclient/IAudioPolicyService.cpp
index 0397eec..970ae90 100644
--- a/media/libaudioclient/IAudioPolicyService.cpp
+++ b/media/libaudioclient/IAudioPolicyService.cpp
@@ -928,6 +928,7 @@
             bool hasAttributes = data.readInt32() != 0;
             if (hasAttributes) {
                 data.read(&attr, sizeof(audio_attributes_t));
+                sanetizeAudioAttributes(&attr);
             }
             audio_session_t session = (audio_session_t)data.readInt32();
             audio_stream_type_t stream = AUDIO_STREAM_DEFAULT;
@@ -993,6 +994,7 @@
             CHECK_INTERFACE(IAudioPolicyService, data, reply);
             audio_attributes_t attr;
             data.read(&attr, sizeof(audio_attributes_t));
+            sanetizeAudioAttributes(&attr);
             audio_io_handle_t input = (audio_io_handle_t)data.readInt32();
             audio_session_t session = (audio_session_t)data.readInt32();
             pid_t pid = (pid_t)data.readInt32();
@@ -1368,6 +1370,7 @@
             data.read(&source, sizeof(struct audio_port_config));
             audio_attributes_t attributes;
             data.read(&attributes, sizeof(audio_attributes_t));
+            sanetizeAudioAttributes(&attributes);
             audio_patch_handle_t handle = AUDIO_PATCH_HANDLE_NONE;
             status_t status = startAudioSource(&source, &attributes, &handle);
             reply->writeInt32(status);
@@ -1418,6 +1421,15 @@
     }
 }
 
+void BnAudioPolicyService::sanetizeAudioAttributes(audio_attributes_t* attr)
+{
+    const size_t tagsMaxSize = AUDIO_ATTRIBUTES_TAGS_MAX_SIZE;
+    if (strnlen(attr->tags, tagsMaxSize) >= tagsMaxSize) {
+        android_errorWriteLog(0x534e4554, "68953950"); // SafetyNet logging
+    }
+    attr->tags[tagsMaxSize - 1] = '\0';
+}
+
 // ----------------------------------------------------------------------------
 
 } // namespace android
diff --git a/media/libaudioclient/aidl/android/media/IAudioRecord.aidl b/media/libaudioclient/aidl/android/media/IAudioRecord.aidl
index 50ce78f..7572671 100644
--- a/media/libaudioclient/aidl/android/media/IAudioRecord.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioRecord.aidl
@@ -16,6 +16,7 @@
 
 package android.media;
 
+/* Native code must specify namespace media (media::IAudioRecord) when referring to this class */
 interface IAudioRecord {
 
   /* After it's created the track is not active. Call start() to
diff --git a/media/libaudioclient/include/media/AudioClient.h b/media/libaudioclient/include/media/AudioClient.h
index 9efd76d..247af9e 100644
--- a/media/libaudioclient/include/media/AudioClient.h
+++ b/media/libaudioclient/include/media/AudioClient.h
@@ -18,19 +18,38 @@
 #ifndef ANDROID_AUDIO_CLIENT_H
 #define ANDROID_AUDIO_CLIENT_H
 
+#include <binder/Parcel.h>
+#include <binder/Parcelable.h>
 #include <system/audio.h>
 #include <utils/String16.h>
 
 namespace android {
 
-class AudioClient {
+class AudioClient : public Parcelable {
  public:
     AudioClient() :
-        clientUid(-1), clientPid(-1), packageName("") {}
+        clientUid(-1), clientPid(-1), clientTid(-1), packageName("") {}
 
     uid_t clientUid;
     pid_t clientPid;
+    pid_t clientTid;
     String16 packageName;
+
+    status_t readFromParcel(const Parcel *parcel) override {
+        clientUid = parcel->readInt32();
+        clientPid = parcel->readInt32();
+        clientTid = parcel->readInt32();
+        packageName = parcel->readString16();
+        return NO_ERROR;
+    }
+
+    status_t writeToParcel(Parcel *parcel) const override {
+        parcel->writeInt32(clientUid);
+        parcel->writeInt32(clientPid);
+        parcel->writeInt32(clientTid);
+        parcel->writeString16(packageName);
+        return NO_ERROR;
+    }
 };
 
 }; // namespace android
diff --git a/media/libaudioclient/include/media/AudioRecord.h b/media/libaudioclient/include/media/AudioRecord.h
index c6ad1b5..00c2a88 100644
--- a/media/libaudioclient/include/media/AudioRecord.h
+++ b/media/libaudioclient/include/media/AudioRecord.h
@@ -185,7 +185,8 @@
                                     audio_input_flags_t flags = AUDIO_INPUT_FLAG_NONE,
                                     uid_t uid = AUDIO_UID_INVALID,
                                     pid_t pid = -1,
-                                    const audio_attributes_t* pAttributes = NULL);
+                                    const audio_attributes_t* pAttributes = NULL,
+                                    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE);
 
     /* Terminates the AudioRecord and unregisters it from AudioFlinger.
      * Also destroys all resources associated with the AudioRecord.
@@ -223,7 +224,8 @@
                             audio_input_flags_t flags = AUDIO_INPUT_FLAG_NONE,
                             uid_t uid = AUDIO_UID_INVALID,
                             pid_t pid = -1,
-                            const audio_attributes_t* pAttributes = NULL);
+                            const audio_attributes_t* pAttributes = NULL,
+                            audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE);
 
     /* Result of constructing the AudioRecord. This must be checked for successful initialization
      * before using any AudioRecord API (except for set()), because using
@@ -568,7 +570,7 @@
 
             // caller must hold lock on mLock for all _l methods
 
-            status_t openRecord_l(const Modulo<uint32_t> &epoch, const String16& opPackageName);
+            status_t createRecord_l(const Modulo<uint32_t> &epoch, const String16& opPackageName);
 
             // FIXME enum is faster than strcmp() for parameter 'from'
             status_t restoreRecord_l(const char *from);
@@ -680,7 +682,6 @@
                                               // May not match the app selection depending on other
                                               // activity and connected devices
     wp<AudioSystem::AudioDeviceCallback> mDeviceCallback;
-    audio_port_handle_t    mPortId;  // unique ID allocated by audio policy
 
 };
 
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index 327eba8..24a6e22 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -106,6 +106,9 @@
 
     static float linearToLog(int volume);
     static int logToLinear(float volume);
+    static size_t calculateMinFrameCount(
+            uint32_t afLatencyMs, uint32_t afFrameCount, uint32_t afSampleRate,
+            uint32_t sampleRate, float speed /*, uint32_t notificationsPerBufferReq*/);
 
     // Returned samplingRate and frameCount output values are guaranteed
     // to be non-zero if status == NO_ERROR
@@ -209,8 +212,6 @@
     static status_t setForceUse(audio_policy_force_use_t usage, audio_policy_forced_cfg_t config);
     static audio_policy_forced_cfg_t getForceUse(audio_policy_force_use_t usage);
 
-    // Client must successfully hand off the handle reference to AudioFlinger via createTrack(),
-    // or release it with releaseOutput().
     static status_t getOutputForAttr(const audio_attributes_t *attr,
                                      audio_io_handle_t *output,
                                      audio_session_t session,
@@ -230,7 +231,7 @@
                               audio_stream_type_t stream,
                               audio_session_t session);
 
-    // Client must successfully hand off the handle reference to AudioFlinger via openRecord(),
+    // Client must successfully hand off the handle reference to AudioFlinger via createRecord(),
     // or release it with releaseInput().
     static status_t getInputForAttr(const audio_attributes_t *attr,
                                     audio_io_handle_t *input,
diff --git a/media/libaudioclient/include/media/AudioTrack.h b/media/libaudioclient/include/media/AudioTrack.h
index 8973133..9fbd04b 100644
--- a/media/libaudioclient/include/media/AudioTrack.h
+++ b/media/libaudioclient/include/media/AudioTrack.h
@@ -1182,7 +1182,6 @@
     pid_t                   mClientPid;
 
     wp<AudioSystem::AudioDeviceCallback> mDeviceCallback;
-    audio_port_handle_t     mPortId;  // unique ID allocated by audio policy
 };
 
 }; // namespace android
diff --git a/media/libaudioclient/include/media/IAudioFlinger.h b/media/libaudioclient/include/media/IAudioFlinger.h
index 133d6c9..57d9778 100644
--- a/media/libaudioclient/include/media/IAudioFlinger.h
+++ b/media/libaudioclient/include/media/IAudioFlinger.h
@@ -24,6 +24,9 @@
 #include <utils/RefBase.h>
 #include <utils/Errors.h>
 #include <binder/IInterface.h>
+#include <binder/Parcel.h>
+#include <binder/Parcelable.h>
+#include <media/AudioClient.h>
 #include <media/IAudioTrack.h>
 #include <media/IAudioFlingerClient.h>
 #include <system/audio.h>
@@ -44,6 +47,271 @@
 public:
     DECLARE_META_INTERFACE(AudioFlinger);
 
+    /* CreateTrackInput contains all input arguments sent by AudioTrack to AudioFlinger
+     * when calling createTrack() including arguments that will be updated by AudioFlinger
+     * and returned in CreateTrackOutput object
+     */
+    class CreateTrackInput : public Parcelable {
+    public:
+        status_t readFromParcel(const Parcel *parcel) override {
+            /* input arguments*/
+            memset(&attr, 0, sizeof(audio_attributes_t));
+            if (parcel->read(&attr, sizeof(audio_attributes_t)) != NO_ERROR) {
+                return DEAD_OBJECT;
+            }
+            attr.tags[AUDIO_ATTRIBUTES_TAGS_MAX_SIZE -1] = '\0';
+            memset(&config, 0, sizeof(audio_config_t));
+            if (parcel->read(&config, sizeof(audio_config_t)) != NO_ERROR) {
+                return DEAD_OBJECT;
+            }
+            if (clientInfo.readFromParcel(parcel) != NO_ERROR) {
+                return DEAD_OBJECT;
+            }
+            if (parcel->readInt32() != 0) {
+                sharedBuffer = interface_cast<IMemory>(parcel->readStrongBinder());
+                if (sharedBuffer == 0 || sharedBuffer->pointer() == NULL) {
+                    return BAD_VALUE;
+                }
+            }
+            notificationsPerBuffer = parcel->readInt32();
+            speed = parcel->readFloat();
+
+            /* input/output arguments*/
+            (void)parcel->read(&flags, sizeof(audio_output_flags_t));
+            frameCount = parcel->readInt64();
+            notificationFrameCount = parcel->readInt64();
+            (void)parcel->read(&selectedDeviceId, sizeof(audio_port_handle_t));
+            (void)parcel->read(&sessionId, sizeof(audio_session_t));
+            return NO_ERROR;
+        }
+
+        status_t writeToParcel(Parcel *parcel) const override {
+            /* input arguments*/
+            (void)parcel->write(&attr, sizeof(audio_attributes_t));
+            (void)parcel->write(&config, sizeof(audio_config_t));
+            (void)clientInfo.writeToParcel(parcel);
+            if (sharedBuffer != 0) {
+                (void)parcel->writeInt32(1);
+                (void)parcel->writeStrongBinder(IInterface::asBinder(sharedBuffer));
+            } else {
+                (void)parcel->writeInt32(0);
+            }
+            (void)parcel->writeInt32(notificationsPerBuffer);
+            (void)parcel->writeFloat(speed);
+
+            /* input/output arguments*/
+            (void)parcel->write(&flags, sizeof(audio_output_flags_t));
+            (void)parcel->writeInt64(frameCount);
+            (void)parcel->writeInt64(notificationFrameCount);
+            (void)parcel->write(&selectedDeviceId, sizeof(audio_port_handle_t));
+            (void)parcel->write(&sessionId, sizeof(audio_session_t));
+            return NO_ERROR;
+        }
+
+        /* input */
+        audio_attributes_t attr;
+        audio_config_t config;
+        AudioClient clientInfo;
+        sp<IMemory> sharedBuffer;
+        uint32_t notificationsPerBuffer;
+        float speed;
+
+        /* input/output */
+        audio_output_flags_t flags;
+        size_t frameCount;
+        size_t notificationFrameCount;
+        audio_port_handle_t selectedDeviceId;
+        audio_session_t sessionId;
+    };
+
+    /* CreateTrackOutput contains all output arguments returned by AudioFlinger to AudioTrack
+     * when calling createTrack() including arguments that were passed as I/O for update by
+     * CreateTrackInput.
+     */
+    class CreateTrackOutput : public Parcelable {
+    public:
+        status_t readFromParcel(const Parcel *parcel) override {
+            /* input/output arguments*/
+            (void)parcel->read(&flags, sizeof(audio_output_flags_t));
+            frameCount = parcel->readInt64();
+            notificationFrameCount = parcel->readInt64();
+            (void)parcel->read(&selectedDeviceId, sizeof(audio_port_handle_t));
+            (void)parcel->read(&sessionId, sizeof(audio_session_t));
+
+            /* output arguments*/
+            sampleRate = parcel->readUint32();
+            afFrameCount = parcel->readInt64();
+            afSampleRate = parcel->readInt64();
+            afLatencyMs = parcel->readInt32();
+            (void)parcel->read(&outputId, sizeof(audio_io_handle_t));
+            return NO_ERROR;
+        }
+
+        status_t writeToParcel(Parcel *parcel) const override {
+            /* input/output arguments*/
+            (void)parcel->write(&flags, sizeof(audio_output_flags_t));
+            (void)parcel->writeInt64(frameCount);
+            (void)parcel->writeInt64(notificationFrameCount);
+            (void)parcel->write(&selectedDeviceId, sizeof(audio_port_handle_t));
+            (void)parcel->write(&sessionId, sizeof(audio_session_t));
+
+            /* output arguments*/
+            (void)parcel->writeUint32(sampleRate);
+            (void)parcel->writeInt64(afFrameCount);
+            (void)parcel->writeInt64(afSampleRate);
+            (void)parcel->writeInt32(afLatencyMs);
+            (void)parcel->write(&outputId, sizeof(audio_io_handle_t));
+            return NO_ERROR;
+        }
+
+        /* input/output */
+        audio_output_flags_t flags;
+        size_t frameCount;
+        size_t notificationFrameCount;
+        audio_port_handle_t selectedDeviceId;
+        audio_session_t sessionId;
+
+        /* output */
+        uint32_t sampleRate;
+        size_t   afFrameCount;
+        uint32_t afSampleRate;
+        uint32_t afLatencyMs;
+        audio_io_handle_t outputId;
+    };
+
+    /* CreateRecordInput contains all input arguments sent by AudioRecord to AudioFlinger
+     * when calling createRecord() including arguments that will be updated by AudioFlinger
+     * and returned in CreateRecordOutput object
+     */
+    class CreateRecordInput : public Parcelable {
+    public:
+        status_t readFromParcel(const Parcel *parcel) override {
+            /* input arguments*/
+            memset(&attr, 0, sizeof(audio_attributes_t));
+            if (parcel->read(&attr, sizeof(audio_attributes_t)) != NO_ERROR) {
+                return DEAD_OBJECT;
+            }
+            attr.tags[AUDIO_ATTRIBUTES_TAGS_MAX_SIZE -1] = '\0';
+            memset(&config, 0, sizeof(audio_config_base_t));
+            if (parcel->read(&config, sizeof(audio_config_base_t)) != NO_ERROR) {
+                return DEAD_OBJECT;
+            }
+            if (clientInfo.readFromParcel(parcel) != NO_ERROR) {
+                return DEAD_OBJECT;
+            }
+            opPackageName = parcel->readString16();
+
+            /* input/output arguments*/
+            (void)parcel->read(&flags, sizeof(audio_input_flags_t));
+            frameCount = parcel->readInt64();
+            notificationFrameCount = parcel->readInt64();
+            (void)parcel->read(&selectedDeviceId, sizeof(audio_port_handle_t));
+            (void)parcel->read(&sessionId, sizeof(audio_session_t));
+            return NO_ERROR;
+        }
+
+        status_t writeToParcel(Parcel *parcel) const override {
+            /* input arguments*/
+            (void)parcel->write(&attr, sizeof(audio_attributes_t));
+            (void)parcel->write(&config, sizeof(audio_config_base_t));
+            (void)clientInfo.writeToParcel(parcel);
+            (void)parcel->writeString16(opPackageName);
+
+            /* input/output arguments*/
+            (void)parcel->write(&flags, sizeof(audio_input_flags_t));
+            (void)parcel->writeInt64(frameCount);
+            (void)parcel->writeInt64(notificationFrameCount);
+            (void)parcel->write(&selectedDeviceId, sizeof(audio_port_handle_t));
+            (void)parcel->write(&sessionId, sizeof(audio_session_t));
+            return NO_ERROR;
+        }
+
+        /* input */
+        audio_attributes_t attr;
+        audio_config_base_t config;
+        AudioClient clientInfo;
+        String16 opPackageName;
+
+        /* input/output */
+        audio_input_flags_t flags;
+        size_t frameCount;
+        size_t notificationFrameCount;
+        audio_port_handle_t selectedDeviceId;
+        audio_session_t sessionId;
+    };
+
+    /* CreateRecordOutput contains all output arguments returned by AudioFlinger to AudioRecord
+     * when calling createRecord() including arguments that were passed as I/O for update by
+     * CreateRecordInput.
+     */
+    class CreateRecordOutput : public Parcelable {
+    public:
+        status_t readFromParcel(const Parcel *parcel) override {
+            /* input/output arguments*/
+            (void)parcel->read(&flags, sizeof(audio_input_flags_t));
+            frameCount = parcel->readInt64();
+            notificationFrameCount = parcel->readInt64();
+            (void)parcel->read(&selectedDeviceId, sizeof(audio_port_handle_t));
+            (void)parcel->read(&sessionId, sizeof(audio_session_t));
+
+            /* output arguments*/
+            sampleRate = parcel->readUint32();
+            (void)parcel->read(&inputId, sizeof(audio_io_handle_t));
+            if (parcel->readInt32() != 0) {
+                cblk = interface_cast<IMemory>(parcel->readStrongBinder());
+                if (cblk == 0 || cblk->pointer() == NULL) {
+                    return BAD_VALUE;
+                }
+            }
+            if (parcel->readInt32() != 0) {
+                buffers = interface_cast<IMemory>(parcel->readStrongBinder());
+                if (buffers == 0 || buffers->pointer() == NULL) {
+                    return BAD_VALUE;
+                }
+            }
+            return NO_ERROR;
+        }
+
+        status_t writeToParcel(Parcel *parcel) const override {
+            /* input/output arguments*/
+            (void)parcel->write(&flags, sizeof(audio_input_flags_t));
+            (void)parcel->writeInt64(frameCount);
+            (void)parcel->writeInt64(notificationFrameCount);
+            (void)parcel->write(&selectedDeviceId, sizeof(audio_port_handle_t));
+            (void)parcel->write(&sessionId, sizeof(audio_session_t));
+
+            /* output arguments*/
+            (void)parcel->writeUint32(sampleRate);
+            (void)parcel->write(&inputId, sizeof(audio_io_handle_t));
+            if (cblk != 0) {
+                (void)parcel->writeInt32(1);
+                (void)parcel->writeStrongBinder(IInterface::asBinder(cblk));
+            } else {
+                (void)parcel->writeInt32(0);
+            }
+            if (buffers != 0) {
+                (void)parcel->writeInt32(1);
+                (void)parcel->writeStrongBinder(IInterface::asBinder(buffers));
+            } else {
+                (void)parcel->writeInt32(0);
+            }
+
+            return NO_ERROR;
+        }
+
+        /* input/output */
+        audio_input_flags_t flags;
+        size_t frameCount;
+        size_t notificationFrameCount;
+        audio_port_handle_t selectedDeviceId;
+        audio_session_t sessionId;
+
+        /* output */
+        uint32_t sampleRate;
+        audio_io_handle_t inputId;
+        sp<IMemory> cblk;
+        sp<IMemory> buffers;
+    };
 
     // invariant on exit for all APIs that return an sp<>:
     //   (return value != 0) == (*status == NO_ERROR)
@@ -51,45 +319,13 @@
     /* create an audio track and registers it with AudioFlinger.
      * return null if the track cannot be created.
      */
-    virtual sp<IAudioTrack> createTrack(
-                                audio_stream_type_t streamType,
-                                uint32_t sampleRate,
-                                audio_format_t format,
-                                audio_channel_mask_t channelMask,
-                                size_t *pFrameCount,
-                                audio_output_flags_t *flags,
-                                const sp<IMemory>& sharedBuffer,
-                                // On successful return, AudioFlinger takes over the handle
-                                // reference and will release it when the track is destroyed.
-                                // However on failure, the client is responsible for release.
-                                audio_io_handle_t output,
-                                pid_t pid,
-                                pid_t tid,  // -1 means unused, otherwise must be valid non-0
-                                audio_session_t *sessionId,
-                                int clientUid,
-                                status_t *status,
-                                audio_port_handle_t portId) = 0;
+    virtual sp<IAudioTrack> createTrack(const CreateTrackInput& input,
+                                        CreateTrackOutput& output,
+                                        status_t *status) = 0;
 
-    virtual sp<media::IAudioRecord> openRecord(
-                                // On successful return, AudioFlinger takes over the handle
-                                // reference and will release it when the track is destroyed.
-                                // However on failure, the client is responsible for release.
-                                audio_io_handle_t input,
-                                uint32_t sampleRate,
-                                audio_format_t format,
-                                audio_channel_mask_t channelMask,
-                                const String16& callingPackage,
-                                size_t *pFrameCount,
-                                audio_input_flags_t *flags,
-                                pid_t pid,
-                                pid_t tid,  // -1 means unused, otherwise must be valid non-0
-                                int clientUid,
-                                audio_session_t *sessionId,
-                                size_t *notificationFrames,
-                                sp<IMemory>& cblk,
-                                sp<IMemory>& buffers,   // return value 0 means it follows cblk
-                                status_t *status,
-                                audio_port_handle_t portId) = 0;
+    virtual sp<media::IAudioRecord> createRecord(const CreateRecordInput& input,
+                                        CreateRecordOutput& output,
+                                        status_t *status) = 0;
 
     // FIXME Surprisingly, format/latency don't work for input handles
 
diff --git a/media/libaudioclient/include/media/IAudioPolicyService.h b/media/libaudioclient/include/media/IAudioPolicyService.h
index 7c88e57..5558b77 100644
--- a/media/libaudioclient/include/media/IAudioPolicyService.h
+++ b/media/libaudioclient/include/media/IAudioPolicyService.h
@@ -178,6 +178,8 @@
                                     const Parcel& data,
                                     Parcel* reply,
                                     uint32_t flags = 0);
+private:
+    void sanetizeAudioAttributes(audio_attributes_t* attr);
 };
 
 // ----------------------------------------------------------------------------
diff --git a/media/libaudioclient/tests/Android.bp b/media/libaudioclient/tests/Android.bp
new file mode 100644
index 0000000..e9c1606
--- /dev/null
+++ b/media/libaudioclient/tests/Android.bp
@@ -0,0 +1,20 @@
+cc_defaults {
+    name: "libaudioclient_tests_defaults",
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
+}
+
+cc_test {
+    name: "test_create_audiotrack",
+    defaults: ["libaudioclient_tests_defaults"],
+    srcs: ["test_create_audiotrack.cpp"],
+    shared_libs: [
+        "libaudioclient",
+        "libcutils",
+        "libutils",
+        "libbinder",
+    ],
+    data: ["track_test_input_*.txt"],
+}
diff --git a/media/libaudioclient/tests/test_create_audiotrack.cpp b/media/libaudioclient/tests/test_create_audiotrack.cpp
new file mode 100644
index 0000000..b0351b2
--- /dev/null
+++ b/media/libaudioclient/tests/test_create_audiotrack.cpp
@@ -0,0 +1,260 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Handle a DISCONNECT by only opening and starting a new stream
+ * without stopping and closing the old one.
+ * This caused the new stream to use the old disconnected device.
+ */
+
+#include <fcntl.h>
+#include <stdio.h>
+#include <string.h>
+#include <unistd.h>
+
+#include <binder/MemoryBase.h>
+#include <binder/MemoryDealer.h>
+#include <binder/MemoryHeapBase.h>
+#include <media/AudioTrack.h>
+
+#define MAX_INPUT_FILE_LINE_LENGTH 512
+#define MAX_OUTPUT_FILE_LINE_LENGTH 512
+
+#define NUM_ARGUMENTS 10
+#define VERSION_KEY "version"
+#define VERSION_VALUE "1.0"
+
+namespace android {
+
+int readLine(FILE *inputFile, char *line, int size) {
+    int ret = 0;
+    while (true) {
+        char *str = fgets(line, size, inputFile);
+        if (str == nullptr) {
+            ret = -1;
+            break;
+        }
+        if (feof(inputFile) != 0 || ferror(inputFile) != 0) {
+            ret = -1;
+            break;
+        }
+        if (strlen(str) != 0 && str[0] != '#') {
+            break;
+        }
+    }
+    return ret;
+}
+
+bool checkVersion(FILE *inputFile)
+{
+    char line[MAX_INPUT_FILE_LINE_LENGTH];
+    char versionKey[MAX_INPUT_FILE_LINE_LENGTH];
+    char versionValue[MAX_INPUT_FILE_LINE_LENGTH];
+
+    if (readLine(inputFile, line, MAX_INPUT_FILE_LINE_LENGTH) != 0) {
+        fprintf(stderr, "Missing version in input file\n");
+        return false;
+    }
+
+    if (sscanf(line, " %s %s", versionKey, versionValue) != 2) {
+        fprintf(stderr, "Malformed version in input file\n");
+        return false;
+    }
+    if (strcmp(versionKey, VERSION_KEY) != 0) {
+        fprintf(stderr, "Malformed version in input file\n");
+        return false;
+    }
+    if (strcmp(versionValue, VERSION_VALUE) != 0) {
+        fprintf(stderr, "Wrong input file version %s expecting %s\n", versionValue, VERSION_VALUE);
+        return false;
+    }
+    return true;
+}
+
+void callback(int event __unused, void* user __unused, void *info __unused)
+{
+}
+
+void testTrack(FILE *inputFile, int outputFileFd)
+{
+    char line[MAX_INPUT_FILE_LINE_LENGTH];
+    uint32_t testCount = 0;
+    Vector<String16> args;
+
+    if (inputFile == nullptr) {
+        sp<AudioTrack> track = new AudioTrack(AUDIO_STREAM_DEFAULT,
+                                              0 /* sampleRate */,
+                                              AUDIO_FORMAT_DEFAULT,
+                                              AUDIO_CHANNEL_OUT_STEREO);
+        if (track == 0 || track->initCheck() != NO_ERROR) {
+            write(outputFileFd, "Error creating AudioTrack\n",
+                  sizeof("Error creating AudioTrack\n"));
+        } else {
+            track->dump(outputFileFd, args);
+        }
+        return;
+    }
+
+    // check version
+    if (!checkVersion(inputFile)) {
+        return;
+    }
+
+    while (readLine(inputFile, line, MAX_INPUT_FILE_LINE_LENGTH) == 0) {
+        uint32_t sampleRate;
+        audio_format_t format;
+        audio_channel_mask_t channelMask;
+        size_t frameCount;
+        int32_t notificationFrames;
+        uint32_t useSharedBuffer;
+        audio_output_flags_t flags;
+        audio_session_t sessionId;
+        audio_usage_t usage;
+        audio_content_type_t contentType;
+        audio_attributes_t attributes;
+        sp<IMemory> sharedBuffer;
+        sp<MemoryDealer> heap;
+        audio_offload_info_t offloadInfo = AUDIO_INFO_INITIALIZER;
+        status_t status;
+        char statusStr[MAX_OUTPUT_FILE_LINE_LENGTH];
+        bool offload = false;
+        bool fast = false;
+
+        if (sscanf(line, " %u %x %x %zu %d %u %x %u %u %u",
+                   &sampleRate, &format, &channelMask,
+                   &frameCount, &notificationFrames, &useSharedBuffer,
+                   &flags, &sessionId, &usage, &contentType) != NUM_ARGUMENTS) {
+            fprintf(stderr, "Malformed line for test #%u in input file\n", testCount+1);
+            continue;
+        }
+        testCount++;
+
+        if (useSharedBuffer != 0) {
+            size_t heapSize = audio_channel_count_from_out_mask(channelMask) *
+                    audio_bytes_per_sample(format) * frameCount;
+            heap = new MemoryDealer(heapSize, "AudioTrack Heap Base");
+            sharedBuffer = heap->allocate(heapSize);
+            frameCount = 0;
+            notificationFrames = 0;
+        }
+        if ((flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) != 0) {
+            offloadInfo.sample_rate = sampleRate;
+            offloadInfo.channel_mask = channelMask;
+            offloadInfo.format = format;
+            offload = true;
+        }
+        if ((flags & AUDIO_OUTPUT_FLAG_FAST) != 0) {
+            fast = true;
+        }
+
+        memset(&attributes, 0, sizeof(attributes));
+        attributes.content_type = contentType;
+        attributes.usage = usage;
+
+        sp<AudioTrack> track = new AudioTrack();
+
+        track->set(AUDIO_STREAM_DEFAULT,
+                   sampleRate,
+                   format,
+                   channelMask,
+                   frameCount,
+                   flags,
+                   (fast || offload) ? callback : nullptr,
+                   nullptr,
+                   notificationFrames,
+                   sharedBuffer,
+                   false,
+                   sessionId,
+                   ((fast && sharedBuffer == 0) || offload) ?
+                           AudioTrack::TRANSFER_CALLBACK : AudioTrack::TRANSFER_DEFAULT,
+                   offload ? &offloadInfo : nullptr,
+                   getuid(),
+                   getpid(),
+                   &attributes,
+                   false,
+                   1.0f,
+                   AUDIO_PORT_HANDLE_NONE);
+        status = track->initCheck();
+        sprintf(statusStr, "\n#### Test %u status %d\n", testCount, status);
+        write(outputFileFd, statusStr, strlen(statusStr));
+        if (status != NO_ERROR) {
+            continue;
+        }
+        track->dump(outputFileFd, args);
+    }
+}
+
+}; // namespace android
+
+
+int main(int argc, char **argv)
+{
+    FILE *inputFile = nullptr;
+    int outputFileFd = STDOUT_FILENO;
+    mode_t mode = S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH;
+    int ret = 0;
+
+    if (argc > 5) {
+        fprintf(stderr, "Usage: %s [-i input_params.txt] [-o output_params.txt]\n", argv[0]);
+        return 1;
+    }
+
+    argv++;
+    while (*argv) {
+        if (strcmp(*argv, "-i") == 0) {
+            argv++;
+            if (*argv) {
+                inputFile = fopen(*argv, "r");
+                if (inputFile == nullptr) {
+                    ret = 1;
+                }
+            } else {
+                ret = 1;
+            }
+        }
+        if (strcmp(*argv, "-o") == 0) {
+            argv++;
+            if (*argv) {
+                outputFileFd = open(*argv, O_WRONLY|O_CREAT, mode);
+                if (outputFileFd < 0) {
+                    ret = 1;
+                }
+            } else {
+                ret = 1;
+            }
+            argv++;
+        }
+        if (*argv) {
+            argv++;
+        }
+    }
+
+    if (ret != 0) {
+        return ret;
+    }
+
+    android::testTrack(inputFile, outputFileFd);
+
+    if (inputFile) {
+        fclose(inputFile);
+    }
+    if (outputFileFd >= 0 && outputFileFd != STDOUT_FILENO) {
+        close(outputFileFd);
+    }
+
+    return ret;
+}
+
diff --git a/media/libaudioclient/tests/track_test_input_v1.0_ref.txt b/media/libaudioclient/tests/track_test_input_v1.0_ref.txt
new file mode 100644
index 0000000..b923ff3
--- /dev/null
+++ b/media/libaudioclient/tests/track_test_input_v1.0_ref.txt
@@ -0,0 +1,40 @@
+version 1.0
+# Input file for test_create_audiotrack
+# Add one line for each tested AudioTrack constructor with the following arguments:
+# sampleRate format 	channelMask frameCount notificationFrames sharedBuffer flags sessionId usage contentType
+# sample rate tests
+  48000      0x1    	0x3         4800       2400               0            0x0   0         1     2
+  24000      0x1    	0x3         4800       2400               0            0x0   0         1     2
+  16000      0x1    	0x3         4800       2400               0            0x0   0         1     2
+   8000      0x1    	0x3         4800       2400               0            0x0   0         1     2
+  44100      0x1    	0x3         4410       2205               0            0x0   0         1     2
+  22050      0x1    	0x3         4410       2205               0            0x0   0         1     2
+  11025      0x1    	0x3         4410       2205               0            0x0   0         1     2
+# format tests
+  48000      0x2    	0x3         4800       2400               0            0x0   0         1     2
+  48000      0x3    	0x3         4800       2400               0            0x0   0         1     2
+  48000      0x5    	0x3         4800       2400               0            0x0   0         1     2
+# channel mask tests
+  48000      0x1    	0x1         4800       2400               0            0x0   0         1     2
+  48000      0x1    	0x3F        4800       2400               0            0x0   0         1     2
+  48000      0x1    	0x63F       4800       2400               0            0x0   0         1     2
+# framecount tests
+  48000      0x1    	0x3         0          0                  0            0x0   0         1     2
+  48000      0x1    	0x3         48000      0                  0            0x0   0         1     2
+  48000      0x1    	0x3         0          -2                 0            0x4   0         1     2
+# shared memory tests
+  48000      0x1    	0x3         4800       2400               1            0x0   0         1     2
+  48000      0x1    	0x3         4800       2400               1            0x4   0         1     2
+# flags test
+  48000      0x1    	0x3         4800       2400               0            0x4   0         1     2
+  48000      0x1    	0x3         4800       2400               0            0x8   0         1     2
+  44100      0x1000000  0x3         4800       2400               0            0x11  0         1     2
+# session tests
+  48000      0x1    	0x3         4800       2400               0            0x0   1001      1     2
+# attributes tests
+  48000      0x1    	0x3         4800       2400               0            0x0   0         0     0
+  48000      0x1    	0x3         4800       2400               0            0x0   0         2     1
+  48000      0x1    	0x3         4800       2400               0            0x0   0         4     2
+  48000      0x1    	0x3         4800       2400               0            0x0   0         5     2
+  48000      0x1    	0x3         4800       2400               0            0x0   0         11    1
+  48000      0x1    	0x3         4800       2400               0            0x0   0         12    1
diff --git a/media/libaudioclient/tests/track_test_output_v1.0_ref_walleye.txt b/media/libaudioclient/tests/track_test_output_v1.0_ref_walleye.txt
new file mode 100644
index 0000000..5fe433c
--- /dev/null
+++ b/media/libaudioclient/tests/track_test_output_v1.0_ref_walleye.txt
@@ -0,0 +1,308 @@
+
+#### Test 1 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(49), flags(0)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(3), channel count(2)
+  sample rate(48000), original sample rate(48000), speed(1.000000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(2400), req. notif. frame count(2400), req. notif. per buff(0)
+  latency (150), selected device Id(0), routed device Id(2)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
+
+#### Test 2 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(57), flags(0)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(3), channel count(2)
+  sample rate(24000), original sample rate(24000), speed(1.000000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(1600), req. notif. frame count(2400), req. notif. per buff(0)
+  latency (250), selected device Id(0), routed device Id(2)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
+
+#### Test 3 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(65), flags(0)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(3), channel count(2)
+  sample rate(16000), original sample rate(16000), speed(1.000000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(1600), req. notif. frame count(2400), req. notif. per buff(0)
+  latency (350), selected device Id(0), routed device Id(2)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
+
+#### Test 4 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(73), flags(0)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(3), channel count(2)
+  sample rate(8000), original sample rate(8000), speed(1.000000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(1600), req. notif. frame count(2400), req. notif. per buff(0)
+  latency (650), selected device Id(0), routed device Id(2)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
+
+#### Test 5 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(81), flags(0)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(3), channel count(2)
+  sample rate(44100), original sample rate(44100), speed(1.000000)
+  frame count(4410), req. frame count(4410)
+  notif. frame count(1470), req. notif. frame count(2205), req. notif. per buff(0)
+  latency (150), selected device Id(0), routed device Id(2)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
+
+#### Test 6 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(89), flags(0)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(3), channel count(2)
+  sample rate(22050), original sample rate(22050), speed(1.000000)
+  frame count(4410), req. frame count(4410)
+  notif. frame count(1470), req. notif. frame count(2205), req. notif. per buff(0)
+  latency (250), selected device Id(0), routed device Id(2)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
+
+#### Test 7 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(97), flags(0)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(3), channel count(2)
+  sample rate(11025), original sample rate(11025), speed(1.000000)
+  frame count(4410), req. frame count(4410)
+  notif. frame count(1470), req. notif. frame count(2205), req. notif. per buff(0)
+  latency (450), selected device Id(0), routed device Id(2)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
+
+#### Test 8 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(105), flags(0)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(2), channel mask(3), channel count(2)
+  sample rate(48000), original sample rate(48000), speed(1.000000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(2400), req. notif. frame count(2400), req. notif. per buff(0)
+  latency (150), selected device Id(0), routed device Id(2)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
+
+#### Test 9 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(113), flags(0)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(3), channel mask(3), channel count(2)
+  sample rate(48000), original sample rate(48000), speed(1.000000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(2400), req. notif. frame count(2400), req. notif. per buff(0)
+  latency (180), selected device Id(0), routed device Id(2)
+  output(29) AF latency (80) AF frame count(1920) AF SampleRate(48000)
+
+#### Test 10 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(121), flags(0)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(5), channel mask(3), channel count(2)
+  sample rate(48000), original sample rate(48000), speed(1.000000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(2400), req. notif. frame count(2400), req. notif. per buff(0)
+  latency (180), selected device Id(0), routed device Id(2)
+  output(29) AF latency (80) AF frame count(1920) AF SampleRate(48000)
+
+#### Test 11 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(129), flags(0)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(1), channel count(1)
+  sample rate(48000), original sample rate(48000), speed(1.000000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(2400), req. notif. frame count(2400), req. notif. per buff(0)
+  latency (150), selected device Id(0), routed device Id(2)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
+
+#### Test 12 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(137), flags(0)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(3f), channel count(6)
+  sample rate(48000), original sample rate(48000), speed(1.000000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(2400), req. notif. frame count(2400), req. notif. per buff(0)
+  latency (150), selected device Id(0), routed device Id(2)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
+
+#### Test 13 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(145), flags(0)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(63f), channel count(8)
+  sample rate(48000), original sample rate(48000), speed(1.000000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(2400), req. notif. frame count(2400), req. notif. per buff(0)
+  latency (150), selected device Id(0), routed device Id(2)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
+
+#### Test 14 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(153), flags(0)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(3), channel count(2)
+  sample rate(48000), original sample rate(48000), speed(1.000000)
+  frame count(1924), req. frame count(1924)
+  notif. frame count(962), req. notif. frame count(0), req. notif. per buff(0)
+  latency (90), selected device Id(0), routed device Id(2)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
+
+#### Test 15 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(161), flags(0)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(3), channel count(2)
+  sample rate(48000), original sample rate(48000), speed(1.000000)
+  frame count(48000), req. frame count(48000)
+  notif. frame count(24000), req. notif. frame count(0), req. notif. per buff(0)
+  latency (1050), selected device Id(0), routed device Id(2)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
+
+#### Test 16 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(169), flags(4)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(3), channel count(2)
+  sample rate(48000), original sample rate(48000), speed(1.000000)
+  frame count(480), req. frame count(480)
+  notif. frame count(240), req. notif. frame count(0), req. notif. per buff(2)
+  latency (60), selected device Id(0), routed device Id(2)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
+
+#### Test 17 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(177), flags(0)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(3), channel count(2)
+  sample rate(48000), original sample rate(48000), speed(1.000000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(0), req. notif. frame count(0), req. notif. per buff(0)
+  latency (150), selected device Id(0), routed device Id(2)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
+
+#### Test 18 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(185), flags(4)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(3), channel count(2)
+  sample rate(48000), original sample rate(48000), speed(1.000000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(0), req. notif. frame count(0), req. notif. per buff(0)
+  latency (150), selected device Id(0), routed device Id(2)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
+
+#### Test 19 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(193), flags(4)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(3), channel count(2)
+  sample rate(48000), original sample rate(48000), speed(1.000000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(240), req. notif. frame count(2400), req. notif. per buff(0)
+  latency (150), selected device Id(0), routed device Id(2)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
+
+#### Test 20 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(201), flags(8)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(3), channel count(2)
+  sample rate(48000), original sample rate(48000), speed(1.000000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(2400), req. notif. frame count(2400), req. notif. per buff(0)
+  latency (180), selected device Id(0), routed device Id(2)
+  output(29) AF latency (80) AF frame count(1920) AF SampleRate(48000)
+
+#### Test 21 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(209), flags(11)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(1000000), channel mask(3), channel count(2)
+  sample rate(44100), original sample rate(44100), speed(1.000000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(4800), req. notif. frame count(2400), req. notif. per buff(0)
+  latency (204), selected device Id(0), routed device Id(2)
+  output(53) AF latency (96) AF frame count(262144) AF SampleRate(44100)
+
+#### Test 22 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(1001), flags(0)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(3), channel count(2)
+  sample rate(48000), original sample rate(48000), speed(1.000000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(2400), req. notif. frame count(2400), req. notif. per buff(0)
+  latency (150), selected device Id(0), routed device Id(2)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
+
+#### Test 23 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(217), flags(0)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(3), channel count(2)
+  sample rate(48000), original sample rate(48000), speed(1.000000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(2400), req. notif. frame count(2400), req. notif. per buff(0)
+  latency (150), selected device Id(0), routed device Id(2)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
+
+#### Test 24 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(225), flags(0)
+  stream type(0), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(3), channel count(2)
+  sample rate(48000), original sample rate(48000), speed(1.000000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(2400), req. notif. frame count(2400), req. notif. per buff(0)
+  latency (140), selected device Id(0), routed device Id(1)
+  output(45) AF latency (40) AF frame count(960) AF SampleRate(48000)
+
+#### Test 25 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(233), flags(0)
+  stream type(4), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(3), channel count(2)
+  sample rate(48000), original sample rate(48000), speed(1.000000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(2400), req. notif. frame count(2400), req. notif. per buff(0)
+  latency (150), selected device Id(0), routed device Id(3)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
+
+#### Test 26 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(241), flags(0)
+  stream type(5), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(3), channel count(2)
+  sample rate(48000), original sample rate(48000), speed(1.000000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(2400), req. notif. frame count(2400), req. notif. per buff(0)
+  latency (150), selected device Id(0), routed device Id(3)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
+
+#### Test 27 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(249), flags(0)
+  stream type(10), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(3), channel count(2)
+  sample rate(48000), original sample rate(48000), speed(1.000000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(2400), req. notif. frame count(2400), req. notif. per buff(0)
+  latency (150), selected device Id(0), routed device Id(2)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
+
+#### Test 28 status 0
+ AudioTrack::dump
+  status(0), state(1), session Id(257), flags(0)
+  stream type(3), left - right volume(1.000000, 1.000000)
+  format(1), channel mask(3), channel count(2)
+  sample rate(48000), original sample rate(48000), speed(1.000000)
+  frame count(4800), req. frame count(4800)
+  notif. frame count(2400), req. notif. frame count(2400), req. notif. per buff(0)
+  latency (150), selected device Id(0), routed device Id(2)
+  output(13) AF latency (50) AF frame count(960) AF SampleRate(48000)
diff --git a/media/libaudiohal/EffectBufferHalHidl.h b/media/libaudiohal/EffectBufferHalHidl.h
index 66a81c2..d7a43ae 100644
--- a/media/libaudiohal/EffectBufferHalHidl.h
+++ b/media/libaudiohal/EffectBufferHalHidl.h
@@ -35,6 +35,8 @@
     virtual audio_buffer_t* audioBuffer();
     virtual void* externalData() const;
 
+    virtual size_t getSize() const override { return mBufferSize; }
+
     virtual void setExternalData(void* external);
     virtual void setFrameCount(size_t frameCount);
     virtual bool checkFrameCountChange();
diff --git a/media/libaudiohal/EffectHalHidl.cpp b/media/libaudiohal/EffectHalHidl.cpp
index 61fb6bab..f4d1958 100644
--- a/media/libaudiohal/EffectHalHidl.cpp
+++ b/media/libaudiohal/EffectHalHidl.cpp
@@ -121,16 +121,24 @@
 }
 
 status_t EffectHalHidl::setInBuffer(const sp<EffectBufferHalInterface>& buffer) {
-    if (mInBuffer == 0 || buffer->audioBuffer() != mInBuffer->audioBuffer()) {
-        mBuffersChanged = true;
+    if (!mBuffersChanged) {
+        if (buffer.get() == nullptr || mInBuffer.get() == nullptr) {
+            mBuffersChanged = buffer.get() != mInBuffer.get();
+        } else {
+            mBuffersChanged = buffer->audioBuffer() != mInBuffer->audioBuffer();
+        }
     }
     mInBuffer = buffer;
     return OK;
 }
 
 status_t EffectHalHidl::setOutBuffer(const sp<EffectBufferHalInterface>& buffer) {
-    if (mOutBuffer == 0 || buffer->audioBuffer() != mOutBuffer->audioBuffer()) {
-        mBuffersChanged = true;
+    if (!mBuffersChanged) {
+        if (buffer.get() == nullptr || mOutBuffer.get() == nullptr) {
+            mBuffersChanged = buffer.get() != mOutBuffer.get();
+        } else {
+            mBuffersChanged = buffer->audioBuffer() != mOutBuffer->audioBuffer();
+        }
     }
     mOutBuffer = buffer;
     return OK;
diff --git a/media/libaudiohal/include/media/audiohal/EffectBufferHalInterface.h b/media/libaudiohal/include/media/audiohal/EffectBufferHalInterface.h
index e862f6e..1cae662 100644
--- a/media/libaudiohal/include/media/audiohal/EffectBufferHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/EffectBufferHalInterface.h
@@ -37,6 +37,8 @@
         return externalData() != nullptr ? externalData() : audioBuffer()->raw;
     }
 
+    virtual size_t getSize() const = 0;
+
     virtual void setExternalData(void* external) = 0;
     virtual void setFrameCount(size_t frameCount) = 0;
     virtual bool checkFrameCountChange() = 0;  // returns whether frame count has been updated
diff --git a/media/libaudioprocessing/AudioMixer.cpp b/media/libaudioprocessing/AudioMixer.cpp
index 3e72c89..43b97a5 100644
--- a/media/libaudioprocessing/AudioMixer.cpp
+++ b/media/libaudioprocessing/AudioMixer.cpp
@@ -1947,11 +1947,10 @@
     case AUDIO_FORMAT_PCM_16_BIT:
         switch (mixerOutFormat) {
         case AUDIO_FORMAT_PCM_FLOAT:
-            memcpy_to_float_from_q4_27((float*)out, (int32_t*)in, sampleCount);
+            memcpy_to_float_from_q4_27((float*)out, (const int32_t*)in, sampleCount);
             break;
         case AUDIO_FORMAT_PCM_16_BIT:
-            // two int16_t are produced per iteration
-            ditherAndClamp((int32_t*)out, (int32_t*)in, sampleCount >> 1);
+            memcpy_to_i16_from_q4_27((int16_t*)out, (const int32_t*)in, sampleCount);
             break;
         default:
             LOG_ALWAYS_FATAL("bad mixerOutFormat: %#x", mixerOutFormat);
diff --git a/media/libaudioprocessing/tests/test-mixer.cpp b/media/libaudioprocessing/tests/test-mixer.cpp
index 75dbf91..b67810d 100644
--- a/media/libaudioprocessing/tests/test-mixer.cpp
+++ b/media/libaudioprocessing/tests/test-mixer.cpp
@@ -316,8 +316,7 @@
             outputSampleRate, outputChannels, outputFrames, useMixerFloat);
     if (auxFilename) {
         // Aux buffer is always in q4_27 format for now.
-        // memcpy_to_i16_from_q4_27(), but with stereo frame count (not sample count)
-        ditherAndClamp((int32_t*)auxAddr, (int32_t*)auxAddr, outputFrames >> 1);
+        memcpy_to_i16_from_q4_27((int16_t*)auxAddr, (const int32_t*)auxAddr, outputFrames);
         writeFile(auxFilename, auxAddr, outputSampleRate, 1, outputFrames, false);
     }
 
diff --git a/media/libeffects/config/Android.bp b/media/libeffects/config/Android.bp
index 4398a91..3e88c7c 100644
--- a/media/libeffects/config/Android.bp
+++ b/media/libeffects/config/Android.bp
@@ -5,6 +5,11 @@
 
     srcs: ["src/EffectsConfig.cpp"],
 
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
+
     shared_libs: [
         "liblog",
         "libtinyxml2",
diff --git a/media/libeffects/lvm/lib/Common/lib/LVM_Types.h b/media/libeffects/lvm/lib/Common/lib/LVM_Types.h
index cb15b60..ea16072 100644
--- a/media/libeffects/lvm/lib/Common/lib/LVM_Types.h
+++ b/media/libeffects/lvm/lib/Common/lib/LVM_Types.h
@@ -44,9 +44,6 @@
 
 #define LVM_MAXINT_8            127                 /* Maximum positive integer size */
 #define LVM_MAXINT_16           32767
-#ifdef BUILD_FLOAT
-#define LVM_MAXFLOAT            1.0f
-#endif
 #define LVM_MAXINT_32           2147483647
 #define LVM_MAXENUM             2147483647
 
@@ -99,8 +96,32 @@
 typedef     uint32_t            LVM_UINT32;         /* Unsigned 32-bit word */
 
 #ifdef BUILD_FLOAT
-typedef     float               LVM_FLOAT;          /* single precission floating point*/
-#endif
+
+#define LVM_MAXFLOAT            1.f
+
+typedef     float               LVM_FLOAT;          /* single precision floating point */
+
+// If NATIVE_FLOAT_BUFFER is defined, we expose effects as floating point format;
+// otherwise we expose as integer 16 bit and translate to float for the effect libraries.
+// Hence, NATIVE_FLOAT_BUFFER should only be enabled under BUILD_FLOAT compilation.
+
+#define NATIVE_FLOAT_BUFFER
+
+#endif // BUILD_FLOAT
+
+// Select whether we expose int16_t or float buffers.
+#ifdef NATIVE_FLOAT_BUFFER
+
+#define    EFFECT_BUFFER_FORMAT AUDIO_FORMAT_PCM_FLOAT
+typedef     float               effect_buffer_t;
+
+#else // NATIVE_FLOAT_BUFFER
+
+#define    EFFECT_BUFFER_FORMAT AUDIO_FORMAT_PCM_16_BIT
+typedef     int16_t             effect_buffer_t;
+
+#endif // NATIVE_FLOAT_BUFFER
+
 /****************************************************************************************/
 /*                                                                                      */
 /*  Standard Enumerated types                                                           */
diff --git a/media/libeffects/lvm/wrapper/Android.mk b/media/libeffects/lvm/wrapper/Android.mk
index 91e2246..341dbc2 100644
--- a/media/libeffects/lvm/wrapper/Android.mk
+++ b/media/libeffects/lvm/wrapper/Android.mk
@@ -1,5 +1,8 @@
 LOCAL_PATH:= $(call my-dir)
 
+# The wrapper -DBUILD_FLOAT needs to match
+# the lvm library -DBUILD_FLOAT.
+
 # music bundle wrapper
 LOCAL_PATH:= $(call my-dir)
 include $(CLEAR_VARS)
@@ -20,15 +23,17 @@
 LOCAL_STATIC_LIBRARIES += libmusicbundle
 
 LOCAL_SHARED_LIBRARIES := \
-     liblog \
+     libaudioutils \
      libcutils \
-     libdl
+     libdl \
+     liblog \
 
 LOCAL_C_INCLUDES += \
 	$(LOCAL_PATH)/Bundle \
 	$(LOCAL_PATH)/../lib/Common/lib/ \
 	$(LOCAL_PATH)/../lib/Bundle/lib/ \
-	$(call include-path-for, audio-effects)
+	$(call include-path-for, audio-effects) \
+	$(call include-path-for, audio-utils) \
 
 LOCAL_HEADER_LIBRARIES += libhardware_headers
 include $(BUILD_SHARED_LIBRARY)
@@ -53,15 +58,17 @@
 LOCAL_STATIC_LIBRARIES += libreverb
 
 LOCAL_SHARED_LIBRARIES := \
-     liblog \
+     libaudioutils \
      libcutils \
-     libdl
+     libdl \
+     liblog \
 
 LOCAL_C_INCLUDES += \
     $(LOCAL_PATH)/Reverb \
     $(LOCAL_PATH)/../lib/Common/lib/ \
     $(LOCAL_PATH)/../lib/Reverb/lib/ \
-    $(call include-path-for, audio-effects)
+    $(call include-path-for, audio-effects) \
+    $(call include-path-for, audio-utils) \
 
 LOCAL_HEADER_LIBRARIES += libhardware_headers
 
diff --git a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
index aae80b6..146e9e8 100644
--- a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
+++ b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
@@ -27,6 +27,7 @@
 #include <stdlib.h>
 #include <string.h>
 
+#include <audio_utils/primitives.h>
 #include <log/log.h>
 
 #include "EffectBundle.h"
@@ -63,16 +64,6 @@
         }\
     }
 
-
-static inline int16_t clamp16(int32_t sample)
-{
-    // check overflow for both positive and negative values:
-    // all bits above short range must me equal to sign bit
-    if ((sample>>15) ^ (sample>>31))
-        sample = 0x7FFF ^ (sample>>31);
-    return sample;
-}
-
 // Namespaces
 namespace android {
 namespace {
@@ -299,7 +290,7 @@
         pContext->pBundledContext->SamplesToExitCountVirt   = 0;
         pContext->pBundledContext->SamplesToExitCountBb     = 0;
         pContext->pBundledContext->SamplesToExitCountEq     = 0;
-#ifdef BUILD_FLOAT
+#if defined(BUILD_FLOAT) && !defined(NATIVE_FLOAT_BUFFER)
         pContext->pBundledContext->pInputBuffer             = NULL;
         pContext->pBundledContext->pOutputBuffer            = NULL;
 #endif
@@ -470,13 +461,9 @@
         if (pContext->pBundledContext->workBuffer != NULL) {
             free(pContext->pBundledContext->workBuffer);
         }
-#ifdef BUILD_FLOAT
-        if (pContext->pBundledContext->pInputBuffer != NULL) {
-            free(pContext->pBundledContext->pInputBuffer);
-        }
-        if (pContext->pBundledContext->pOutputBuffer != NULL) {
-            free(pContext->pBundledContext->pOutputBuffer);
-        }
+#if defined(BUILD_FLOAT) && !defined(NATIVE_FLOAT_BUFFER)
+        free(pContext->pBundledContext->pInputBuffer);
+        free(pContext->pBundledContext->pOutputBuffer);
 #endif
         delete pContext->pBundledContext;
         pContext->pBundledContext = LVM_NULL;
@@ -549,7 +536,7 @@
 
     pContext->config.inputCfg.accessMode                    = EFFECT_BUFFER_ACCESS_READ;
     pContext->config.inputCfg.channels                      = AUDIO_CHANNEL_OUT_STEREO;
-    pContext->config.inputCfg.format                        = AUDIO_FORMAT_PCM_16_BIT;
+    pContext->config.inputCfg.format                        = EFFECT_BUFFER_FORMAT;
     pContext->config.inputCfg.samplingRate                  = 44100;
     pContext->config.inputCfg.bufferProvider.getBuffer      = NULL;
     pContext->config.inputCfg.bufferProvider.releaseBuffer  = NULL;
@@ -557,7 +544,7 @@
     pContext->config.inputCfg.mask                          = EFFECT_CONFIG_ALL;
     pContext->config.outputCfg.accessMode                   = EFFECT_BUFFER_ACCESS_ACCUMULATE;
     pContext->config.outputCfg.channels                     = AUDIO_CHANNEL_OUT_STEREO;
-    pContext->config.outputCfg.format                       = AUDIO_FORMAT_PCM_16_BIT;
+    pContext->config.outputCfg.format                       = EFFECT_BUFFER_FORMAT;
     pContext->config.outputCfg.samplingRate                 = 44100;
     pContext->config.outputCfg.bufferProvider.getBuffer     = NULL;
     pContext->config.outputCfg.bufferProvider.releaseBuffer = NULL;
@@ -734,47 +721,6 @@
     return 0;
 }   /* end LvmBundle_init */
 
-#ifdef BUILD_FLOAT
-/**********************************************************************************
-   FUNCTION INT16LTOFLOAT
-***********************************************************************************/
-// Todo: need to write function descriptor
-static void Int16ToFloat(const LVM_INT16 *src, LVM_FLOAT *dst, size_t n) {
-    size_t ii;
-    src += n-1;
-    dst += n-1;
-    for (ii = n; ii != 0; ii--) {
-        *dst = ((LVM_FLOAT)((LVM_INT16)*src)) / 32768.0f;
-        src--;
-        dst--;
-    }
-    return;
-}
-/**********************************************************************************
-   FUNCTION FLOATTOINT16_SAT
-***********************************************************************************/
-// Todo : Need to write function descriptor
-static void FloatToInt16_SAT(const LVM_FLOAT *src, LVM_INT16 *dst, size_t n) {
-    size_t ii;
-    LVM_INT32 temp;
-
-    src += n-1;
-    dst += n-1;
-    for (ii = n; ii != 0; ii--) {
-        temp = (LVM_INT32)((*src) * 32768.0f);
-        if (temp >= 32767) {
-            *dst = 32767;
-        } else if (temp <= -32768) {
-            *dst = -32768;
-        } else {
-            *dst = (LVM_INT16)temp;
-        }
-        src--;
-        dst--;
-    }
-    return;
-}
-#endif
 //----------------------------------------------------------------------------
 // LvmBundle_process()
 //----------------------------------------------------------------------------
@@ -782,8 +728,8 @@
 // Apply LVM Bundle effects
 //
 // Inputs:
-//  pIn:        pointer to stereo 16 bit input data
-//  pOut:       pointer to stereo 16 bit output data
+//  pIn:        pointer to stereo float or 16 bit input data
+//  pOut:       pointer to stereo float or 16 bit output data
 //  frameCount: Frames to process
 //  pContext:   effect engine context
 //  strength    strength to be applied
@@ -793,44 +739,37 @@
 //
 //----------------------------------------------------------------------------
 #ifdef BUILD_FLOAT
-int LvmBundle_process(LVM_INT16        *pIn,
-                      LVM_INT16        *pOut,
+int LvmBundle_process(effect_buffer_t  *pIn,
+                      effect_buffer_t  *pOut,
                       int              frameCount,
                       EffectContext    *pContext){
 
-
-    //LVM_ControlParams_t     ActiveParams;                           /* Current control Parameters */
     LVM_ReturnStatus_en     LvmStatus = LVM_SUCCESS;                /* Function call status */
-    LVM_INT16               *pOutTmp;
-    LVM_FLOAT               *pInputBuff;
-    LVM_FLOAT               *pOutputBuff;
-
-    if (pContext->pBundledContext->pInputBuffer == NULL ||
+    effect_buffer_t         *pOutTmp;
+#ifndef NATIVE_FLOAT_BUFFER
+    if (pContext->pBundledContext->pInputBuffer == nullptr ||
             pContext->pBundledContext->frameCount < frameCount) {
-        if (pContext->pBundledContext->pInputBuffer != NULL) {
-            free(pContext->pBundledContext->pInputBuffer);
-        }
-        pContext->pBundledContext->pInputBuffer = (LVM_FLOAT *)malloc(frameCount * \
-                                                                      sizeof(LVM_FLOAT) * FCC_2);
+        free(pContext->pBundledContext->pInputBuffer);
+        pContext->pBundledContext->pInputBuffer =
+                (LVM_FLOAT *)calloc(frameCount, sizeof(LVM_FLOAT) * FCC_2);
     }
 
-    if (pContext->pBundledContext->pOutputBuffer == NULL ||
+    if (pContext->pBundledContext->pOutputBuffer == nullptr ||
             pContext->pBundledContext->frameCount < frameCount) {
-        if (pContext->pBundledContext->pOutputBuffer != NULL) {
-            free(pContext->pBundledContext->pOutputBuffer);
-        }
-        pContext->pBundledContext->pOutputBuffer = (LVM_FLOAT *)malloc(frameCount * \
-                                                                       sizeof(LVM_FLOAT) * FCC_2);
+        free(pContext->pBundledContext->pOutputBuffer);
+        pContext->pBundledContext->pOutputBuffer =
+                (LVM_FLOAT *)calloc(frameCount, sizeof(LVM_FLOAT) * FCC_2);
     }
 
-    if ((pContext->pBundledContext->pInputBuffer == NULL) ||
-                                    (pContext->pBundledContext->pOutputBuffer == NULL)) {
-        ALOGV("LVM_ERROR : LvmBundle_process memory allocation for float buffer's failed");
+    if (pContext->pBundledContext->pInputBuffer == nullptr ||
+            pContext->pBundledContext->pOutputBuffer == nullptr) {
+        ALOGE("LVM_ERROR : LvmBundle_process memory allocation for float buffer's failed");
         return -EINVAL;
     }
 
-    pInputBuff = pContext->pBundledContext->pInputBuffer;
-    pOutputBuff = pContext->pBundledContext->pOutputBuffer;
+    LVM_FLOAT * const pInputBuff = pContext->pBundledContext->pInputBuffer;
+    LVM_FLOAT * const pOutputBuff = pContext->pBundledContext->pOutputBuffer;
+#endif
 
     if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_WRITE){
         pOutTmp = pOut;
@@ -840,7 +779,7 @@
                 free(pContext->pBundledContext->workBuffer);
             }
             pContext->pBundledContext->workBuffer =
-                    (LVM_INT16 *)calloc(frameCount, sizeof(LVM_INT16) * FCC_2);
+                    (effect_buffer_t *)calloc(frameCount, sizeof(effect_buffer_t) * FCC_2);
             if (pContext->pBundledContext->workBuffer == NULL) {
                 return -ENOMEM;
             }
@@ -852,43 +791,61 @@
         return -EINVAL;
     }
 
-    #ifdef LVM_PCM
-    fwrite(pIn, frameCount*sizeof(LVM_INT16) * FCC_2, 1, pContext->pBundledContext->PcmInPtr);
+#ifdef LVM_PCM
+    fwrite(pIn,
+            frameCount*sizeof(effect_buffer_t) * FCC_2, 1, pContext->pBundledContext->PcmInPtr);
     fflush(pContext->pBundledContext->PcmInPtr);
-    #endif
+#endif
 
+#ifndef NATIVE_FLOAT_BUFFER
     /* Converting input data from fixed point to float point */
-    Int16ToFloat(pIn, pInputBuff, frameCount * 2);
+    memcpy_to_float_from_i16(pInputBuff, pIn, frameCount * FCC_2);
 
     /* Process the samples */
     LvmStatus = LVM_Process(pContext->pBundledContext->hInstance, /* Instance handle */
                             pInputBuff,                           /* Input buffer */
                             pOutputBuff,                          /* Output buffer */
                             (LVM_UINT16)frameCount,               /* Number of samples to read */
-                            0);                                   /* Audo Time */
+                            0);                                   /* Audio Time */
 
+    /* Converting output data from float point to fixed point */
+    memcpy_to_i16_from_float(pOutTmp, pOutputBuff, frameCount * FCC_2);
+
+#else
+    /* Process the samples */
+    LvmStatus = LVM_Process(pContext->pBundledContext->hInstance, /* Instance handle */
+                            pIn,                                  /* Input buffer */
+                            pOutTmp,                              /* Output buffer */
+                            (LVM_UINT16)frameCount,               /* Number of samples to read */
+                            0);                                   /* Audio Time */
+#endif
     LVM_ERROR_CHECK(LvmStatus, "LVM_Process", "LvmBundle_process")
     if(LvmStatus != LVM_SUCCESS) return -EINVAL;
 
-    /* Converting output data from float point to fixed point */
-    FloatToInt16_SAT(pOutputBuff, pOutTmp, (LVM_UINT16)frameCount * 2);
-    #ifdef LVM_PCM
-    fwrite(pOutTmp, frameCount*sizeof(LVM_INT16) * FCC_2, 1, pContext->pBundledContext->PcmOutPtr);
+#ifdef LVM_PCM
+    fwrite(pOutTmp,
+            frameCount*sizeof(effect_buffer_t) * FCC_2, 1, pContext->pBundledContext->PcmOutPtr);
     fflush(pContext->pBundledContext->PcmOutPtr);
-    #endif
+#endif
 
     if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE){
-        for (int i = 0; i < frameCount * 2; i++){
+        for (int i = 0; i < frameCount * FCC_2; i++) {
+#ifndef NATIVE_FLOAT_BUFFER
             pOut[i] = clamp16((LVM_INT32)pOut[i] + (LVM_INT32)pOutTmp[i]);
+#else
+            pOut[i] = pOut[i] + pOutTmp[i];
+#endif
         }
     }
     return 0;
 }    /* end LvmBundle_process */
-#else
+
+#else // BUILD_FLOAT
+
 int LvmBundle_process(LVM_INT16        *pIn,
                       LVM_INT16        *pOut,
                       int              frameCount,
-                      EffectContext    *pContext){
+                      EffectContext    *pContext) {
 
     LVM_ReturnStatus_en     LvmStatus = LVM_SUCCESS;                /* Function call status */
     LVM_INT16               *pOutTmp;
@@ -901,7 +858,7 @@
                 free(pContext->pBundledContext->workBuffer);
             }
             pContext->pBundledContext->workBuffer =
-                    (LVM_INT16 *)calloc(frameCount, sizeof(LVM_INT16) * 2);
+                    (effect_buffer_t *)calloc(frameCount, sizeof(effect_buffer_t) * FCC_2);
             if (pContext->pBundledContext->workBuffer == NULL) {
                 return -ENOMEM;
             }
@@ -913,10 +870,11 @@
         return -EINVAL;
     }
 
-    #ifdef LVM_PCM
-    fwrite(pIn, frameCount*sizeof(LVM_INT16)*2, 1, pContext->pBundledContext->PcmInPtr);
+#ifdef LVM_PCM
+    fwrite(pIn, frameCount * sizeof(*pIn) * FCC_2,
+            1 /* nmemb */, pContext->pBundledContext->PcmInPtr);
     fflush(pContext->pBundledContext->PcmInPtr);
-    #endif
+#endif
 
     //ALOGV("Calling LVM_Process");
 
@@ -925,15 +883,16 @@
                             pIn,                                  /* Input buffer */
                             pOutTmp,                              /* Output buffer */
                             (LVM_UINT16)frameCount,               /* Number of samples to read */
-                            0);                                   /* Audo Time */
+                            0);                                   /* Audio Time */
 
     LVM_ERROR_CHECK(LvmStatus, "LVM_Process", "LvmBundle_process")
     if(LvmStatus != LVM_SUCCESS) return -EINVAL;
 
-    #ifdef LVM_PCM
-    fwrite(pOutTmp, frameCount*sizeof(LVM_INT16)*2, 1, pContext->pBundledContext->PcmOutPtr);
+#ifdef LVM_PCM
+    fwrite(pOutTmp, frameCount * sizeof(*pOutTmp) * FCC_2,
+            1 /* nmemb */, pContext->pBundledContext->PcmOutPtr);
     fflush(pContext->pBundledContext->PcmOutPtr);
-    #endif
+#endif
 
     if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE){
         for (int i=0; i<frameCount*2; i++){
@@ -942,7 +901,8 @@
     }
     return 0;
 }    /* end LvmBundle_process */
-#endif
+
+#endif // BUILD_FLOAT
 
 //----------------------------------------------------------------------------
 // EqualizerUpdateActiveParams()
@@ -1276,8 +1236,7 @@
     CHECK_ARG(pConfig->inputCfg.channels == AUDIO_CHANNEL_OUT_STEREO);
     CHECK_ARG(pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_WRITE
               || pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE);
-    CHECK_ARG(pConfig->inputCfg.format == AUDIO_FORMAT_PCM_16_BIT);
-
+    CHECK_ARG(pConfig->inputCfg.format == EFFECT_BUFFER_FORMAT);
     pContext->config = *pConfig;
 
     switch (pConfig->inputCfg.samplingRate) {
@@ -3349,10 +3308,17 @@
         pContext->pBundledContext->NumberEffectsCalled = 0;
         /* Process all the available frames, block processing is
            handled internalLY by the LVM bundle */
-        processStatus = android::LvmBundle_process(    (LVM_INT16 *)inBuffer->raw,
-                                                (LVM_INT16 *)outBuffer->raw,
-                                                outBuffer->frameCount,
-                                                pContext);
+#ifdef NATIVE_FLOAT_BUFFER
+        processStatus = android::LvmBundle_process(inBuffer->f32,
+                                                   outBuffer->f32,
+                                                   outBuffer->frameCount,
+                                                   pContext);
+#else
+        processStatus = android::LvmBundle_process(inBuffer->s16,
+                                                   outBuffer->s16,
+                                                   outBuffer->frameCount,
+                                                   pContext);
+#endif
         if (processStatus != 0){
             ALOGV("\tLVM_ERROR : LvmBundle_process returned error %d", processStatus);
             if (status == 0) {
diff --git a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.h b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.h
index 291383a..6bf045d 100644
--- a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.h
+++ b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.h
@@ -95,7 +95,7 @@
     int                             SamplesToExitCountEq;
     int                             SamplesToExitCountBb;
     int                             SamplesToExitCountVirt;
-    LVM_INT16                       *workBuffer;
+    effect_buffer_t                 *workBuffer;
     int                             frameCount;
     int32_t                         bandGaindB[FIVEBAND_NUMBANDS];
     int                             volume;
@@ -103,10 +103,10 @@
     FILE                            *PcmInPtr;
     FILE                            *PcmOutPtr;
     #endif
-    #ifdef BUILD_FLOAT
+#if defined(BUILD_FLOAT) && !defined(NATIVE_FLOAT_BUFFER)
     LVM_FLOAT                       *pInputBuffer;
     LVM_FLOAT                       *pOutputBuffer;
-    #endif
+#endif
 };
 
 /* SessionContext : One session */
diff --git a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
index 3d8e982..0630285 100644
--- a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
+++ b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
@@ -27,6 +27,7 @@
 #include <stdlib.h>
 #include <string.h>
 
+#include <audio_utils/primitives.h>
 #include <log/log.h>
 
 #include "EffectReverb.h"
@@ -135,6 +136,12 @@
         &gInsertPresetReverbDescriptor
 };
 
+#ifdef BUILD_FLOAT
+typedef     float               process_buffer_t; // process in float
+#else
+typedef     int32_t             process_buffer_t; // process in Q4_27
+#endif // BUILD_FLOAT
+
 struct ReverbContext{
     const struct effect_interface_s *itfe;
     effect_config_t                 config;
@@ -152,8 +159,8 @@
     FILE                            *PcmOutPtr;
     #endif
     LVM_Fs_en                       SampleRate;
-    LVM_INT32                       *InFrames32;
-    LVM_INT32                       *OutFrames32;
+    process_buffer_t                *InFrames;
+    process_buffer_t                *OutFrames;
     size_t                          bufferSizeIn;
     size_t                          bufferSizeOut;
     bool                            auxiliary;
@@ -262,7 +269,7 @@
 
     *pHandle = (effect_handle_t)pContext;
 
-    #ifdef LVM_PCM
+#ifdef LVM_PCM
     pContext->PcmInPtr = NULL;
     pContext->PcmOutPtr = NULL;
 
@@ -273,19 +280,15 @@
        (pContext->PcmOutPtr == NULL)){
        return -EINVAL;
     }
-    #endif
+#endif
 
+    int channels = audio_channel_count_from_out_mask(pContext->config.inputCfg.channels);
 
     // Allocate memory for reverb process (*2 is for STEREO)
-#ifdef BUILD_FLOAT
-    pContext->bufferSizeIn = LVREV_MAX_FRAME_SIZE * sizeof(float) * 2;
-    pContext->bufferSizeOut = pContext->bufferSizeIn;
-#else
-    pContext->bufferSizeIn = LVREV_MAX_FRAME_SIZE * sizeof(LVM_INT32) * 2;
-    pContext->bufferSizeOut = pContext->bufferSizeIn;
-#endif
-    pContext->InFrames32  = (LVM_INT32 *)malloc(pContext->bufferSizeIn);
-    pContext->OutFrames32 = (LVM_INT32 *)malloc(pContext->bufferSizeOut);
+    pContext->bufferSizeIn = LVREV_MAX_FRAME_SIZE * sizeof(process_buffer_t) * channels;
+    pContext->bufferSizeOut = LVREV_MAX_FRAME_SIZE * sizeof(process_buffer_t) * FCC_2;
+    pContext->InFrames  = (process_buffer_t *)calloc(pContext->bufferSizeIn, 1 /* size */);
+    pContext->OutFrames = (process_buffer_t *)calloc(pContext->bufferSizeOut, 1 /* size */);
 
     ALOGV("\tEffectCreate %p, size %zu", pContext, sizeof(ReverbContext));
     ALOGV("\tEffectCreate end\n");
@@ -305,8 +308,8 @@
     fclose(pContext->PcmInPtr);
     fclose(pContext->PcmOutPtr);
     #endif
-    free(pContext->InFrames32);
-    free(pContext->OutFrames32);
+    free(pContext->InFrames);
+    free(pContext->OutFrames);
     pContext->bufferSizeIn = 0;
     pContext->bufferSizeOut = 0;
     Reverb_free(pContext);
@@ -344,114 +347,6 @@
     }                                         \
 }
 
-#if 0
-//----------------------------------------------------------------------------
-// MonoTo2I_32()
-//----------------------------------------------------------------------------
-// Purpose:
-//  Convert MONO to STEREO
-//
-//----------------------------------------------------------------------------
-
-void MonoTo2I_32( const LVM_INT32  *src,
-                        LVM_INT32  *dst,
-                        LVM_INT16 n)
-{
-   LVM_INT16 ii;
-   src += (n-1);
-   dst += ((n*2)-1);
-
-   for (ii = n; ii != 0; ii--)
-   {
-       *dst = *src;
-       dst--;
-
-       *dst = *src;
-       dst--;
-       src--;
-   }
-
-   return;
-}
-
-//----------------------------------------------------------------------------
-// From2iToMono_32()
-//----------------------------------------------------------------------------
-// Purpose:
-//  Convert STEREO to MONO
-//
-//----------------------------------------------------------------------------
-
-void From2iToMono_32( const LVM_INT32 *src,
-                            LVM_INT32 *dst,
-                            LVM_INT16 n)
-{
-   LVM_INT16 ii;
-   LVM_INT32 Temp;
-
-   for (ii = n; ii != 0; ii--)
-   {
-       Temp = (*src>>1);
-       src++;
-
-       Temp +=(*src>>1);
-       src++;
-
-       *dst = Temp;
-       dst++;
-   }
-
-   return;
-}
-#endif
-
-#ifdef BUILD_FLOAT
-/**********************************************************************************
-   FUNCTION INT16LTOFLOAT
-***********************************************************************************/
-// Todo: need to write function descriptor
-static void Int16ToFloat(const LVM_INT16 *src, LVM_FLOAT *dst, size_t n) {
-    size_t ii;
-    src += n-1;
-    dst += n-1;
-    for (ii = n; ii != 0; ii--) {
-        *dst = ((LVM_FLOAT)((LVM_INT16)*src)) / 32768.0f;
-        src--;
-        dst--;
-    }
-    return;
-}
-/**********************************************************************************
-   FUNCTION FLOATTOINT16_SAT
-***********************************************************************************/
-// Todo : Need to write function descriptor
-static void FloatToInt16_SAT(const LVM_FLOAT *src, LVM_INT16 *dst, size_t n) {
-    size_t ii;
-    LVM_INT32 temp;
-
-    for (ii = 0; ii < n; ii++) {
-        temp = (LVM_INT32)((*src) * 32768.0f);
-        if (temp >= 32767) {
-            *dst = 32767;
-        } else if (temp <= -32768) {
-            *dst = -32768;
-        } else {
-            *dst = (LVM_INT16)temp;
-        }
-        src++;
-        dst++;
-    }
-    return;
-}
-#endif
-
-static inline int16_t clamp16(int32_t sample)
-{
-    if ((sample>>15) ^ (sample>>31))
-        sample = 0x7FFF ^ (sample>>31);
-    return sample;
-}
-
 //----------------------------------------------------------------------------
 // process()
 //----------------------------------------------------------------------------
@@ -459,8 +354,8 @@
 // Apply the Reverb
 //
 // Inputs:
-//  pIn:        pointer to stereo/mono 16 bit input data
-//  pOut:       pointer to stereo 16 bit output data
+//  pIn:        pointer to stereo/mono float or 16 bit input data
+//  pOut:       pointer to stereo float or 16 bit output data
 //  frameCount: Frames to process
 //  pContext:   effect engine context
 //  strength    strength to be applied
@@ -469,116 +364,107 @@
 //  pOut:       pointer to updated stereo 16 bit output data
 //
 //----------------------------------------------------------------------------
-
-int process( LVM_INT16     *pIn,
-             LVM_INT16     *pOut,
+int process( effect_buffer_t   *pIn,
+             effect_buffer_t   *pOut,
              int           frameCount,
              ReverbContext *pContext){
 
-    LVM_INT16               samplesPerFrame = 1;
+    int channels = audio_channel_count_from_out_mask(pContext->config.inputCfg.channels);
     LVREV_ReturnStatus_en   LvmStatus = LVREV_SUCCESS;              /* Function call status */
-    LVM_INT16 *OutFrames16;
-#ifdef BUILD_FLOAT
-    LVM_FLOAT               *pInputBuff;
-    LVM_FLOAT               *pOutputBuff;
-#endif
 
-#ifdef BUILD_FLOAT
-    if (pContext->InFrames32 == NULL ||
-            pContext->bufferSizeIn < frameCount * sizeof(float) * 2) {
-        if (pContext->InFrames32 != NULL) {
-            free(pContext->InFrames32);
-        }
-        pContext->bufferSizeIn = frameCount * sizeof(float) * 2;
-        pContext->InFrames32 = (LVM_INT32 *)malloc(pContext->bufferSizeIn);
-    }
-    if (pContext->OutFrames32 == NULL ||
-            pContext->bufferSizeOut < frameCount * sizeof(float) * 2) {
-        if (pContext->OutFrames32 != NULL) {
-            free(pContext->OutFrames32);
-        }
-        pContext->bufferSizeOut = frameCount * sizeof(float) * 2;
-        pContext->OutFrames32 = (LVM_INT32 *)malloc(pContext->bufferSizeOut);
-    }
-    pInputBuff = (float *)pContext->InFrames32;
-    pOutputBuff = (float *)pContext->OutFrames32;
-#endif
     // Check that the input is either mono or stereo
-    if (pContext->config.inputCfg.channels == AUDIO_CHANNEL_OUT_STEREO) {
-        samplesPerFrame = 2;
-    } else if (pContext->config.inputCfg.channels != AUDIO_CHANNEL_OUT_MONO) {
-        ALOGV("\tLVREV_ERROR : process invalid PCM format");
+    if (!(channels == 1 || channels == FCC_2) ) {
+        ALOGE("\tLVREV_ERROR : process invalid PCM format");
         return -EINVAL;
     }
 
-    OutFrames16 = (LVM_INT16 *)pContext->OutFrames32;
+#ifdef BUILD_FLOAT
+    size_t inSize = frameCount * sizeof(process_buffer_t) * channels;
+    size_t outSize = frameCount * sizeof(process_buffer_t) * FCC_2;
+    if (pContext->InFrames == NULL ||
+            pContext->bufferSizeIn < inSize) {
+        free(pContext->InFrames);
+        pContext->bufferSizeIn = inSize;
+        pContext->InFrames = (process_buffer_t *)calloc(1, pContext->bufferSizeIn);
+    }
+    if (pContext->OutFrames == NULL ||
+            pContext->bufferSizeOut < outSize) {
+        free(pContext->OutFrames);
+        pContext->bufferSizeOut = outSize;
+        pContext->OutFrames = (process_buffer_t *)calloc(1, pContext->bufferSizeOut);
+    }
+
+#ifndef NATIVE_FLOAT_BUFFER
+    effect_buffer_t * const OutFrames16 = (effect_buffer_t *)pContext->OutFrames;
+#endif
+#endif
 
     // Check for NULL pointers
-    if((pContext->InFrames32 == NULL)||(pContext->OutFrames32 == NULL)){
-        ALOGV("\tLVREV_ERROR : process failed to allocate memory for temporary buffers ");
+    if ((pContext->InFrames == NULL) || (pContext->OutFrames == NULL)) {
+        ALOGE("\tLVREV_ERROR : process failed to allocate memory for temporary buffers ");
         return -EINVAL;
     }
 
-    #ifdef LVM_PCM
-    fwrite(pIn, frameCount*sizeof(LVM_INT16)*samplesPerFrame, 1, pContext->PcmInPtr);
+#ifdef LVM_PCM
+    fwrite(pIn, frameCount * sizeof(*pIn) * channels, 1 /* nmemb */, pContext->PcmInPtr);
     fflush(pContext->PcmInPtr);
-    #endif
+#endif
 
     if (pContext->preset && pContext->nextPreset != pContext->curPreset) {
         Reverb_LoadPreset(pContext);
     }
 
-    // Convert to Input 32 bits
     if (pContext->auxiliary) {
 #ifdef BUILD_FLOAT
-        Int16ToFloat(pIn, pInputBuff, frameCount * samplesPerFrame);
+#ifdef NATIVE_FLOAT_BUFFER
+        static_assert(std::is_same<decltype(*pIn), decltype(*pContext->InFrames)>::value,
+                "pIn and InFrames must be same type");
+        memcpy(pContext->InFrames, pIn, frameCount * channels * sizeof(*pIn));
 #else
-        for(int i=0; i<frameCount*samplesPerFrame; i++){
-            pContext->InFrames32[i] = (LVM_INT32)pIn[i]<<8;
+        memcpy_to_float_from_i16(
+                pContext->InFrames, pIn, frameCount * channels);
+#endif
+#else //no BUILD_FLOAT
+        for (int i = 0; i < frameCount * channels; i++) {
+            pContext->InFrames[i] = (process_buffer_t)pIn[i]<<8;
         }
 #endif
         } else {
         // insert reverb input is always stereo
         for (int i = 0; i < frameCount; i++) {
-#ifndef BUILD_FLOAT
-            pContext->InFrames32[2*i] = (pIn[2*i] * REVERB_SEND_LEVEL) >> 4; // <<8 + >>12
-            pContext->InFrames32[2*i+1] = (pIn[2*i+1] * REVERB_SEND_LEVEL) >> 4; // <<8 + >>12
+#ifdef BUILD_FLOAT
+#ifdef NATIVE_FLOAT_BUFFER
+            pContext->InFrames[2 * i] = (process_buffer_t)pIn[2 * i] * REVERB_SEND_LEVEL;
+            pContext->InFrames[2 * i + 1] = (process_buffer_t)pIn[2 * i + 1] * REVERB_SEND_LEVEL;
 #else
-            pInputBuff[2 * i] = (LVM_FLOAT)pIn[2 * i] * REVERB_SEND_LEVEL / 32768.0f;
-            pInputBuff[2 * i + 1] = (LVM_FLOAT)pIn[2 * i + 1] * REVERB_SEND_LEVEL / 32768.0f;
+            pContext->InFrames[2 * i] =
+                    (process_buffer_t)pIn[2 * i] * REVERB_SEND_LEVEL / 32768.0f;
+            pContext->InFrames[2 * i + 1] =
+                    (process_buffer_t)pIn[2 * i + 1] * REVERB_SEND_LEVEL / 32768.0f;
+#endif
+#else
+            pContext->InFrames[2*i] = (pIn[2*i] * REVERB_SEND_LEVEL) >> 4; // <<8 + >>12
+            pContext->InFrames[2*i+1] = (pIn[2*i+1] * REVERB_SEND_LEVEL) >> 4; // <<8 + >>12
 #endif
         }
     }
 
     if (pContext->preset && pContext->curPreset == REVERB_PRESET_NONE) {
-#ifdef BUILD_FLOAT
-        memset(pOutputBuff, 0, frameCount * sizeof(LVM_FLOAT) * 2); //always stereo here
-#else
-        memset(pContext->OutFrames32, 0, frameCount * sizeof(LVM_INT32) * 2); //always stereo here
-#endif
+        memset(pContext->OutFrames, 0,
+                frameCount * sizeof(*pContext->OutFrames) * FCC_2); //always stereo here
     } else {
         if(pContext->bEnabled == LVM_FALSE && pContext->SamplesToExitCount > 0) {
-#ifdef BUILD_FLOAT
-            memset(pInputBuff, 0, frameCount * sizeof(LVM_FLOAT) * samplesPerFrame);
-#else
-            memset(pContext->InFrames32,0,frameCount * sizeof(LVM_INT32) * samplesPerFrame);
-#endif
-            ALOGV("\tZeroing %d samples per frame at the end of call", samplesPerFrame);
+            memset(pContext->InFrames, 0,
+                    frameCount * sizeof(*pContext->OutFrames) * channels);
+            ALOGV("\tZeroing %d samples per frame at the end of call", channels);
         }
 
         /* Process the samples, producing a stereo output */
-#ifdef BUILD_FLOAT
         LvmStatus = LVREV_Process(pContext->hInstance,      /* Instance handle */
-                                  pInputBuff,     /* Input buffer */
-                                  pOutputBuff,    /* Output buffer */
+                                  pContext->InFrames,     /* Input buffer */
+                                  pContext->OutFrames,    /* Output buffer */
                                   frameCount);              /* Number of samples to read */
-#else
-        LvmStatus = LVREV_Process(pContext->hInstance,      /* Instance handle */
-                                  pContext->InFrames32,     /* Input buffer */
-                                  pContext->OutFrames32,    /* Output buffer */
-                                  frameCount);              /* Number of samples to read */
-#endif
-        }
+    }
 
     LVM_ERROR_CHECK(LvmStatus, "LVREV_Process", "process")
     if(LvmStatus != LVREV_SUCCESS) return -EINVAL;
@@ -586,55 +472,87 @@
     // Convert to 16 bits
     if (pContext->auxiliary) {
 #ifdef BUILD_FLOAT
-        FloatToInt16_SAT(pOutputBuff, OutFrames16, (size_t)frameCount * 2);
-#else
-        for (int i=0; i < frameCount*2; i++) { //always stereo here
-            OutFrames16[i] = clamp16(pContext->OutFrames32[i]>>8);
-        }
+        // nothing to do here
+#ifndef NATIVE_FLOAT_BUFFER
+        // pContext->OutFrames and OutFrames16 point to the same buffer
+        // make sure the float to int conversion happens in the right order.
+        memcpy_to_i16_from_float(OutFrames16, pContext->OutFrames,
+                (size_t)frameCount * FCC_2);
 #endif
-        } else {
-#ifdef BUILD_FLOAT
-            for (int i = 0; i < frameCount * 2; i++) {//always stereo here
-                //pOutputBuff and OutFrames16 point to the same buffer, so better to
-                //accumulate in pInputBuff, which is available
-                pInputBuff[i] = pOutputBuff[i] + (LVM_FLOAT)pIn[i] / 32768.0f;
-            }
-
-            FloatToInt16_SAT(pInputBuff, OutFrames16, (size_t)frameCount * 2);
 #else
-            for (int i=0; i < frameCount*2; i++) { //always stereo here
-                OutFrames16[i] = clamp16((pContext->OutFrames32[i]>>8) + (LVM_INT32)pIn[i]);
-            }
+        memcpy_to_i16_from_q4_27(OutFrames16, pContext->OutFrames, (size_t)frameCount * FCC_2);
+#endif
+    } else {
+#ifdef BUILD_FLOAT
+#ifdef NATIVE_FLOAT_BUFFER
+        for (int i = 0; i < frameCount * FCC_2; i++) { // always stereo here
+            // Mix with dry input
+            pContext->OutFrames[i] += pIn[i];
+        }
+#else
+        for (int i = 0; i < frameCount * FCC_2; i++) { // always stereo here
+            // pOutputBuff and OutFrames16 point to the same buffer
+            // make sure the float to int conversion happens in the right order.
+            pContext->OutFrames[i] += (process_buffer_t)pIn[i] / 32768.0f;
+        }
+        memcpy_to_i16_from_float(OutFrames16, pContext->OutFrames,
+                (size_t)frameCount * FCC_2);
+#endif
+#else
+        for (int i=0; i < frameCount * FCC_2; i++) { // always stereo here
+            OutFrames16[i] = clamp16((pContext->OutFrames[i]>>8) + (process_buffer_t)pIn[i]);
+        }
 #endif
         // apply volume with ramp if needed
         if ((pContext->leftVolume != pContext->prevLeftVolume ||
                 pContext->rightVolume != pContext->prevRightVolume) &&
                 pContext->volumeMode == REVERB_VOLUME_RAMP) {
+#if defined (BUILD_FLOAT) && defined (NATIVE_FLOAT_BUFFER)
+            // FIXME: still using int16 volumes.
+            // For reference: REVERB_UNIT_VOLUME  (0x1000) // 1.0 in 4.12 format
+            float vl = (float)pContext->prevLeftVolume / 4096;
+            float incl = (((float)pContext->leftVolume / 4096) - vl) / frameCount;
+            float vr = (float)pContext->prevRightVolume / 4096;
+            float incr = (((float)pContext->rightVolume / 4096) - vr) / frameCount;
+
+            for (int i = 0; i < frameCount; i++) {
+                pContext->OutFrames[FCC_2 * i] *= vl;
+                pContext->OutFrames[FCC_2 * i + 1] *= vr;
+
+                vl += incl;
+                vr += incr;
+            }
+#else
             LVM_INT32 vl = (LVM_INT32)pContext->prevLeftVolume << 16;
             LVM_INT32 incl = (((LVM_INT32)pContext->leftVolume << 16) - vl) / frameCount;
             LVM_INT32 vr = (LVM_INT32)pContext->prevRightVolume << 16;
             LVM_INT32 incr = (((LVM_INT32)pContext->rightVolume << 16) - vr) / frameCount;
 
             for (int i = 0; i < frameCount; i++) {
-                OutFrames16[2*i] =
+                OutFrames16[FCC_2 * i] =
                         clamp16((LVM_INT32)((vl >> 16) * OutFrames16[2*i]) >> 12);
-                OutFrames16[2*i+1] =
+                OutFrames16[FCC_2 * i + 1] =
                         clamp16((LVM_INT32)((vr >> 16) * OutFrames16[2*i+1]) >> 12);
 
                 vl += incl;
                 vr += incr;
             }
-
+#endif
             pContext->prevLeftVolume = pContext->leftVolume;
             pContext->prevRightVolume = pContext->rightVolume;
         } else if (pContext->volumeMode != REVERB_VOLUME_OFF) {
             if (pContext->leftVolume != REVERB_UNIT_VOLUME ||
                 pContext->rightVolume != REVERB_UNIT_VOLUME) {
                 for (int i = 0; i < frameCount; i++) {
-                    OutFrames16[2*i] =
+#if defined(BUILD_FLOAT) && defined(NATIVE_FLOAT_BUFFER)
+                    pContext->OutFrames[FCC_2 * i] *= ((float)pContext->leftVolume / 4096);
+                    pContext->OutFrames[FCC_2 * i + 1] *= ((float)pContext->rightVolume / 4096);
+#else
+                    OutFrames16[FCC_2 * i] =
                             clamp16((LVM_INT32)(pContext->leftVolume * OutFrames16[2*i]) >> 12);
-                    OutFrames16[2*i+1] =
+                    OutFrames16[FCC_2 * i + 1] =
                             clamp16((LVM_INT32)(pContext->rightVolume * OutFrames16[2*i+1]) >> 12);
+#endif
                 }
             }
             pContext->prevLeftVolume = pContext->leftVolume;
@@ -643,20 +561,25 @@
         }
     }
 
-    #ifdef LVM_PCM
-    fwrite(OutFrames16, frameCount*sizeof(LVM_INT16)*2, 1, pContext->PcmOutPtr);
+#ifdef LVM_PCM
+    fwrite(pContext->OutFrames, frameCount * sizeof(*pContext->OutFrames) * FCC_2,
+            1 /* nmemb */, pContext->PcmOutPtr);
     fflush(pContext->PcmOutPtr);
-    #endif
+#endif
 
     // Accumulate if required
     if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE){
         //ALOGV("\tBuffer access is ACCUMULATE");
-        for (int i=0; i<frameCount*2; i++){ //always stereo here
+        for (int i = 0; i < frameCount * FCC_2; i++) { // always stereo here
+#ifndef NATIVE_FLOAT_BUFFER
             pOut[i] = clamp16((int32_t)pOut[i] + (int32_t)OutFrames16[i]);
+#else
+            pOut[i] += pContext->OutFrames[i];
+#endif
         }
     }else{
         //ALOGV("\tBuffer access is WRITE");
-        memcpy(pOut, OutFrames16, frameCount*sizeof(LVM_INT16)*2);
+        memcpy(pOut, pContext->OutFrames, frameCount * sizeof(*pOut) * FCC_2);
     }
 
     return 0;
@@ -733,8 +656,7 @@
     CHECK_ARG(pConfig->outputCfg.channels == AUDIO_CHANNEL_OUT_STEREO);
     CHECK_ARG(pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_WRITE
               || pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE);
-    CHECK_ARG(pConfig->inputCfg.format == AUDIO_FORMAT_PCM_16_BIT);
-
+    CHECK_ARG(pConfig->inputCfg.format == EFFECT_BUFFER_FORMAT);
     //ALOGV("\tReverb_setConfig calling memcpy");
     pContext->config = *pConfig;
 
@@ -847,8 +769,7 @@
     } else {
         pContext->config.inputCfg.channels                  = AUDIO_CHANNEL_OUT_STEREO;
     }
-
-    pContext->config.inputCfg.format                        = AUDIO_FORMAT_PCM_16_BIT;
+    pContext->config.inputCfg.format                        = EFFECT_BUFFER_FORMAT;
     pContext->config.inputCfg.samplingRate                  = 44100;
     pContext->config.inputCfg.bufferProvider.getBuffer      = NULL;
     pContext->config.inputCfg.bufferProvider.releaseBuffer  = NULL;
@@ -856,7 +777,7 @@
     pContext->config.inputCfg.mask                          = EFFECT_CONFIG_ALL;
     pContext->config.outputCfg.accessMode                   = EFFECT_BUFFER_ACCESS_ACCUMULATE;
     pContext->config.outputCfg.channels                     = AUDIO_CHANNEL_OUT_STEREO;
-    pContext->config.outputCfg.format                       = AUDIO_FORMAT_PCM_16_BIT;
+    pContext->config.outputCfg.format                       = EFFECT_BUFFER_FORMAT;
     pContext->config.outputCfg.samplingRate                 = 44100;
     pContext->config.outputCfg.bufferProvider.getBuffer     = NULL;
     pContext->config.outputCfg.bufferProvider.releaseBuffer = NULL;
@@ -2031,10 +1952,17 @@
     }
     //ALOGV("\tReverb_process() Calling process with %d frames", outBuffer->frameCount);
     /* Process all the available frames, block processing is handled internalLY by the LVM bundle */
-    status = process(    (LVM_INT16 *)inBuffer->raw,
-                         (LVM_INT16 *)outBuffer->raw,
-                                      outBuffer->frameCount,
-                                      pContext);
+#if defined (BUILD_FLOAT) && defined (NATIVE_FLOAT_BUFFER)
+    status = process(    inBuffer->f32,
+                         outBuffer->f32,
+                         outBuffer->frameCount,
+                         pContext);
+#else
+    status = process(    inBuffer->s16,
+                         outBuffer->s16,
+                         outBuffer->frameCount,
+                         pContext);
+#endif
 
     if (pContext->bEnabled == LVM_FALSE) {
         if (pContext->SamplesToExitCount > 0) {
diff --git a/media/libmedia/IMediaRecorder.cpp b/media/libmedia/IMediaRecorder.cpp
index 5282352..72f5f58 100644
--- a/media/libmedia/IMediaRecorder.cpp
+++ b/media/libmedia/IMediaRecorder.cpp
@@ -61,6 +61,9 @@
     PAUSE,
     RESUME,
     GET_METRICS,
+    SET_INPUT_DEVICE,
+    GET_ROUTED_DEVICE_ID,
+    ENABLE_AUDIO_DEVICE_CALLBACK,
 
 };
 
@@ -337,6 +340,57 @@
         remote()->transact(RELEASE, data, &reply);
         return reply.readInt32();
     }
+
+    status_t setInputDevice(audio_port_handle_t deviceId)
+    {
+        ALOGV("setInputDevice");
+        Parcel data, reply;
+        data.writeInterfaceToken(IMediaRecorder::getInterfaceDescriptor());
+        data.writeInt32(deviceId);
+
+        status_t status = remote()->transact(SET_INPUT_DEVICE, data, &reply);
+        if (status != OK) {
+            ALOGE("setInputDevice binder call failed: %d", status);
+            return status;
+        }
+        return reply.readInt32();;
+    }
+
+    audio_port_handle_t getRoutedDeviceId(audio_port_handle_t *deviceId)
+    {
+        ALOGV("getRoutedDeviceId");
+        Parcel data, reply;
+        data.writeInterfaceToken(IMediaRecorder::getInterfaceDescriptor());
+
+        status_t status = remote()->transact(GET_ROUTED_DEVICE_ID, data, &reply);
+        if (status != OK) {
+            ALOGE("getRoutedDeviceid binder call failed: %d", status);
+            *deviceId = AUDIO_PORT_HANDLE_NONE;
+            return status;
+        }
+
+        status = reply.readInt32();
+        if (status != NO_ERROR) {
+            *deviceId = AUDIO_PORT_HANDLE_NONE;
+        } else {
+            *deviceId = reply.readInt32();
+        }
+        return status;
+    }
+
+    status_t enableAudioDeviceCallback(bool enabled)
+    {
+        ALOGV("enableAudioDeviceCallback");
+        Parcel data, reply;
+        data.writeInterfaceToken(IMediaRecorder::getInterfaceDescriptor());
+        data.writeBool(enabled);
+        status_t status = remote()->transact(ENABLE_AUDIO_DEVICE_CALLBACK, data, &reply);
+        if (status != OK) {
+            ALOGE("enableAudioDeviceCallback binder call failed: %d, %d", enabled, status);
+            return status;
+        }
+        return reply.readInt32();
+    }
 };
 
 IMPLEMENT_META_INTERFACE(MediaRecorder, "android.media.IMediaRecorder");
@@ -543,6 +597,41 @@
             }
             return NO_ERROR;
         } break;
+        case SET_INPUT_DEVICE: {
+            ALOGV("SET_INPUT_DEVICE");
+            CHECK_INTERFACE(IMediaRecorder, data, reply);
+            audio_port_handle_t deviceId;
+            status_t status = data.readInt32(&deviceId);
+            if (status == NO_ERROR) {
+                reply->writeInt32(setInputDevice(deviceId));
+            } else {
+                reply->writeInt32(BAD_VALUE);
+            }
+            return NO_ERROR;
+        } break;
+        case GET_ROUTED_DEVICE_ID: {
+            ALOGV("GET_ROUTED_DEVICE_ID");
+            CHECK_INTERFACE(IMediaRecorder, data, reply);
+            audio_port_handle_t deviceId;
+            status_t status = getRoutedDeviceId(&deviceId);
+            reply->writeInt32(status);
+            if (status == NO_ERROR) {
+                reply->writeInt32(deviceId);
+            }
+            return NO_ERROR;
+        } break;
+        case ENABLE_AUDIO_DEVICE_CALLBACK: {
+            ALOGV("ENABLE_AUDIO_DEVICE_CALLBACK");
+            CHECK_INTERFACE(IMediaRecorder, data, reply);
+            bool enabled;
+            status_t status = data.readBool(&enabled);
+            if (status == NO_ERROR) {
+                reply->writeInt32(enableAudioDeviceCallback(enabled));
+            } else {
+                reply->writeInt32(BAD_VALUE);
+            }
+            return NO_ERROR;
+        }
         default:
             return BBinder::onTransact(code, data, reply, flags);
     }
diff --git a/media/libmedia/include/media/IMediaRecorder.h b/media/libmedia/include/media/IMediaRecorder.h
index 9d0341a..3cef329 100644
--- a/media/libmedia/include/media/IMediaRecorder.h
+++ b/media/libmedia/include/media/IMediaRecorder.h
@@ -19,6 +19,7 @@
 #define ANDROID_IMEDIARECORDER_H
 
 #include <binder/IInterface.h>
+#include <system/audio.h>
 
 namespace android {
 
@@ -64,6 +65,10 @@
     virtual status_t release() = 0;
     virtual status_t setInputSurface(const sp<PersistentSurface>& surface) = 0;
     virtual sp<IGraphicBufferProducer> querySurfaceMediaSource() = 0;
+
+    virtual status_t setInputDevice(audio_port_handle_t deviceId) = 0;
+    virtual status_t getRoutedDeviceId(audio_port_handle_t *deviceId) = 0;
+    virtual status_t enableAudioDeviceCallback(bool enabled) = 0;
 };
 
 // ----------------------------------------------------------------------------
diff --git a/media/libmedia/include/media/MediaRecorderBase.h b/media/libmedia/include/media/MediaRecorderBase.h
index 40dd9f9..748153c 100644
--- a/media/libmedia/include/media/MediaRecorderBase.h
+++ b/media/libmedia/include/media/MediaRecorderBase.h
@@ -18,6 +18,7 @@
 
 #define MEDIA_RECORDER_BASE_H_
 
+#include <media/AudioSystem.h>
 #include <media/mediarecorder.h>
 
 #include <system/audio.h>
@@ -62,6 +63,10 @@
     virtual status_t dump(int fd, const Vector<String16>& args) const = 0;
     virtual status_t setInputSurface(const sp<PersistentSurface>& surface) = 0;
     virtual sp<IGraphicBufferProducer> querySurfaceMediaSource() const = 0;
+    virtual status_t setInputDevice(audio_port_handle_t deviceId) = 0;
+    virtual status_t getRoutedDeviceId(audio_port_handle_t* deviceId) = 0;
+    virtual void setAudioDeviceCallback(const sp<AudioSystem::AudioDeviceCallback>& callback) = 0;
+    virtual status_t enableAudioDeviceCallback(bool enabled) = 0;
 
 
 protected:
diff --git a/media/libmedia/include/media/mediarecorder.h b/media/libmedia/include/media/mediarecorder.h
index 071e7a1..5f2a6fe 100644
--- a/media/libmedia/include/media/mediarecorder.h
+++ b/media/libmedia/include/media/mediarecorder.h
@@ -77,6 +77,9 @@
     /* VP8/VORBIS data in a WEBM container */
     OUTPUT_FORMAT_WEBM = 9,
 
+    /* HEIC data in a HEIF container */
+    OUTPUT_FORMAT_HEIF = 10,
+
     OUTPUT_FORMAT_LIST_END // must be last - used to validate format type
 };
 
@@ -138,6 +141,8 @@
     MEDIA_RECORDER_TRACK_EVENT_ERROR              = 100,
     MEDIA_RECORDER_TRACK_EVENT_INFO               = 101,
     MEDIA_RECORDER_TRACK_EVENT_LIST_END           = 1000,
+
+    MEDIA_RECORDER_AUDIO_ROUTING_CHANGED          = 10000,
 };
 
 /*
@@ -250,6 +255,9 @@
     status_t    setInputSurface(const sp<PersistentSurface>& surface);
     sp<IGraphicBufferProducer>     querySurfaceMediaSourceFromMediaServer();
     status_t    getMetrics(Parcel *reply);
+    status_t    setInputDevice(audio_port_handle_t deviceId);
+    status_t    getRoutedDeviceId(audio_port_handle_t *deviceId);
+    status_t    enableAudioDeviceCallback(bool enabled);
 
 private:
     void                    doCleanUp();
diff --git a/media/libmedia/mediarecorder.cpp b/media/libmedia/mediarecorder.cpp
index 4405930..aab845b 100644
--- a/media/libmedia/mediarecorder.cpp
+++ b/media/libmedia/mediarecorder.cpp
@@ -792,4 +792,41 @@
     notify(MEDIA_RECORDER_EVENT_ERROR, MEDIA_ERROR_SERVER_DIED, 0);
 }
 
+status_t MediaRecorder::setInputDevice(audio_port_handle_t deviceId)
+{
+    ALOGV("setInputDevice");
+
+    if (mMediaRecorder == NULL) {
+        ALOGE("media recorder is not initialized yet");
+        return INVALID_OPERATION;
+    }
+    return mMediaRecorder->setInputDevice(deviceId);
+}
+
+status_t MediaRecorder::getRoutedDeviceId(audio_port_handle_t* deviceId)
+{
+    ALOGV("getRoutedDeviceId");
+
+    if (mMediaRecorder == NULL) {
+        ALOGE("media recorder is not initialized yet");
+        return INVALID_OPERATION;
+    }
+    status_t status = mMediaRecorder->getRoutedDeviceId(deviceId);
+    if (status != NO_ERROR) {
+        *deviceId = AUDIO_PORT_HANDLE_NONE;
+    }
+    return status;
+}
+
+status_t MediaRecorder::enableAudioDeviceCallback(bool enabled)
+{
+    ALOGV("enableAudioDeviceCallback");
+
+    if (mMediaRecorder == NULL) {
+        ALOGE("media recorder is not initialized yet");
+        return INVALID_OPERATION;
+    }
+    return mMediaRecorder->enableAudioDeviceCallback(enabled);
+}
+
 } // namespace android
diff --git a/media/libmediametrics/MediaAnalyticsItem.cpp b/media/libmediametrics/MediaAnalyticsItem.cpp
index f7df2b4..6b063e8 100644
--- a/media/libmediametrics/MediaAnalyticsItem.cpp
+++ b/media/libmediametrics/MediaAnalyticsItem.cpp
@@ -214,12 +214,12 @@
     return mPkgName;
 }
 
-MediaAnalyticsItem &MediaAnalyticsItem::setPkgVersionCode(int32_t pkgVersionCode) {
+MediaAnalyticsItem &MediaAnalyticsItem::setPkgVersionCode(int64_t pkgVersionCode) {
     mPkgVersionCode = pkgVersionCode;
     return *this;
 }
 
-int32_t MediaAnalyticsItem::getPkgVersionCode() const {
+int64_t MediaAnalyticsItem::getPkgVersionCode() const {
     return mPkgVersionCode;
 }
 
@@ -640,7 +640,7 @@
     mPid = data.readInt32();
     mUid = data.readInt32();
     mPkgName = data.readCString();
-    mPkgVersionCode = data.readInt32();
+    mPkgVersionCode = data.readInt64();
     mSessionID = data.readInt64();
     mFinalized = data.readInt32();
     mTimestamp = data.readInt64();
@@ -687,7 +687,7 @@
     data->writeInt32(mPid);
     data->writeInt32(mUid);
     data->writeCString(mPkgName.c_str());
-    data->writeInt32(mPkgVersionCode);
+    data->writeInt64(mPkgVersionCode);
     data->writeInt64(mSessionID);
     data->writeInt32(mFinalized);
     data->writeInt64(mTimestamp);
@@ -766,7 +766,7 @@
 
     if (version >= PROTO_V1) {
         result.append(mPkgName);
-        snprintf(buffer, sizeof(buffer), ":%d:", mPkgVersionCode);
+        snprintf(buffer, sizeof(buffer), ":%"  PRId64 ":", mPkgVersionCode);
         result.append(buffer);
     }
 
diff --git a/media/libmediametrics/include/MediaAnalyticsItem.h b/media/libmediametrics/include/MediaAnalyticsItem.h
index 5f9b916..ec9b660 100644
--- a/media/libmediametrics/include/MediaAnalyticsItem.h
+++ b/media/libmediametrics/include/MediaAnalyticsItem.h
@@ -173,8 +173,8 @@
         MediaAnalyticsItem &setPkgName(AString);
         AString getPkgName() const;
 
-        MediaAnalyticsItem &setPkgVersionCode(int32_t);
-        int32_t getPkgVersionCode() const;
+        MediaAnalyticsItem &setPkgVersionCode(int64_t);
+        int64_t getPkgVersionCode() const;
 
         // our serialization code for binder calls
         int32_t writeToParcel(Parcel *);
@@ -205,7 +205,7 @@
         pid_t     mPid;
         uid_t     mUid;
         AString   mPkgName;
-        int32_t   mPkgVersionCode;
+        int64_t   mPkgVersionCode;
 
         // let's reuse a binder connection
         static sp<IMediaAnalyticsService> sAnalyticsService;
diff --git a/media/libmediaplayerservice/MediaRecorderClient.cpp b/media/libmediaplayerservice/MediaRecorderClient.cpp
index a423fee..dcd393b 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.cpp
+++ b/media/libmediaplayerservice/MediaRecorderClient.cpp
@@ -411,6 +411,25 @@
     }
 }
 
+MediaRecorderClient::AudioDeviceUpdatedNotifier::AudioDeviceUpdatedNotifier(
+        const sp<IMediaRecorderClient>& listener) {
+    mListener = listener;
+}
+
+MediaRecorderClient::AudioDeviceUpdatedNotifier::~AudioDeviceUpdatedNotifier() {
+}
+
+void MediaRecorderClient::AudioDeviceUpdatedNotifier::onAudioDeviceUpdate(
+        audio_io_handle_t audioIo,
+        audio_port_handle_t deviceId) {
+    sp<IMediaRecorderClient> listener = mListener.promote();
+    if (listener != NULL) {
+        listener->notify(MEDIA_RECORDER_AUDIO_ROUTING_CHANGED, audioIo, deviceId);
+    } else {
+        ALOGW("listener for process %d death is gone", MEDIA_RECORDER_AUDIO_ROUTING_CHANGED);
+    }
+}
+
 void MediaRecorderClient::clearDeathNotifiers_l() {
     if (mCameraDeathListener != nullptr) {
         mCameraDeathListener->unlinkToDeath();
@@ -459,6 +478,9 @@
             MediaPlayerService::MEDIACODEC_PROCESS_DEATH);
     omx->linkToDeath(mCodecDeathListener, 0);
 
+    mAudioDeviceUpdatedNotifier = new AudioDeviceUpdatedNotifier(listener);
+    mRecorder->setAudioDeviceCallback(mAudioDeviceUpdatedNotifier);
+
     return OK;
 }
 
@@ -479,4 +501,30 @@
     return OK;
 }
 
+status_t MediaRecorderClient::setInputDevice(audio_port_handle_t deviceId) {
+    ALOGV("setInputDevice(%d)", deviceId);
+    Mutex::Autolock lock(mLock);
+    if (mRecorder != NULL) {
+        return mRecorder->setInputDevice(deviceId);
+    }
+    return NO_INIT;
+}
+
+status_t MediaRecorderClient::getRoutedDeviceId(audio_port_handle_t* deviceId) {
+    ALOGV("getRoutedDeviceId");
+    Mutex::Autolock lock(mLock);
+    if (mRecorder != NULL) {
+        return mRecorder->getRoutedDeviceId(deviceId);
+    }
+    return NO_INIT;
+}
+
+status_t MediaRecorderClient::enableAudioDeviceCallback(bool enabled) {
+    ALOGV("enableDeviceCallback: %d", enabled);
+    Mutex::Autolock lock(mLock);
+    if (mRecorder != NULL) {
+        return mRecorder->enableAudioDeviceCallback(enabled);
+    }
+    return NO_INIT;
+}
 }; // namespace android
diff --git a/media/libmediaplayerservice/MediaRecorderClient.h b/media/libmediaplayerservice/MediaRecorderClient.h
index 711db2c..538b461 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.h
+++ b/media/libmediaplayerservice/MediaRecorderClient.h
@@ -18,6 +18,7 @@
 #ifndef ANDROID_MEDIARECORDERCLIENT_H
 #define ANDROID_MEDIARECORDERCLIENT_H
 
+#include <media/AudioSystem.h>
 #include <media/IMediaRecorder.h>
 
 #include <android/hardware/media/omx/1.0/IOmx.h>
@@ -58,6 +59,18 @@
         wp<IMediaRecorderClient> mListener;
     };
 
+    class AudioDeviceUpdatedNotifier: public AudioSystem::AudioDeviceCallback
+    {
+    public:
+        AudioDeviceUpdatedNotifier(const sp<IMediaRecorderClient>& listener);
+        virtual ~AudioDeviceUpdatedNotifier();
+        virtual void onAudioDeviceUpdate(
+                audio_io_handle_t audioIo,
+                audio_port_handle_t deviceId);
+    private:
+        wp<IMediaRecorderClient> mListener;
+    };
+
     void clearDeathNotifiers_l();
 
 public:
@@ -91,6 +104,9 @@
     virtual     status_t   dump(int fd, const Vector<String16>& args);
     virtual     status_t   setInputSurface(const sp<PersistentSurface>& surface);
     virtual     sp<IGraphicBufferProducer> querySurfaceMediaSource();
+    virtual     status_t   setInputDevice(audio_port_handle_t deviceId);
+    virtual     status_t   getRoutedDeviceId(audio_port_handle_t* deviceId);
+    virtual     status_t   enableAudioDeviceCallback(bool enabled);
 
 private:
     friend class           MediaPlayerService;  // for accessing private constructor
@@ -103,6 +119,7 @@
 
     sp<ServiceDeathNotifier> mCameraDeathListener;
     sp<ServiceDeathNotifier> mCodecDeathListener;
+    sp<AudioDeviceUpdatedNotifier> mAudioDeviceUpdatedNotifier;
 
     pid_t                  mPid;
     Mutex                  mLock;
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index 89354d6..77eaefe 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -103,7 +103,9 @@
       mOutputFd(-1),
       mAudioSource(AUDIO_SOURCE_CNT),
       mVideoSource(VIDEO_SOURCE_LIST_END),
-      mStarted(false) {
+      mStarted(false),
+      mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE),
+      mDeviceCallbackEnabled(false) {
 
     ALOGV("Constructor");
 
@@ -204,7 +206,7 @@
     return OK;
 }
 
-// The client side of mediaserver asks it to creat a SurfaceMediaSource
+// The client side of mediaserver asks it to create a SurfaceMediaSource
 // and return a interface reference. The client side will use that
 // while encoding GL Frames
 sp<IGraphicBufferProducer> StagefrightRecorder::querySurfaceMediaSource() const {
@@ -1069,7 +1071,8 @@
                 mAudioChannels,
                 mSampleRate,
                 mClientUid,
-                mClientPid);
+                mClientPid,
+                mSelectedDeviceId);
 
     status_t err = audioSource->initCheck();
 
@@ -1120,6 +1123,10 @@
 
     sp<MediaCodecSource> audioEncoder =
             MediaCodecSource::Create(mLooper, format, audioSource);
+    sp<AudioSystem::AudioDeviceCallback> callback = mAudioDeviceCallback.promote();
+    if (mDeviceCallbackEnabled && callback != 0) {
+        audioSource->addAudioDeviceCallback(callback);
+    }
     mAudioSourceNode = audioSource;
 
     if (audioEncoder == NULL) {
@@ -2116,6 +2123,46 @@
     return OK;
 }
 
+status_t StagefrightRecorder::setInputDevice(audio_port_handle_t deviceId) {
+    ALOGV("setInputDevice");
+
+    if (mSelectedDeviceId != deviceId) {
+        mSelectedDeviceId = deviceId;
+        if (mAudioSourceNode != 0) {
+            return mAudioSourceNode->setInputDevice(deviceId);
+        }
+    }
+    return NO_ERROR;
+}
+
+status_t StagefrightRecorder::getRoutedDeviceId(audio_port_handle_t* deviceId) {
+    ALOGV("getRoutedDeviceId");
+
+    if (mAudioSourceNode != 0) {
+        status_t status = mAudioSourceNode->getRoutedDeviceId(deviceId);
+        return status;
+    }
+    return NO_INIT;
+}
+
+void StagefrightRecorder::setAudioDeviceCallback(
+        const sp<AudioSystem::AudioDeviceCallback>& callback) {
+    mAudioDeviceCallback = callback;
+}
+
+status_t StagefrightRecorder::enableAudioDeviceCallback(bool enabled) {
+    mDeviceCallbackEnabled = enabled;
+    sp<AudioSystem::AudioDeviceCallback> callback = mAudioDeviceCallback.promote();
+    if (mAudioSourceNode != 0 && callback != 0) {
+        if (enabled) {
+            return mAudioSourceNode->addAudioDeviceCallback(callback);
+        } else {
+            return mAudioSourceNode->removeAudioDeviceCallback(callback);
+        }
+    }
+    return NO_ERROR;
+}
+
 status_t StagefrightRecorder::dump(
         int fd, const Vector<String16>& args) const {
     ALOGV("dump");
diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h
index 5111c8e..ec7e8ed 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.h
+++ b/media/libmediaplayerservice/StagefrightRecorder.h
@@ -72,6 +72,10 @@
     virtual status_t dump(int fd, const Vector<String16> &args) const;
     // Querying a SurfaceMediaSourcer
     virtual sp<IGraphicBufferProducer> querySurfaceMediaSource() const;
+    virtual status_t setInputDevice(audio_port_handle_t deviceId);
+    virtual status_t getRoutedDeviceId(audio_port_handle_t* deviceId);
+    virtual void setAudioDeviceCallback(const sp<AudioSystem::AudioDeviceCallback>& callback);
+    virtual status_t enableAudioDeviceCallback(bool enabled);
 
 private:
     mutable Mutex mLock;
@@ -144,6 +148,10 @@
     sp<IGraphicBufferProducer> mGraphicBufferProducer;
     sp<ALooper> mLooper;
 
+    audio_port_handle_t mSelectedDeviceId;
+    bool mDeviceCallbackEnabled;
+    wp<AudioSystem::AudioDeviceCallback> mAudioDeviceCallback;
+
     static const int kMaxHighSpeedFps = 1000;
 
     status_t prepareInternal();
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index 6c4b823..d1e5d45 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -1311,6 +1311,7 @@
 
             mResetting = true;
             stopPlaybackTimer("kWhatReset");
+            stopRebufferingTimer(true);
 
             mDeferredActions.push_back(
                     new FlushDecoderAction(
@@ -1604,6 +1605,37 @@
     }
 }
 
+void NuPlayer::startRebufferingTimer() {
+    Mutex::Autolock autoLock(mPlayingTimeLock);
+    if (mLastStartedRebufferingTimeNs == 0) {
+        mLastStartedRebufferingTimeNs = systemTime();
+        ALOGV("startRebufferingTimer() time %20" PRId64 "",  mLastStartedRebufferingTimeNs);
+    }
+}
+
+void NuPlayer::stopRebufferingTimer(bool exitingPlayback) {
+    Mutex::Autolock autoLock(mPlayingTimeLock);
+
+    ALOGV("stopRebufferTimer()  time %20" PRId64 " (exiting %d)", mLastStartedRebufferingTimeNs, exitingPlayback);
+
+    if (mLastStartedRebufferingTimeNs != 0) {
+        sp<NuPlayerDriver> driver = mDriver.promote();
+        if (driver != NULL) {
+            int64_t now = systemTime();
+            int64_t rebuffered = now - mLastStartedRebufferingTimeNs;
+            ALOGV("stopRebufferingTimer()  log  %20" PRId64 "", rebuffered);
+
+            if (rebuffered > 0) {
+                driver->notifyMoreRebufferingTimeUs((rebuffered+500)/1000);
+                if (exitingPlayback) {
+                    driver->notifyRebufferingWhenExit(true);
+                }
+            }
+        }
+        mLastStartedRebufferingTimeNs = 0;
+    }
+}
+
 void NuPlayer::onPause() {
 
     stopPlaybackTimer("onPause");
@@ -2250,6 +2282,7 @@
     CHECK(mVideoDecoder == NULL);
 
     stopPlaybackTimer("performReset");
+    stopRebufferingTimer(true);
 
     cancelPollDuration();
 
@@ -2503,6 +2536,7 @@
             if (mStarted) {
                 ALOGI("buffer low, pausing...");
 
+                startRebufferingTimer();
                 mPausedForBuffering = true;
                 onPause();
             }
@@ -2516,6 +2550,7 @@
             if (mStarted) {
                 ALOGI("buffer ready, resuming...");
 
+                stopRebufferingTimer(false);
                 mPausedForBuffering = false;
 
                 // do not resume yet if client didn't unpause
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h
index 492b8d4..fda69e8 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h
@@ -183,6 +183,10 @@
     void stopPlaybackTimer(const char *where);
     void startPlaybackTimer(const char *where);
 
+    int64_t mLastStartedRebufferingTimeNs;
+    void startRebufferingTimer();
+    void stopRebufferingTimer(bool exitingPlayback);
+
     int64_t mPreviousSeekTimeUs;
 
     List<sp<Action> > mDeferredActions;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
index bd54df0..ebf57c6 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
@@ -56,6 +56,10 @@
 static const char *kPlayerError = "android.media.mediaplayer.err";
 static const char *kPlayerErrorCode = "android.media.mediaplayer.errcode";
 static const char *kPlayerDataSourceType = "android.media.mediaplayer.dataSource";
+//
+static const char *kPlayerRebuffering = "android.media.mediaplayer.rebufferingMs";
+static const char *kPlayerRebufferingCount = "android.media.mediaplayer.rebuffers";
+static const char *kPlayerRebufferingAtExit = "android.media.mediaplayer.rebufferExit";
 
 
 NuPlayerDriver::NuPlayerDriver(pid_t pid)
@@ -67,6 +71,9 @@
       mPositionUs(-1),
       mSeekInProgress(false),
       mPlayingTimeUs(0),
+      mRebufferingTimeUs(0),
+      mRebufferingEvents(0),
+      mRebufferingAtExit(false),
       mLooper(new ALooper),
       mMediaClock(new MediaClock),
       mPlayer(new NuPlayer(pid, mMediaClock)),
@@ -582,6 +589,12 @@
 
     mAnalyticsItem->setInt64(kPlayerPlaying, (mPlayingTimeUs+500)/1000 );
 
+    if (mRebufferingEvents != 0) {
+        mAnalyticsItem->setInt64(kPlayerRebuffering, (mRebufferingTimeUs+500)/1000 );
+        mAnalyticsItem->setInt32(kPlayerRebufferingCount, mRebufferingEvents);
+        mAnalyticsItem->setInt32(kPlayerRebufferingAtExit, mRebufferingAtExit);
+    }
+
     mAnalyticsItem->setCString(kPlayerDataSourceType, mPlayer->getDataSourceType());
 }
 
@@ -661,6 +674,9 @@
     mPositionUs = -1;
     mLooping = false;
     mPlayingTimeUs = 0;
+    mRebufferingTimeUs = 0;
+    mRebufferingEvents = 0;
+    mRebufferingAtExit = false;
 
     return OK;
 }
@@ -811,6 +827,17 @@
     mPlayingTimeUs += playingUs;
 }
 
+void NuPlayerDriver::notifyMoreRebufferingTimeUs(int64_t rebufferingUs) {
+    Mutex::Autolock autoLock(mLock);
+    mRebufferingTimeUs += rebufferingUs;
+    mRebufferingEvents++;
+}
+
+void NuPlayerDriver::notifyRebufferingWhenExit(bool status) {
+    Mutex::Autolock autoLock(mLock);
+    mRebufferingAtExit = status;
+}
+
 void NuPlayerDriver::notifySeekComplete() {
     ALOGV("notifySeekComplete(%p)", this);
     Mutex::Autolock autoLock(mLock);
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
index 6ca6344..13b46c3 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
@@ -84,6 +84,8 @@
     void notifySetSurfaceComplete();
     void notifyDuration(int64_t durationUs);
     void notifyMorePlayingTimeUs(int64_t timeUs);
+    void notifyMoreRebufferingTimeUs(int64_t timeUs);
+    void notifyRebufferingWhenExit(bool status);
     void notifySeekComplete();
     void notifySeekComplete_l();
     void notifyListener(int msg, int ext1 = 0, int ext2 = 0, const Parcel *in = NULL);
@@ -126,6 +128,9 @@
     int64_t mPositionUs;
     bool mSeekInProgress;
     int64_t mPlayingTimeUs;
+    int64_t mRebufferingTimeUs;
+    int32_t mRebufferingEvents;
+    bool mRebufferingAtExit;
     // <<<
 
     sp<ALooper> mLooper;
diff --git a/media/libnblog/PerformanceAnalysis.cpp b/media/libnblog/PerformanceAnalysis.cpp
index 478c460..f09e93d 100644
--- a/media/libnblog/PerformanceAnalysis.cpp
+++ b/media/libnblog/PerformanceAnalysis.cpp
@@ -230,6 +230,7 @@
 }
 
 // rounds value to precision based on log-distance from mean
+__attribute__((no_sanitize("signed-integer-overflow")))
 inline double logRound(double x, double mean) {
     // Larger values decrease range of high resolution and prevent overflow
     // of a histogram on the console.
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index 0c71487..a618676 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -37,6 +37,8 @@
         "AudioPlayer.cpp",
         "AudioSource.cpp",
         "BufferImpl.cpp",
+        "CCodec.cpp",
+        "CCodecBufferChannel.cpp",
         "CodecBase.cpp",
         "CallbackDataSource.cpp",
         "CallbackMediaSource.cpp",
@@ -89,6 +91,7 @@
         "libdl",
         "libdrmframework",
         "libgui",
+        "libion",
         "liblog",
         "libmedia",
         "libmedia_omx",
@@ -100,6 +103,7 @@
         "libui",
         "libutils",
         "libmedia_helper",
+        "libstagefright_codec2",
         "libstagefright_foundation",
         "libstagefright_omx",
         "libstagefright_omx_utils",
@@ -111,6 +115,11 @@
         "android.hidl.allocator@1.0",
         "android.hardware.cas.native@1.0",
         "android.hardware.media.omx@1.0",
+        "android.hardware.graphics.allocator@2.0",
+        "android.hardware.graphics.mapper@2.0",
+
+        // XXX: hack
+        "libstagefright_soft_c2avcdec",
     ],
 
     static_libs: [
@@ -125,6 +134,9 @@
         "libstagefright_esds",
         "libstagefright_id3",
         "libFLAC",
+
+        // XXX: hack
+        "libstagefright_codec2_vndk",
     ],
 
     export_shared_lib_headers: [
diff --git a/media/libstagefright/AudioSource.cpp b/media/libstagefright/AudioSource.cpp
index f2b1f10..b8da980 100644
--- a/media/libstagefright/AudioSource.cpp
+++ b/media/libstagefright/AudioSource.cpp
@@ -52,7 +52,7 @@
 AudioSource::AudioSource(
         audio_source_t inputSource, const String16 &opPackageName,
         uint32_t sampleRate, uint32_t channelCount, uint32_t outSampleRate,
-        uid_t uid, pid_t pid)
+        uid_t uid, pid_t pid, audio_port_handle_t selectedDeviceId)
     : mStarted(false),
       mSampleRate(sampleRate),
       mOutSampleRate(outSampleRate > 0 ? outSampleRate : sampleRate),
@@ -101,7 +101,9 @@
                     AudioRecord::TRANSFER_DEFAULT,
                     AUDIO_INPUT_FLAG_NONE,
                     uid,
-                    pid);
+                    pid,
+                    NULL /*pAttributes*/,
+                    selectedDeviceId);
         mInitCheck = mRecord->initCheck();
         if (mInitCheck != OK) {
             mRecord.clear();
@@ -465,4 +467,35 @@
     return value;
 }
 
+status_t AudioSource::setInputDevice(audio_port_handle_t deviceId) {
+    if (mRecord != 0) {
+        return mRecord->setInputDevice(deviceId);
+    }
+    return NO_INIT;
+}
+
+status_t AudioSource::getRoutedDeviceId(audio_port_handle_t* deviceId) {
+    if (mRecord != 0) {
+        *deviceId = mRecord->getRoutedDeviceId();
+        return NO_ERROR;
+    }
+    return NO_INIT;
+}
+
+status_t AudioSource::addAudioDeviceCallback(
+        const sp<AudioSystem::AudioDeviceCallback>& callback) {
+    if (mRecord != 0) {
+        return mRecord->addAudioDeviceCallback(callback);
+    }
+    return NO_INIT;
+}
+
+status_t AudioSource::removeAudioDeviceCallback(
+        const sp<AudioSystem::AudioDeviceCallback>& callback) {
+    if (mRecord != 0) {
+        return mRecord->removeAudioDeviceCallback(callback);
+    }
+    return NO_INIT;
+}
+
 }  // namespace android
diff --git a/media/libstagefright/BufferImpl.cpp b/media/libstagefright/BufferImpl.cpp
index fee3739..9fb6d34 100644
--- a/media/libstagefright/BufferImpl.cpp
+++ b/media/libstagefright/BufferImpl.cpp
@@ -24,11 +24,14 @@
 #include <media/ICrypto.h>
 #include <utils/NativeHandle.h>
 
+#include "include/Codec2Buffer.h"
 #include "include/SecureBuffer.h"
 #include "include/SharedMemoryBuffer.h"
 
 namespace android {
 
+// SharedMemoryBuffer
+
 SharedMemoryBuffer::SharedMemoryBuffer(const sp<AMessage> &format, const sp<IMemory> &mem)
     : MediaCodecBuffer(format, new ABuffer(mem->pointer(), mem->size())),
       mMemory(mem) {
@@ -39,6 +42,8 @@
       mTMemory(mem) {
 }
 
+// SecureBuffer
+
 SecureBuffer::SecureBuffer(const sp<AMessage> &format, const void *ptr, size_t size)
     : MediaCodecBuffer(format, new ABuffer(nullptr, size)),
       mPointer(ptr) {
@@ -59,4 +64,28 @@
     return ICrypto::kDestinationTypeNativeHandle;
 }
 
+// Codec2Buffer
+
+// static
+sp<Codec2Buffer> Codec2Buffer::allocate(
+        const sp<AMessage> &format, const std::shared_ptr<C2LinearBlock> &block) {
+    C2WriteView writeView(block->map().get());
+    if (writeView.error() != C2_OK) {
+        return nullptr;
+    }
+    return new Codec2Buffer(format, new ABuffer(writeView.base(), writeView.capacity()), block);
+}
+
+C2ConstLinearBlock Codec2Buffer::share() {
+    return mBlock->share(offset(), size(), C2Fence());
+}
+
+Codec2Buffer::Codec2Buffer(
+        const sp<AMessage> &format,
+        const sp<ABuffer> &buffer,
+        const std::shared_ptr<C2LinearBlock> &block)
+    : MediaCodecBuffer(format, buffer),
+      mBlock(block) {
+}
+
 }  // namespace android
diff --git a/media/libstagefright/CCodec.cpp b/media/libstagefright/CCodec.cpp
new file mode 100644
index 0000000..080d00f
--- /dev/null
+++ b/media/libstagefright/CCodec.cpp
@@ -0,0 +1,582 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "CCodec"
+#include <utils/Log.h>
+
+// XXX: HACK
+#include "codecs/avcdec/C2SoftAvcDec.h"
+
+#include <thread>
+
+#include <gui/Surface.h>
+#include <media/stagefright/CCodec.h>
+
+#include "include/CCodecBufferChannel.h"
+
+using namespace std::chrono_literals;
+
+namespace android {
+
+namespace {
+
+class CCodecWatchdog : public AHandler {
+private:
+    enum {
+        kWhatRegister,
+        kWhatWatch,
+    };
+    constexpr static int64_t kWatchIntervalUs = 3000000;  // 3 secs
+
+public:
+    static sp<CCodecWatchdog> getInstance() {
+        Mutexed<sp<CCodecWatchdog>>::Locked instance(sInstance);
+        if (*instance == nullptr) {
+            *instance = new CCodecWatchdog;
+            (*instance)->init();
+        }
+        return *instance;
+    }
+
+    ~CCodecWatchdog() = default;
+
+    void registerCodec(CCodec *codec) {
+        sp<AMessage> msg = new AMessage(kWhatRegister, this);
+        msg->setPointer("codec", codec);
+        msg->post();
+    }
+
+protected:
+    void onMessageReceived(const sp<AMessage> &msg) {
+        switch (msg->what()) {
+            case kWhatRegister: {
+                void *ptr = nullptr;
+                CHECK(msg->findPointer("codec", &ptr));
+                Mutexed<std::list<wp<CCodec>>>::Locked codecs(mCodecs);
+                codecs->emplace_back((CCodec *)ptr);
+                break;
+            }
+
+            case kWhatWatch: {
+                Mutexed<std::list<wp<CCodec>>>::Locked codecs(mCodecs);
+                for (auto it = codecs->begin(); it != codecs->end(); ) {
+                    sp<CCodec> codec = it->promote();
+                    if (codec == nullptr) {
+                        it = codecs->erase(it);
+                        continue;
+                    }
+                    codec->initiateReleaseIfStuck();
+                    ++it;
+                }
+                msg->post(kWatchIntervalUs);
+                break;
+            }
+
+            default: {
+                TRESPASS("CCodecWatchdog: unrecognized message");
+            }
+        }
+    }
+
+private:
+    CCodecWatchdog() : mLooper(new ALooper) {}
+
+    void init() {
+        mLooper->setName("CCodecWatchdog");
+        mLooper->registerHandler(this);
+        mLooper->start();
+        (new AMessage(kWhatWatch, this))->post(kWatchIntervalUs);
+    }
+
+    static Mutexed<sp<CCodecWatchdog>> sInstance;
+
+    sp<ALooper> mLooper;
+    Mutexed<std::list<wp<CCodec>>> mCodecs;
+};
+
+Mutexed<sp<CCodecWatchdog>> CCodecWatchdog::sInstance;
+
+class CCodecListener : public C2Component::Listener {
+public:
+    CCodecListener(const std::shared_ptr<CCodecBufferChannel> &channel)
+        : mChannel(channel) {
+    }
+
+    virtual void onWorkDone_nb(
+            std::weak_ptr<C2Component> component,
+            std::vector<std::unique_ptr<C2Work>> workItems) override {
+        (void) component;
+        mChannel->onWorkDone(std::move(workItems));
+    }
+
+    virtual void onTripped_nb(
+            std::weak_ptr<C2Component> component,
+            std::vector<std::shared_ptr<C2SettingResult>> settingResult) override {
+        // TODO
+        (void) component;
+        (void) settingResult;
+    }
+
+    virtual void onError_nb(std::weak_ptr<C2Component> component, uint32_t errorCode) override {
+        // TODO
+        (void) component;
+        (void) errorCode;
+    }
+
+private:
+    std::shared_ptr<CCodecBufferChannel> mChannel;
+};
+
+}  // namespace
+
+CCodec::CCodec()
+    : mChannel(new CCodecBufferChannel([this] (status_t err, enum ActionCode actionCode) {
+          mCallback->onError(err, actionCode);
+      })) {
+    CCodecWatchdog::getInstance()->registerCodec(this);
+}
+
+CCodec::~CCodec() {
+}
+
+std::shared_ptr<BufferChannelBase> CCodec::getBufferChannel() {
+    return mChannel;
+}
+
+void CCodec::initiateAllocateComponent(const sp<AMessage> &msg) {
+    {
+        Mutexed<State>::Locked state(mState);
+        if (state->mState != RELEASED) {
+            mCallback->onError(INVALID_OPERATION, ACTION_CODE_FATAL);
+            return;
+        }
+        state->mState = ALLOCATING;
+    }
+
+    AString componentName;
+    if (!msg->findString("componentName", &componentName)) {
+        // TODO: find componentName appropriate with the media type
+    }
+
+    sp<AMessage> allocMsg(new AMessage(kWhatAllocate, this));
+    allocMsg->setString("componentName", componentName);
+    allocMsg->post();
+}
+
+void CCodec::allocate(const AString &componentName) {
+    // TODO: use C2ComponentStore to create component
+    mListener.reset(new CCodecListener(mChannel));
+
+    std::shared_ptr<C2Component> comp(new C2SoftAvcDec(componentName.c_str(), 0));
+    comp->setListener_sm(mListener);
+    {
+        Mutexed<State>::Locked state(mState);
+        if (state->mState != ALLOCATING) {
+            state->mState = RELEASED;
+            state.unlock();
+            mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+            state.lock();
+            return;
+        }
+        state->mState = ALLOCATED;
+        state->mComp = comp;
+    }
+    mChannel->setComponent(comp);
+    mCallback->onComponentAllocated(comp->intf()->getName().c_str());
+}
+
+void CCodec::initiateConfigureComponent(const sp<AMessage> &format) {
+    {
+        Mutexed<State>::Locked state(mState);
+        if (state->mState != ALLOCATED) {
+            mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+            return;
+        }
+    }
+
+    sp<AMessage> msg(new AMessage(kWhatConfigure, this));
+    msg->setMessage("format", format);
+    msg->post();
+}
+
+void CCodec::configure(const sp<AMessage> &msg) {
+    sp<AMessage> inputFormat(new AMessage);
+    sp<AMessage> outputFormat(new AMessage);
+    if (status_t err = [=] {
+        AString mime;
+        if (!msg->findString("mime", &mime)) {
+            return BAD_VALUE;
+        }
+
+        int32_t encoder;
+        if (!msg->findInt32("encoder", &encoder)) {
+            encoder = false;
+        }
+
+        sp<RefBase> obj;
+        if (msg->findObject("native-window", &obj)) {
+            sp<Surface> surface = static_cast<Surface *>(obj.get());
+            setSurface(surface);
+        }
+
+        // TODO
+
+        return OK;
+    }() != OK) {
+        mCallback->onError(err, ACTION_CODE_FATAL);
+        return;
+    }
+
+    {
+        Mutexed<Formats>::Locked formats(mFormats);
+        formats->mInputFormat = inputFormat;
+        formats->mOutputFormat = outputFormat;
+    }
+    mCallback->onComponentConfigured(inputFormat, outputFormat);
+}
+
+
+void CCodec::initiateCreateInputSurface() {
+    // TODO
+}
+
+void CCodec::initiateSetInputSurface(const sp<PersistentSurface> &surface) {
+    // TODO
+    (void) surface;
+}
+
+void CCodec::initiateStart() {
+    {
+        Mutexed<State>::Locked state(mState);
+        if (state->mState != ALLOCATED) {
+            state.unlock();
+            mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+            state.lock();
+            return;
+        }
+        state->mState = STARTING;
+    }
+
+    (new AMessage(kWhatStart, this))->post();
+}
+
+void CCodec::start() {
+    std::shared_ptr<C2Component> comp;
+    {
+        Mutexed<State>::Locked state(mState);
+        if (state->mState != STARTING) {
+            state.unlock();
+            mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+            state.lock();
+            return;
+        }
+        comp = state->mComp;
+    }
+    c2_status_t err = comp->start();
+    if (err != C2_OK) {
+        // TODO: convert err into status_t
+        mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+        return;
+    }
+    sp<AMessage> inputFormat;
+    sp<AMessage> outputFormat;
+    {
+        Mutexed<Formats>::Locked formats(mFormats);
+        inputFormat = formats->mInputFormat;
+        outputFormat = formats->mOutputFormat;
+    }
+    mChannel->start(inputFormat, outputFormat);
+
+    {
+        Mutexed<State>::Locked state(mState);
+        if (state->mState != STARTING) {
+            state.unlock();
+            mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+            state.lock();
+            return;
+        }
+        state->mState = RUNNING;
+    }
+    mCallback->onStartCompleted();
+}
+
+void CCodec::initiateShutdown(bool keepComponentAllocated) {
+    if (keepComponentAllocated) {
+        initiateStop();
+    } else {
+        initiateRelease();
+    }
+}
+
+void CCodec::initiateStop() {
+    {
+        Mutexed<State>::Locked state(mState);
+        if (state->mState == ALLOCATED
+                || state->mState  == RELEASED
+                || state->mState == STOPPING
+                || state->mState == RELEASING) {
+            // We're already stopped, released, or doing it right now.
+            state.unlock();
+            mCallback->onStopCompleted();
+            state.lock();
+            return;
+        }
+        state->mState = STOPPING;
+    }
+
+    (new AMessage(kWhatStop, this))->post();
+}
+
+void CCodec::stop() {
+    std::shared_ptr<C2Component> comp;
+    {
+        Mutexed<State>::Locked state(mState);
+        if (state->mState == RELEASING) {
+            state.unlock();
+            // We're already stopped or release is in progress.
+            mCallback->onStopCompleted();
+            state.lock();
+            return;
+        } else if (state->mState != STOPPING) {
+            state.unlock();
+            mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+            state.lock();
+            return;
+        }
+        comp = state->mComp;
+    }
+    mChannel->stop();
+    status_t err = comp->stop();
+    if (err != C2_OK) {
+        // TODO: convert err into status_t
+        mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+    }
+
+    {
+        Mutexed<State>::Locked state(mState);
+        if (state->mState == STOPPING) {
+            state->mState = ALLOCATED;
+        }
+    }
+    mCallback->onStopCompleted();
+}
+
+void CCodec::initiateRelease(bool sendCallback /* = true */) {
+    {
+        Mutexed<State>::Locked state(mState);
+        if (state->mState == RELEASED || state->mState == RELEASING) {
+            // We're already released or doing it right now.
+            if (sendCallback) {
+                state.unlock();
+                mCallback->onReleaseCompleted();
+                state.lock();
+            }
+            return;
+        }
+        if (state->mState == ALLOCATING) {
+            state->mState = RELEASING;
+            // With the altered state allocate() would fail and clean up.
+            if (sendCallback) {
+                state.unlock();
+                mCallback->onReleaseCompleted();
+                state.lock();
+            }
+            return;
+        }
+        state->mState = RELEASING;
+    }
+
+    std::thread([this, sendCallback] { release(sendCallback); }).detach();
+}
+
+void CCodec::release(bool sendCallback) {
+    std::shared_ptr<C2Component> comp;
+    {
+        Mutexed<State>::Locked state(mState);
+        if (state->mState == RELEASED) {
+            if (sendCallback) {
+                state.unlock();
+                mCallback->onReleaseCompleted();
+                state.lock();
+            }
+            return;
+        }
+        comp = state->mComp;
+    }
+    mChannel->stop();
+    comp->release();
+
+    {
+        Mutexed<State>::Locked state(mState);
+        state->mState = RELEASED;
+        state->mComp.reset();
+    }
+    if (sendCallback) {
+        mCallback->onReleaseCompleted();
+    }
+}
+
+status_t CCodec::setSurface(const sp<Surface> &surface) {
+    return mChannel->setSurface(surface);
+}
+
+void CCodec::signalFlush() {
+    {
+        Mutexed<State>::Locked state(mState);
+        if (state->mState != RUNNING) {
+            mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+            return;
+        }
+        state->mState = FLUSHING;
+    }
+
+    (new AMessage(kWhatFlush, this))->post();
+}
+
+void CCodec::flush() {
+    std::shared_ptr<C2Component> comp;
+    {
+        Mutexed<State>::Locked state(mState);
+        if (state->mState != FLUSHING) {
+            state.unlock();
+            mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+            state.lock();
+            return;
+        }
+        comp = state->mComp;
+    }
+
+    mChannel->stop();
+
+    std::list<std::unique_ptr<C2Work>> flushedWork;
+    c2_status_t err = comp->flush_sm(C2Component::FLUSH_COMPONENT, &flushedWork);
+    if (err != C2_OK) {
+        // TODO: convert err into status_t
+        mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+    }
+
+    mChannel->flush(flushedWork);
+
+    {
+        Mutexed<State>::Locked state(mState);
+        state->mState = FLUSHED;
+    }
+    mCallback->onFlushCompleted();
+}
+
+void CCodec::signalResume() {
+    {
+        Mutexed<State>::Locked state(mState);
+        if (state->mState != FLUSHED) {
+            state.unlock();
+            mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+            state.lock();
+            return;
+        }
+        state->mState = RESUMING;
+    }
+
+    mChannel->start(nullptr, nullptr);
+
+    {
+        Mutexed<State>::Locked state(mState);
+        if (state->mState != RESUMING) {
+            state.unlock();
+            mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+            state.lock();
+            return;
+        }
+        state->mState = RUNNING;
+    }
+}
+
+void CCodec::signalSetParameters(const sp<AMessage> &msg) {
+    // TODO
+    (void) msg;
+}
+
+void CCodec::signalEndOfInputStream() {
+}
+
+void CCodec::signalRequestIDRFrame() {
+    // TODO
+}
+
+void CCodec::onMessageReceived(const sp<AMessage> &msg) {
+    TimePoint now = std::chrono::steady_clock::now();
+    switch (msg->what()) {
+        case kWhatAllocate: {
+            // C2ComponentStore::createComponent() should return within 100ms.
+            setDeadline(now + 150ms);
+            AString componentName;
+            CHECK(msg->findString("componentName", &componentName));
+            allocate(componentName);
+            break;
+        }
+        case kWhatConfigure: {
+            // C2Component::commit_sm() should return within 5ms.
+            setDeadline(now + 50ms);
+            sp<AMessage> format;
+            CHECK(msg->findMessage("format", &format));
+            configure(format);
+            break;
+        }
+        case kWhatStart: {
+            // C2Component::start() should return within 500ms.
+            setDeadline(now + 550ms);
+            start();
+            break;
+        }
+        case kWhatStop: {
+            // C2Component::stop() should return within 500ms.
+            setDeadline(now + 550ms);
+            stop();
+            break;
+        }
+        case kWhatFlush: {
+            // C2Component::flush_sm() should return within 5ms.
+            setDeadline(now + 50ms);
+            flush();
+            break;
+        }
+        default: {
+            ALOGE("unrecognized message");
+            break;
+        }
+    }
+    setDeadline(TimePoint::max());
+}
+
+void CCodec::setDeadline(const TimePoint &newDeadline) {
+    Mutexed<TimePoint>::Locked deadline(mDeadline);
+    *deadline = newDeadline;
+}
+
+void CCodec::initiateReleaseIfStuck() {
+    {
+        Mutexed<TimePoint>::Locked deadline(mDeadline);
+        if (*deadline >= std::chrono::steady_clock::now()) {
+            // We're not stuck.
+            return;
+        }
+    }
+
+    mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+    initiateRelease();
+}
+
+}  // namespace android
diff --git a/media/libstagefright/CCodecBufferChannel.cpp b/media/libstagefright/CCodecBufferChannel.cpp
new file mode 100644
index 0000000..9868cd4
--- /dev/null
+++ b/media/libstagefright/CCodecBufferChannel.cpp
@@ -0,0 +1,589 @@
+/*
+ * Copyright 2016, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "CCodecBufferChannel"
+#include <utils/Log.h>
+
+#include <numeric>
+#include <thread>
+
+#include <C2PlatformSupport.h>
+
+#include <android/hardware/cas/native/1.0/IDescrambler.h>
+#include <binder/MemoryDealer.h>
+#include <gui/Surface.h>
+#include <media/openmax/OMX_Core.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/MediaCodec.h>
+#include <media/MediaCodecBuffer.h>
+#include <system/window.h>
+
+#include "include/CCodecBufferChannel.h"
+#include "include/Codec2Buffer.h"
+#include "include/SecureBuffer.h"
+#include "include/SharedMemoryBuffer.h"
+
+namespace android {
+
+using hardware::hidl_handle;
+using hardware::hidl_string;
+using hardware::hidl_vec;
+using namespace hardware::cas::V1_0;
+using namespace hardware::cas::native::V1_0;
+
+// TODO: get this info from component
+const static size_t kMinBufferArraySize = 16;
+
+void CCodecBufferChannel::OutputBuffers::flush(
+        const std::list<std::unique_ptr<C2Work>> &flushedWork) {
+    (void) flushedWork;
+    // This is no-op by default unless we're in array mode where we need to keep
+    // track of the flushed work.
+}
+
+namespace {
+
+template <class T>
+ssize_t findBufferSlot(
+        std::vector<T> *buffers,
+        size_t maxSize,
+        std::function<bool(const T&)> pred) {
+    auto it = std::find_if(buffers->begin(), buffers->end(), pred);
+    if (it == buffers->end()) {
+        if (buffers->size() < maxSize) {
+            buffers->emplace_back();
+            return buffers->size() - 1;
+        } else {
+            return -1;
+        }
+    }
+    return std::distance(buffers->begin(), it);
+}
+
+class LinearBuffer : public C2Buffer {
+public:
+    explicit LinearBuffer(C2ConstLinearBlock block) : C2Buffer({ block }) {}
+};
+
+class LinearInputBuffers : public CCodecBufferChannel::InputBuffers {
+public:
+    using CCodecBufferChannel::InputBuffers::InputBuffers;
+
+    virtual bool requestNewBuffer(size_t *index, sp<MediaCodecBuffer> *buffer) override {
+        *buffer = nullptr;
+        ssize_t ret = findBufferSlot<wp<Codec2Buffer>>(
+                &mBuffers, kMinBufferArraySize,
+                [] (const auto &elem) { return elem.promote() == nullptr; });
+        if (ret < 0) {
+            return false;
+        }
+        std::shared_ptr<C2LinearBlock> block;
+
+        status_t err = mAlloc->fetchLinearBlock(
+                // TODO: proper max input size
+                65536,
+                { 0, C2MemoryUsage::kSoftwareWrite },
+                &block);
+        if (err != OK) {
+            return false;
+        }
+
+        sp<Codec2Buffer> newBuffer = Codec2Buffer::allocate(mFormat, block);
+        mBuffers[ret] = newBuffer;
+        *index = ret;
+        *buffer = newBuffer;
+        return true;
+    }
+
+    virtual std::shared_ptr<C2Buffer> releaseBuffer(const sp<MediaCodecBuffer> &buffer) override {
+        auto it = std::find(mBuffers.begin(), mBuffers.end(), buffer);
+        if (it == mBuffers.end()) {
+            return nullptr;
+        }
+        sp<Codec2Buffer> codecBuffer = it->promote();
+        // We got sp<> reference from the caller so this should never happen..
+        CHECK(codecBuffer != nullptr);
+        return std::make_shared<LinearBuffer>(codecBuffer->share());
+    }
+
+    virtual void flush() override {
+    }
+
+private:
+    // Buffers we passed to the client. The index of a buffer matches what
+    // was passed in BufferCallback::onInputBufferAvailable().
+    std::vector<wp<Codec2Buffer>> mBuffers;
+
+    // Buffer array we passed to the client. This only gets initialized at
+    // getInput/OutputBufferArray() and when this is set we can't add more
+    // buffers.
+    std::vector<sp<Codec2Buffer>> mBufferArray;
+};
+
+class GraphicOutputBuffers : public CCodecBufferChannel::OutputBuffers {
+public:
+    using CCodecBufferChannel::OutputBuffers::OutputBuffers;
+
+    virtual bool registerBuffer(
+            const std::shared_ptr<C2Buffer> &buffer,
+            size_t *index,
+            sp<MediaCodecBuffer> *codecBuffer) override {
+        *codecBuffer = nullptr;
+        ssize_t ret = findBufferSlot<BufferInfo>(
+                &mBuffers,
+                kMinBufferArraySize,
+                [] (const auto &elem) { return elem.mClientBuffer.promote() == nullptr; });
+        if (ret < 0) {
+            return false;
+        }
+        sp<MediaCodecBuffer> newBuffer = new MediaCodecBuffer(
+                mFormat,
+                buffer == nullptr ? kEmptyBuffer : kDummyBuffer);
+        mBuffers[ret] = { newBuffer, buffer };
+        *index = ret;
+        *codecBuffer = newBuffer;
+        return true;
+    }
+
+    virtual std::shared_ptr<C2Buffer> releaseBuffer(const sp<MediaCodecBuffer> &buffer) override {
+        auto it = std::find_if(
+                mBuffers.begin(), mBuffers.end(),
+                [buffer] (const auto &elem) {
+                    return elem.mClientBuffer.promote() == buffer;
+                });
+        if (it == mBuffers.end()) {
+            return nullptr;
+        }
+        return it->mBufferRef;
+    }
+
+private:
+    static const sp<ABuffer> kEmptyBuffer;
+    static const sp<ABuffer> kDummyBuffer;
+
+    struct BufferInfo {
+        // wp<> of MediaCodecBuffer for MediaCodec.
+        wp<MediaCodecBuffer> mClientBuffer;
+        // Buffer reference to hold until mClientBuffer is valid.
+        std::shared_ptr<C2Buffer> mBufferRef;
+    };
+    // Buffers we passed to the client. The index of a buffer matches what
+    // was passed in BufferCallback::onInputBufferAvailable().
+    std::vector<BufferInfo> mBuffers;
+};
+
+const sp<ABuffer> GraphicOutputBuffers::kEmptyBuffer = new ABuffer(nullptr, 0);
+const sp<ABuffer> GraphicOutputBuffers::kDummyBuffer = new ABuffer(nullptr, 1);
+
+}  // namespace
+
+CCodecBufferChannel::QueueGuard::QueueGuard(
+        CCodecBufferChannel::QueueSync &sync) : mSync(sync) {
+    std::unique_lock<std::mutex> l(mSync.mMutex);
+    if (mSync.mCount == -1) {
+        mRunning = false;
+    } else {
+        ++mSync.mCount;
+        mRunning = true;
+    }
+}
+
+CCodecBufferChannel::QueueGuard::~QueueGuard() {
+    if (mRunning) {
+        --mSync.mCount;
+    }
+}
+
+void CCodecBufferChannel::QueueSync::start() {
+    std::unique_lock<std::mutex> l(mMutex);
+    // If stopped, it goes to running state; otherwise no-op.
+    int32_t expected = -1;
+    mCount.compare_exchange_strong(expected, 0);
+}
+
+void CCodecBufferChannel::QueueSync::stop() {
+    std::unique_lock<std::mutex> l(mMutex);
+    if (mCount == -1) {
+        // no-op
+        return;
+    }
+    int32_t expected = 0;
+    while (!mCount.compare_exchange_weak(expected, -1)) {
+        std::this_thread::yield();
+    }
+}
+
+CCodecBufferChannel::CCodecBufferChannel(
+        const std::function<void(status_t, enum ActionCode)> &onError)
+    : mOnError(onError),
+      mInputBuffers(new LinearInputBuffers),
+      mOutputBuffers(new GraphicOutputBuffers),
+      mFrameIndex(0u),
+      mFirstValidFrameIndex(0u) {
+}
+
+CCodecBufferChannel::~CCodecBufferChannel() {
+    if (mCrypto != nullptr && mDealer != nullptr && mHeapSeqNum >= 0) {
+        mCrypto->unsetHeap(mHeapSeqNum);
+    }
+}
+
+void CCodecBufferChannel::setComponent(const std::shared_ptr<C2Component> &component) {
+    mComponent = component;
+    // TODO: get pool ID from params
+    std::shared_ptr<C2BlockPool> pool;
+    c2_status_t err = GetCodec2BlockPool(C2BlockPool::BASIC_LINEAR, component, &pool);
+    if (err == C2_OK) {
+        Mutexed<std::unique_ptr<InputBuffers>>::Locked buffers(mInputBuffers);
+        (*buffers)->setAlloc(pool);
+    }
+}
+
+status_t CCodecBufferChannel::queueInputBuffer(const sp<MediaCodecBuffer> &buffer) {
+    QueueGuard guard(mSync);
+    if (!guard.isRunning()) {
+        ALOGW("No more buffers should be queued at current state.");
+        return -ENOSYS;
+    }
+
+    int64_t timeUs;
+    CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+
+    int32_t flags = 0;
+    int32_t tmp = 0;
+    if (buffer->meta()->findInt32("eos", &tmp) && tmp) {
+        flags |= C2BufferPack::FLAG_END_OF_STREAM;
+        ALOGV("input EOS");
+    }
+    if (buffer->meta()->findInt32("csd", &tmp) && tmp) {
+        flags |= C2BufferPack::FLAG_CODEC_CONFIG;
+    }
+    std::unique_ptr<C2Work> work(new C2Work);
+    work->input.flags = (C2BufferPack::flags_t)flags;
+    work->input.ordinal.timestamp = timeUs;
+    work->input.ordinal.frame_index = mFrameIndex++;
+    work->input.buffers.clear();
+    {
+        Mutexed<std::unique_ptr<InputBuffers>>::Locked buffers(mInputBuffers);
+        work->input.buffers.push_back((*buffers)->releaseBuffer(buffer));
+    }
+    // TODO: fill info's
+
+    work->worklets.clear();
+    work->worklets.emplace_back(new C2Worklet);
+
+    std::list<std::unique_ptr<C2Work>> items;
+    items.push_back(std::move(work));
+    return mComponent->queue_nb(&items);
+}
+
+status_t CCodecBufferChannel::queueSecureInputBuffer(
+        const sp<MediaCodecBuffer> &buffer, bool secure, const uint8_t *key,
+        const uint8_t *iv, CryptoPlugin::Mode mode, CryptoPlugin::Pattern pattern,
+        const CryptoPlugin::SubSample *subSamples, size_t numSubSamples,
+        AString *errorDetailMsg) {
+    // TODO
+    (void) buffer;
+    (void) secure;
+    (void) key;
+    (void) iv;
+    (void) mode;
+    (void) pattern;
+    (void) subSamples;
+    (void) numSubSamples;
+    (void) errorDetailMsg;
+    return -ENOSYS;
+}
+
+status_t CCodecBufferChannel::renderOutputBuffer(
+        const sp<MediaCodecBuffer> &buffer, int64_t timestampNs) {
+    ALOGV("renderOutputBuffer");
+    sp<MediaCodecBuffer> inBuffer;
+    size_t index;
+    {
+        Mutexed<std::unique_ptr<InputBuffers>>::Locked buffers(mInputBuffers);
+        if (!(*buffers)->requestNewBuffer(&index, &inBuffer)) {
+            inBuffer = nullptr;
+        }
+    }
+    if (inBuffer != nullptr) {
+        mCallback->onInputBufferAvailable(index, inBuffer);
+    }
+
+    std::shared_ptr<C2Buffer> c2Buffer;
+    {
+        Mutexed<std::unique_ptr<OutputBuffers>>::Locked buffers(mOutputBuffers);
+        c2Buffer = (*buffers)->releaseBuffer(buffer);
+    }
+
+    Mutexed<sp<Surface>>::Locked surface(mSurface);
+    if (*surface == nullptr) {
+        ALOGE("no surface");
+        return OK;
+    }
+
+    std::list<C2ConstGraphicBlock> blocks = c2Buffer->data().graphicBlocks();
+    if (blocks.size() != 1u) {
+        ALOGE("# of graphic blocks expected to be 1, but %zu", blocks.size());
+        return UNKNOWN_ERROR;
+    }
+
+    sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(
+            blocks.front().handle(),
+            GraphicBuffer::CLONE_HANDLE,
+            blocks.front().width(),
+            blocks.front().height(),
+            HAL_PIXEL_FORMAT_YV12,
+            // TODO
+            1,
+            (uint64_t)GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN,
+            // TODO
+            blocks.front().width()));
+
+    status_t result = (*surface)->attachBuffer(graphicBuffer.get());
+    if (result != OK) {
+        ALOGE("attachBuffer failed: %d", result);
+        return result;
+    }
+
+    // TODO: read and set crop
+
+    result = native_window_set_buffers_timestamp((*surface).get(), timestampNs);
+    ALOGW_IF(result != OK, "failed to set buffer timestamp: %d", result);
+
+    // TODO: fix after C2Fence implementation
+#if 0
+    const C2Fence &fence = blocks.front().fence();
+    result = ((ANativeWindow *)(*surface).get())->queueBuffer(
+            (*surface).get(), graphicBuffer.get(), fence.valid() ? fence.fd() : -1);
+#else
+    result = ((ANativeWindow *)(*surface).get())->queueBuffer(
+            (*surface).get(), graphicBuffer.get(), -1);
+#endif
+    if (result != OK) {
+        ALOGE("queueBuffer failed: %d", result);
+        return result;
+    }
+
+    return OK;
+}
+
+status_t CCodecBufferChannel::discardBuffer(const sp<MediaCodecBuffer> &buffer) {
+    ALOGV("discardBuffer");
+    {
+        Mutexed<std::unique_ptr<InputBuffers>>::Locked buffers(mInputBuffers);
+        (void) (*buffers)->releaseBuffer(buffer);
+    }
+    {
+        Mutexed<std::unique_ptr<OutputBuffers>>::Locked buffers(mOutputBuffers);
+        (void) (*buffers)->releaseBuffer(buffer);
+    }
+    return OK;
+}
+
+#if 0
+void fillBufferArray_l(Mutexed<Buffers>::Locked &buffers) {
+    for (size_t i = 0; i < buffers->mClientBuffer.size(); ++i) {
+        sp<Codec2Buffer> buffer(buffers->mClientBuffer.get(i).promote());
+        if (buffer == nullptr) {
+            buffer = allocateBuffer_l(buffers->mAlloc);
+        }
+        buffers->mBufferArray.push_back(buffer);
+    }
+    while (buffers->mBufferArray.size() < kMinBufferArraySize) {
+        sp<Codec2Buffer> buffer = allocateBuffer_l(buffers->mAlloc);
+        // allocate buffer
+        buffers->mBufferArray.push_back(buffer);
+    }
+}
+#endif
+
+void CCodecBufferChannel::getInputBufferArray(Vector<sp<MediaCodecBuffer>> *array) {
+    (void) array;
+    // TODO
+#if 0
+    array->clear();
+    Mutexed<Buffers>::Locked buffers(mInputBuffers);
+
+    if (!buffers->isArrayMode()) {
+        // mBufferArray is empty.
+        fillBufferArray_l(buffers);
+    }
+
+    for (const auto &buffer : buffers->mBufferArray) {
+        array->push_back(buffer);
+    }
+#endif
+}
+
+void CCodecBufferChannel::getOutputBufferArray(Vector<sp<MediaCodecBuffer>> *array) {
+    (void) array;
+    // TODO
+#if 0
+    array->clear();
+    Mutexed<Buffers>::Locked buffers(mOutputBuffers);
+
+    if (!buffers->isArrayMode()) {
+        if (linear) {
+            // mBufferArray is empty.
+            fillBufferArray_l(buffers);
+
+            // We need to replace the allocator so that the component only returns
+            // buffer from the array.
+            ArrayModeAllocator::Builder builder(buffers->mBufferArray);
+            for (size_t i = 0; i < buffers->mClientBuffer.size(); ++i) {
+                if (buffers->mClientBuffer.get(i).promote() != nullptr) {
+                    builder.markUsing(i);
+                }
+            }
+            buffers->mAlloc.reset(builder.build());
+        } else {
+            for (int i = 0; i < X; ++i) {
+                buffers->mBufferArray.push_back(dummy buffer);
+            }
+        }
+    }
+
+    for (const auto &buffer : buffers->mBufferArray) {
+        array->push_back(buffer);
+    }
+#endif
+}
+
+void CCodecBufferChannel::start(const sp<AMessage> &inputFormat, const sp<AMessage> &outputFormat) {
+    if (inputFormat != nullptr) {
+        Mutexed<std::unique_ptr<InputBuffers>>::Locked buffers(mInputBuffers);
+        (*buffers)->setFormat(inputFormat);
+    }
+    if (outputFormat != nullptr) {
+        Mutexed<std::unique_ptr<OutputBuffers>>::Locked buffers(mOutputBuffers);
+        (*buffers)->setFormat(outputFormat);
+    }
+
+    mSync.start();
+    // TODO: use proper buffer depth instead of this random value
+    for (size_t i = 0; i < kMinBufferArraySize; ++i) {
+        size_t index;
+        sp<MediaCodecBuffer> buffer;
+        {
+            Mutexed<std::unique_ptr<InputBuffers>>::Locked buffers(mInputBuffers);
+            if (!(*buffers)->requestNewBuffer(&index, &buffer)) {
+                buffers.unlock();
+                ALOGE("start: cannot allocate memory");
+                mOnError(NO_MEMORY, ACTION_CODE_FATAL);
+                buffers.lock();
+                return;
+            }
+        }
+        mCallback->onInputBufferAvailable(index, buffer);
+    }
+}
+
+void CCodecBufferChannel::stop() {
+    mSync.stop();
+    mFirstValidFrameIndex = mFrameIndex.load();
+}
+
+void CCodecBufferChannel::flush(const std::list<std::unique_ptr<C2Work>> &flushedWork) {
+    {
+        Mutexed<std::unique_ptr<InputBuffers>>::Locked buffers(mInputBuffers);
+        (*buffers)->flush();
+    }
+    {
+        Mutexed<std::unique_ptr<OutputBuffers>>::Locked buffers(mOutputBuffers);
+        (*buffers)->flush(flushedWork);
+    }
+}
+
+void CCodecBufferChannel::onWorkDone(std::vector<std::unique_ptr<C2Work>> workItems) {
+    for (const auto &work : workItems) {
+        if (work->result != OK) {
+            ALOGE("work failed to complete: %d", work->result);
+            mOnError(work->result, ACTION_CODE_FATAL);
+            return;
+        }
+
+        // NOTE: MediaCodec usage supposedly have only one worklet
+        if (work->worklets.size() != 1u) {
+            ALOGE("incorrect number of worklets: %zu", work->worklets.size());
+            mOnError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+            continue;
+        }
+
+        const std::unique_ptr<C2Worklet> &worklet = work->worklets.front();
+        if (worklet->output.ordinal.frame_index < mFirstValidFrameIndex) {
+            // Discard frames from previous generation.
+            continue;
+        }
+        // NOTE: MediaCodec usage supposedly have only one output stream.
+        if (worklet->output.buffers.size() != 1u) {
+            ALOGE("incorrect number of output buffers: %zu", worklet->output.buffers.size());
+            mOnError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+            continue;
+        }
+
+        const std::shared_ptr<C2Buffer> &buffer = worklet->output.buffers[0];
+        // TODO: transfer infos() into buffer metadata
+
+        int32_t flags = 0;
+        if (worklet->output.flags & C2BufferPack::FLAG_END_OF_STREAM) {
+            flags |= MediaCodec::BUFFER_FLAG_EOS;
+            ALOGV("output EOS");
+        }
+
+        size_t index;
+        sp<MediaCodecBuffer> outBuffer;
+        Mutexed<std::unique_ptr<OutputBuffers>>::Locked buffers(mOutputBuffers);
+        if (!(*buffers)->registerBuffer(buffer, &index, &outBuffer)) {
+            ALOGE("unable to register output buffer");
+            mOnError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+            continue;
+        }
+
+        outBuffer->meta()->setInt64("timeUs", worklet->output.ordinal.timestamp);
+        outBuffer->meta()->setInt32("flags", flags);
+        ALOGV("index = %zu", index);
+        mCallback->onOutputBufferAvailable(index, outBuffer);
+    }
+}
+
+status_t CCodecBufferChannel::setSurface(const sp<Surface> &newSurface) {
+    if (newSurface != nullptr) {
+        newSurface->setScalingMode(NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
+    }
+
+    Mutexed<sp<Surface>>::Locked surface(mSurface);
+//    if (newSurface == nullptr) {
+//        if (*surface != nullptr) {
+//            ALOGW("cannot unset a surface");
+//            return INVALID_OPERATION;
+//        }
+//        return OK;
+//    }
+//
+//    if (*surface == nullptr) {
+//        ALOGW("component was not configured with a surface");
+//        return INVALID_OPERATION;
+//    }
+
+    *surface = newSurface;
+    return OK;
+}
+
+}  // namespace android
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
index fa5f37ec..b529940 100644
--- a/media/libstagefright/FrameDecoder.cpp
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -419,8 +419,11 @@
 
     *done = (++mNumFramesDecoded >= mNumFrames);
 
+    if (outputFormat == NULL) {
+        return ERROR_MALFORMED;
+    }
+
     int32_t width, height;
-    CHECK(outputFormat != NULL);
     CHECK(outputFormat->findInt32("width", &width));
     CHECK(outputFormat->findInt32("height", &height));
 
@@ -540,8 +543,11 @@
 status_t ImageDecoder::onOutputReceived(
         const sp<MediaCodecBuffer> &videoFrameBuffer,
         const sp<AMessage> &outputFormat, int64_t /*timeUs*/, bool *done) {
+    if (outputFormat == NULL) {
+        return ERROR_MALFORMED;
+    }
+
     int32_t width, height;
-    CHECK(outputFormat != NULL);
     CHECK(outputFormat->findInt32("width", &width));
     CHECK(outputFormat->findInt32("height", &height));
 
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index a132873..1fe5f60 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -112,14 +112,18 @@
 
     int64_t getDurationUs() const;
     int64_t getEstimatedTrackSizeBytes() const;
+    int32_t getMetaSizeIncrease() const;
     void writeTrackHeader(bool use32BitOffset = true);
     int64_t getMinCttsOffsetTimeUs();
     void bufferChunk(int64_t timestampUs);
     bool isAvc() const { return mIsAvc; }
     bool isHevc() const { return mIsHevc; }
+    bool isHeic() const { return mIsHeic; }
     bool isAudio() const { return mIsAudio; }
     bool isMPEG4() const { return mIsMPEG4; }
+    bool usePrefix() const { return mIsAvc || mIsHevc || mIsHeic; }
     void addChunkOffset(off64_t offset);
+    void addItemOffsetAndSize(off64_t offset, size_t size);
     int32_t getTrackId() const { return mTrackId; }
     status_t dump(int fd, const Vector<String16>& args) const;
     static const char *getFourCCForMime(const char *mime);
@@ -281,6 +285,7 @@
     bool mIsHevc;
     bool mIsAudio;
     bool mIsVideo;
+    bool mIsHeic;
     bool mIsMPEG4;
     bool mGotStartKeyFrame;
     bool mIsMalformed;
@@ -347,6 +352,16 @@
     int64_t mPreviousTrackTimeUs;
     int64_t mTrackEveryTimeDurationUs;
 
+    int32_t mRotation;
+
+    Vector<uint16_t> mProperties;
+    Vector<uint16_t> mDimgRefs;
+    int32_t mIsPrimary;
+    int32_t mWidth, mHeight;
+    int32_t mGridWidth, mGridHeight;
+    int32_t mGridRows, mGridCols;
+    size_t mNumTiles, mTileIndex;
+
     // Update the audio track's drift information.
     void updateDriftTime(const sp<MetaData>& meta);
 
@@ -386,7 +401,6 @@
 
     // Simple validation on the codec specific data
     status_t checkCodecSpecificData() const;
-    int32_t mRotation;
 
     void updateTrackSizeEstimate();
     void addOneStscTableEntry(size_t chunkId, size_t sampleId);
@@ -474,13 +488,18 @@
     mUse32BitOffset = true;
     mOffset = 0;
     mMdatOffset = 0;
-    mMoovBoxBuffer = NULL;
-    mMoovBoxBufferOffset = 0;
-    mWriteMoovBoxToMemory = false;
+    mInMemoryCache = NULL;
+    mInMemoryCacheOffset = 0;
+    mInMemoryCacheSize = 0;
+    mWriteBoxToMemory = false;
     mFreeBoxOffset = 0;
     mStreamableFile = false;
-    mEstimatedMoovBoxSize = 0;
     mTimeScale = -1;
+    mHasFileLevelMeta = false;
+    mHasMoovBox = false;
+    mPrimaryItemId = 0;
+    mAssociationEntryCount = 0;
+    mNumGrids = 0;
 
     // Following variables only need to be set for the first recording session.
     // And they will stay the same for all the recording sessions.
@@ -567,6 +586,8 @@
         }
     } else if (!strncasecmp(mime, "application/", 12)) {
         return "mett";
+    } else if (!strcasecmp(MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC, mime)) {
+        return "heic";
     } else {
         ALOGE("Track (%s) other than video/audio/metadata is not supported", mime);
     }
@@ -595,6 +616,9 @@
     Track *track = new Track(this, source, 1 + mTracks.size());
     mTracks.push_back(track);
 
+    mHasMoovBox |= !track->isHeic();
+    mHasFileLevelMeta |= track->isHeic();
+
     return OK;
 }
 
@@ -656,6 +680,32 @@
 #endif
 }
 
+int64_t MPEG4Writer::estimateFileLevelMetaSize() {
+    // base meta size
+    int64_t metaSize =     12  // meta fullbox header
+                         + 33  // hdlr box
+                         + 14  // pitm box
+                         + 16  // iloc box (fixed size portion)
+                         + 14  // iinf box (fixed size portion)
+                         + 32  // iprp box (fixed size protion)
+                         + 8   // idat box (when empty)
+                         + 12  // iref box (when empty)
+                         ;
+
+    for (List<Track *>::iterator it = mTracks.begin();
+         it != mTracks.end(); ++it) {
+        if ((*it)->isHeic()) {
+            metaSize += (*it)->getMetaSizeIncrease();
+        }
+    }
+
+    ALOGV("estimated meta size: %lld", (long long) metaSize);
+
+    // Need at least 8-byte padding at the end, otherwise the left-over
+    // freebox may become malformed
+    return metaSize + 8;
+}
+
 int64_t MPEG4Writer::estimateMoovBoxSize(int32_t bitRate) {
     // This implementation is highly experimental/heurisitic.
     //
@@ -715,7 +765,11 @@
     ALOGI("limits: %" PRId64 "/%" PRId64 " bytes/us, bit rate: %d bps and the"
          " estimated moov size %" PRId64 " bytes",
          mMaxFileSizeLimitBytes, mMaxFileDurationLimitUs, bitRate, size);
-    return factor * size;
+
+    int64_t estimatedSize = factor * size;
+    CHECK_GE(estimatedSize, 8);
+
+    return estimatedSize;
 }
 
 status_t MPEG4Writer::start(MetaData *param) {
@@ -797,63 +851,70 @@
          mMaxFileSizeLimitBytes >= kMinStreamableFileSizeInBytes);
 
     /*
-     * mWriteMoovBoxToMemory is true if the amount of data in moov box is
-     * smaller than the reserved free space at the beginning of a file, AND
-     * when the content of moov box is constructed. Note that video/audio
-     * frame data is always written to the file but not in the memory.
+     * mWriteBoxToMemory is true if the amount of data in a file-level meta or
+     * moov box is smaller than the reserved free space at the beginning of a
+     * file, AND when the content of the box is constructed. Note that video/
+     * audio frame data is always written to the file but not in the memory.
      *
-     * Before stop()/reset() is called, mWriteMoovBoxToMemory is always
+     * Before stop()/reset() is called, mWriteBoxToMemory is always
      * false. When reset() is called at the end of a recording session,
-     * Moov box needs to be constructed.
+     * file-level meta and/or moov box needs to be constructed.
      *
-     * 1) Right before a moov box is constructed, mWriteMoovBoxToMemory
-     * to set to mStreamableFile so that if
-     * the file is intended to be streamable, it is set to true;
-     * otherwise, it is set to false. When the value is set to false,
-     * all the content of the moov box is written immediately to
+     * 1) Right before the box is constructed, mWriteBoxToMemory to set to
+     * mStreamableFile so that if the file is intended to be streamable, it
+     * is set to true; otherwise, it is set to false. When the value is set
+     * to false, all the content of that box is written immediately to
      * the end of the file. When the value is set to true, all the
-     * content of the moov box is written to an in-memory cache,
-     * mMoovBoxBuffer, util the following condition happens. Note
+     * content of that box is written to an in-memory cache,
+     * mInMemoryCache, util the following condition happens. Note
      * that the size of the in-memory cache is the same as the
      * reserved free space at the beginning of the file.
      *
-     * 2) While the data of the moov box is written to an in-memory
+     * 2) While the data of the box is written to an in-memory
      * cache, the data size is checked against the reserved space.
-     * If the data size surpasses the reserved space, subsequent moov
-     * data could no longer be hold in the in-memory cache. This also
+     * If the data size surpasses the reserved space, subsequent box data
+     * could no longer be hold in the in-memory cache. This also
      * indicates that the reserved space was too small. At this point,
-     * _all_ moov data must be written to the end of the file.
-     * mWriteMoovBoxToMemory must be set to false to direct the write
+     * _all_ subsequent box data must be written to the end of the file.
+     * mWriteBoxToMemory must be set to false to direct the write
      * to the file.
      *
-     * 3) If the data size in moov box is smaller than the reserved
-     * space after moov box is completely constructed, the in-memory
-     * cache copy of the moov box is written to the reserved free
-     * space. Thus, immediately after the moov is completedly
-     * constructed, mWriteMoovBoxToMemory is always set to false.
+     * 3) If the data size in the box is smaller than the reserved
+     * space after the box is completely constructed, the in-memory
+     * cache copy of the box is written to the reserved free space.
+     * mWriteBoxToMemory is always set to false after all boxes that
+     * using the in-memory cache have been constructed.
      */
-    mWriteMoovBoxToMemory = false;
-    mMoovBoxBuffer = NULL;
-    mMoovBoxBufferOffset = 0;
+    mWriteBoxToMemory = false;
+    mInMemoryCache = NULL;
+    mInMemoryCacheOffset = 0;
+
+
+    ALOGV("muxer starting: mHasMoovBox %d, mHasFileLevelMeta %d",
+            mHasMoovBox, mHasFileLevelMeta);
 
     writeFtypBox(param);
 
     mFreeBoxOffset = mOffset;
 
-    if (mEstimatedMoovBoxSize == 0) {
+    if (mInMemoryCacheSize == 0) {
         int32_t bitRate = -1;
-        if (param) {
-            param->findInt32(kKeyBitRate, &bitRate);
+        if (mHasFileLevelMeta) {
+            mInMemoryCacheSize += estimateFileLevelMetaSize();
         }
-        mEstimatedMoovBoxSize = estimateMoovBoxSize(bitRate);
+        if (mHasMoovBox) {
+            if (param) {
+                param->findInt32(kKeyBitRate, &bitRate);
+            }
+            mInMemoryCacheSize += estimateMoovBoxSize(bitRate);
+        }
     }
-    CHECK_GE(mEstimatedMoovBoxSize, 8);
     if (mStreamableFile) {
         // Reserve a 'free' box only for streamable file
         lseek64(mFd, mFreeBoxOffset, SEEK_SET);
-        writeInt32(mEstimatedMoovBoxSize);
+        writeInt32(mInMemoryCacheSize);
         write("free", 4);
-        mMdatOffset = mFreeBoxOffset + mEstimatedMoovBoxSize;
+        mMdatOffset = mFreeBoxOffset + mInMemoryCacheSize;
     } else {
         mMdatOffset = mOffset;
     }
@@ -965,8 +1026,8 @@
     mFd = -1;
     mInitCheck = NO_INIT;
     mStarted = false;
-    free(mMoovBoxBuffer);
-    mMoovBoxBuffer = NULL;
+    free(mInMemoryCache);
+    mInMemoryCache = NULL;
 }
 
 void MPEG4Writer::finishCurrentSession() {
@@ -1009,13 +1070,18 @@
     status_t err = OK;
     int64_t maxDurationUs = 0;
     int64_t minDurationUs = 0x7fffffffffffffffLL;
+    int32_t nonImageTrackCount = 0;
     for (List<Track *>::iterator it = mTracks.begin();
-         it != mTracks.end(); ++it) {
+        it != mTracks.end(); ++it) {
         status_t status = (*it)->stop(stopSource);
         if (err == OK && status != OK) {
             err = status;
         }
 
+        // skip image tracks
+        if ((*it)->isHeic()) continue;
+        nonImageTrackCount++;
+
         int64_t durationUs = (*it)->getDurationUs();
         if (durationUs > maxDurationUs) {
             maxDurationUs = durationUs;
@@ -1025,7 +1091,7 @@
         }
     }
 
-    if (mTracks.size() > 1) {
+    if (nonImageTrackCount > 1) {
         ALOGD("Duration from tracks range is [%" PRId64 ", %" PRId64 "] us",
             minDurationUs, maxDurationUs);
     }
@@ -1051,45 +1117,43 @@
     }
     lseek64(mFd, mOffset, SEEK_SET);
 
-    // Construct moov box now
-    mMoovBoxBufferOffset = 0;
-    mWriteMoovBoxToMemory = mStreamableFile;
-    if (mWriteMoovBoxToMemory) {
+    // Construct file-level meta and moov box now
+    mInMemoryCacheOffset = 0;
+    mWriteBoxToMemory = mStreamableFile;
+    if (mWriteBoxToMemory) {
         // There is no need to allocate in-memory cache
-        // for moov box if the file is not streamable.
+        // if the file is not streamable.
 
-        mMoovBoxBuffer = (uint8_t *) malloc(mEstimatedMoovBoxSize);
-        CHECK(mMoovBoxBuffer != NULL);
-    }
-    writeMoovBox(maxDurationUs);
-
-    // mWriteMoovBoxToMemory could be set to false in
-    // MPEG4Writer::write() method
-    if (mWriteMoovBoxToMemory) {
-        mWriteMoovBoxToMemory = false;
-        // Content of the moov box is saved in the cache, and the in-memory
-        // moov box needs to be written to the file in a single shot.
-
-        CHECK_LE(mMoovBoxBufferOffset + 8, mEstimatedMoovBoxSize);
-
-        // Moov box
-        lseek64(mFd, mFreeBoxOffset, SEEK_SET);
-        mOffset = mFreeBoxOffset;
-        write(mMoovBoxBuffer, 1, mMoovBoxBufferOffset);
-
-        // Free box
-        lseek64(mFd, mOffset, SEEK_SET);
-        writeInt32(mEstimatedMoovBoxSize - mMoovBoxBufferOffset);
-        write("free", 4);
-    } else {
-        ALOGI("The mp4 file will not be streamable.");
+        mInMemoryCache = (uint8_t *) malloc(mInMemoryCacheSize);
+        CHECK(mInMemoryCache != NULL);
     }
 
-    // Free in-memory cache for moov box
-    if (mMoovBoxBuffer != NULL) {
-        free(mMoovBoxBuffer);
-        mMoovBoxBuffer = NULL;
-        mMoovBoxBufferOffset = 0;
+    if (mHasFileLevelMeta) {
+        writeFileLevelMetaBox();
+        if (mWriteBoxToMemory) {
+            writeCachedBoxToFile("meta");
+        } else {
+            ALOGI("The file meta box is written at the end.");
+        }
+    }
+
+    if (mHasMoovBox) {
+        writeMoovBox(maxDurationUs);
+        // mWriteBoxToMemory could be set to false in
+        // MPEG4Writer::write() method
+        if (mWriteBoxToMemory) {
+            writeCachedBoxToFile("moov");
+        } else {
+            ALOGI("The mp4 file will not be streamable.");
+        }
+    }
+    mWriteBoxToMemory = false;
+
+    // Free in-memory cache for box writing
+    if (mInMemoryCache != NULL) {
+        free(mInMemoryCache);
+        mInMemoryCache = NULL;
+        mInMemoryCacheOffset = 0;
     }
 
     CHECK(mBoxes.empty());
@@ -1098,6 +1162,42 @@
     return err;
 }
 
+/*
+ * Writes currently cached box into file.
+ *
+ * Must be called while mWriteBoxToMemory is true, and will not modify
+ * mWriteBoxToMemory. After the call, remaining cache size will be
+ * reduced and buffer offset will be set to the beginning of the cache.
+ */
+void MPEG4Writer::writeCachedBoxToFile(const char *type) {
+    CHECK(mWriteBoxToMemory);
+
+    mWriteBoxToMemory = false;
+    // Content of the box is saved in the cache, and the in-memory
+    // box needs to be written to the file in a single shot.
+
+    CHECK_LE(mInMemoryCacheOffset + 8, mInMemoryCacheSize);
+
+    // Cached box
+    lseek64(mFd, mFreeBoxOffset, SEEK_SET);
+    mOffset = mFreeBoxOffset;
+    write(mInMemoryCache, 1, mInMemoryCacheOffset);
+
+    // Free box
+    lseek64(mFd, mOffset, SEEK_SET);
+    mFreeBoxOffset = mOffset;
+    writeInt32(mInMemoryCacheSize - mInMemoryCacheOffset);
+    write("free", 4);
+
+    // Rewind buffering to the beginning, and restore mWriteBoxToMemory flag
+    mInMemoryCacheSize -= mInMemoryCacheOffset;
+    mInMemoryCacheOffset = 0;
+    mWriteBoxToMemory = true;
+
+    ALOGV("dumped out %s box, estimated size remaining %lld",
+            type, (long long)mInMemoryCacheSize);
+}
+
 uint32_t MPEG4Writer::getMpeg4Time() {
     time_t now = time(NULL);
     // MP4 file uses time counting seconds since midnight, Jan. 1, 1904
@@ -1142,14 +1242,16 @@
     if (mAreGeoTagsAvailable) {
         writeUdtaBox();
     }
-    writeMetaBox();
+    writeMoovLevelMetaBox();
     // Loop through all the tracks to get the global time offset if there is
     // any ctts table appears in a video track.
     int64_t minCttsOffsetTimeUs = kMaxCttsOffsetTimeUs;
     for (List<Track *>::iterator it = mTracks.begin();
         it != mTracks.end(); ++it) {
-        minCttsOffsetTimeUs =
-            std::min(minCttsOffsetTimeUs, (*it)->getMinCttsOffsetTimeUs());
+        if (!(*it)->isHeic()) {
+            minCttsOffsetTimeUs =
+                std::min(minCttsOffsetTimeUs, (*it)->getMinCttsOffsetTimeUs());
+        }
     }
     ALOGI("Ajust the moov start time from %lld us -> %lld us",
             (long long)mStartTimestampUs,
@@ -1159,7 +1261,9 @@
 
     for (List<Track *>::iterator it = mTracks.begin();
         it != mTracks.end(); ++it) {
-        (*it)->writeTrackHeader(mUse32BitOffset);
+        if (!(*it)->isHeic()) {
+            (*it)->writeTrackHeader(mUse32BitOffset);
+        }
     }
     endBox();  // moov
 }
@@ -1168,17 +1272,31 @@
     beginBox("ftyp");
 
     int32_t fileType;
-    if (param && param->findInt32(kKeyFileType, &fileType) &&
-        fileType != OUTPUT_FORMAT_MPEG_4) {
+    if (!param || !param->findInt32(kKeyFileType, &fileType)) {
+        fileType = OUTPUT_FORMAT_MPEG_4;
+    }
+    if (fileType != OUTPUT_FORMAT_MPEG_4 && fileType != OUTPUT_FORMAT_HEIF) {
         writeFourcc("3gp4");
         writeInt32(0);
         writeFourcc("isom");
         writeFourcc("3gp4");
     } else {
-        writeFourcc("mp42");
+        // Only write "heic" as major brand if the client specified HEIF
+        // AND we indeed receive some image heic tracks.
+        if (fileType == OUTPUT_FORMAT_HEIF && mHasFileLevelMeta) {
+            writeFourcc("heic");
+        } else {
+            writeFourcc("mp42");
+        }
         writeInt32(0);
-        writeFourcc("isom");
-        writeFourcc("mp42");
+        if (mHasFileLevelMeta) {
+            writeFourcc("mif1");
+            writeFourcc("heic");
+        }
+        if (mHasMoovBox) {
+            writeFourcc("isom");
+            writeFourcc("mp42");
+        }
     }
 
     endBox();
@@ -1225,15 +1343,21 @@
     mLock.unlock();
 }
 
-off64_t MPEG4Writer::addSample_l(MediaBuffer *buffer) {
+off64_t MPEG4Writer::addSample_l(
+        MediaBuffer *buffer, bool usePrefix, size_t *bytesWritten) {
     off64_t old_offset = mOffset;
 
-    ::write(mFd,
-          (const uint8_t *)buffer->data() + buffer->range_offset(),
-          buffer->range_length());
+    if (usePrefix) {
+        addMultipleLengthPrefixedSamples_l(buffer);
+    } else {
+        ::write(mFd,
+              (const uint8_t *)buffer->data() + buffer->range_offset(),
+              buffer->range_length());
 
-    mOffset += buffer->range_length();
+        mOffset += buffer->range_length();
+    }
 
+    *bytesWritten = mOffset - old_offset;
     return old_offset;
 }
 
@@ -1251,9 +1375,7 @@
     }
 }
 
-off64_t MPEG4Writer::addMultipleLengthPrefixedSamples_l(MediaBuffer *buffer) {
-    off64_t old_offset = mOffset;
-
+void MPEG4Writer::addMultipleLengthPrefixedSamples_l(MediaBuffer *buffer) {
     const size_t kExtensionNALSearchRange = 64; // bytes to look for non-VCL NALUs
 
     const uint8_t *dataStart = (const uint8_t *)buffer->data() + buffer->range_offset();
@@ -1278,13 +1400,9 @@
     buffer->set_range(buffer->range_offset() + currentNalOffset,
             buffer->range_length() - currentNalOffset);
     addLengthPrefixedSample_l(buffer);
-
-    return old_offset;
 }
 
-off64_t MPEG4Writer::addLengthPrefixedSample_l(MediaBuffer *buffer) {
-    off64_t old_offset = mOffset;
-
+void MPEG4Writer::addLengthPrefixedSample_l(MediaBuffer *buffer) {
     size_t length = buffer->range_length();
 
     if (mUse4ByteNalLength) {
@@ -1312,40 +1430,35 @@
         ::write(mFd, (const uint8_t *)buffer->data() + buffer->range_offset(), length);
         mOffset += length + 2;
     }
-
-    return old_offset;
 }
 
 size_t MPEG4Writer::write(
         const void *ptr, size_t size, size_t nmemb) {
 
     const size_t bytes = size * nmemb;
-    if (mWriteMoovBoxToMemory) {
+    if (mWriteBoxToMemory) {
 
-        off64_t moovBoxSize = 8 + mMoovBoxBufferOffset + bytes;
-        if (moovBoxSize > mEstimatedMoovBoxSize) {
-            // The reserved moov box at the beginning of the file
-            // is not big enough. Moov box should be written to
-            // the end of the file from now on, but not to the
-            // in-memory cache.
+        off64_t boxSize = 8 + mInMemoryCacheOffset + bytes;
+        if (boxSize > mInMemoryCacheSize) {
+            // The reserved free space at the beginning of the file is not big
+            // enough. Boxes should be written to the end of the file from now
+            // on, but not to the in-memory cache.
 
-            // We write partial moov box that is in the memory to
-            // the file first.
+            // We write partial box that is in the memory to the file first.
             for (List<off64_t>::iterator it = mBoxes.begin();
                  it != mBoxes.end(); ++it) {
                 (*it) += mOffset;
             }
             lseek64(mFd, mOffset, SEEK_SET);
-            ::write(mFd, mMoovBoxBuffer, mMoovBoxBufferOffset);
+            ::write(mFd, mInMemoryCache, mInMemoryCacheOffset);
             ::write(mFd, ptr, bytes);
-            mOffset += (bytes + mMoovBoxBufferOffset);
+            mOffset += (bytes + mInMemoryCacheOffset);
 
-            // All subsequent moov box content will be written
-            // to the end of the file.
-            mWriteMoovBoxToMemory = false;
+            // All subsequent boxes will be written to the end of the file.
+            mWriteBoxToMemory = false;
         } else {
-            memcpy(mMoovBoxBuffer + mMoovBoxBufferOffset, ptr, bytes);
-            mMoovBoxBufferOffset += bytes;
+            memcpy(mInMemoryCache + mInMemoryCacheOffset, ptr, bytes);
+            mInMemoryCacheOffset += bytes;
         }
     } else {
         ::write(mFd, ptr, size * nmemb);
@@ -1355,8 +1468,8 @@
 }
 
 void MPEG4Writer::beginBox(uint32_t id) {
-    mBoxes.push_back(mWriteMoovBoxToMemory?
-            mMoovBoxBufferOffset: mOffset);
+    mBoxes.push_back(mWriteBoxToMemory?
+            mInMemoryCacheOffset: mOffset);
 
     writeInt32(0);
     writeInt32(id);
@@ -1365,8 +1478,8 @@
 void MPEG4Writer::beginBox(const char *fourcc) {
     CHECK_EQ(strlen(fourcc), 4u);
 
-    mBoxes.push_back(mWriteMoovBoxToMemory?
-            mMoovBoxBufferOffset: mOffset);
+    mBoxes.push_back(mWriteBoxToMemory?
+            mInMemoryCacheOffset: mOffset);
 
     writeInt32(0);
     writeFourcc(fourcc);
@@ -1378,9 +1491,9 @@
     off64_t offset = *--mBoxes.end();
     mBoxes.erase(--mBoxes.end());
 
-    if (mWriteMoovBoxToMemory) {
-       int32_t x = htonl(mMoovBoxBufferOffset - offset);
-       memcpy(mMoovBoxBuffer + offset, &x, 4);
+    if (mWriteBoxToMemory) {
+        int32_t x = htonl(mInMemoryCacheOffset - offset);
+        memcpy(mInMemoryCache + offset, &x, 4);
     } else {
         lseek64(mFd, offset, SEEK_SET);
         writeInt32(mOffset - offset);
@@ -1539,7 +1652,7 @@
     if (mMaxFileSizeLimitBytes == 0) {
         return false;
     }
-    int64_t nTotalBytesEstimate = static_cast<int64_t>(mEstimatedMoovBoxSize);
+    int64_t nTotalBytesEstimate = static_cast<int64_t>(mInMemoryCacheSize);
     for (List<Track *>::iterator it = mTracks.begin();
          it != mTracks.end(); ++it) {
         nTotalBytesEstimate += (*it)->getEstimatedTrackSizeBytes();
@@ -1562,7 +1675,7 @@
         return false;
     }
 
-    int64_t nTotalBytesEstimate = static_cast<int64_t>(mEstimatedMoovBoxSize);
+    int64_t nTotalBytesEstimate = static_cast<int64_t>(mInMemoryCacheSize);
     for (List<Track *>::iterator it = mTracks.begin();
          it != mTracks.end(); ++it) {
         nTotalBytesEstimate += (*it)->getEstimatedTrackSizeBytes();
@@ -1584,7 +1697,7 @@
 
     for (List<Track *>::iterator it = mTracks.begin();
          it != mTracks.end(); ++it) {
-        if ((*it)->getDurationUs() >= mMaxFileDurationLimitUs) {
+        if (!(*it)->isHeic() && (*it)->getDurationUs() >= mMaxFileDurationLimitUs) {
             return true;
         }
     }
@@ -1656,7 +1769,16 @@
       mGotAllCodecSpecificData(false),
       mReachedEOS(false),
       mStartTimestampUs(-1),
-      mRotation(0) {
+      mRotation(0),
+      mIsPrimary(0),
+      mWidth(0),
+      mHeight(0),
+      mGridWidth(0),
+      mGridHeight(0),
+      mGridRows(0),
+      mGridCols(0),
+      mNumTiles(1),
+      mTileIndex(0) {
     getCodecSpecificDataFromInputFormatIfPossible();
 
     const char *mime;
@@ -1665,6 +1787,7 @@
     mIsHevc = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC);
     mIsAudio = !strncasecmp(mime, "audio/", 6);
     mIsVideo = !strncasecmp(mime, "video/", 6);
+    mIsHeic = !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC);
     mIsMPEG4 = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_MPEG4) ||
                !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC);
 
@@ -1676,7 +1799,27 @@
         }
     }
 
-    setTimeScale();
+    if (!mIsHeic) {
+        setTimeScale();
+    } else {
+        CHECK(mMeta->findInt32(kKeyWidth, &mWidth) && (mWidth > 0));
+        CHECK(mMeta->findInt32(kKeyHeight, &mHeight) && (mHeight > 0));
+
+        int32_t gridWidth, gridHeight, gridRows, gridCols;
+        if (mMeta->findInt32(kKeyGridWidth, &gridWidth) && (gridWidth > 0) &&
+            mMeta->findInt32(kKeyGridHeight, &gridHeight) && (gridHeight > 0) &&
+            mMeta->findInt32(kKeyGridRows, &gridRows) && (gridRows > 0) &&
+            mMeta->findInt32(kKeyGridCols, &gridCols) && (gridCols > 0)) {
+            mGridWidth = gridWidth;
+            mGridHeight = gridHeight;
+            mGridRows = gridRows;
+            mGridCols = gridCols;
+            mNumTiles = gridRows * gridCols;
+        }
+        if (!mMeta->findInt32(kKeyTrackIsDefault, &mIsPrimary)) {
+            mIsPrimary = false;
+        }
+    }
 }
 
 // Clear all the internal states except the CSD data.
@@ -1724,15 +1867,15 @@
 }
 
 void MPEG4Writer::Track::updateTrackSizeEstimate() {
-
-    uint32_t stcoBoxCount = (mOwner->use32BitFileOffset()
-                            ? mStcoTableEntries->count()
-                            : mCo64TableEntries->count());
-    int64_t stcoBoxSizeBytes = stcoBoxCount * 4;
-    int64_t stszBoxSizeBytes = mSamplesHaveSameSize? 4: (mStszTableEntries->count() * 4);
-
     mEstimatedTrackSizeBytes = mMdatSizeBytes;  // media data size
-    if (!mOwner->isFileStreamable()) {
+
+    if (!isHeic() && !mOwner->isFileStreamable()) {
+        uint32_t stcoBoxCount = (mOwner->use32BitFileOffset()
+                                ? mStcoTableEntries->count()
+                                : mCo64TableEntries->count());
+        int64_t stcoBoxSizeBytes = stcoBoxCount * 4;
+        int64_t stszBoxSizeBytes = mSamplesHaveSameSize? 4: (mStszTableEntries->count() * 4);
+
         // Reserved free space is not large enough to hold
         // all meta data and thus wasted.
         mEstimatedTrackSizeBytes += mStscTableEntries->count() * 12 +  // stsc box size
@@ -1746,10 +1889,9 @@
 
 void MPEG4Writer::Track::addOneStscTableEntry(
         size_t chunkId, size_t sampleId) {
-
-        mStscTableEntries->add(htonl(chunkId));
-        mStscTableEntries->add(htonl(sampleId));
-        mStscTableEntries->add(htonl(1));
+    mStscTableEntries->add(htonl(chunkId));
+    mStscTableEntries->add(htonl(sampleId));
+    mStscTableEntries->add(htonl(1));
 }
 
 void MPEG4Writer::Track::addOneStssTableEntry(size_t sampleId) {
@@ -1795,6 +1937,7 @@
 }
 
 void MPEG4Writer::Track::addChunkOffset(off64_t offset) {
+    CHECK(!mIsHeic);
     if (mOwner->use32BitFileOffset()) {
         uint32_t value = offset;
         mStcoTableEntries->add(htonl(value));
@@ -1803,6 +1946,70 @@
     }
 }
 
+void MPEG4Writer::Track::addItemOffsetAndSize(off64_t offset, size_t size) {
+    CHECK(mIsHeic);
+
+    if (offset > UINT32_MAX || size > UINT32_MAX) {
+        ALOGE("offset or size is out of range: %lld, %lld",
+                (long long) offset, (long long) size);
+        mIsMalformed = true;
+    }
+    if (mIsMalformed) {
+        return;
+    }
+    if (mTileIndex >= mNumTiles) {
+        ALOGW("Ignoring excess tiles!");
+        return;
+    }
+
+    if (mProperties.empty()) {
+        mProperties.push_back(mOwner->addProperty_l({
+            .type = FOURCC('h', 'v', 'c', 'C'),
+            .hvcc = ABuffer::CreateAsCopy(mCodecSpecificData, mCodecSpecificDataSize)
+        }));
+
+        mProperties.push_back(mOwner->addProperty_l({
+            .type = FOURCC('i', 's', 'p', 'e'),
+            .width = (mNumTiles > 1) ? mGridWidth : mWidth,
+            .height = (mNumTiles > 1) ? mGridHeight : mHeight,
+        }));
+    }
+
+    uint16_t itemId = mOwner->addItem_l({
+        .itemType = "hvc1",
+        .isPrimary = (mNumTiles > 1) ? false : (mIsPrimary != 0),
+        .isHidden = (mNumTiles > 1),
+        .offset = (uint32_t)offset,
+        .size = (uint32_t)size,
+        .properties = mProperties,
+    });
+
+    mTileIndex++;
+    if (mNumTiles > 1) {
+        mDimgRefs.push_back(itemId);
+
+        if (mTileIndex == mNumTiles) {
+            mProperties.clear();
+            mProperties.push_back(mOwner->addProperty_l({
+                .type = FOURCC('i', 's', 'p', 'e'),
+                .width = mWidth,
+                .height = mHeight,
+            }));
+            mOwner->addItem_l({
+                .itemType = "grid",
+                .isPrimary = (mIsPrimary != 0),
+                .isHidden = false,
+                .rows = (uint32_t)mGridRows,
+                .cols = (uint32_t)mGridCols,
+                .width = (uint32_t)mWidth,
+                .height = (uint32_t)mHeight,
+                .properties = mProperties,
+                .dimgRefs = mDimgRefs,
+            });
+        }
+    }
+}
+
 void MPEG4Writer::Track::setTimeScale() {
     ALOGV("setTimeScale");
     // Default time scale
@@ -1855,7 +2062,8 @@
     size_t size = 0;
     if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)) {
         mMeta->findData(kKeyAVCC, &type, &data, &size);
-    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC)) {
+    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC) ||
+               !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC)) {
         mMeta->findData(kKeyHVCC, &type, &data, &size);
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_MPEG4)
             || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) {
@@ -1945,14 +2153,16 @@
         chunk->mTimeStampUs, chunk->mTrack->getTrackType());
 
     int32_t isFirstSample = true;
+    bool usePrefix = chunk->mTrack->usePrefix();
     while (!chunk->mSamples.empty()) {
         List<MediaBuffer *>::iterator it = chunk->mSamples.begin();
 
-        off64_t offset = (chunk->mTrack->isAvc() || chunk->mTrack->isHevc())
-                                ? addMultipleLengthPrefixedSamples_l(*it)
-                                : addSample_l(*it);
+        size_t bytesWritten;
+        off64_t offset = addSample_l(*it, usePrefix, &bytesWritten);
 
-        if (isFirstSample) {
+        if (chunk->mTrack->isHeic()) {
+            chunk->mTrack->addItemOffsetAndSize(offset, bytesWritten);
+        } else if (isFirstSample) {
             chunk->mTrack->addChunkOffset(offset);
             isFirstSample = false;
         }
@@ -2637,7 +2847,7 @@
                             (const uint8_t *)buffer->data()
                                 + buffer->range_offset(),
                             buffer->range_length());
-                } else if (mIsHevc) {
+                } else if (mIsHevc || mIsHeic) {
                     err = makeHEVCCodecSpecificData(
                             (const uint8_t *)buffer->data()
                                 + buffer->range_offset(),
@@ -2662,7 +2872,8 @@
         }
 
         // Per-frame metadata sample's size must be smaller than max allowed.
-        if (!mIsVideo && !mIsAudio && buffer->range_length() >= kMaxMetadataSize) {
+        if (!mIsVideo && !mIsAudio && !mIsHeic &&
+                buffer->range_length() >= kMaxMetadataSize) {
             ALOGW("Buffer size is %zu. Maximum metadata buffer size is %lld for %s track",
                     buffer->range_length(), (long long)kMaxMetadataSize, trackName);
             buffer->release();
@@ -2683,10 +2894,10 @@
         buffer->release();
         buffer = NULL;
 
-        if (mIsAvc || mIsHevc) StripStartcode(copy);
+        if (usePrefix()) StripStartcode(copy);
 
         size_t sampleSize = copy->range_length();
-        if (mIsAvc || mIsHevc) {
+        if (usePrefix()) {
             if (mOwner->useNalLengthFour()) {
                 sampleSize += 4;
             } else {
@@ -2948,15 +3159,19 @@
             trackProgressStatus(timestampUs);
         }
         if (!hasMultipleTracks) {
-            off64_t offset = (mIsAvc || mIsHevc) ? mOwner->addMultipleLengthPrefixedSamples_l(copy)
-                                 : mOwner->addSample_l(copy);
+            size_t bytesWritten;
+            off64_t offset = mOwner->addSample_l(copy, usePrefix(), &bytesWritten);
 
-            uint32_t count = (mOwner->use32BitFileOffset()
-                        ? mStcoTableEntries->count()
-                        : mCo64TableEntries->count());
+            if (mIsHeic) {
+                addItemOffsetAndSize(offset, bytesWritten);
+            } else {
+                uint32_t count = (mOwner->use32BitFileOffset()
+                            ? mStcoTableEntries->count()
+                            : mCo64TableEntries->count());
 
-            if (count == 0) {
-                addChunkOffset(offset);
+                if (count == 0) {
+                    addChunkOffset(offset);
+                }
             }
             copy->release();
             copy = NULL;
@@ -2964,7 +3179,10 @@
         }
 
         mChunkSamples.push_back(copy);
-        if (interleaveDurationUs == 0) {
+        if (mIsHeic) {
+            bufferChunk(0 /*timestampUs*/);
+            ++nChunks;
+        } else if (interleaveDurationUs == 0) {
             addOneStscTableEntry(++nChunks, 1);
             bufferChunk(timestampUs);
         } else {
@@ -2997,42 +3215,49 @@
 
     mOwner->trackProgressStatus(mTrackId, -1, err);
 
-    // Last chunk
-    if (!hasMultipleTracks) {
-        addOneStscTableEntry(1, mStszTableEntries->count());
-    } else if (!mChunkSamples.empty()) {
-        addOneStscTableEntry(++nChunks, mChunkSamples.size());
-        bufferChunk(timestampUs);
-    }
-
-    // We don't really know how long the last frame lasts, since
-    // there is no frame time after it, just repeat the previous
-    // frame's duration.
-    if (mStszTableEntries->count() == 1) {
-        lastDurationUs = 0;  // A single sample's duration
-        lastDurationTicks = 0;
-    } else {
-        ++sampleCount;  // Count for the last sample
-    }
-
-    if (mStszTableEntries->count() <= 2) {
-        addOneSttsTableEntry(1, lastDurationTicks);
-        if (sampleCount - 1 > 0) {
-            addOneSttsTableEntry(sampleCount - 1, lastDurationTicks);
+    if (mIsHeic) {
+        if (!mChunkSamples.empty()) {
+            bufferChunk(0);
+            ++nChunks;
         }
     } else {
-        addOneSttsTableEntry(sampleCount, lastDurationTicks);
-    }
-
-    // The last ctts box may not have been written yet, and this
-    // is to make sure that we write out the last ctts box.
-    if (currCttsOffsetTimeTicks == lastCttsOffsetTimeTicks) {
-        if (cttsSampleCount > 0) {
-            addOneCttsTableEntry(cttsSampleCount, lastCttsOffsetTimeTicks);
+        // Last chunk
+        if (!hasMultipleTracks) {
+            addOneStscTableEntry(1, mStszTableEntries->count());
+        } else if (!mChunkSamples.empty()) {
+            addOneStscTableEntry(++nChunks, mChunkSamples.size());
+            bufferChunk(timestampUs);
         }
-    }
 
-    mTrackDurationUs += lastDurationUs;
+        // We don't really know how long the last frame lasts, since
+        // there is no frame time after it, just repeat the previous
+        // frame's duration.
+        if (mStszTableEntries->count() == 1) {
+            lastDurationUs = 0;  // A single sample's duration
+            lastDurationTicks = 0;
+        } else {
+            ++sampleCount;  // Count for the last sample
+        }
+
+        if (mStszTableEntries->count() <= 2) {
+            addOneSttsTableEntry(1, lastDurationTicks);
+            if (sampleCount - 1 > 0) {
+                addOneSttsTableEntry(sampleCount - 1, lastDurationTicks);
+            }
+        } else {
+            addOneSttsTableEntry(sampleCount, lastDurationTicks);
+        }
+
+        // The last ctts box may not have been written yet, and this
+        // is to make sure that we write out the last ctts box.
+        if (currCttsOffsetTimeTicks == lastCttsOffsetTimeTicks) {
+            if (cttsSampleCount > 0) {
+                addOneCttsTableEntry(cttsSampleCount, lastCttsOffsetTimeTicks);
+            }
+        }
+
+        mTrackDurationUs += lastDurationUs;
+    }
     mReachedEOS = true;
 
     sendTrackSummary(hasMultipleTracks);
@@ -3054,7 +3279,7 @@
         return true;
     }
 
-    if (mStszTableEntries->count() == 0) {                      // no samples written
+    if (!mIsHeic && mStszTableEntries->count() == 0) {  // no samples written
         ALOGE("The number of recorded samples is 0");
         return true;
     }
@@ -3200,13 +3425,28 @@
     return mEstimatedTrackSizeBytes;
 }
 
+int32_t MPEG4Writer::Track::getMetaSizeIncrease() const {
+    CHECK(mIsHeic);
+    return    20                           // 1. 'ispe' property
+            + (8 + mCodecSpecificDataSize) // 2. 'hvcC' property
+            + (20                          // 3. extra 'ispe'
+            + (8 + 2 + 2 + mNumTiles * 2)  // 4. 'dimg' ref
+            + 12)                          // 5. ImageGrid in 'idat' (worst case)
+            * (mNumTiles > 1)              // -  (3~5: applicable only if grid)
+            + (16                          // 6. increase to 'iloc'
+            + 21                           // 7. increase to 'iinf'
+            + (3 + 2 * 2))                 // 8. increase to 'ipma' (worst case)
+            * (mNumTiles + 1);             // -  (6~8: are per-item)
+}
+
 status_t MPEG4Writer::Track::checkCodecSpecificData() const {
     const char *mime;
     CHECK(mMeta->findCString(kKeyMIMEType, &mime));
     if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AAC, mime) ||
         !strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG4, mime) ||
         !strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime) ||
-        !strcasecmp(MEDIA_MIMETYPE_VIDEO_HEVC, mime)) {
+        !strcasecmp(MEDIA_MIMETYPE_VIDEO_HEVC, mime) ||
+        !strcasecmp(MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC, mime)) {
         if (!mCodecSpecificData ||
             mCodecSpecificDataSize <= 0) {
             ALOGE("Missing codec specific data");
@@ -3223,7 +3463,10 @@
 }
 
 const char *MPEG4Writer::Track::getTrackType() const {
-    return mIsAudio ? "Audio" : (mIsVideo ? "Video" : "Metadata");
+    return mIsAudio ? "Audio" :
+           mIsVideo ? "Video" :
+           mIsHeic  ? "Image" :
+                      "Metadata";
 }
 
 void MPEG4Writer::Track::writeTrackHeader(bool use32BitOffset) {
@@ -3793,11 +4036,11 @@
     endBox();
 }
 
-void MPEG4Writer::writeHdlr() {
+void MPEG4Writer::writeHdlr(const char *handlerType) {
     beginBox("hdlr");
     writeInt32(0); // Version, Flags
     writeInt32(0); // Predefined
-    writeFourcc("mdta");
+    writeFourcc(handlerType);
     writeInt32(0); // Reserved[0]
     writeInt32(0); // Reserved[1]
     writeInt32(0); // Reserved[2]
@@ -3877,19 +4120,283 @@
     endBox(); // ilst
 }
 
-void MPEG4Writer::writeMetaBox() {
+void MPEG4Writer::writeMoovLevelMetaBox() {
     size_t count = mMetaKeys->countEntries();
     if (count == 0) {
         return;
     }
 
     beginBox("meta");
-    writeHdlr();
+    writeHdlr("mdta");
     writeKeys();
     writeIlst();
     endBox();
 }
 
+void MPEG4Writer::writeIlocBox() {
+    beginBox("iloc");
+    // Use version 1 to allow construction method 1 that refers to
+    // data in idat box inside meta box.
+    writeInt32(0x01000000); // Version = 1, Flags = 0
+    writeInt16(0x4400);     // offset_size = length_size = 4
+                            // base_offset_size = index_size = 0
+
+    // 16-bit item_count
+    size_t itemCount = mItems.size();
+    if (itemCount > 65535) {
+        ALOGW("Dropping excess items: itemCount %zu", itemCount);
+        itemCount = 65535;
+    }
+    writeInt16((uint16_t)itemCount);
+
+    for (size_t i = 0; i < itemCount; i++) {
+        writeInt16(mItems[i].itemId);
+        bool isGrid = mItems[i].isGrid();
+
+        writeInt16(isGrid ? 1 : 0); // construction_method
+        writeInt16(0); // data_reference_index = 0
+        writeInt16(1); // extent_count = 1
+
+        if (isGrid) {
+            // offset into the 'idat' box
+            writeInt32(mNumGrids++ * 8);
+            writeInt32(8);
+        } else {
+            writeInt32(mItems[i].offset);
+            writeInt32(mItems[i].size);
+        }
+    }
+    endBox();
+}
+
+void MPEG4Writer::writeInfeBox(
+        uint16_t itemId, const char *itemType, uint32_t flags) {
+    beginBox("infe");
+    writeInt32(0x02000000 | flags); // Version = 2, Flags = 0
+    writeInt16(itemId);
+    writeInt16(0);          //item_protection_index = 0
+    writeFourcc(itemType);
+    writeCString("");       // item_name
+    endBox();
+}
+
+void MPEG4Writer::writeIinfBox() {
+    beginBox("iinf");
+    writeInt32(0);          // Version = 0, Flags = 0
+
+    // 16-bit item_count
+    size_t itemCount = mItems.size();
+    if (itemCount > 65535) {
+        ALOGW("Dropping excess items: itemCount %zu", itemCount);
+        itemCount = 65535;
+    }
+
+    writeInt16((uint16_t)itemCount);
+    for (size_t i = 0; i < itemCount; i++) {
+        writeInfeBox(mItems[i].itemId, mItems[i].itemType,
+                mItems[i].isHidden ? 1 : 0);
+    }
+
+    endBox();
+}
+
+void MPEG4Writer::writeIdatBox() {
+    beginBox("idat");
+
+    for (size_t i = 0; i < mItems.size(); i++) {
+        if (mItems[i].isGrid()) {
+            writeInt8(0); // version
+            // flags == 1 means 32-bit width,height
+            int8_t flags = (mItems[i].width > 65535 || mItems[i].height > 65535);
+            writeInt8(flags);
+            writeInt8(mItems[i].rows - 1);
+            writeInt8(mItems[i].cols - 1);
+            if (flags) {
+                writeInt32(mItems[i].width);
+                writeInt32(mItems[i].height);
+            } else {
+                writeInt16((uint16_t)mItems[i].width);
+                writeInt16((uint16_t)mItems[i].height);
+            }
+        }
+    }
+
+    endBox();
+}
+
+void MPEG4Writer::writeIrefBox() {
+    beginBox("iref");
+    writeInt32(0);          // Version = 0, Flags = 0
+    {
+        for (size_t i = 0; i < mItems.size(); i++) {
+            if (!mItems[i].isGrid()) {
+                continue;
+            }
+            beginBox("dimg");
+            writeInt16(mItems[i].itemId);
+            size_t refCount = mItems[i].dimgRefs.size();
+            if (refCount > 65535) {
+                ALOGW("too many entries in dimg");
+                refCount = 65535;
+            }
+            writeInt16((uint16_t)refCount);
+            for (size_t refIndex = 0; refIndex < refCount; refIndex++) {
+                writeInt16(mItems[i].dimgRefs[refIndex]);
+            }
+            endBox();
+        }
+    }
+    endBox();
+}
+
+void MPEG4Writer::writePitmBox() {
+    beginBox("pitm");
+    writeInt32(0);          // Version = 0, Flags = 0
+    writeInt16(mPrimaryItemId);
+    endBox();
+}
+
+void MPEG4Writer::writeIpcoBox() {
+    beginBox("ipco");
+    size_t numProperties = mProperties.size();
+    if (numProperties > 32767) {
+        ALOGW("Dropping excess properties: numProperties %zu", numProperties);
+        numProperties = 32767;
+    }
+    for (size_t propIndex = 0; propIndex < numProperties; propIndex++) {
+        if (mProperties[propIndex].type == FOURCC('h', 'v', 'c', 'C')) {
+            beginBox("hvcC");
+            sp<ABuffer> hvcc = mProperties[propIndex].hvcc;
+            // Patch avcc's lengthSize field to match the number
+            // of bytes we use to indicate the size of a nal unit.
+            uint8_t *ptr = (uint8_t *)hvcc->data();
+            ptr[21] = (ptr[21] & 0xfc) | (useNalLengthFour() ? 3 : 1);
+            write(hvcc->data(), hvcc->size());
+            endBox();
+        } else if (mProperties[propIndex].type == FOURCC('i', 's', 'p', 'e')) {
+            beginBox("ispe");
+            writeInt32(0); // Version = 0, Flags = 0
+            writeInt32(mProperties[propIndex].width);
+            writeInt32(mProperties[propIndex].height);
+            endBox();
+        } else {
+            ALOGW("Skipping unrecognized property: type 0x%08x",
+                    mProperties[propIndex].type);
+        }
+    }
+    endBox();
+}
+
+void MPEG4Writer::writeIpmaBox() {
+    beginBox("ipma");
+    uint32_t flags = (mProperties.size() > 127) ? 1 : 0;
+    writeInt32(flags); // Version = 0
+
+    writeInt32(mAssociationEntryCount);
+    for (size_t itemIndex = 0; itemIndex < mItems.size(); itemIndex++) {
+        const Vector<uint16_t> &properties = mItems[itemIndex].properties;
+        if (properties.empty()) {
+            continue;
+        }
+        writeInt16(mItems[itemIndex].itemId);
+
+        size_t entryCount = properties.size();
+        if (entryCount > 255) {
+            ALOGW("Dropping excess associations: entryCount %zu", entryCount);
+            entryCount = 255;
+        }
+        writeInt8((uint8_t)entryCount);
+        for (size_t propIndex = 0; propIndex < entryCount; propIndex++) {
+            if (flags & 1) {
+                writeInt16((1 << 15) | properties[propIndex]);
+            } else {
+                writeInt8((1 << 7) | properties[propIndex]);
+            }
+        }
+    }
+    endBox();
+}
+
+void MPEG4Writer::writeIprpBox() {
+    beginBox("iprp");
+    writeIpcoBox();
+    writeIpmaBox();
+    endBox();
+}
+
+void MPEG4Writer::writeFileLevelMetaBox() {
+    if (mItems.empty()) {
+        ALOGE("no valid item was found");
+        return;
+    }
+
+    // patch up the mPrimaryItemId and count items with prop associations
+    for (size_t index = 0; index < mItems.size(); index++) {
+        if (mItems[index].isPrimary) {
+            mPrimaryItemId = mItems[index].itemId;
+        }
+
+        if (!mItems[index].properties.empty()) {
+            mAssociationEntryCount++;
+        }
+    }
+
+    if (mPrimaryItemId == 0) {
+        ALOGW("didn't find primary, using first item");
+        mPrimaryItemId = mItems[0].itemId;
+    }
+
+    beginBox("meta");
+    writeInt32(0); // Version = 0, Flags = 0
+    writeHdlr("pict");
+    writeIlocBox();
+    writeIinfBox();
+    writePitmBox();
+    writeIprpBox();
+    if (mNumGrids > 0) {
+        writeIdatBox();
+        writeIrefBox();
+    }
+    endBox();
+}
+
+uint16_t MPEG4Writer::addProperty_l(const ItemProperty &prop) {
+    char typeStr[5];
+    MakeFourCCString(prop.type, typeStr);
+    ALOGV("addProperty_l: %s", typeStr);
+
+    mProperties.push_back(prop);
+
+    // returning 1-based property index
+    return mProperties.size();
+}
+
+uint16_t MPEG4Writer::addItem_l(const ItemInfo &info) {
+    ALOGV("addItem_l: type %s, offset %u, size %u",
+            info.itemType, info.offset, info.size);
+
+    size_t index = mItems.size();
+    mItems.push_back(info);
+
+    // make the item id start at 10000
+    mItems.editItemAt(index).itemId = index + 10000;
+
+#if (LOG_NDEBUG==0)
+    if (!info.properties.empty()) {
+        AString str;
+        for (size_t i = 0; i < info.properties.size(); i++) {
+            if (i > 0) {
+                str.append(", ");
+            }
+            str.append(info.properties[i]);
+        }
+        ALOGV("addItem_l: id %d, properties: %s", mItems[index].itemId, str.c_str());
+    }
+#endif // (LOG_NDEBUG==0)
+
+    return mItems[index].itemId;
+}
+
 /*
  * Geodata is stored according to ISO-6709 standard.
  */
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 4fedab6..677d25a 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -28,6 +28,7 @@
 #include <binder/IPCThreadState.h>
 #include <binder/IServiceManager.h>
 #include <binder/MemoryDealer.h>
+#include <cutils/properties.h>
 #include <gui/BufferQueue.h>
 #include <gui/Surface.h>
 #include <media/ICrypto.h>
@@ -44,6 +45,7 @@
 #include <media/stagefright/foundation/hexdump.h>
 #include <media/stagefright/ACodec.h>
 #include <media/stagefright/BufferProducerWrapper.h>
+#include <media/stagefright/CCodec.h>
 #include <media/stagefright/MediaCodec.h>
 #include <media/stagefright/MediaCodecList.h>
 #include <media/stagefright/MediaDefs.h>
@@ -549,8 +551,11 @@
 
 //static
 sp<CodecBase> MediaCodec::GetCodecBase(const AString &name, bool nameIsType) {
-    // at this time only ACodec specifies a mime type.
-    if (nameIsType || name.startsWithIgnoreCase("omx.")) {
+    static bool ccodecEnabled = property_get_bool("debug.stagefright.ccodec", false);
+    if (ccodecEnabled && !nameIsType && name.startsWithIgnoreCase("codec2.")) {
+        return new CCodec;
+    } else if (nameIsType || name.startsWithIgnoreCase("omx.")) {
+        // at this time only ACodec specifies a mime type.
         return new ACodec;
     } else if (name.startsWithIgnoreCase("android.filter.")) {
         return new MediaFilter;
@@ -1849,7 +1854,6 @@
                                 }
                             }
                         }
-
                         if (mFlags & kFlagIsAsync) {
                             onOutputFormatChanged();
                         } else {
diff --git a/media/libstagefright/MediaCodecSource.cpp b/media/libstagefright/MediaCodecSource.cpp
index 5852dd4..cf800b2 100644
--- a/media/libstagefright/MediaCodecSource.cpp
+++ b/media/libstagefright/MediaCodecSource.cpp
@@ -643,7 +643,9 @@
 
     if (mStopping && reachedEOS) {
         ALOGI("encoder (%s) stopped", mIsVideo ? "video" : "audio");
-        mPuller->stopSource();
+        if (mPuller != NULL) {
+            mPuller->stopSource();
+        }
         ALOGV("source (%s) stopped", mIsVideo ? "video" : "audio");
         // posting reply to everyone that's waiting
         List<sp<AReplyToken>>::iterator it;
diff --git a/media/libstagefright/MediaMuxer.cpp b/media/libstagefright/MediaMuxer.cpp
index fb048fe..62daac8 100644
--- a/media/libstagefright/MediaMuxer.cpp
+++ b/media/libstagefright/MediaMuxer.cpp
@@ -23,6 +23,7 @@
 
 #include <media/stagefright/MediaMuxer.h>
 
+#include <media/mediarecorder.h>
 #include <media/MediaSource.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
@@ -38,10 +39,16 @@
 
 namespace android {
 
+static bool isMp4Format(MediaMuxer::OutputFormat format) {
+    return format == MediaMuxer::OUTPUT_FORMAT_MPEG_4 ||
+           format == MediaMuxer::OUTPUT_FORMAT_THREE_GPP ||
+           format == MediaMuxer::OUTPUT_FORMAT_HEIF;
+}
+
 MediaMuxer::MediaMuxer(int fd, OutputFormat format)
     : mFormat(format),
       mState(UNINITIALIZED) {
-    if (format == OUTPUT_FORMAT_MPEG_4 || format == OUTPUT_FORMAT_THREE_GPP) {
+    if (isMp4Format(format)) {
         mWriter = new MPEG4Writer(fd);
     } else if (format == OUTPUT_FORMAT_WEBM) {
         mWriter = new WebmWriter(fd);
@@ -49,6 +56,10 @@
 
     if (mWriter != NULL) {
         mFileMeta = new MetaData;
+        if (format == OUTPUT_FORMAT_HEIF) {
+            // Note that the key uses recorder file types.
+            mFileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_HEIF);
+        }
         mState = INITIALIZED;
     }
 }
@@ -108,8 +119,8 @@
         ALOGE("setLocation() must be called before start().");
         return INVALID_OPERATION;
     }
-    if (mFormat != OUTPUT_FORMAT_MPEG_4 && mFormat != OUTPUT_FORMAT_THREE_GPP) {
-        ALOGE("setLocation() is only supported for .mp4 pr .3gp output.");
+    if (!isMp4Format(mFormat)) {
+        ALOGE("setLocation() is only supported for .mp4, .3gp or .heic output.");
         return INVALID_OPERATION;
     }
 
diff --git a/media/libstagefright/SimpleDecodingSource.cpp b/media/libstagefright/SimpleDecodingSource.cpp
index 67e6748..9b2fb4f 100644
--- a/media/libstagefright/SimpleDecodingSource.cpp
+++ b/media/libstagefright/SimpleDecodingSource.cpp
@@ -14,6 +14,10 @@
  * limitations under the License.
  */
 
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SimpleDecodingSource"
+#include <utils/Log.h>
+
 #include <gui/Surface.h>
 
 #include <media/ICrypto.h>
@@ -43,7 +47,7 @@
 //static
 sp<SimpleDecodingSource> SimpleDecodingSource::Create(
         const sp<MediaSource> &source, uint32_t flags, const sp<ANativeWindow> &nativeWindow,
-        const char *desiredCodec) {
+        const char *desiredCodec, bool skipMediaCodecList) {
     sp<Surface> surface = static_cast<Surface*>(nativeWindow.get());
     const char *mime = NULL;
     sp<MetaData> meta = source->getFormat();
@@ -63,6 +67,33 @@
     looper->start();
 
     sp<MediaCodec> codec;
+    auto configure = [=](const sp<MediaCodec> &codec, const AString &componentName)
+            -> sp<SimpleDecodingSource> {
+        if (codec != NULL) {
+            ALOGI("Successfully allocated codec '%s'", componentName.c_str());
+
+            status_t err = codec->configure(format, surface, NULL /* crypto */, 0 /* flags */);
+            sp<AMessage> outFormat;
+            if (err == OK) {
+                err = codec->getOutputFormat(&outFormat);
+            }
+            if (err == OK) {
+                return new SimpleDecodingSource(codec, source, looper,
+                        surface != NULL,
+                        strcmp(mime, MEDIA_MIMETYPE_AUDIO_VORBIS) == 0,
+                        outFormat);
+            }
+
+            ALOGD("Failed to configure codec '%s'", componentName.c_str());
+            codec->release();
+        }
+        return NULL;
+    };
+
+    if (skipMediaCodecList) {
+        codec = MediaCodec::CreateByComponentName(looper, desiredCodec);
+        return configure(codec, desiredCodec);
+    }
 
     for (size_t i = 0; i < matchingCodecs.size(); ++i) {
         const AString &componentName = matchingCodecs[i];
@@ -73,22 +104,10 @@
         ALOGV("Attempting to allocate codec '%s'", componentName.c_str());
 
         codec = MediaCodec::CreateByComponentName(looper, componentName);
-        if (codec != NULL) {
-            ALOGI("Successfully allocated codec '%s'", componentName.c_str());
-
-            status_t err = codec->configure(format, surface, NULL /* crypto */, 0 /* flags */);
-            if (err == OK) {
-                err = codec->getOutputFormat(&format);
-            }
-            if (err == OK) {
-                return new SimpleDecodingSource(codec, source, looper,
-                        surface != NULL,
-                        strcmp(mime, MEDIA_MIMETYPE_AUDIO_VORBIS) == 0,
-                        format);
-            }
-
-            ALOGD("Failed to configure codec '%s'", componentName.c_str());
-            codec->release();
+        sp<SimpleDecodingSource> res = configure(codec, componentName);
+        if (res != NULL) {
+            return res;
+        } else {
             codec = NULL;
         }
     }
diff --git a/media/libstagefright/StagefrightMetadataRetriever.cpp b/media/libstagefright/StagefrightMetadataRetriever.cpp
index dfaa8b6..e2db0f5 100644
--- a/media/libstagefright/StagefrightMetadataRetriever.cpp
+++ b/media/libstagefright/StagefrightMetadataRetriever.cpp
@@ -154,7 +154,8 @@
 
         if (!strncasecmp(mime, "image/", 6)) {
             int32_t isPrimary;
-            if ((index < 0 && meta->findInt32(kKeyIsPrimaryImage, &isPrimary) && isPrimary)
+            if ((index < 0 && meta->findInt32(
+                    kKeyTrackIsDefault, &isPrimary) && isPrimary)
                     || (index == imageCount++)) {
                 break;
             }
@@ -490,7 +491,8 @@
                 }
             } else if (!strncasecmp("image/", mime, 6)) {
                 int32_t isPrimary;
-                if (trackMeta->findInt32(kKeyIsPrimaryImage, &isPrimary) && isPrimary) {
+                if (trackMeta->findInt32(
+                        kKeyTrackIsDefault, &isPrimary) && isPrimary) {
                     imagePrimary = imageCount;
                     CHECK(trackMeta->findInt32(kKeyWidth, &imageWidth));
                     CHECK(trackMeta->findInt32(kKeyHeight, &imageHeight));
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index 68bbd18..53699ef 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -675,6 +675,10 @@
                 msg->setInt32("grid-rows", gridRows);
                 msg->setInt32("grid-cols", gridCols);
             }
+            int32_t isPrimary;
+            if (meta->findInt32(kKeyTrackIsDefault, &isPrimary) && isPrimary) {
+                msg->setInt32("is-default", 1);
+            }
         }
 
         int32_t colorFormat;
@@ -1308,7 +1312,7 @@
         meta->setCString(kKeyMediaLanguage, lang.c_str());
     }
 
-    if (mime.startsWith("video/")) {
+    if (mime.startsWith("video/") || mime.startsWith("image/")) {
         int32_t width;
         int32_t height;
         if (msg->findInt32("width", &width) && msg->findInt32("height", &height)) {
@@ -1332,6 +1336,26 @@
             meta->setInt32(kKeyDisplayHeight, displayHeight);
         }
 
+        if (mime.startsWith("image/")){
+            int32_t isPrimary;
+            if (msg->findInt32("is-default", &isPrimary) && isPrimary) {
+                meta->setInt32(kKeyTrackIsDefault, 1);
+            }
+            int32_t gridWidth, gridHeight, gridRows, gridCols;
+            if (msg->findInt32("grid-width", &gridWidth)) {
+                meta->setInt32(kKeyGridWidth, gridWidth);
+            }
+            if (msg->findInt32("grid-height", &gridHeight)) {
+                meta->setInt32(kKeyGridHeight, gridHeight);
+            }
+            if (msg->findInt32("grid-rows", &gridRows)) {
+                meta->setInt32(kKeyGridRows, gridRows);
+            }
+            if (msg->findInt32("grid-cols", &gridCols)) {
+                meta->setInt32(kKeyGridCols, gridCols);
+            }
+        }
+
         int32_t colorFormat;
         if (msg->findInt32("color-format", &colorFormat)) {
             meta->setInt32(kKeyColorFormat, colorFormat);
@@ -1448,7 +1472,8 @@
             // for transporting the CSD to muxers.
             reassembleESDS(csd0, esds.data());
             meta->setData(kKeyESDS, kKeyESDS, esds.data(), esds.size());
-        } else if (mime == MEDIA_MIMETYPE_VIDEO_HEVC) {
+        } else if (mime == MEDIA_MIMETYPE_VIDEO_HEVC ||
+                   mime == MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC) {
             std::vector<uint8_t> hvcc(csd0size + 1024);
             size_t outsize = reassembleHVCC(csd0, hvcc.data(), hvcc.size(), 4);
             meta->setData(kKeyHVCC, kKeyHVCC, hvcc.data(), outsize);
diff --git a/media/libstagefright/codec2/Android.bp b/media/libstagefright/codec2/Android.bp
index f79e058..696a062 100644
--- a/media/libstagefright/codec2/Android.bp
+++ b/media/libstagefright/codec2/Android.bp
@@ -7,19 +7,28 @@
 
     srcs: ["C2.cpp"],
 
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
+
     include_dirs: [
         "frameworks/av/media/libstagefright/codec2/include",
         "frameworks/native/include/media/hardware",
     ],
 
+    export_include_dirs: [
+        "include",
+    ],
+
     sanitize: {
         misc_undefined: [
             "unsigned-integer-overflow",
             "signed-integer-overflow",
         ],
-        cfi: true,
+        cfi: false, // true,
         diag: {
-            cfi: true,
+            cfi: false, // true,
         },
     },
 
diff --git a/media/libstagefright/codec2/include/C2.h b/media/libstagefright/codec2/include/C2.h
index bd563ff..01d1341 100644
--- a/media/libstagefright/codec2/include/C2.h
+++ b/media/libstagefright/codec2/include/C2.h
@@ -66,7 +66,7 @@
  * mitigate binary breaks by adhering to the following conventions:
  *
  * - at most one vtable with placeholder virtual methods
- * - all optional/placeholder virtual methods returning a C2Status, with C2_OMITTED not requiring
+ * - all optional/placeholder virtual methods returning a c2_status_t, with C2_OMITTED not requiring
  *   any update to input/output arguments.
  * - limiting symbol export of inline methods
  * - use of pimpl (or shared-pimpl)
@@ -106,9 +106,9 @@
 typedef const char *C2StringLiteral;
 
 /**
- * C2Status: status codes used.
+ * c2_status_t: status codes used.
  */
-enum C2Status : int32_t {
+enum c2_status_t : int32_t {
 
 /*
  * Use android status constants if available. Otherwise, define the android status constants as
diff --git a/media/libstagefright/codec2/include/C2Buffer.h b/media/libstagefright/codec2/include/C2Buffer.h
index 22e4360..d978e42 100644
--- a/media/libstagefright/codec2/include/C2Buffer.h
+++ b/media/libstagefright/codec2/include/C2Buffer.h
@@ -91,7 +91,7 @@
      * \retval C2_REFUSED       no permission to wait for the fence (unexpected - system)
      * \retval C2_CORRUPTED     some unknown error prevented waiting for the fence (unexpected)
      */
-    C2Status wait(nsecs_t timeoutNs);
+    c2_status_t wait(nsecs_t timeoutNs);
 
     /**
      * Used to check if this fence is valid (if there is a chance for it to be signaled.)
@@ -158,7 +158,7 @@
      * \retval C2_REFUSED       no permission to signal the fence (unexpected - system)
      * \retval C2_CORRUPTED     some unknown error prevented signaling the fence(s) (unexpected)
      */
-    C2Status fire();
+    c2_status_t fire();
 
     /**
      * Trigger this event from the merging of the supplied fences. This means that it will be
@@ -172,7 +172,7 @@
      * \retval C2_REFUSED       no permission to merge the fence (unexpected - system)
      * \retval C2_CORRUPTED     some unknown error prevented merging the fence(s) (unexpected)
      */
-    C2Status merge(std::vector<C2Fence> fences);
+    c2_status_t merge(std::vector<C2Fence> fences);
 
     /**
      * Abandons the event and any associated fence(s).
@@ -186,7 +186,7 @@
      * \retval C2_REFUSED       no permission to abandon the fence (unexpected - system)
      * \retval C2_CORRUPTED     some unknown error prevented signaling the fence(s) (unexpected)
      */
-    C2Status abandon();
+    c2_status_t abandon();
 
 private:
     class Impl;
@@ -200,12 +200,12 @@
  * Interface for objects that encapsulate an updatable status value.
  */
 struct _C2InnateStatus {
-    inline C2Status status() const { return mStatus; }
+    inline c2_status_t status() const { return mStatus; }
 
 protected:
-    _C2InnateStatus(C2Status status) : mStatus(status) { }
+    _C2InnateStatus(c2_status_t status) : mStatus(status) { }
 
-    C2Status mStatus; // this status is updatable by the object
+    c2_status_t mStatus; // this status is updatable by the object
 };
 
 /// @}
@@ -230,10 +230,10 @@
     }
 
 protected:
-    C2Acquirable(C2Status error, C2Fence fence, T t) : C2Fence(fence), mInitialError(error), mT(t) { }
+    C2Acquirable(c2_status_t error, C2Fence fence, T t) : C2Fence(fence), mInitialError(error), mT(t) { }
 
 private:
-    C2Status mInitialError;
+    c2_status_t mInitialError;
     T mT; // TODO: move instead of copy
 };
 
@@ -449,11 +449,11 @@
     /**
      * \return error during the creation/mapping of this view.
      */
-    C2Status error() const;
+    c2_status_t error() const;
 
 protected:
     C2ReadView(const _C2LinearCapacityAspect *parent, const uint8_t *data);
-    explicit C2ReadView(C2Status error);
+    explicit C2ReadView(c2_status_t error);
 
 private:
     class Impl;
@@ -482,11 +482,11 @@
     /**
      * \return error during the creation/mapping of this view.
      */
-    C2Status error() const;
+    c2_status_t error() const;
 
 protected:
     C2WriteView(const _C2LinearRangeAspect *parent, uint8_t *base);
-    explicit C2WriteView(C2Status error);
+    explicit C2WriteView(c2_status_t error);
 
 private:
     class Impl;
@@ -631,7 +631,7 @@
      * \retval C2_TIMED_OUT     the reservation timed out \todo when?
      * \retval C2_CORRUPTED     some unknown error prevented reserving space. (unexpected)
      */
-    C2Status reserve(size_t size, C2Fence *fence /* nullable */);
+    c2_status_t reserve(size_t size, C2Fence *fence /* nullable */);
 
     /**
      * Abandons a portion of this segment. This will move to the beginning of this segment.
@@ -644,7 +644,7 @@
      * \retval C2_TIMED_OUT     the operation timed out (unexpected)
      * \retval C2_CORRUPTED     some unknown error prevented abandoning the data (unexpected)
      */
-    C2Status abandon(size_t size);
+    c2_status_t abandon(size_t size);
 
     /**
      * Share a portion as block(s) with consumers (these are moved to the used section).
@@ -661,7 +661,7 @@
      * \retval C2_TIMED_OUT     the operation timed out (unexpected)
      * \retval C2_CORRUPTED     some unknown error prevented sharing the data (unexpected)
      */
-    C2Status share(size_t size, C2Fence fence, std::list<C2ConstLinearBlock> &blocks);
+    c2_status_t share(size_t size, C2Fence fence, std::list<C2ConstLinearBlock> &blocks);
 
     /**
      * Returns the beginning offset of this segment from the start of this circular block.
@@ -695,7 +695,7 @@
     /**
      * \return error during the creation/mapping of this view.
      */
-    C2Status error() const;
+    c2_status_t error() const;
 };
 
 /**
@@ -716,7 +716,7 @@
      * \param size    number of bytes to commit to the next segment
      * \param fence   fence used for the commit (the fence must signal before the data is committed)
      */
-    C2Status commit(size_t size, C2Fence fence);
+    c2_status_t commit(size_t size, C2Fence fence);
 
     /**
      * Maps this block into memory and returns a write view for it.
@@ -1016,14 +1016,14 @@
     /**
      * \return error during the creation/mapping of this view.
      */
-    C2Status error() const;
+    c2_status_t error() const;
 
 protected:
     C2GraphicView(
             const _C2PlanarCapacityAspect *parent,
             uint8_t *const *data,
             const C2PlaneLayout& layout);
-    explicit C2GraphicView(C2Status error);
+    explicit C2GraphicView(c2_status_t error);
 
 private:
     class Impl;
@@ -1224,7 +1224,7 @@
      * \retval C2_NO_MEMORY not enough memory to register for this callback
      * \retval C2_CORRUPTED an unknown error prevented the registration (unexpected)
      */
-    C2Status registerOnDestroyNotify(OnDestroyNotify onDestroyNotify, void *arg = nullptr);
+    c2_status_t registerOnDestroyNotify(OnDestroyNotify onDestroyNotify, void *arg = nullptr);
 
     /**
      * Unregisters a previously registered pre-destroy notification.
@@ -1236,7 +1236,7 @@
      * \retval C2_NOT_FOUND the notification was not found
      * \retval C2_CORRUPTED an unknown error prevented the registration (unexpected)
      */
-    C2Status unregisterOnDestroyNotify(OnDestroyNotify onDestroyNotify, void *arg = nullptr);
+    c2_status_t unregisterOnDestroyNotify(OnDestroyNotify onDestroyNotify, void *arg = nullptr);
 
     ///@}
 
@@ -1262,7 +1262,7 @@
      * \retval C2_NO_MEMORY not enough memory to attach the metadata (this return value is not
      *                      used if the same kind of metadata is already attached to the buffer).
      */
-    C2Status setInfo(const std::shared_ptr<C2Info> &info);
+    c2_status_t setInfo(const std::shared_ptr<C2Info> &info);
 
     /**
      * Checks if there is a certain type of metadata attached to this buffer.
@@ -1385,7 +1385,7 @@
      *                      the usage flags are invalid (caller error)
      * \retval C2_CORRUPTED some unknown error prevented the operation from completing (unexpected)
      */
-    virtual C2Status map(
+    virtual c2_status_t map(
             size_t offset, size_t size, C2MemoryUsage usage, int *fenceFd /* nullable */,
             void **addr /* nonnull */) = 0;
 
@@ -1409,7 +1409,7 @@
      * \retval C2_CORRUPTED some unknown error prevented the operation from completing (unexpected)
      * \retval C2_REFUSED   no permission to unmap the portion (unexpected - system)
      */
-    virtual C2Status unmap(void *addr, size_t size, int *fenceFd /* nullable */) = 0;
+    virtual c2_status_t unmap(void *addr, size_t size, int *fenceFd /* nullable */) = 0;
 
     /**
      * Returns true if this is a valid allocation.
@@ -1472,7 +1472,7 @@
      * \retval C2_CORRUPTED some unknown error prevented the operation from completing (unexpected)
 
      */
-    virtual C2Status map(
+    virtual c2_status_t map(
             C2Rect rect, C2MemoryUsage usage, int *fenceFd,
             // TODO: return <addr, size> buffers with plane sizes
             C2PlaneLayout *layout /* nonnull */, uint8_t **addr /* nonnull */) = 0;
@@ -1492,7 +1492,7 @@
      * \retval C2_CORRUPTED some unknown error prevented the operation from completing (unexpected)
      * \retval C2_REFUSED   no permission to unmap the section (unexpected - system)
      */
-    virtual C2Status unmap(C2Fence *fenceFd /* nullable */) = 0;
+    virtual c2_status_t unmap(C2Fence *fenceFd /* nullable */) = 0;
 
     /**
      * Returns true if this is a valid allocation.
@@ -1544,8 +1544,10 @@
 
     /**
      * Information about an allocator.
+     *
+     * Allocators don't have a query API so all queriable information is stored here.
      */
-    struct Info {
+    struct Traits {
         C2String name;              ///< allocator name
         id_t id;                    ///< allocator ID
         type_t supportedTypes;      ///< supported allocation types
@@ -1574,13 +1576,15 @@
     virtual id_t getId() const = 0;
 
     /**
-     * Returns the allocator information.
+     * Returns the allocator traits.
      *
      * This method MUST be "non-blocking" and return within 1ms.
      *
+     * Allocators don't have a full-fledged query API, only this method.
+     *
      * \return allocator information
      */
-    virtual std::shared_ptr<const Info> getInfo() const = 0;
+    virtual std::shared_ptr<const Traits> getTraits() const = 0;
 
     /**
      * Allocates a 1D allocation of given |capacity| and |usage|. If successful, the allocation is
@@ -1604,7 +1608,7 @@
      * \retval C2_OMITTED   this allocator does not support 1D allocations
      * \retval C2_CORRUPTED some unknown, unrecoverable error occured during allocation (unexpected)
      */
-    virtual C2Status newLinearAllocation(
+    virtual c2_status_t newLinearAllocation(
             uint32_t capacity __unused, C2MemoryUsage usage __unused,
             std::shared_ptr<C2LinearAllocation> *allocation /* nonnull */) {
         *allocation = nullptr;
@@ -1627,7 +1631,7 @@
      * \retval C2_OMITTED   this allocator does not support 1D allocations
      * \retval C2_CORRUPTED some unknown, unrecoverable error occured during allocation (unexpected)
      */
-    virtual C2Status priorLinearAllocation(
+    virtual c2_status_t priorLinearAllocation(
             const C2Handle *handle __unused,
             std::shared_ptr<C2LinearAllocation> *allocation /* nonnull */) {
         *allocation = nullptr;
@@ -1660,7 +1664,7 @@
      * \retval C2_OMITTED   this allocator does not support 2D allocations
      * \retval C2_CORRUPTED some unknown, unrecoverable error occured during allocation (unexpected)
      */
-    virtual C2Status newGraphicAllocation(
+    virtual c2_status_t newGraphicAllocation(
             uint32_t width __unused, uint32_t height __unused, uint32_t format __unused,
             C2MemoryUsage usage __unused,
             std::shared_ptr<C2GraphicAllocation> *allocation /* nonnull */) {
@@ -1684,7 +1688,7 @@
      * \retval C2_OMITTED   this allocator does not support 2D allocations
      * \retval C2_CORRUPTED some unknown, unrecoverable error occured during recreation (unexpected)
      */
-    virtual C2Status priorGraphicAllocation(
+    virtual c2_status_t priorGraphicAllocation(
             const C2Handle *handle __unused,
             std::shared_ptr<C2GraphicAllocation> *allocation /* nonnull */) {
         *allocation = nullptr;
@@ -1764,7 +1768,7 @@
      * \retval C2_OMITTED   this pool does not support linear blocks
      * \retval C2_CORRUPTED some unknown, unrecoverable error occured during operation (unexpected)
      */
-    virtual C2Status fetchLinearBlock(
+    virtual c2_status_t fetchLinearBlock(
             uint32_t capacity __unused, C2MemoryUsage usage __unused,
             std::shared_ptr<C2LinearBlock> *block /* nonnull */) {
         *block = nullptr;
@@ -1792,7 +1796,7 @@
      * \retval C2_OMITTED   this pool does not support circular blocks
      * \retval C2_CORRUPTED some unknown, unrecoverable error occured during operation (unexpected)
      */
-    virtual C2Status fetchCircularBlock(
+    virtual c2_status_t fetchCircularBlock(
             uint32_t capacity __unused, C2MemoryUsage usage __unused,
             std::shared_ptr<C2CircularBlock> *block /* nonnull */) {
         *block = nullptr;
@@ -1823,7 +1827,7 @@
      * \retval C2_OMITTED   this pool does not support 2D blocks
      * \retval C2_CORRUPTED some unknown, unrecoverable error occured during operation (unexpected)
      */
-    virtual C2Status fetchGraphicBlock(
+    virtual c2_status_t fetchGraphicBlock(
             uint32_t width __unused, uint32_t height __unused, uint32_t format __unused,
             C2MemoryUsage usage __unused,
             std::shared_ptr<C2GraphicBlock> *block /* nonnull */) {
diff --git a/media/libstagefright/codec2/include/C2Component.h b/media/libstagefright/codec2/include/C2Component.h
index c88de62..a555b35 100644
--- a/media/libstagefright/codec2/include/C2Component.h
+++ b/media/libstagefright/codec2/include/C2Component.h
@@ -36,25 +36,6 @@
 
 class C2Component;
 
-class C2ComponentListener {
-public:
-    virtual void onWorkDone(std::weak_ptr<C2Component> component,
-                            std::vector<std::unique_ptr<C2Work>> workItems) = 0;
-
-    virtual void onTripped(std::weak_ptr<C2Component> component,
-                           std::vector<std::shared_ptr<C2SettingResult>> settingResult) = 0;
-
-    virtual void onError(std::weak_ptr<C2Component> component,
-                         uint32_t errorCode) = 0;
-
-    // virtual void onTunnelReleased(<from>, <to>) = 0;
-
-    // virtual void onComponentReleased(<id>) = 0;
-
-protected:
-    virtual ~C2ComponentListener() = default;
-};
-
 struct C2FieldSupportedValuesQuery {
     enum Type : uint32_t {
         POSSIBLE, ///< query all possible values regardless of other settings
@@ -63,7 +44,7 @@
 
     const C2ParamField field;
     const Type type;
-    C2Status status;
+    c2_status_t status;
     C2FieldSupportedValues values;
 
     C2FieldSupportedValuesQuery(const C2ParamField &field_, Type type_)
@@ -118,7 +99,7 @@
      *
      * \return a unique node ID for this component or component interface instance.
      */
-    virtual node_id getId() const = 0;
+    virtual c2_node_id_t getId() const = 0;
 
     /**
      * Queries a set of parameters from the component or interface object.
@@ -154,7 +135,7 @@
      * \retval C2_CORRUPTED some unknown error prevented the querying of the parameters
      *                      (unexpected)
      */
-    virtual C2Status query_nb(
+    virtual c2_status_t query_nb(
         const std::vector<C2Param* const> &stackParams,
         const std::vector<C2Param::Index> &heapParamIndices,
         std::vector<std::unique_ptr<C2Param>>* const heapParams) const = 0;
@@ -191,7 +172,7 @@
      * \retval C2_CORRUPTED some unknown error prevented the update of the parameters
      *                      (unexpected)
      */
-    virtual C2Status config_nb(
+    virtual c2_status_t config_nb(
             const std::vector<C2Param* const> &params,
             std::vector<std::unique_ptr<C2SettingResult>>* const failures) = 0;
 
@@ -230,7 +211,7 @@
      * \retval C2_CORRUPTED some unknown error prevented the update of the parameters
      *                      (unexpected)
      */
-    virtual C2Status commit_sm(
+    virtual c2_status_t commit_sm(
             const std::vector<C2Param* const> &params,
             std::vector<std::unique_ptr<C2SettingResult>>* const failures) = 0;
 
@@ -256,7 +237,7 @@
      * \retval C2_TIMED_OUT could not create the tunnel within the time limit (unexpected)
      * \retval C2_CORRUPTED some unknown error prevented the creation of the tunnel (unexpected)
      */
-    virtual C2Status createTunnel_sm(node_id targetComponent) = 0;
+    virtual c2_status_t createTunnel_sm(c2_node_id_t targetComponent) = 0;
 
     /**
      * Releases a tunnel from this component to the target component.
@@ -277,38 +258,41 @@
      * \retval C2_TIMED_OUT could not mark the tunnel for release within the time limit (unexpected)
      * \retval C2_CORRUPTED some unknown error prevented the release of the tunnel (unexpected)
      */
-    virtual C2Status releaseTunnel_sm(node_id targetComponent) = 0;
-
+    virtual c2_status_t releaseTunnel_sm(c2_node_id_t targetComponent) = 0;
 
     // REFLECTION MECHANISM (USED FOR EXTENSION)
     // =============================================================================================
 
     /**
-     * Returns the parameter reflector.
-     *
-     * This is used to describe parameter fields.
-     *
-     * \return a shared parameter reflector object.
-     */
-    virtual std::shared_ptr<C2ParamReflector> getParamReflector() const = 0;
-
-    /**
      * Returns the set of supported parameters.
      *
+     * This method MUST be "non-blocking" and return within 1ms.
+     *
      * \param[out] params a vector of supported parameters will be appended to this vector.
      *
      * \retval C2_OK        the operation completed successfully.
      * \retval C2_NO_MEMORY not enough memory to complete this method.
      */
-    virtual C2Status getSupportedParams(
+    virtual c2_status_t querySupportedParams_nb(
             std::vector<std::shared_ptr<C2ParamDescriptor>> * const params) const = 0;
 
     /**
+     * Retrieves the supported values for the queried fields.
      *
-     * \todo should this take a list considering that setting some fields may further limit other
-     * fields in the same list?
+     * Client SHALL set the parameter-field specifier and the type of supported values query (e.g.
+     * currently supported values, or potential supported values) in fields.
+     * Upon return the component SHALL fill in the supported values for the fields listed as well
+     * as a status for each field. Component shall process all fields queried even if some queries
+     * fail.
+     *
+     * This method MUST be "non-blocking" and return within 1ms.
+     *
+     * \param[in out] fields a vector of fields descriptor structures.
+     *
+     * \retval C2_OK        the operation completed successfully.
+     * \retval C2_BAD_INDEX at least one field was not recognized as a component field
      */
-    virtual C2Status getSupportedValues(
+    virtual c2_status_t querySupportedValues_nb(
             std::vector<C2FieldSupportedValuesQuery> &fields) const = 0;
 
     virtual ~C2ComponentInterface() = default;
@@ -316,6 +300,76 @@
 
 class C2Component {
 public:
+    class Listener {
+    public:
+        virtual void onWorkDone_nb(std::weak_ptr<C2Component> component,
+                                std::vector<std::unique_ptr<C2Work>> workItems) = 0;
+
+        virtual void onTripped_nb(std::weak_ptr<C2Component> component,
+                               std::vector<std::shared_ptr<C2SettingResult>> settingResult) = 0;
+
+        virtual void onError_nb(std::weak_ptr<C2Component> component,
+                             uint32_t errorCode) = 0;
+
+        // virtual void onTunnelReleased(<from>, <to>) = 0;
+
+        // virtual void onComponentReleased(<id>) = 0;
+
+    protected:
+        virtual ~Listener() = default;
+    };
+
+    /**
+     * Sets the listener for this component
+     *
+     * This method MUST be supported in all states. The listener can only be set to non-null value
+     * in non-running state (that does not include tripped or error). It can be set to nullptr in
+     * any state. Components only use the listener in running state.
+     *
+     * If listener is nullptr, the component SHALL guarantee that no more listener callbacks are
+     * done to the original listener once this method returns. (Any pending listener callbacks will
+     * need to be completed during this call - hence this call may be temporarily blocking.)
+     *
+     * This method may be momentarily blocking, but must return within 5ms.
+     *
+     * Component SHALL handle listener notifications from the same thread (the thread used is
+     * at the component's discretion.)
+     *
+     * \note This could also be accomplished by passing a weak_ptr to a component-specific listener
+     * here and requiring the client to always promote the weak_ptr before any callback. This would
+     * put the burden on the client to clear the listener - wait for its deletion - at which point
+     * it is guaranteed that no more listener callbacks will occur.
+     *
+     * \todo TBD is this needed? or move it to createComponent()
+     *
+     * \param listener the component listener object
+     *
+     * \retval C2_BAD_STATE attempting to change the listener in the running state (user error)
+     * \retval C2_OK        listener was updated successfully.
+     */
+    virtual c2_status_t setListener_sm(const std::shared_ptr<Listener> &listener) = 0;
+
+    /**
+     * Information about a component.
+     */
+    struct Traits {
+    // public:
+    // TBD
+    #if 0
+        C2String name;             ///< name of the component
+        C2DomainKind domain;       ///< component domain (e.g. audio or video)
+        C2ComponentKind type;      ///< component type (e.g. encoder, decoder or filter)
+        C2StringLiteral mediaType; ///< media type supported by the component
+        C2ComponentPriority priority; ///< priority used to determine component ordering
+
+        /**
+         * name alias(es) for backward compatibility.
+         * \note Multiple components can have the same alias as long as their media-type differs.
+         */
+        std::vector<C2StringLiteral> aliases; ///< name aliases for backward compatibility
+    #endif
+    };
+
     // METHODS AVAILABLE WHEN RUNNING
     // =============================================================================================
 
@@ -336,7 +390,7 @@
      * \retval C2_NO_MEMORY not enough memory to queue the work
      * \retval C2_CORRUPTED some unknown error prevented queuing the work (unexpected)
      */
-    virtual C2Status queue_nb(std::list<std::unique_ptr<C2Work>>* const items) = 0;
+    virtual c2_status_t queue_nb(std::list<std::unique_ptr<C2Work>>* const items) = 0;
 
     /**
      * Announces a work to be queued later for the component. This reserves a slot for the queue
@@ -354,8 +408,17 @@
      * \retval C2_CORRUPTED some unknown error prevented recording the announcement (unexpected)
      *
      * \todo Can this be rolled into queue_nb?
+     * \todo Expose next work item for each component to detect stalls
      */
-    virtual C2Status announce_nb(const std::vector<C2WorkOutline> &items) = 0;
+    virtual c2_status_t announce_nb(const std::vector<C2WorkOutline> &items) = 0;
+
+    enum flush_mode_t : uint32_t {
+        /// flush work from this component only
+        FLUSH_COMPONENT,
+        /// flush work from this component and all components connected downstream from it via
+        /// tunneling
+        FLUSH_CHAIN,
+    };
 
     /**
      * Discards and abandons any pending work for the component, and optionally any component
@@ -379,22 +442,33 @@
      *
      * Work that could not be abandoned or discarded immediately SHALL be marked to be
      * discarded at the earliest opportunity, and SHALL be returned via the onWorkDone() callback.
+     * This shall be completed within 500ms.
      *
-     * \param flushThrough    flush work from this component and all components connected downstream
-     *                      from it via tunneling.
+     * \param mode flush mode
      *
-     * \retval C2_OK        the work announcement has been successfully recorded
+     * \retval C2_OK        the component has been successfully flushed
      * \retval C2_TIMED_OUT the flush could not be completed within the time limit (unexpected)
      * \retval C2_CORRUPTED some unknown error prevented flushing from completion (unexpected)
      */
-    virtual C2Status flush_sm(bool flushThrough, std::list<std::unique_ptr<C2Work>>* const flushedWork) = 0;
+    virtual c2_status_t flush_sm(flush_mode_t mode, std::list<std::unique_ptr<C2Work>>* const flushedWork) = 0;
+
+    enum drain_mode_t : uint32_t {
+        /// drain component only
+        DRAIN_COMPONENT,
+        /// marks the last work item with a persistent "end-of-stream" marker that will drain
+        /// downstream components
+        /// \todo this may confuse work-ordering downstream
+        DRAIN_CHAIN,
+        /**
+         * \todo define this; we could place EOS to all upstream components, just this component, or
+         *       all upstream and downstream component.
+         * \todo should EOS carry over to downstream components?
+         */
+    };
 
     /**
-     * Drains the component, and optionally downstream components
-     *
-     * \todo define this; we could place EOS to all upstream components, just this component, or
-     *       all upstream and downstream component.
-     * \todo should EOS carry over to downstream components?
+     * Drains the component, and optionally downstream components. This is a signalling method;
+     * as such it does not wait for any work completion.
      *
      * Marks last work item as "end-of-stream", so component is notified not to wait for further
      * work before it processes work already queued. This method is called to set the end-of-stream
@@ -407,16 +481,13 @@
      *
      * Work that is completed SHALL be returned via the onWorkDone() callback.
      *
-     * \param drainThrough    marks the last work item with a persistent "end-of-stream" marker that
-     *                      will drain downstream components.
+     * \param mode drain mode
      *
-     * \todo this may confuse work-ordering downstream; could be an mode enum
-     *
-     * \retval C2_OK        the work announcement has been successfully recorded
+     * \retval C2_OK        the drain request has been successfully recorded
      * \retval C2_TIMED_OUT the flush could not be completed within the time limit (unexpected)
      * \retval C2_CORRUPTED some unknown error prevented flushing from completion (unexpected)
      */
-    virtual C2Status drain_nb(bool drainThrough) = 0;
+    virtual c2_status_t drain_nb(drain_mode_t mode) = 0;
 
     // STATE CHANGE METHODS
     // =============================================================================================
@@ -430,14 +501,12 @@
      * there are no immediate guarantees. Though there are guarantees for responsiveness immediately
      * after start returns.
      *
-     * \todo Could we just start a ComponentInterface to get a Component?
-     *
-     * \retval C2_OK        the work announcement has been successfully recorded
+     * \retval C2_OK        the component has started successfully
      * \retval C2_NO_MEMORY not enough memory to start the component
      * \retval C2_TIMED_OUT the component could not be started within the time limit (unexpected)
      * \retval C2_CORRUPTED some unknown error prevented starting the component (unexpected)
      */
-    virtual C2Status start() = 0;
+    virtual c2_status_t start() = 0;
 
     /**
      * Stops the component.
@@ -454,14 +523,14 @@
      * This does not alter any settings and tunings that may have resulted in a tripped state.
      * (Is this material given the definition? Perhaps in case we want to start again.)
      */
-    virtual C2Status stop() = 0;
+    virtual c2_status_t stop() = 0;
 
     /**
      * Resets the component.
      *
-     * This method MUST be supported in running (including tripped) state.
+     * This method MUST be supported in all (including tripped) state.
      *
-     * This method MUST be supported during any other call (\todo or just blocking ones?)
+     * This method MUST be supported during any other blocking call.
      *
      * This method MUST return withing 500ms.
      *
@@ -473,22 +542,24 @@
      * This brings settings back to their default - "guaranteeing" no tripped space.
      *
      * \todo reclaim support - it seems that since ownership is passed, this will allow reclaiming stuff.
+     *
+     * \retval C2_OK        the component has been reset
+     * \retval C2_TIMED_OUT the component could not be reset within the time limit (unexpected)
+     * \retval C2_CORRUPTED some unknown error prevented resetting the component (unexpected)
      */
     virtual void reset() = 0;
 
     /**
      * Releases the component.
      *
-     * This method MUST be supported in any state. (\todo Or shall we force reset() first to bring
-     * to a known state?)
+     * This method MUST be supported in stopped state.
      *
-     * This method MUST return withing 500ms.
+     * This method MUST return withing 500ms. Upon return all references shall be abandoned.
      *
-     * \todo should this return completed work, since client will just free it? Also, if it unblocks
-     * a stop, where should completed work be returned?
-     *
-     * TODO: does it matter if this call has a short time limit? Yes, as upon return all references
-     * shall be abandoned.
+     * \retval C2_OK        the component has been released
+     * \retval C2_BAD_STATE the component is running
+     * \retval C2_TIMED_OUT the component could not be released within the time limit (unexpected)
+     * \retval C2_CORRUPTED some unknown error prevented releasing the component (unexpected)
      */
     virtual void release() = 0;
 
@@ -499,7 +570,6 @@
      */
     virtual std::shared_ptr<C2ComponentInterface> intf() = 0;
 
-protected:
     virtual ~C2Component() = default;
 };
 
@@ -515,6 +585,8 @@
     /**
      * \return a vector of supported parameter indices parsed by this info parser.
      *
+     * This method MUST be "non-blocking" and return within 1ms.
+     *
      * \todo sticky vs. non-sticky params? this may be communicated by param-reflector.
      */
     virtual const std::vector<C2Param::Index> getParsedParams() const = 0;
@@ -528,18 +600,13 @@
      * \retval C2_TIMED_OUT could not reset the parser within the time limit (unexpected)
      * \retval C2_CORRUPTED some unknown error prevented the resetting of the parser (unexpected)
      */
-    virtual C2Status reset() { return C2_OK; }
+    virtual c2_status_t reset() { return C2_OK; }
 
-    virtual C2Status parseFrame(C2BufferPack &frame);
+    virtual c2_status_t parseFrame(C2BufferPack &frame);
 
     virtual ~C2FrameInfoParser() = default;
 };
 
-struct C2ComponentInfo {
-    // TBD
-
-};
-
 class C2AllocatorStore {
 public:
     typedef C2Allocator::id_t id_t;
@@ -564,16 +631,18 @@
     /**
      * Returns the set of allocators supported by this allocator store.
      *
-     * This method SHALL return within 1ms.
+     * This method MUST be "non-blocking" and return within 1ms.
      *
      * \retval vector of allocator information (as shared pointers)
      * \retval an empty vector if there was not enough memory to allocate the whole vector.
      */
-    virtual std::vector<std::shared_ptr<const C2Allocator::Info>> listAllocators() const = 0;
+    virtual std::vector<std::shared_ptr<const C2Allocator::Traits>> listAllocators_nb() const = 0;
 
     /**
      * Retrieves/creates a shared allocator object.
      *
+     * This method MUST be return within 5ms.
+     *
      * The allocator is created on first use, and the same allocator is returned on subsequent
      * concurrent uses in the same process. The allocator is freed when it is no longer referenced.
      *
@@ -589,7 +658,7 @@
      * \retval C2_NOT_FOUND no such allocator
      * \retval C2_NO_MEMORY not enough memory to create the allocator
      */
-    virtual C2Status getAllocator(id_t id, std::shared_ptr<C2Allocator>* const allocator) = 0;
+    virtual c2_status_t fetchAllocator(id_t id, std::shared_ptr<C2Allocator>* const allocator) = 0;
 
     virtual ~C2AllocatorStore() = default;
 };
@@ -597,6 +666,21 @@
 class C2ComponentStore {
 public:
     /**
+     * Returns the name of this component or component interface object.
+     * This is a unique name for this component or component interface 'class'; however, multiple
+     * instances of this component SHALL have the same name.
+     *
+     * This method MUST be supported in any state. This call does not change the state nor the
+     * internal states of the component.
+     *
+     * This method MUST be "non-blocking" and return within 1ms.
+     *
+     * \return the name of this component or component interface object.
+     * \retval an empty string if there was not enough memory to allocate the actual name.
+     */
+    virtual C2String getName() const = 0;
+
+    /**
      * Creates a component.
      *
      * This method SHALL return within 100ms.
@@ -612,7 +696,8 @@
      * \retval C2_NOT_FOUND no such component
      * \retval C2_NO_MEMORY not enough memory to create the component
      */
-    virtual C2Status createComponent(C2String name, std::shared_ptr<C2Component>* const component);
+    virtual c2_status_t createComponent(
+            C2String name, std::shared_ptr<C2Component>* const component) = 0;
 
     /**
      * Creates a component interface.
@@ -633,26 +718,27 @@
      *
      * \todo Do we need an interface, or could this just be a component that is never started?
      */
-    virtual C2Status createInterface(C2String name, std::shared_ptr<C2ComponentInterface>* const interface);
+    virtual c2_status_t createInterface(
+            C2String name, std::shared_ptr<C2ComponentInterface>* const interface) = 0;
 
     /**
      * Returns the list of components supported by this component store.
      *
-     * This method SHALL return within 1ms.
+     * This method MUST return within 500ms.
      *
      * \retval vector of component information.
      */
-    virtual std::vector<std::unique_ptr<const C2ComponentInfo>> getComponents();
+    virtual std::vector<std::shared_ptr<const C2Component::Traits>> listComponents() = 0;
 
     // -------------------------------------- UTILITY METHODS --------------------------------------
 
     // on-demand buffer layout conversion (swizzling)
-    virtual C2Status copyBuffer(std::shared_ptr<C2GraphicBuffer> src, std::shared_ptr<C2GraphicBuffer> dst);
+    //
+    virtual c2_status_t copyBuffer(
+            std::shared_ptr<C2GraphicBuffer> src, std::shared_ptr<C2GraphicBuffer> dst) = 0;
 
-    // C2Status selectPreferredColor(formats<A>, formats<B>);
-
-    // GLOBAL SETTINGS
-    // system-wide stride & slice-height (???)
+    // -------------------------------------- CONFIGURATION API -----------------------------------
+    // e.g. for global settings (system-wide stride, etc.)
 
     /**
      * Queries a set of system-wide parameters.
@@ -683,7 +769,7 @@
      * \retval C2_CORRUPTED some unknown error prevented the querying of the parameters
      *                      (unexpected)
      */
-    virtual C2Status query_sm(
+    virtual c2_status_t query_sm(
         const std::vector<C2Param* const> &stackParams,
         const std::vector<C2Param::Index> &heapParamIndices,
         std::vector<std::unique_ptr<C2Param>>* const heapParams) const = 0;
@@ -701,15 +787,15 @@
      * \note Parameter tuning DOES depend on the order of the tuning parameters. E.g. some parameter
      * update may allow some subsequent parameter update.
      *
-     * This method MUST be "non-blocking" and return within 1ms.
+     * This method may be momentarily blocking, but MUST return within 5ms.
      *
-     * \param params          a list of parameter updates. These will be updated to the actual
+     * \param params        a list of parameter updates. These will be updated to the actual
      *                      parameter values after the updates (this is because tuning is performed
      *                      at best effort).
      *                      \todo params that could not be updated are not marked here, so are
      *                      confusing - are they "existing" values or intended to be configured
      *                      values?
-     * \param failures        a list of parameter failures
+     * \param failures      a list of parameter failures
      *
      * \retval C2_OK        all parameters could be updated successfully
      * \retval C2_BAD_INDEX all supported parameters could be updated successfully, but some
@@ -722,9 +808,95 @@
      * \retval C2_CORRUPTED some unknown error prevented the update of the parameters
      *                      (unexpected)
      */
-    virtual C2Status config_nb(
+    virtual c2_status_t config_sm(
             const std::vector<C2Param* const> &params,
-            std::list<std::unique_ptr<C2SettingResult>>* const failures) = 0;
+            std::vector<std::unique_ptr<C2SettingResult>>* const failures) = 0;
+
+    /**
+     * Atomically sets a set of system-wide parameters.
+     *
+     * \note There are no settable system-wide parameters defined thus far, but may be added in the
+     * future.
+     *
+     * The component store SHALL update all supported configuration at best effort(TBD)
+     * (unless configured otherwise) and skip unsupported ones. If any errors are encountered
+     * (other than unsupported parameters), the configuration SHALL be aborted as if it did not
+     * happen.
+     *
+     * \note Parameter tuning DOES depend on the order of the tuning parameters. E.g. some parameter
+     * update may allow some subsequent parameter update.
+     *
+     * This method may be momentarily blocking, but MUST return within 5ms.
+     *
+     * \param params[in,out] a list of parameter updates. These will be updated to the actual
+     *                       parameter values after the updates (this is because tuning is performed
+     *                       at best effort).
+     *                       \todo params that could not be updated are not marked here, so are
+     *                       confusing - are they "existing" values or intended to be configured
+     *                       values?
+     * \param failures[out]  a list of parameter failures
+     *
+     * \retval C2_OK        all parameters could be updated successfully
+     * \retval C2_BAD_INDEX all supported parameters could be updated successfully, but some
+     *                      parameters were not supported
+     * \retval C2_BAD_VALUE some supported parameters could not be updated successfully because
+     *                      they contained unsupported values. These are returned in |failures|.
+     * \retval C2_NO_MEMORY some supported parameters could not be updated successfully because
+     *                      they contained unsupported values, but could not allocate a failure
+     *                      object for them.
+     * \retval C2_CORRUPTED some unknown error prevented the update of the parameters
+     *                      (unexpected)
+     */
+    virtual c2_status_t commit_sm(
+            const std::vector<C2Param* const> &params,
+            std::vector<std::unique_ptr<C2SettingResult>>* const failures) = 0;
+
+    // REFLECTION MECHANISM (USED FOR EXTENSION)
+    // =============================================================================================
+
+    /**
+     * Returns the parameter reflector.
+     *
+     * This is used to describe parameter fields. This is shared for all components created by
+     * this component store.
+     *
+     * This method MUST be "non-blocking" and return within 1ms.
+     *
+     * \return a shared parameter reflector object.
+     */
+    virtual std::shared_ptr<C2ParamReflector> getParamReflector() const = 0;
+
+    /**
+     * Returns the set of supported parameters.
+     *
+     * This method MUST be "non-blocking" and return within 1ms.
+     *
+     * \param[out] params a vector of supported parameters will be appended to this vector.
+     *
+     * \retval C2_OK        the operation completed successfully.
+     * \retval C2_NO_MEMORY not enough memory to complete this method.
+     */
+    virtual c2_status_t querySupportedParams_nb(
+            std::vector<std::shared_ptr<C2ParamDescriptor>> * const params) const = 0;
+
+    /**
+     * Retrieves the supported values for the queried fields.
+     *
+     * Client SHALL set the parameter-field specifier and the type of supported values query (e.g.
+     * currently supported values, or potential supported values) in fields.
+     * Upon return the store SHALL fill in the supported values for the fields listed as well
+     * as a status for each field. Store shall process all fields queried even if some queries
+     * fail.
+     *
+     * This method MUST be "non-blocking" and return within 1ms.
+     *
+     * \param[in out] fields a vector of fields descriptor structures.
+     *
+     * \retval C2_OK        the operation completed successfully.
+     * \retval C2_BAD_INDEX at least one field was not recognized as a component store field
+     */
+    virtual c2_status_t querySupportedValues_nb(
+            std::vector<C2FieldSupportedValuesQuery> &fields) const = 0;
 
     virtual ~C2ComponentStore() = default;
 };
diff --git a/media/libstagefright/codec2/include/C2Config.h b/media/libstagefright/codec2/include/C2Config.h
index d4294c4..0568432 100644
--- a/media/libstagefright/codec2/include/C2Config.h
+++ b/media/libstagefright/codec2/include/C2Config.h
@@ -61,13 +61,15 @@
     kParamIndexMime,
     kParamIndexStreamCount,
     kParamIndexFormat,
+    kParamIndexBlockPools,
+
+    kParamIndexMaxVideoSizeHint,
+    kParamIndexVideoSizeTuning,
 
     // video info
 
     kParamIndexStructStart = 0x1,
     kParamIndexVideoSize,
-    kParamIndexMaxVideoSizeHint,
-    kParamIndexVideoSizeTuning,
 
     kParamIndexParamStart = 0x800,
 };
@@ -125,6 +127,8 @@
 
 typedef C2StreamParam<C2Tuning, C2Uint32Value, kParamIndexFormat> C2StreamFormatConfig;
 
+typedef C2PortParam<C2Tuning, C2Uint64Array, kParamIndexBlockPools> C2PortBlockPoolsTuning;
+
 /*
    Component description fields:
 
diff --git a/media/libstagefright/codec2/include/C2Work.h b/media/libstagefright/codec2/include/C2Work.h
index 52c00d5..105cf81 100644
--- a/media/libstagefright/codec2/include/C2Work.h
+++ b/media/libstagefright/codec2/include/C2Work.h
@@ -75,14 +75,9 @@
 //  WORK
 // ================================================================================================
 
-// node_id-s
-typedef uint32_t node_id;
-
-enum flags_t : uint32_t {
-    BUFFERFLAG_CODEC_CONFIG  = (1 << 0),
-    BUFFERFLAG_DROP_FRAME    = (1 << 1),
-    BUFFERFLAG_END_OF_STREAM = (1 << 2),
-};
+// c2_node_id_t-s
+typedef uint32_t c2_node_id_t;
+typedef c2_node_id_t c2_node_id_t;
 
 enum {
     kParamIndexWorkOrdinal,
@@ -101,6 +96,12 @@
 
 struct C2BufferPack {
 //public:
+    enum flags_t : uint32_t {
+        FLAG_CODEC_CONFIG  = (1 << 0),
+        FLAG_DROP_FRAME    = (1 << 1),
+        FLAG_END_OF_STREAM = (1 << 2),
+    };
+
     flags_t  flags;
     C2WorkOrdinalStruct ordinal;
     std::vector<std::shared_ptr<C2Buffer>> buffers;
@@ -113,7 +114,7 @@
 struct C2Worklet {
 //public:
     // IN
-    node_id component;
+    c2_node_id_t component;
 
     std::list<std::unique_ptr<C2Param>> tunings; //< tunings to be applied before processing this
                                                  // worklet
@@ -165,13 +166,13 @@
     std::list<std::unique_ptr<C2Worklet>> worklets;
 
     uint32_t worklets_processed;
-    C2Status result;
+    c2_status_t result;
 };
 
 struct C2WorkOutline {
 //public:
     C2WorkOrdinalStruct ordinal;
-    std::list<node_id> chain;
+    std::list<c2_node_id_t> chain;
 };
 
 /// @}
diff --git a/media/libstagefright/codec2/tests/C2ComponentInterface_test.cpp b/media/libstagefright/codec2/tests/C2ComponentInterface_test.cpp
index b725d76..0613b5a 100644
--- a/media/libstagefright/codec2/tests/C2ComponentInterface_test.cpp
+++ b/media/libstagefright/codec2/tests/C2ComponentInterface_test.cpp
@@ -72,7 +72,7 @@
 
     // If a parameter is writable this is called.
     // Test one filed |writableField| for given writable parameter |param|.
-    // |validValues| contains all values obtained from getSupportedValues() for |writableField|.
+    // |validValues| contains all values obtained from querySupportedValues() for |writableField|.
     // The test checks validity for config() with each value, and make sure values are config-ed
     // by query() them out. |invalidValues| contains some values which are not in |validValues|.
     // The test expects C2_BAD_VALUE while config() with these values,
@@ -112,11 +112,11 @@
     // check if a component has a parameter whose type is |T|.
     // If a component has, the value should be copied into an argument, that is
     // |p| in queryOnStack() and |heapParams| in queryOnHeap().
-    // The return value is C2Status (e.g. C2_OK).
-    template <typename T> C2Status queryOnStack(T *const p);
+    // The return value is c2_status_t (e.g. C2_OK).
+    template <typename T> c2_status_t queryOnStack(T *const p);
 
     template <typename T>
-    C2Status queryOnHeap(const T &p,
+    c2_status_t queryOnHeap(const T &p,
                          std::vector<std::unique_ptr<C2Param>> *const heapParams);
 
     // Get a value whose type is |T| in a component. The value is copied to |param|.
@@ -139,7 +139,7 @@
     // Execute an interface's config_nb(). |T| is a single parameter type, not std::vector.
     // config() creates std::vector<C2Param *const> {p} and passes it to config_nb().
     template <typename T>
-    C2Status
+    c2_status_t
     config(T *const p,
            std::vector<std::unique_ptr<C2SettingResult>> *const failures);
 
@@ -150,7 +150,7 @@
     // Test if config works correctly for writable parameters.
     // This changes the parameter's value to |newParam|.
     // |stConfig| is a return value of config().
-    template <typename T> void configWritableParamValidValue(const T &newParam, C2Status *stConfig);
+    template <typename T> void configWritableParamValidValue(const T &newParam, c2_status_t *stConfig);
 
     // Test if config works correctly in the case an invalid value |newParam| is tried to write
     // to an writable parameter.
@@ -194,13 +194,13 @@
         }                                               \
     } while (false)
 
-template <typename T> C2Status C2CompIntfTest::queryOnStack(T *const p) {
+template <typename T> c2_status_t C2CompIntfTest::queryOnStack(T *const p) {
     std::vector<C2Param *const> stackParams{p};
     return mIntf->query_nb(stackParams, {}, nullptr);
 }
 
 template <typename T>
-C2Status C2CompIntfTest::queryOnHeap(
+c2_status_t C2CompIntfTest::queryOnHeap(
         const T &p, std::vector<std::unique_ptr<C2Param>> *const heapParams) {
     uint32_t index = p.type();
     if (p.forStream()) {
@@ -258,7 +258,7 @@
 }
 
 template <typename T>
-C2Status C2CompIntfTest::config(
+c2_status_t C2CompIntfTest::config(
         T *const p, std::vector<std::unique_ptr<C2SettingResult>> *const failures) {
     std::vector<C2Param *const> params{p};
     return mIntf->config_nb(params, failures);
@@ -286,7 +286,7 @@
 }
 
 template <typename T>
-void C2CompIntfTest::configWritableParamValidValue(const T &newParam, C2Status *configResult) {
+void C2CompIntfTest::configWritableParamValidValue(const T &newParam, c2_status_t *configResult) {
     std::unique_ptr<T> p = makeParamFrom(newParam);
 
     std::vector<C2Param *const> params{p.get()};
@@ -297,7 +297,7 @@
     // because there may be dependent limitations between fields or between parameters.
     // TODO(hiroh): I have to fill the return value. Comments in C2Component.h doesn't mention
     // about the return value when conflict happens. I set C2_BAD_VALUE to it temporarily now.
-    C2Status stConfig = mIntf->config_nb(params, &failures);
+    c2_status_t stConfig = mIntf->config_nb(params, &failures);
     if (stConfig == C2_OK) {
         EXPECT_EQ(0u, failures.size());
     } else {
@@ -481,7 +481,7 @@
         TParam *const param, TRealField *const writableField,
         const std::vector<TField> &validValues,
         const std::vector<TField> &invalidValues) {
-    C2Status stConfig;
+    c2_status_t stConfig;
 
     // Get the parameter's value in the beginning in order to reset the value at the end.
     TRACED_FAILURE(getValue(param));
@@ -555,7 +555,7 @@
     std::vector<std::unique_ptr<C2SettingResult>> failures;
     // Config does not change the parameter, because param is the present param.
     // This config is executed to find out if a parameter is read-only or writable.
-    C2Status stStack = config(param.get(), &failures);
+    c2_status_t stStack = config(param.get(), &failures);
     if (stStack == C2_BAD_VALUE) {
         // Read-only
         std::unique_ptr<T> newParam = makeParam<T>();
@@ -594,7 +594,7 @@
                     C2ParamField(param.get(), &field_type_name_::field_name_)) \
         };                                                              \
         ASSERT_EQ(C2_OK,                                                \
-                  mIntf->getSupportedValues(validValueInfos));          \
+                  mIntf->querySupportedValues_nb(validValueInfos));     \
         ASSERT_EQ(1u, validValueInfos.size());                          \
         std::vector<decltype(param->field_name_)> validValues;          \
         std::vector<decltype(param->field_name_)> invalidValues;        \
@@ -640,7 +640,7 @@
     setComponent(intf);
 
     std::vector<std::shared_ptr<C2ParamDescriptor>> supportedParams;
-    ASSERT_EQ(C2_OK, mIntf->getSupportedParams(&supportedParams));
+    ASSERT_EQ(C2_OK, mIntf->querySupportedParams_nb(&supportedParams));
 
     EACH_TEST_SELF(C2ComponentLatencyInfo, TEST_U32_WRITABLE_FIELD);
     EACH_TEST_SELF(C2ComponentTemporalInfo, TEST_U32_WRITABLE_FIELD);
diff --git a/media/libstagefright/codec2/tests/C2Param_test.cpp b/media/libstagefright/codec2/tests/C2Param_test.cpp
index 97c5f91..a688d2c 100644
--- a/media/libstagefright/codec2/tests/C2Param_test.cpp
+++ b/media/libstagefright/codec2/tests/C2Param_test.cpp
@@ -2393,41 +2393,41 @@
 
 class MyComponentInstance : public C2ComponentInterface {
 public:
-    virtual C2String getName() const {
+    virtual C2String getName() const override {
         /// \todo this seems too specific
         return "sample.interface";
     };
 
-    virtual node_id getId() const {
+    virtual c2_node_id_t getId() const override {
         /// \todo how are these shared?
         return 0;
     }
 
-    virtual C2Status commit_sm(
+    virtual c2_status_t commit_sm(
             const std::vector<C2Param* const> &params,
-            std::vector<std::unique_ptr<C2SettingResult>>* const failures) {
+            std::vector<std::unique_ptr<C2SettingResult>>* const failures) override {
         (void)params;
         (void)failures;
         return C2_OMITTED;
     }
 
-    virtual C2Status config_nb(
+    virtual c2_status_t config_nb(
             const std::vector<C2Param* const> &params,
-            std::vector<std::unique_ptr<C2SettingResult>>* const failures) {
+            std::vector<std::unique_ptr<C2SettingResult>>* const failures) override {
         (void)params;
         (void)failures;
         return C2_OMITTED;
     }
 
-    virtual C2Status createTunnel_sm(node_id targetComponent) {
+    virtual c2_status_t createTunnel_sm(c2_node_id_t targetComponent) override {
         (void)targetComponent;
         return C2_OMITTED;
     }
 
-    virtual C2Status query_nb(
+    virtual c2_status_t query_nb(
             const std::vector<C2Param* const> &stackParams,
             const std::vector<C2Param::Index> &heapParamIndices,
-            std::vector<std::unique_ptr<C2Param>>* const heapParams) const {
+            std::vector<std::unique_ptr<C2Param>>* const heapParams) const override {
         for (C2Param* const param : stackParams) {
             if (!*param) { // param is already invalid - remember it
                 continue;
@@ -2466,7 +2466,7 @@
         mMyParams.insert({mDomainInfo.type(), mDomainInfo});
     }
 
-    virtual C2Status releaseTunnel_sm(node_id targetComponent) {
+    virtual c2_status_t releaseTunnel_sm(c2_node_id_t targetComponent) override {
         (void)targetComponent;
         return C2_OMITTED;
     }
@@ -2477,7 +2477,7 @@
     public:
         MyParamReflector(const MyComponentInstance *i) : instance(i) { }
 
-        virtual std::unique_ptr<C2StructDescriptor> describe(C2Param::BaseIndex paramIndex) {
+        virtual std::unique_ptr<C2StructDescriptor> describe(C2Param::BaseIndex paramIndex) override {
             switch (paramIndex.coreIndex()) {
             case decltype(instance->mDomainInfo)::coreIndex:
             default:
@@ -2490,8 +2490,8 @@
         }
     };
 
-    virtual C2Status getSupportedValues(
-            std::vector<C2FieldSupportedValuesQuery> &fields) const {
+    virtual c2_status_t querySupportedValues_nb(
+            std::vector<C2FieldSupportedValuesQuery> &fields) const override {
         for (C2FieldSupportedValuesQuery &query : fields) {
             if (query.field == C2ParamField(&mDomainInfo, &C2ComponentDomainInfo::mValue)) {
                 query.values = C2FieldSupportedValues(
@@ -2508,22 +2508,20 @@
         return C2_OK;
     }
 
-    virtual std::shared_ptr<C2ParamReflector> getParamReflector() const {
+    std::shared_ptr<C2ParamReflector> getParamReflector() const {
         return std::shared_ptr<C2ParamReflector>(new MyParamReflector(this));
     }
 
-    virtual C2Status getSupportedParams(std::vector<std::shared_ptr<C2ParamDescriptor>> * const params) const {
+    virtual c2_status_t querySupportedParams_nb(
+            std::vector<std::shared_ptr<C2ParamDescriptor>> * const params) const override {
         params->push_back(std::make_shared<C2ParamDescriptor>(
                 true /* required */, "_domain", &mDomainInfo));
-        return C2_OK;
-    }
-
-    C2Status getSupportedParams2(std::vector<std::shared_ptr<C2ParamDescriptor>> * const params) {
         params->push_back(std::shared_ptr<C2ParamDescriptor>(
                 new C2ParamDescriptor(true /* required */, "_domain", &mDomainInfo)));
         return C2_OK;
     }
 
+    virtual ~MyComponentInstance() override = default;
 };
 
 template<typename E, bool S=std::is_enum<E>::value>
@@ -2700,10 +2698,11 @@
 
 TEST_F(C2ParamTest, ReflectorTest) {
     C2ComponentDomainInfo domainInfo;
-    std::shared_ptr<C2ComponentInterface> comp(new MyComponentInstance);
+    std::shared_ptr<MyComponentInstance> myComp(new MyComponentInstance);
+    std::shared_ptr<C2ComponentInterface> comp = myComp;
 
     std::unique_ptr<C2StructDescriptor> desc{
-        comp->getParamReflector()->describe(C2ComponentDomainInfo::indexFlags)};
+        myComp->getParamReflector()->describe(C2ComponentDomainInfo::indexFlags)};
     dumpStruct(*desc);
 
     std::vector<C2FieldSupportedValuesQuery> query = {
@@ -2714,7 +2713,7 @@
         C2FieldSupportedValuesQuery::Current(C2ParamField(&domainInfo, &C2ComponentDomainInfo::mValue)),
     };
 
-    EXPECT_EQ(C2_OK, comp->getSupportedValues(query));
+    EXPECT_EQ(C2_OK, comp->querySupportedValues_nb(query));
 
     for (const C2FieldSupportedValuesQuery &q : query) {
         dumpFSV(q.values, &domainInfo.mValue);
diff --git a/media/libstagefright/codec2/tests/vndk/C2BufferTest.cpp b/media/libstagefright/codec2/tests/vndk/C2BufferTest.cpp
index 3d23c23..1bcf070 100644
--- a/media/libstagefright/codec2/tests/vndk/C2BufferTest.cpp
+++ b/media/libstagefright/codec2/tests/vndk/C2BufferTest.cpp
@@ -38,7 +38,7 @@
     ~C2BufferTest() = default;
 
     void allocateLinear(size_t capacity) {
-        C2Status err = mLinearAllocator->newLinearAllocation(
+        c2_status_t err = mLinearAllocator->newLinearAllocation(
                 capacity,
                 { C2MemoryUsage::kSoftwareRead, C2MemoryUsage::kSoftwareWrite },
                 &mLinearAllocation);
@@ -50,7 +50,7 @@
 
     void mapLinear(size_t offset, size_t size, uint8_t **addr) {
         ASSERT_TRUE(mLinearAllocation);
-        C2Status err = mLinearAllocation->map(
+        c2_status_t err = mLinearAllocation->map(
                 offset,
                 size,
                 { C2MemoryUsage::kSoftwareRead, C2MemoryUsage::kSoftwareWrite },
@@ -82,7 +82,7 @@
     }
 
     void allocateGraphic(uint32_t width, uint32_t height) {
-        C2Status err = mGraphicAllocator->newGraphicAllocation(
+        c2_status_t err = mGraphicAllocator->newGraphicAllocation(
                 width,
                 height,
                 HAL_PIXEL_FORMAT_YCBCR_420_888,
@@ -96,7 +96,7 @@
 
     void mapGraphic(C2Rect rect, C2PlaneLayout *layout, uint8_t **addr) {
         ASSERT_TRUE(mGraphicAllocation);
-        C2Status err = mGraphicAllocation->map(
+        c2_status_t err = mGraphicAllocation->map(
                 rect,
                 { C2MemoryUsage::kSoftwareRead, C2MemoryUsage::kSoftwareWrite },
                 // TODO: fence
diff --git a/media/libstagefright/codec2/vndk/Android.bp b/media/libstagefright/codec2/vndk/Android.bp
index 64ce5e6..d2cfebb 100644
--- a/media/libstagefright/codec2/vndk/Android.bp
+++ b/media/libstagefright/codec2/vndk/Android.bp
@@ -9,6 +9,10 @@
         "C2Store.cpp",
     ],
 
+    export_include_dirs: [
+        "include",
+    ],
+
     include_dirs: [
         "frameworks/av/media/libstagefright/codec2/include",
         "frameworks/av/media/libstagefright/codec2/vndk/include",
diff --git a/media/libstagefright/codec2/vndk/C2AllocatorGralloc.cpp b/media/libstagefright/codec2/vndk/C2AllocatorGralloc.cpp
index 3e2242b..da8372c 100644
--- a/media/libstagefright/codec2/vndk/C2AllocatorGralloc.cpp
+++ b/media/libstagefright/codec2/vndk/C2AllocatorGralloc.cpp
@@ -38,7 +38,7 @@
 using ::android::hardware::hidl_vec;
 
 /* ===================================== GRALLOC ALLOCATION ==================================== */
-static C2Status maperr2error(Error maperr) {
+static c2_status_t maperr2error(Error maperr) {
     switch (maperr) {
         case Error::NONE:           return C2_OK;
         case Error::BAD_DESCRIPTOR: return C2_BAD_VALUE;
@@ -52,12 +52,12 @@
 
 class C2AllocationGralloc : public C2GraphicAllocation {
 public:
-    virtual ~C2AllocationGralloc();
+    virtual ~C2AllocationGralloc() override;
 
-    virtual C2Status map(
+    virtual c2_status_t map(
             C2Rect rect, C2MemoryUsage usage, int *fenceFd,
             C2PlaneLayout *layout /* nonnull */, uint8_t **addr /* nonnull */) override;
-    virtual C2Status unmap(C2Fence *fenceFd /* nullable */) override;
+    virtual c2_status_t unmap(C2Fence *fenceFd /* nullable */) override;
     virtual bool isValid() const override { return true; }
     virtual const C2Handle *handle() const override { return mHandle; }
     virtual bool equals(const std::shared_ptr<const C2GraphicAllocation> &other) const override;
@@ -69,7 +69,7 @@
               const sp<IMapper> &mapper,
               hidl_handle &handle);
     int dup() const;
-    C2Status status() const;
+    c2_status_t status() const;
 
 private:
     const IMapper::BufferDescriptorInfo mInfo;
@@ -100,7 +100,7 @@
     mMapper->freeBuffer(const_cast<native_handle_t *>(mBuffer));
 }
 
-C2Status C2AllocationGralloc::map(
+c2_status_t C2AllocationGralloc::map(
         C2Rect rect, C2MemoryUsage usage, int *fenceFd,
         C2PlaneLayout *layout /* nonnull */, uint8_t **addr /* nonnull */) {
     // TODO
@@ -114,7 +114,7 @@
         return C2_BAD_VALUE;
     }
 
-    C2Status err = C2_OK;
+    c2_status_t err = C2_OK;
     if (!mBuffer) {
         mMapper->importBuffer(
                 mHandle, [&err, this](const auto &maperr, const auto &buffer) {
@@ -202,9 +202,9 @@
     return C2_OK;
 }
 
-C2Status C2AllocationGralloc::unmap(C2Fence *fenceFd /* nullable */) {
+c2_status_t C2AllocationGralloc::unmap(C2Fence *fenceFd /* nullable */) {
     // TODO: fence
-    C2Status err = C2_OK;
+    c2_status_t err = C2_OK;
     mMapper->unlock(
             const_cast<native_handle_t *>(mBuffer),
             [&err, &fenceFd](const auto &maperr, const auto &releaseFence) {
@@ -235,18 +235,18 @@
 
     C2String getName() const;
 
-    C2Status newGraphicAllocation(
+    c2_status_t newGraphicAllocation(
             uint32_t width, uint32_t height, uint32_t format, const C2MemoryUsage &usage,
             std::shared_ptr<C2GraphicAllocation> *allocation);
 
-    C2Status priorGraphicAllocation(
+    c2_status_t priorGraphicAllocation(
             const C2Handle *handle,
             std::shared_ptr<C2GraphicAllocation> *allocation);
 
-    C2Status status() const { return mInit; }
+    c2_status_t status() const { return mInit; }
 
 private:
-    C2Status mInit;
+    c2_status_t mInit;
     sp<IAllocator> mAllocator;
     sp<IMapper> mMapper;
 };
@@ -268,7 +268,7 @@
     return "android.allocator.gralloc";
 }
 
-C2Status C2AllocatorGralloc::Impl::newGraphicAllocation(
+c2_status_t C2AllocatorGralloc::Impl::newGraphicAllocation(
         uint32_t width, uint32_t height, uint32_t format, const C2MemoryUsage &usage,
         std::shared_ptr<C2GraphicAllocation> *allocation) {
     // TODO: buffer usage should be determined according to |usage|
@@ -281,7 +281,7 @@
         (PixelFormat)format,
         BufferUsage::CPU_READ_OFTEN | BufferUsage::CPU_WRITE_OFTEN,
     };
-    C2Status err = C2_OK;
+    c2_status_t err = C2_OK;
     BufferDescriptor desc;
     mMapper->createDescriptor(
             info, [&err, &desc](const auto &maperr, const auto &descriptor) {
@@ -319,7 +319,7 @@
     return C2_OK;
 }
 
-C2Status C2AllocatorGralloc::Impl::priorGraphicAllocation(
+c2_status_t C2AllocatorGralloc::Impl::priorGraphicAllocation(
         const C2Handle *handle,
         std::shared_ptr<C2GraphicAllocation> *allocation) {
     (void) handle;
@@ -341,19 +341,19 @@
     return mImpl->getName();
 }
 
-C2Status C2AllocatorGralloc::newGraphicAllocation(
+c2_status_t C2AllocatorGralloc::newGraphicAllocation(
         uint32_t width, uint32_t height, uint32_t format, C2MemoryUsage usage,
         std::shared_ptr<C2GraphicAllocation> *allocation) {
     return mImpl->newGraphicAllocation(width, height, format, usage, allocation);
 }
 
-C2Status C2AllocatorGralloc::priorGraphicAllocation(
+c2_status_t C2AllocatorGralloc::priorGraphicAllocation(
         const C2Handle *handle,
         std::shared_ptr<C2GraphicAllocation> *allocation) {
     return mImpl->priorGraphicAllocation(handle, allocation);
 }
 
-C2Status C2AllocatorGralloc::status() const {
+c2_status_t C2AllocatorGralloc::status() const {
     return mImpl->status();
 }
 
diff --git a/media/libstagefright/codec2/vndk/C2AllocatorIon.cpp b/media/libstagefright/codec2/vndk/C2AllocatorIon.cpp
index 09df502..acd69af 100644
--- a/media/libstagefright/codec2/vndk/C2AllocatorIon.cpp
+++ b/media/libstagefright/codec2/vndk/C2AllocatorIon.cpp
@@ -82,20 +82,20 @@
 /* ======================================= ION ALLOCATION ====================================== */
 class C2AllocationIon : public C2LinearAllocation {
 public:
-    virtual C2Status map(
+    virtual c2_status_t map(
         size_t offset, size_t size, C2MemoryUsage usage, int *fence,
-        void **addr /* nonnull */);
-    virtual C2Status unmap(void *addr, size_t size, int *fenceFd);
-    virtual bool isValid() const;
-    virtual ~C2AllocationIon();
-    virtual const C2Handle *handle() const;
-    virtual bool equals(const std::shared_ptr<C2LinearAllocation> &other) const;
+        void **addr /* nonnull */) override;
+    virtual c2_status_t unmap(void *addr, size_t size, int *fenceFd) override;
+    virtual bool isValid() const override;
+    virtual ~C2AllocationIon() override;
+    virtual const C2Handle *handle() const override;
+    virtual bool equals(const std::shared_ptr<C2LinearAllocation> &other) const override;
 
     // internal methods
     C2AllocationIon(int ionFd, size_t size, size_t align, unsigned heapMask, unsigned flags);
     C2AllocationIon(int ionFd, size_t size, int shareFd);
     int dup() const;
-    C2Status status() const;
+    c2_status_t status() const;
 
 protected:
     class Impl;
@@ -142,7 +142,7 @@
         (void)mCapacity; // TODO
     }
 
-    C2Status map(size_t offset, size_t size, C2MemoryUsage usage, int *fenceFd, void **addr) {
+    c2_status_t map(size_t offset, size_t size, C2MemoryUsage usage, int *fenceFd, void **addr) {
         (void)fenceFd; // TODO: wait for fence
         *addr = nullptr;
         int prot = PROT_NONE;
@@ -159,7 +159,7 @@
         size_t mapOffset = offset - alignmentBytes;
         size_t mapSize = size + alignmentBytes;
 
-        C2Status err = C2_OK;
+        c2_status_t err = C2_OK;
         if (mMapFd == -1) {
             int ret = ion_map(mHandle.ionFd(), mHandle.buffer(), mapSize, prot,
                               flags, mapOffset, (unsigned char**)&mMapAddr, &mMapFd);
@@ -186,7 +186,7 @@
         return err;
     }
 
-    C2Status unmap(void *addr, size_t size, int *fenceFd) {
+    c2_status_t unmap(void *addr, size_t size, int *fenceFd) {
         if (addr != (uint8_t *)mMapAddr + mMapAlignmentBytes ||
                 size + mMapAlignmentBytes != mMapSize) {
             return C2_BAD_VALUE;
@@ -210,7 +210,7 @@
         (void)ion_free(mHandle.ionFd(), mHandle.buffer());
     }
 
-    C2Status status() const {
+    c2_status_t status() const {
         return mInit;
     }
 
@@ -227,7 +227,7 @@
     }
 
 private:
-    C2Status mInit;
+    c2_status_t mInit;
     C2HandleIon mHandle;
     int mMapFd; // only one for now
     void *mMapAddr;
@@ -236,12 +236,12 @@
     size_t mCapacity;
 };
 
-C2Status C2AllocationIon::map(
+c2_status_t C2AllocationIon::map(
     size_t offset, size_t size, C2MemoryUsage usage, int *fenceFd, void **addr) {
     return mImpl->map(offset, size, usage, fenceFd, addr);
 }
 
-C2Status C2AllocationIon::unmap(void *addr, size_t size, int *fenceFd) {
+c2_status_t C2AllocationIon::unmap(void *addr, size_t size, int *fenceFd) {
     return mImpl->unmap(addr, size, fenceFd);
 }
 
@@ -249,7 +249,7 @@
     return mImpl->status() == C2_OK;
 }
 
-C2Status C2AllocationIon::status() const {
+c2_status_t C2AllocationIon::status() const {
     return mImpl->status();
 }
 
@@ -302,7 +302,7 @@
     return "android.allocator.ion";
 }
 
-C2Status C2AllocatorIon::newLinearAllocation(
+c2_status_t C2AllocatorIon::newLinearAllocation(
         uint32_t capacity, C2MemoryUsage usage, std::shared_ptr<C2LinearAllocation> *allocation) {
     if (allocation == nullptr) {
         return C2_BAD_VALUE;
@@ -329,14 +329,14 @@
 
     std::shared_ptr<C2AllocationIon> alloc
         = std::make_shared<C2AllocationIon>(mIonFd, capacity, align, heapMask, flags);
-    C2Status ret = alloc->status();
+    c2_status_t ret = alloc->status();
     if (ret == C2_OK) {
         *allocation = alloc;
     }
     return ret;
 }
 
-C2Status C2AllocatorIon::priorLinearAllocation(
+c2_status_t C2AllocatorIon::priorLinearAllocation(
         const C2Handle *handle, std::shared_ptr<C2LinearAllocation> *allocation) {
     *allocation = nullptr;
     if (mInit != C2_OK) {
@@ -351,7 +351,7 @@
     const C2HandleIon *h = static_cast<const C2HandleIon*>(handle);
     std::shared_ptr<C2AllocationIon> alloc
         = std::make_shared<C2AllocationIon>(mIonFd, 0 /* capacity */, h->buffer());
-    C2Status ret = alloc->status();
+    c2_status_t ret = alloc->status();
     if (ret == C2_OK) {
         *allocation = alloc;
     }
diff --git a/media/libstagefright/codec2/vndk/C2Buffer.cpp b/media/libstagefright/codec2/vndk/C2Buffer.cpp
index 02e2dd9..d9bde7a 100644
--- a/media/libstagefright/codec2/vndk/C2Buffer.cpp
+++ b/media/libstagefright/codec2/vndk/C2Buffer.cpp
@@ -129,26 +129,26 @@
     explicit Impl(const uint8_t *data)
         : mData(data), mError(C2_OK) {}
 
-    explicit Impl(C2Status error)
+    explicit Impl(c2_status_t error)
         : mData(nullptr), mError(error) {}
 
     const uint8_t *data() const {
         return mData;
     }
 
-    C2Status error() const {
+    c2_status_t error() const {
         return mError;
     }
 
 private:
     const uint8_t *mData;
-    C2Status mError;
+    c2_status_t mError;
 };
 
 C2ReadView::C2ReadView(const _C2LinearCapacityAspect *parent, const uint8_t *data)
     : _C2LinearCapacityAspect(parent), mImpl(std::make_shared<Impl>(data)) {}
 
-C2ReadView::C2ReadView(C2Status error)
+C2ReadView::C2ReadView(c2_status_t error)
     : _C2LinearCapacityAspect(0u), mImpl(std::make_shared<Impl>(error)) {}
 
 const uint8_t *C2ReadView::data() const {
@@ -167,7 +167,7 @@
     return C2ReadView(&newCapacity, data() + offset);
 }
 
-C2Status C2ReadView::error() const {
+c2_status_t C2ReadView::error() const {
     return mImpl->error();
 }
 
@@ -176,33 +176,33 @@
     explicit Impl(uint8_t *base)
         : mBase(base), mError(C2_OK) {}
 
-    explicit Impl(C2Status error)
+    explicit Impl(c2_status_t error)
         : mBase(nullptr), mError(error) {}
 
     uint8_t *base() const {
         return mBase;
     }
 
-    C2Status error() const {
+    c2_status_t error() const {
         return mError;
     }
 
 private:
     uint8_t *mBase;
-    C2Status mError;
+    c2_status_t mError;
 };
 
 C2WriteView::C2WriteView(const _C2LinearRangeAspect *parent, uint8_t *base)
     : _C2EditableLinearRange(parent), mImpl(std::make_shared<Impl>(base)) {}
 
-C2WriteView::C2WriteView(C2Status error)
+C2WriteView::C2WriteView(c2_status_t error)
     : _C2EditableLinearRange(nullptr), mImpl(std::make_shared<Impl>(error)) {}
 
 uint8_t *C2WriteView::base() { return mImpl->base(); }
 
 uint8_t *C2WriteView::data() { return mImpl->base() + offset(); }
 
-C2Status C2WriteView::error() const { return mImpl->error(); }
+c2_status_t C2WriteView::error() const { return mImpl->error(); }
 
 class C2ConstLinearBlock::Impl {
 public:
@@ -212,7 +212,7 @@
     ~Impl() {
         if (mBase != nullptr) {
             // TODO: fence
-            C2Status err = mAllocation->unmap(mBase, mSize, nullptr);
+            c2_status_t err = mAllocation->unmap(mBase, mSize, nullptr);
             if (err != C2_OK) {
                 // TODO: Log?
             }
@@ -238,13 +238,13 @@
 
     const uint8_t *base() const { return mBase; }
 
-    C2Status error() const { return mError; }
+    c2_status_t error() const { return mError; }
 
 private:
     std::shared_ptr<C2LinearAllocation> mAllocation;
     uint8_t *mBase;
     size_t mSize;
-    C2Status mError;
+    c2_status_t mError;
 };
 
 C2ConstLinearBlock::C2ConstLinearBlock(std::shared_ptr<C2LinearAllocation> alloc)
@@ -277,7 +277,7 @@
     ~Impl() {
         if (mBase != nullptr) {
             // TODO: fence
-            C2Status err = mAllocation->unmap(mBase, mSize, nullptr);
+            c2_status_t err = mAllocation->unmap(mBase, mSize, nullptr);
             if (err != C2_OK) {
                 // TODO: Log?
             }
@@ -309,7 +309,7 @@
 
     uint8_t *base() const { return mBase; }
 
-    C2Status error() const { return mError; }
+    c2_status_t error() const { return mError; }
 
     C2Fence fence() const { return mFence; }
 
@@ -317,7 +317,7 @@
     std::shared_ptr<C2LinearAllocation> mAllocation;
     uint8_t *mBase;
     size_t mSize;
-    C2Status mError;
+    c2_status_t mError;
     C2Fence mFence;
 };
 
@@ -349,14 +349,14 @@
         const std::shared_ptr<C2Allocator> &allocator)
   : mAllocator(allocator) {}
 
-C2Status C2BasicLinearBlockPool::fetchLinearBlock(
+c2_status_t C2BasicLinearBlockPool::fetchLinearBlock(
         uint32_t capacity,
         C2MemoryUsage usage,
         std::shared_ptr<C2LinearBlock> *block /* nonnull */) {
     block->reset();
 
     std::shared_ptr<C2LinearAllocation> alloc;
-    C2Status err = mAllocator->newLinearAllocation(capacity, usage, &alloc);
+    c2_status_t err = mAllocator->newLinearAllocation(capacity, usage, &alloc);
     if (err != C2_OK) {
         return err;
     }
@@ -392,16 +392,16 @@
 public:
     Impl(uint8_t *const *data, const C2PlaneLayout &layout)
         : mData(data), mLayout(layout), mError(C2_OK) {}
-    explicit Impl(C2Status error) : mData(nullptr), mError(error) {}
+    explicit Impl(c2_status_t error) : mData(nullptr), mError(error) {}
 
     uint8_t *const *data() const { return mData; }
     const C2PlaneLayout &layout() const { return mLayout; }
-    C2Status error() const { return mError; }
+    c2_status_t error() const { return mError; }
 
 private:
     uint8_t *const *mData;
     C2PlaneLayout mLayout;
-    C2Status mError;
+    c2_status_t mError;
 };
 
 C2GraphicView::C2GraphicView(
@@ -410,7 +410,7 @@
         const C2PlaneLayout& layout)
     : _C2PlanarSection(parent), mImpl(new Impl(data, layout)) {}
 
-C2GraphicView::C2GraphicView(C2Status error)
+C2GraphicView::C2GraphicView(c2_status_t error)
     : _C2PlanarSection(nullptr), mImpl(new Impl(error)) {}
 
 const uint8_t *const *C2GraphicView::data() const {
@@ -437,7 +437,7 @@
     return view;
 }
 
-C2Status C2GraphicView::error() const {
+c2_status_t C2GraphicView::error() const {
     return mImpl->error();
 }
 
@@ -453,12 +453,12 @@
         }
     }
 
-    C2Status map(C2Rect rect) {
+    c2_status_t map(C2Rect rect) {
         if (mData[0] != nullptr) {
             // Already mapped.
             return C2_OK;
         }
-        C2Status err = mAllocation->map(
+        c2_status_t err = mAllocation->map(
                 rect,
                 { C2MemoryUsage::kSoftwareRead, 0 },
                 nullptr,
@@ -493,7 +493,7 @@
     : C2Block2D(alloc), mImpl(new Impl(alloc)), mFence(fence) {}
 
 C2Acquirable<const C2GraphicView> C2ConstGraphicBlock::map() const {
-    C2Status err = mImpl->map(crop());
+    c2_status_t err = mImpl->map(crop());
     if (err != C2_OK) {
         C2DefaultGraphicView view(err);
         return C2AcquirableConstGraphicView(err, mFence, view);
@@ -518,13 +518,13 @@
         }
     }
 
-    C2Status map(C2Rect rect) {
+    c2_status_t map(C2Rect rect) {
         if (mData[0] != nullptr) {
             // Already mapped.
             return C2_OK;
         }
         uint8_t *data[C2PlaneLayout::MAX_NUM_PLANES];
-        C2Status err = mAllocation->map(
+        c2_status_t err = mAllocation->map(
                 rect,
                 { C2MemoryUsage::kSoftwareRead, C2MemoryUsage::kSoftwareWrite },
                 nullptr,
@@ -560,7 +560,7 @@
     : C2Block2D(alloc), mImpl(new Impl(alloc)) {}
 
 C2Acquirable<C2GraphicView> C2GraphicBlock::map() {
-    C2Status err = mImpl->map(crop());
+    c2_status_t err = mImpl->map(crop());
     if (err != C2_OK) {
         C2DefaultGraphicView view(err);
         // TODO: fence
@@ -579,7 +579,7 @@
         const std::shared_ptr<C2Allocator> &allocator)
   : mAllocator(allocator) {}
 
-C2Status C2BasicGraphicBlockPool::fetchGraphicBlock(
+c2_status_t C2BasicGraphicBlockPool::fetchGraphicBlock(
         uint32_t width,
         uint32_t height,
         uint32_t format,
@@ -588,7 +588,7 @@
     block->reset();
 
     std::shared_ptr<C2GraphicAllocation> alloc;
-    C2Status err = mAllocator->newGraphicAllocation(width, height, format, usage, &alloc);
+    c2_status_t err = mAllocator->newGraphicAllocation(width, height, format, usage, &alloc);
     if (err != C2_OK) {
         return err;
     }
@@ -650,7 +650,7 @@
 
     const C2BufferData &data() const { return mData; }
 
-    C2Status registerOnDestroyNotify(OnDestroyNotify onDestroyNotify, void *arg) {
+    c2_status_t registerOnDestroyNotify(OnDestroyNotify onDestroyNotify, void *arg) {
         auto it = std::find_if(
                 mNotify.begin(), mNotify.end(),
                 [onDestroyNotify, arg] (const auto &pair) {
@@ -663,7 +663,7 @@
         return C2_OK;
     }
 
-    C2Status unregisterOnDestroyNotify(OnDestroyNotify onDestroyNotify, void *arg) {
+    c2_status_t unregisterOnDestroyNotify(OnDestroyNotify onDestroyNotify, void *arg) {
         auto it = std::find_if(
                 mNotify.begin(), mNotify.end(),
                 [onDestroyNotify, arg] (const auto &pair) {
@@ -684,7 +684,7 @@
         return result;
     }
 
-    C2Status setInfo(const std::shared_ptr<C2Info> &info) {
+    c2_status_t setInfo(const std::shared_ptr<C2Info> &info) {
         // To "update" you need to erase the existing one if any, and then insert.
         (void) mInfos.erase(info->type());
         (void) mInfos.insert({ info->type(), info });
@@ -720,11 +720,11 @@
 
 const C2BufferData C2Buffer::data() const { return mImpl->data(); }
 
-C2Status C2Buffer::registerOnDestroyNotify(OnDestroyNotify onDestroyNotify, void *arg) {
+c2_status_t C2Buffer::registerOnDestroyNotify(OnDestroyNotify onDestroyNotify, void *arg) {
     return mImpl->registerOnDestroyNotify(onDestroyNotify, arg);
 }
 
-C2Status C2Buffer::unregisterOnDestroyNotify(OnDestroyNotify onDestroyNotify, void *arg) {
+c2_status_t C2Buffer::unregisterOnDestroyNotify(OnDestroyNotify onDestroyNotify, void *arg) {
     return mImpl->unregisterOnDestroyNotify(onDestroyNotify, arg);
 }
 
@@ -732,7 +732,7 @@
     return mImpl->infos();
 }
 
-C2Status C2Buffer::setInfo(const std::shared_ptr<C2Info> &info) {
+c2_status_t C2Buffer::setInfo(const std::shared_ptr<C2Info> &info) {
     return mImpl->setInfo(info);
 }
 
diff --git a/media/libstagefright/codec2/vndk/C2Store.cpp b/media/libstagefright/codec2/vndk/C2Store.cpp
index 8413484..460cc60 100644
--- a/media/libstagefright/codec2/vndk/C2Store.cpp
+++ b/media/libstagefright/codec2/vndk/C2Store.cpp
@@ -16,15 +16,30 @@
 
 #include <C2AllocatorGralloc.h>
 #include <C2AllocatorIon.h>
+#include <C2BufferPriv.h>
 #include <C2Component.h>
 #include <C2PlatformSupport.h>
 
+#define LOG_TAG "C2Store"
+#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include <dlfcn.h>
+
 #include <map>
 #include <memory>
 #include <mutex>
 
 namespace android {
 
+/**
+ * The platform allocator store provides basic allocator-types for the framework based on ion and
+ * gralloc. Allocators are not meant to be updatable.
+ *
+ * \todo Provide allocator based on ashmem
+ * \todo Move ion allocation into its HIDL or provide some mapping from memory usage to ion flags
+ * \todo Make this allocator store extendable
+ */
 class C2PlatformAllocatorStore : public C2AllocatorStore {
 public:
     enum : id_t {
@@ -36,40 +51,42 @@
         /* ionmapper */
     );
 
-    virtual C2Status getAllocator(id_t id, std::shared_ptr<C2Allocator> *const allocator);
+    virtual c2_status_t fetchAllocator(
+            id_t id, std::shared_ptr<C2Allocator> *const allocator) override;
 
-    virtual std::vector<std::shared_ptr<const C2Allocator::Info>> listAllocators() const {
-        return std::vector<std::shared_ptr<const C2Allocator::Info>>(); /// \todo
+    virtual std::vector<std::shared_ptr<const C2Allocator::Traits>> listAllocators_nb()
+            const override {
+        return std::vector<std::shared_ptr<const C2Allocator::Traits>>(); /// \todo
     }
 
-    virtual C2String getName() const {
+    virtual C2String getName() const override {
         return "android.allocator-store";
     }
 
 private:
-    // returns a shared-singleton ion allocator
-    std::shared_ptr<C2Allocator> getIonAllocator();
+    /// returns a shared-singleton ion allocator
+    std::shared_ptr<C2Allocator> fetchIonAllocator();
 
-    // returns a shared-singleton gralloc allocator
-    std::shared_ptr<C2Allocator> getGrallocAllocator();
+    /// returns a shared-singleton gralloc allocator
+    std::shared_ptr<C2Allocator> fetchGrallocAllocator();
 };
 
 C2PlatformAllocatorStore::C2PlatformAllocatorStore() {
 }
 
-C2Status C2PlatformAllocatorStore::getAllocator(
+c2_status_t C2PlatformAllocatorStore::fetchAllocator(
         id_t id, std::shared_ptr<C2Allocator> *const allocator) {
     allocator->reset();
     switch (id) {
     // TODO: should we implement a generic registry for all, and use that?
     case C2PlatformAllocatorStore::ION:
     case C2AllocatorStore::DEFAULT_LINEAR:
-        *allocator = getIonAllocator();
+        *allocator = fetchIonAllocator();
         break;
 
     case C2PlatformAllocatorStore::GRALLOC:
     case C2AllocatorStore::DEFAULT_GRAPHIC:
-        *allocator = getGrallocAllocator();
+        *allocator = fetchGrallocAllocator();
         break;
 
     default:
@@ -81,7 +98,7 @@
     return C2_OK;
 }
 
-std::shared_ptr<C2Allocator> C2PlatformAllocatorStore::getIonAllocator() {
+std::shared_ptr<C2Allocator> C2PlatformAllocatorStore::fetchIonAllocator() {
     static std::mutex mutex;
     static std::weak_ptr<C2Allocator> ionAllocator;
     std::lock_guard<std::mutex> lock(mutex);
@@ -93,7 +110,7 @@
     return allocator;
 }
 
-std::shared_ptr<C2Allocator> C2PlatformAllocatorStore::getGrallocAllocator() {
+std::shared_ptr<C2Allocator> C2PlatformAllocatorStore::fetchGrallocAllocator() {
     static std::mutex mutex;
     static std::weak_ptr<C2Allocator> grallocAllocator;
     std::lock_guard<std::mutex> lock(mutex);
@@ -109,4 +126,416 @@
     return std::make_shared<C2PlatformAllocatorStore>();
 }
 
-} // namespace android
\ No newline at end of file
+c2_status_t GetCodec2BlockPool(
+        C2BlockPool::local_id_t id, std::shared_ptr<const C2Component> component,
+        std::shared_ptr<C2BlockPool> *pool) {
+    pool->reset();
+    if (!component) {
+        return C2_BAD_VALUE;
+    }
+    // TODO support pre-registered block pools
+    std::shared_ptr<C2AllocatorStore> allocatorStore = GetCodec2PlatformAllocatorStore();
+    std::shared_ptr<C2Allocator> allocator;
+    c2_status_t res = C2_NOT_FOUND;
+
+    switch (id) {
+    case C2BlockPool::BASIC_LINEAR:
+        res = allocatorStore->fetchAllocator(C2AllocatorStore::DEFAULT_LINEAR, &allocator);
+        if (res == OK) {
+            *pool = std::make_shared<C2BasicLinearBlockPool>(allocator);
+        }
+        break;
+    case C2BlockPool::BASIC_GRAPHIC:
+        res = allocatorStore->fetchAllocator(C2AllocatorStore::DEFAULT_GRAPHIC, &allocator);
+        if (res == OK) {
+            *pool = std::make_shared<C2BasicGraphicBlockPool>(allocator);
+        }
+        break;
+    default:
+        break;
+    }
+    return res;
+}
+
+class C2PlatformComponentStore : public C2ComponentStore {
+public:
+    virtual std::vector<std::shared_ptr<const C2Component::Traits>> listComponents() override;
+    virtual std::shared_ptr<C2ParamReflector> getParamReflector() const override;
+    virtual C2String getName() const override;
+    virtual c2_status_t querySupportedValues_nb(
+            std::vector<C2FieldSupportedValuesQuery> &fields) const override;
+    virtual c2_status_t querySupportedParams_nb(
+            std::vector<std::shared_ptr<C2ParamDescriptor>> *const params) const override;
+    virtual c2_status_t query_sm(
+            const std::vector<C2Param *const> &stackParams,
+            const std::vector<C2Param::Index> &heapParamIndices,
+            std::vector<std::unique_ptr<C2Param>> *const heapParams) const override;
+    virtual c2_status_t createInterface(
+            C2String name, std::shared_ptr<C2ComponentInterface> *const interface) override;
+    virtual c2_status_t createComponent(
+            C2String name, std::shared_ptr<C2Component> *const component) override;
+    virtual c2_status_t copyBuffer(
+            std::shared_ptr<C2GraphicBuffer> src, std::shared_ptr<C2GraphicBuffer> dst) override;
+    virtual c2_status_t config_sm(
+            const std::vector<C2Param *const> &params,
+            std::vector<std::unique_ptr<C2SettingResult>> *const failures) override;
+    virtual c2_status_t commit_sm(
+            const std::vector<C2Param *const> &params,
+            std::vector<std::unique_ptr<C2SettingResult>> *const failures) override;
+
+    C2PlatformComponentStore();
+
+    virtual ~C2PlatformComponentStore() override = default;
+
+private:
+
+    /**
+     * An object encapsulating a loaded component module.
+     *
+     * \todo provide a way to add traits to known components here to avoid loading the .so-s
+     * for listComponents
+     */
+    struct ComponentModule : public C2ComponentFactory,
+            public std::enable_shared_from_this<ComponentModule> {
+        virtual c2_status_t createComponent(
+                c2_node_id_t id, std::shared_ptr<C2Component> *component,
+                ComponentDeleter deleter = std::default_delete<C2Component>()) override;
+        virtual c2_status_t createInterface(
+                c2_node_id_t id, std::shared_ptr<C2ComponentInterface> *interface,
+                InterfaceDeleter deleter = std::default_delete<C2ComponentInterface>()) override;
+
+        /**
+         * \returns the traits of the component in this module.
+         */
+        std::shared_ptr<const C2Component::Traits> getTraits();
+
+        /**
+         * Creates an uninitialized component module.
+         *
+         * \note Only used by ComponentLoader.
+         */
+        ComponentModule() : mInit(C2_NO_INIT) {}
+
+        /**
+         * Initializes a component module with a given library path. Must be called exactly once.
+         *
+         * \note Only used by ComponentLoader.
+         *
+         * \param libPath[in] library path (or name)
+         *
+         * \retval C2_OK        the component module has been successfully loaded
+         * \retval C2_NO_MEMORY not enough memory to loading the component module
+         * \retval C2_NOT_FOUND could not locate the component module
+         * \retval C2_CORRUPTED the component module could not be loaded (unexpected)
+         * \retval C2_REFUSED   permission denied to load the component module (unexpected)
+         * \retval C2_TIMED_OUT could not load the module within the time limit (unexpected)
+         */
+        c2_status_t init(std::string libPath);
+
+        virtual ~ComponentModule() override;
+
+    protected:
+        std::recursive_mutex mLock; ///< lock protecting mTraits
+        std::shared_ptr<C2Component::Traits> mTraits; ///< cached component traits
+
+        c2_status_t mInit; ///< initialization result
+
+        void *mLibHandle; ///< loaded library handle
+        C2ComponentFactory::CreateCodec2FactoryFunc createFactory; ///< loaded create function
+        C2ComponentFactory::DestroyCodec2FactoryFunc destroyFactory; ///< loaded destroy function
+        C2ComponentFactory *mComponentFactory; ///< loaded/created component factory
+    };
+
+    /**
+     * An object encapsulating a loadable component module.
+     *
+     * \todo make this also work for enumerations
+     */
+    struct ComponentLoader {
+        /**
+         * Load the component module.
+         *
+         * This method simply returns the component module if it is already currently loaded, or
+         * attempts to load it if it is not.
+         *
+         * \param module[out] pointer to the shared pointer where the loaded module shall be stored.
+         *                    This will be nullptr on error.
+         *
+         * \retval C2_OK        the component module has been successfully loaded
+         * \retval C2_NO_MEMORY not enough memory to loading the component module
+         * \retval C2_NOT_FOUND could not locate the component module
+         * \retval C2_CORRUPTED the component module could not be loaded
+         * \retval C2_REFUSED   permission denied to load the component module
+         */
+        c2_status_t fetchModule(std::shared_ptr<ComponentModule> *module) {
+            c2_status_t res = C2_OK;
+            std::lock_guard<std::mutex> lock(mMutex);
+            std::shared_ptr<ComponentModule> localModule = mModule.lock();
+            if (localModule == nullptr) {
+                localModule = std::make_shared<ComponentModule>();
+                res = localModule->init(mLibPath);
+                if (res == C2_OK) {
+                    mModule = localModule;
+                }
+            }
+            *module = localModule;
+            return res;
+        }
+
+        /**
+         * Creates a component loader for a specific library path (or name).
+         */
+        ComponentLoader(std::string libPath)
+            : mLibPath(libPath) {}
+
+    private:
+        std::mutex mMutex; ///< mutex guarding the module
+        std::weak_ptr<ComponentModule> mModule; ///< weak reference to the loaded module
+        std::string mLibPath; ///< library path (or name)
+    };
+
+    /**
+     * Retrieves the component loader for a component.
+     *
+     * \return a non-ref-holding pointer to the component loader.
+     *
+     * \retval C2_OK        the component loader has been successfully retrieved
+     * \retval C2_NO_MEMORY not enough memory to locate the component loader
+     * \retval C2_NOT_FOUND could not locate the component to be loaded
+     * \retval C2_CORRUPTED the component loader could not be identified due to some modules being
+     *                      corrupted (this can happen if the name does not refer to an already
+     *                      identified component but some components could not be loaded due to
+     *                      bad library)
+     * \retval C2_REFUSED   permission denied to find the component loader for the named component
+     *                      (this can happen if the name does not refer to an already identified
+     *                      component but some components could not be loaded due to lack of
+     *                      permissions)
+     */
+    c2_status_t findComponent(C2String name, ComponentLoader **loader);
+
+    std::map<C2String, ComponentLoader> mComponents; ///< list of components
+};
+
+c2_status_t C2PlatformComponentStore::ComponentModule::init(std::string libPath) {
+    ALOGV("in %s", __func__);
+    ALOGV("loading dll");
+    mLibHandle = dlopen(libPath.c_str(), RTLD_NOW|RTLD_NODELETE);
+    if (mLibHandle == nullptr) {
+        // could be access/symbol or simply not being there
+        ALOGD("could not dlopen %s: %s", libPath.c_str(), dlerror());
+        mInit = C2_CORRUPTED;
+    } else {
+        createFactory =
+            (C2ComponentFactory::CreateCodec2FactoryFunc)dlsym(mLibHandle, "CreateCodec2Factory");
+        destroyFactory =
+            (C2ComponentFactory::DestroyCodec2FactoryFunc)dlsym(mLibHandle, "DestroyCodec2Factory");
+
+        mComponentFactory = createFactory();
+        if (mComponentFactory == nullptr) {
+            ALOGD("could not create factory in %s", libPath.c_str());
+            mInit = C2_NO_MEMORY;
+        } else {
+            mInit = C2_OK;
+        }
+    }
+    return mInit;
+}
+
+C2PlatformComponentStore::ComponentModule::~ComponentModule() {
+    ALOGV("in %s", __func__);
+    if (destroyFactory && mComponentFactory) {
+        destroyFactory(mComponentFactory);
+    }
+    if (mLibHandle) {
+        ALOGV("unloading dll");
+        dlclose(mLibHandle);
+    }
+}
+
+c2_status_t C2PlatformComponentStore::ComponentModule::createInterface(
+        c2_node_id_t id, std::shared_ptr<C2ComponentInterface> *interface,
+        std::function<void(::android::C2ComponentInterface*)> deleter) {
+    interface->reset();
+    if (mInit != C2_OK) {
+        return mInit;
+    }
+    std::shared_ptr<ComponentModule> module = shared_from_this();
+    c2_status_t res = mComponentFactory->createInterface(
+            id, interface, [module, deleter](C2ComponentInterface *p) mutable {
+                // capture module so that we ensure we still have it while deleting interface
+                deleter(p); // delete interface first
+                module.reset(); // remove module ref (not technically needed)
+    });
+    return res;
+}
+
+c2_status_t C2PlatformComponentStore::ComponentModule::createComponent(
+        c2_node_id_t id, std::shared_ptr<C2Component> *component,
+        std::function<void(::android::C2Component*)> deleter) {
+    component->reset();
+    if (mInit != C2_OK) {
+        return mInit;
+    }
+    std::shared_ptr<ComponentModule> module = shared_from_this();
+    c2_status_t res = mComponentFactory->createComponent(
+            id, component, [module, deleter](C2Component *p) mutable {
+                // capture module so that we ensure we still have it while deleting component
+                deleter(p); // delete component first
+                module.reset(); // remove module ref (not technically needed)
+    });
+    return res;
+}
+
+std::shared_ptr<const C2Component::Traits> C2PlatformComponentStore::ComponentModule::getTraits() {
+    std::unique_lock<std::recursive_mutex> lock(mLock);
+    if (!mTraits) {
+        std::shared_ptr<C2ComponentInterface> intf;
+        c2_status_t res = createInterface(0, &intf);
+        if (res != C2_OK) {
+            return nullptr;
+        }
+
+        std::shared_ptr<C2Component::Traits> traits(new (std::nothrow) C2Component::Traits);
+        if (traits) {
+            // traits->name = intf->getName();
+        }
+
+        mTraits = traits;
+    }
+    return mTraits;
+}
+
+C2PlatformComponentStore::C2PlatformComponentStore() {
+    // TODO: move this also into a .so so it can be updated
+    mComponents.emplace("c2.google.avc.decoder", "libstagefright_soft_c2avcdec.so");
+}
+
+c2_status_t C2PlatformComponentStore::copyBuffer(
+        std::shared_ptr<C2GraphicBuffer> src, std::shared_ptr<C2GraphicBuffer> dst) {
+    (void)src;
+    (void)dst;
+    return C2_OMITTED;
+}
+
+c2_status_t C2PlatformComponentStore::query_sm(
+        const std::vector<C2Param *const> &stackParams,
+        const std::vector<C2Param::Index> &heapParamIndices,
+        std::vector<std::unique_ptr<C2Param>> *const heapParams) const {
+    // there are no supported configs
+    (void)heapParams;
+    return stackParams.empty() && heapParamIndices.empty() ? C2_OK : C2_BAD_INDEX;
+}
+
+c2_status_t C2PlatformComponentStore::config_sm(
+        const std::vector<C2Param *const> &params,
+        std::vector<std::unique_ptr<C2SettingResult>> *const failures) {
+    // there are no supported configs
+    (void)failures;
+    return params.empty() ? C2_OK : C2_BAD_INDEX;
+}
+
+c2_status_t C2PlatformComponentStore::commit_sm(
+        const std::vector<C2Param *const> &params,
+        std::vector<std::unique_ptr<C2SettingResult>> *const failures) {
+    // there are no supported configs
+    (void)failures;
+    return params.empty() ? C2_OK : C2_BAD_INDEX;
+}
+
+std::vector<std::shared_ptr<const C2Component::Traits>> C2PlatformComponentStore::listComponents() {
+    // This method SHALL return within 500ms.
+    std::vector<std::shared_ptr<const C2Component::Traits>> list;
+    for (auto &it : mComponents) {
+        ComponentLoader &loader = it.second;
+        std::shared_ptr<ComponentModule> module;
+        c2_status_t res = loader.fetchModule(&module);
+        if (res == C2_OK) {
+            std::shared_ptr<const C2Component::Traits> traits = module->getTraits();
+            if (traits) {
+                list.push_back(traits);
+            }
+        }
+    }
+    return list;
+}
+
+c2_status_t C2PlatformComponentStore::findComponent(C2String name, ComponentLoader **loader) {
+    *loader = nullptr;
+    auto pos = mComponents.find(name);
+    // TODO: check aliases
+    if (pos == mComponents.end()) {
+        return C2_NOT_FOUND;
+    }
+    *loader = &pos->second;
+    return C2_OK;
+}
+
+c2_status_t C2PlatformComponentStore::createComponent(
+        C2String name, std::shared_ptr<C2Component> *const component) {
+    // This method SHALL return within 100ms.
+    component->reset();
+    ComponentLoader *loader;
+    c2_status_t res = findComponent(name, &loader);
+    if (res == C2_OK) {
+        std::shared_ptr<ComponentModule> module;
+        res = loader->fetchModule(&module);
+        if (res == C2_OK) {
+            // TODO: get a unique node ID
+            res = module->createComponent(0, component);
+        }
+    }
+    return res;
+}
+
+c2_status_t C2PlatformComponentStore::createInterface(
+        C2String name, std::shared_ptr<C2ComponentInterface> *const interface) {
+    // This method SHALL return within 100ms.
+    interface->reset();
+    ComponentLoader *loader;
+    c2_status_t res = findComponent(name, &loader);
+    if (res == C2_OK) {
+        std::shared_ptr<ComponentModule> module;
+        res = loader->fetchModule(&module);
+        if (res == C2_OK) {
+            // TODO: get a unique node ID
+            res = module->createInterface(0, interface);
+        }
+    }
+    return res;
+}
+
+c2_status_t C2PlatformComponentStore::querySupportedParams_nb(
+        std::vector<std::shared_ptr<C2ParamDescriptor>> *const params) const {
+    // there are no supported config params
+    (void)params;
+    return C2_OK;
+}
+
+c2_status_t C2PlatformComponentStore::querySupportedValues_nb(
+        std::vector<C2FieldSupportedValuesQuery> &fields) const {
+    // there are no supported config params
+    return fields.empty() ? C2_OK : C2_BAD_INDEX;
+}
+
+C2String C2PlatformComponentStore::getName() const {
+    return "android.componentStore.platform";
+}
+
+std::shared_ptr<C2ParamReflector> C2PlatformComponentStore::getParamReflector() const {
+    // TODO
+    return nullptr;
+}
+
+std::shared_ptr<C2ComponentStore> GetCodec2PlatformComponentStore() {
+    static std::mutex mutex;
+    static std::weak_ptr<C2ComponentStore> platformStore;
+    std::lock_guard<std::mutex> lock(mutex);
+    std::shared_ptr<C2ComponentStore> store = platformStore.lock();
+    if (store == nullptr) {
+        store = std::make_shared<C2PlatformComponentStore>();
+        platformStore = store;
+    }
+    return store;
+}
+
+} // namespace android
diff --git a/media/libstagefright/codec2/vndk/include/C2AllocatorGralloc.h b/media/libstagefright/codec2/vndk/include/C2AllocatorGralloc.h
index 78d41c4..374b0ed 100644
--- a/media/libstagefright/codec2/vndk/include/C2AllocatorGralloc.h
+++ b/media/libstagefright/codec2/vndk/include/C2AllocatorGralloc.h
@@ -34,23 +34,23 @@
 
     virtual C2String getName() const override;
 
-    virtual std::shared_ptr<const Info> getInfo() const override {
+    virtual std::shared_ptr<const Traits> getTraits() const override {
         return nullptr; // \todo
     }
 
-    virtual C2Status newGraphicAllocation(
+    virtual c2_status_t newGraphicAllocation(
             uint32_t width, uint32_t height, uint32_t format, C2MemoryUsage usage,
             std::shared_ptr<C2GraphicAllocation> *allocation) override;
 
-    virtual C2Status priorGraphicAllocation(
+    virtual c2_status_t priorGraphicAllocation(
             const C2Handle *handle,
             std::shared_ptr<C2GraphicAllocation> *allocation) override;
 
     C2AllocatorGralloc();
 
-    C2Status status() const;
+    c2_status_t status() const;
 
-    virtual ~C2AllocatorGralloc();
+    virtual ~C2AllocatorGralloc() override;
 
 private:
     class Impl;
diff --git a/media/libstagefright/codec2/vndk/include/C2AllocatorIon.h b/media/libstagefright/codec2/vndk/include/C2AllocatorIon.h
index 2fff91f..bb815f9 100644
--- a/media/libstagefright/codec2/vndk/include/C2AllocatorIon.h
+++ b/media/libstagefright/codec2/vndk/include/C2AllocatorIon.h
@@ -33,26 +33,26 @@
 
     virtual C2String getName() const override;
 
-    virtual std::shared_ptr<const Info> getInfo() const override {
+    virtual std::shared_ptr<const Traits> getTraits() const override {
         return nullptr; // \todo
     }
 
-    virtual C2Status newLinearAllocation(
+    virtual c2_status_t newLinearAllocation(
             uint32_t capacity, C2MemoryUsage usage,
             std::shared_ptr<C2LinearAllocation> *allocation) override;
 
-    virtual C2Status priorLinearAllocation(
+    virtual c2_status_t priorLinearAllocation(
             const C2Handle *handle,
             std::shared_ptr<C2LinearAllocation> *allocation) override;
 
     C2AllocatorIon();
 
-    C2Status status() const { return mInit; }
+    virtual c2_status_t status() const { return mInit; }
 
-    virtual ~C2AllocatorIon();
+    virtual ~C2AllocatorIon() override;
 
 private:
-    C2Status mInit;
+    c2_status_t mInit;
     int mIonFd;
     usage_mapper_fn mUsageMapper;
 };
diff --git a/media/libstagefright/codec2/vndk/include/C2BufferPriv.h b/media/libstagefright/codec2/vndk/include/C2BufferPriv.h
index 56b0123..875a8c2 100644
--- a/media/libstagefright/codec2/vndk/include/C2BufferPriv.h
+++ b/media/libstagefright/codec2/vndk/include/C2BufferPriv.h
@@ -27,7 +27,7 @@
 public:
     explicit C2BasicLinearBlockPool(const std::shared_ptr<C2Allocator> &allocator);
 
-    virtual ~C2BasicLinearBlockPool() = default;
+    virtual ~C2BasicLinearBlockPool() override = default;
 
     virtual C2Allocator::id_t getAllocatorId() const override {
         return mAllocator->getId();
@@ -37,7 +37,7 @@
         return BASIC_LINEAR;
     }
 
-    virtual C2Status fetchLinearBlock(
+    virtual c2_status_t fetchLinearBlock(
             uint32_t capacity,
             C2MemoryUsage usage,
             std::shared_ptr<C2LinearBlock> *block /* nonnull */) override;
@@ -52,7 +52,7 @@
 public:
     explicit C2BasicGraphicBlockPool(const std::shared_ptr<C2Allocator> &allocator);
 
-    virtual ~C2BasicGraphicBlockPool() = default;
+    virtual ~C2BasicGraphicBlockPool() override = default;
 
     virtual C2Allocator::id_t getAllocatorId() const override {
         return mAllocator->getId();
@@ -62,7 +62,7 @@
         return BASIC_GRAPHIC;
     }
 
-    virtual C2Status fetchGraphicBlock(
+    virtual c2_status_t fetchGraphicBlock(
             uint32_t width,
             uint32_t height,
             uint32_t format,
diff --git a/media/libstagefright/codec2/vndk/include/C2ErrnoUtils.h b/media/libstagefright/codec2/vndk/include/C2ErrnoUtils.h
index aea3a6f..41132b9 100644
--- a/media/libstagefright/codec2/vndk/include/C2ErrnoUtils.h
+++ b/media/libstagefright/codec2/vndk/include/C2ErrnoUtils.h
@@ -23,17 +23,17 @@
 namespace android {
 
 // standard ERRNO mappings
-template<int N> constexpr C2Status _c2_errno2status_impl();
-template<> constexpr C2Status _c2_errno2status_impl<0>()       { return C2_OK; }
-template<> constexpr C2Status _c2_errno2status_impl<EINVAL>()  { return C2_BAD_VALUE; }
-template<> constexpr C2Status _c2_errno2status_impl<EACCES>()  { return C2_REFUSED; }
-template<> constexpr C2Status _c2_errno2status_impl<EPERM>()   { return C2_REFUSED; }
-template<> constexpr C2Status _c2_errno2status_impl<ENOMEM>()  { return C2_NO_MEMORY; }
+template<int N> constexpr c2_status_t _c2_errno2status_impl();
+template<> constexpr c2_status_t _c2_errno2status_impl<0>()       { return C2_OK; }
+template<> constexpr c2_status_t _c2_errno2status_impl<EINVAL>()  { return C2_BAD_VALUE; }
+template<> constexpr c2_status_t _c2_errno2status_impl<EACCES>()  { return C2_REFUSED; }
+template<> constexpr c2_status_t _c2_errno2status_impl<EPERM>()   { return C2_REFUSED; }
+template<> constexpr c2_status_t _c2_errno2status_impl<ENOMEM>()  { return C2_NO_MEMORY; }
 
-// map standard errno-s to the equivalent C2Status
+// map standard errno-s to the equivalent c2_status_t
 template<int... N> struct _c2_map_errno_impl;
 template<int E, int ... N> struct _c2_map_errno_impl<E, N...> {
-    static C2Status map(int result) {
+    static c2_status_t map(int result) {
         if (result == E) {
             return _c2_errno2status_impl <E>();
         } else {
@@ -42,13 +42,13 @@
     }
 };
 template<> struct _c2_map_errno_impl<> {
-    static C2Status map(int result) {
+    static c2_status_t map(int result) {
         return result == 0 ? C2_OK : C2_CORRUPTED;
     }
 };
 
 template<int... N>
-C2Status c2_map_errno(int result) {
+c2_status_t c2_map_errno(int result) {
     return _c2_map_errno_impl<N...>::map(result);
 }
 
diff --git a/media/libstagefright/codec2/vndk/include/C2PlatformSupport.h b/media/libstagefright/codec2/vndk/include/C2PlatformSupport.h
index 9402050..2281dab 100644
--- a/media/libstagefright/codec2/vndk/include/C2PlatformSupport.h
+++ b/media/libstagefright/codec2/vndk/include/C2PlatformSupport.h
@@ -19,15 +19,108 @@
 
 #include <C2Component.h>
 
+#include <functional>
 #include <memory>
 
 namespace android {
 
 /**
  * Returns the platform allocator store.
+ * \retval nullptr if the platform allocator store could not be obtained
  */
 std::shared_ptr<C2AllocatorStore> GetCodec2PlatformAllocatorStore();
 
+/**
+ * Retrieves a block pool for a component.
+ *
+ * \param id        the local ID of the block pool
+ * \param component the component using the block pool (must be non-null)
+ * \param pool      pointer to where the obtained block pool shall be stored on success. nullptr
+ *                  will be stored here on failure
+ *
+ * \retval C2_OK        the operation was successful
+ * \retval C2_BAD_VALUE the component is null
+ * \retval C2_NOT_FOUND if the block pool does not exist
+ * \retval C2_NO_MEMORY not enough memory to fetch the block pool (this return value is only
+ *                      possible for basic pools)
+ * \retval C2_TIMED_OUT the operation timed out (this return value is only possible for basic pools)
+ * \retval C2_REFUSED   no permission to complete any required allocation (this return value is only
+ *                      possible for basic pools)
+ * \retval C2_CORRUPTED some unknown, unrecoverable error occured during operation (unexpected,
+ *                      this return value is only possible for basic pools)
+ */
+c2_status_t GetCodec2BlockPool(
+        C2BlockPool::local_id_t id, std::shared_ptr<const C2Component> component,
+        std::shared_ptr<C2BlockPool> *pool);
+
+/**
+ * Component factory object that enables to create a component and/or interface from a dynamically
+ * linked library. This is needed because the component/interfaces are managed objects, but we
+ * cannot safely create a managed object and pass it in C.
+ *
+ * Components/interfaces typically inherit from std::enable_shared_from_this, but C requires
+ * passing simple pointer, and shared_ptr constructor needs to know the class to be constructed
+ * derives from enable_shared_from_this.
+ *
+ */
+class C2ComponentFactory {
+public:
+    typedef std::function<void(::android::C2Component*)> ComponentDeleter;
+    typedef std::function<void(::android::C2ComponentInterface*)> InterfaceDeleter;
+
+    /**
+     * Creates a component.
+     *
+     * This method SHALL return within 100ms.
+     *
+     * \param id        component ID for the created component
+     * \param component shared pointer where the created component is stored. Cleared on
+     *                  failure and updated on success.
+     *
+     * \retval C2_OK        the component was created successfully
+     * \retval C2_TIMED_OUT could not create the component within the time limit (unexpected)
+     * \retval C2_CORRUPTED some unknown error prevented the creation of the component (unexpected)
+     *
+     * \retval C2_NO_MEMORY not enough memory to create the component
+     */
+    virtual c2_status_t createComponent(
+            c2_node_id_t id, std::shared_ptr<C2Component>* const component,
+            ComponentDeleter deleter = std::default_delete<C2Component>()) = 0;
+
+    /**
+     * Creates a component interface.
+     *
+     * This method SHALL return within 100ms.
+     *
+     * \param id        component interface ID for the created interface
+     * \param interface shared pointer where the created interface is stored. Cleared on
+     *                  failure and updated on success.
+     *
+     * \retval C2_OK        the component interface was created successfully
+     * \retval C2_TIMED_OUT could not create the component interface within the time limit
+     *                      (unexpected)
+     * \retval C2_CORRUPTED some unknown error prevented the creation of the component interface
+     *                      (unexpected)
+     *
+     * \retval C2_NO_MEMORY not enough memory to create the component interface
+     */
+    virtual c2_status_t createInterface(
+            c2_node_id_t id, std::shared_ptr<C2ComponentInterface>* const interface,
+            InterfaceDeleter deleter = std::default_delete<C2ComponentInterface>()) = 0;
+
+    virtual ~C2ComponentFactory() = default;
+
+    typedef ::android::C2ComponentFactory* (*CreateCodec2FactoryFunc)(void);
+    typedef void (*DestroyCodec2FactoryFunc)(::android::C2ComponentFactory*);
+};
+
+/**
+ * Returns the platform component store.
+ * \retval nullptr if the platform component store could not be obtained
+ */
+std::shared_ptr<C2ComponentStore> GetCodec2PlatformComponentStore();
+
+
 } // namespace android
 
 #endif // STAGEFRIGHT_CODEC2_PLATFORM_SUPPORT_H_
diff --git a/media/libstagefright/codec2/vndk/include/util/C2ParamUtils.h b/media/libstagefright/codec2/vndk/include/util/C2ParamUtils.h
index 5f09889..5ce6071 100644
--- a/media/libstagefright/codec2/vndk/include/util/C2ParamUtils.h
+++ b/media/libstagefright/codec2/vndk/include/util/C2ParamUtils.h
@@ -222,7 +222,7 @@
     friend class C2ParamTest_ParamUtilsTest_Test;
 
 public:
-    static std::vector<C2String> getEnumValuesFromString(C2StringLiteral value) {
+    static std::vector<C2String> parseEnumValuesFromString(C2StringLiteral value) {
         std::vector<C2String> foundNames;
         size_t pos = 0, len = strlen(value);
         do {
diff --git a/media/libstagefright/codecs/avcdec/Android.bp b/media/libstagefright/codecs/avcdec/Android.bp
index 3b2602d..5dcffd5 100644
--- a/media/libstagefright/codecs/avcdec/Android.bp
+++ b/media/libstagefright/codecs/avcdec/Android.bp
@@ -74,9 +74,9 @@
         misc_undefined: [
             "signed-integer-overflow",
         ],
-        cfi: true,
+        cfi: false, // true,
         diag: {
-            cfi: true,
+            cfi: false, // true,
         },
     },
 
diff --git a/media/libstagefright/codecs/avcdec/C2SoftAvcDec.cpp b/media/libstagefright/codecs/avcdec/C2SoftAvcDec.cpp
index 7864f07..2423629 100644
--- a/media/libstagefright/codecs/avcdec/C2SoftAvcDec.cpp
+++ b/media/libstagefright/codecs/avcdec/C2SoftAvcDec.cpp
@@ -27,6 +27,8 @@
 #include "ih264d.h"
 #include "C2SoftAvcDec.h"
 
+#include <C2PlatformSupport.h>
+
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/MediaDefs.h>
 #include <utils/misc.h>
@@ -244,7 +246,7 @@
 //     { OMX_VIDEO_AVCProfileMain,     OMX_VIDEO_AVCLevel52 },
 //     { OMX_VIDEO_AVCProfileHigh,     OMX_VIDEO_AVCLevel52 },
 // };
-C2SoftAvcDecIntf::C2SoftAvcDecIntf(const char *name, node_id id)
+C2SoftAvcDecIntf::C2SoftAvcDecIntf(const char *name, c2_node_id_t id)
     : mName(name),
       mId(id),
       mDomainInfo(C2DomainVideo),
@@ -259,7 +261,7 @@
       mFrameRate(0u, 0),
       mBlocksPerSecond(0u, 0),
       mParamReflector(new ParamReflector) {
-
+    ALOGV("in %s", __func__);
     mInputPortMime = C2PortMimeConfig::input::alloc_unique(strlen(CODEC_MIME_TYPE) + 1);
     strcpy(mInputPortMime->m.mValue, CODEC_MIME_TYPE);
     mOutputPortMime = C2PortMimeConfig::output::alloc_unique(strlen(MEDIA_MIMETYPE_VIDEO_RAW) + 1);
@@ -275,6 +277,8 @@
     mMaxVideoSizeHint.mWidth = H264_MAX_FRAME_WIDTH;
     mMaxVideoSizeHint.mHeight = H264_MAX_FRAME_HEIGHT;
 
+    mOutputBlockPools = C2PortBlockPoolsTuning::output::alloc_unique({});
+
     auto insertParam = [&params = mParams] (C2Param *param) {
         params[restoreIndex(param)] = param;
     };
@@ -422,17 +426,23 @@
             false, "_video_size", &mVideoSize));
     mParamDescs.push_back(std::make_shared<C2ParamDescriptor>(
             false, "_max_video_size_hint", &mMaxVideoSizeHint));
+    mParamDescs.push_back(std::make_shared<C2ParamDescriptor>(
+            false, "_output_block_pools", mOutputBlockPools.get()));
+}
+
+C2SoftAvcDecIntf::~C2SoftAvcDecIntf() {
+    ALOGV("in %s", __func__);
 }
 
 C2String C2SoftAvcDecIntf::getName() const {
     return mName;
 }
 
-node_id C2SoftAvcDecIntf::getId() const {
+c2_node_id_t C2SoftAvcDecIntf::getId() const {
     return mId;
 }
 
-C2Status C2SoftAvcDecIntf::query_nb(
+c2_status_t C2SoftAvcDecIntf::query_nb(
         const std::vector<C2Param* const> & stackParams,
         const std::vector<C2Param::Index> & heapParamIndices,
         std::vector<std::unique_ptr<C2Param>>* const heapParams) const {
@@ -443,6 +453,8 @@
 
         uint32_t index = restoreIndex(param);
         if (!mParams.count(index)) {
+            // TODO: add support for output-block-pools (this will be done when we move all
+            // config to shared ptr)
             continue;
         }
 
@@ -465,12 +477,19 @@
     return C2_OK;
 }
 
-C2Status C2SoftAvcDecIntf::config_nb(
+c2_status_t C2SoftAvcDecIntf::config_nb(
         const std::vector<C2Param* const> &params,
         std::vector<std::unique_ptr<C2SettingResult>>* const failures) {
-    C2Status err = C2_OK;
+    c2_status_t err = C2_OK;
     for (C2Param *param : params) {
         uint32_t index = restoreIndex(param);
+        if (param->index() == mOutputBlockPools.get()->index()) {
+            // setting output block pools
+            mOutputBlockPools.reset(
+                    (C2PortBlockPoolsTuning::output *)C2Param::Copy(*param).release());
+            continue;
+        }
+
         if (mParams.count(index) == 0) {
             // We can't create C2SettingResult with no field, so just skipping in this case.
             err = C2_BAD_INDEX;
@@ -489,20 +508,20 @@
     return err;
 }
 
-C2Status C2SoftAvcDecIntf::commit_sm(
+c2_status_t C2SoftAvcDecIntf::commit_sm(
         const std::vector<C2Param* const> &params,
         std::vector<std::unique_ptr<C2SettingResult>>* const failures) {
     // TODO
     return config_nb(params, failures);
 }
 
-C2Status C2SoftAvcDecIntf::createTunnel_sm(node_id targetComponent) {
+c2_status_t C2SoftAvcDecIntf::createTunnel_sm(c2_node_id_t targetComponent) {
     // Tunneling is not supported
     (void) targetComponent;
     return C2_OMITTED;
 }
 
-C2Status C2SoftAvcDecIntf::releaseTunnel_sm(node_id targetComponent) {
+c2_status_t C2SoftAvcDecIntf::releaseTunnel_sm(c2_node_id_t targetComponent) {
     // Tunneling is not supported
     (void) targetComponent;
     return C2_OMITTED;
@@ -512,15 +531,15 @@
     return mParamReflector;
 }
 
-C2Status C2SoftAvcDecIntf::getSupportedParams(
+c2_status_t C2SoftAvcDecIntf::querySupportedParams_nb(
         std::vector<std::shared_ptr<C2ParamDescriptor>> * const params) const {
     params->insert(params->begin(), mParamDescs.begin(), mParamDescs.end());
     return C2_OK;
 }
 
-C2Status C2SoftAvcDecIntf::getSupportedValues(
+c2_status_t C2SoftAvcDecIntf::querySupportedValues_nb(
         std::vector<C2FieldSupportedValuesQuery> &fields) const {
-    C2Status res = C2_OK;
+    c2_status_t res = C2_OK;
     for (C2FieldSupportedValuesQuery &query : fields) {
         if (mSupportedValues.count(query.field) == 0) {
             query.status = C2_BAD_INDEX;
@@ -627,10 +646,8 @@
 
 C2SoftAvcDec::C2SoftAvcDec(
         const char *name,
-        node_id id,
-        const std::shared_ptr<C2ComponentListener> &listener)
+        c2_node_id_t id)
     : mIntf(std::make_shared<C2SoftAvcDecIntf>(name, id)),
-      mListener(listener),
       mThread(new QueueProcessThread),
       mCodecCtx(NULL),
       mFlushOutBuffer(NULL),
@@ -640,6 +657,7 @@
       mWidth(320),
       mHeight(240),
       mInputOffset(0) {
+    ALOGV("in %s", __func__);
     GETTIME(&mTimeStart, NULL);
 
     // If input dump is enabled, then open create an empty file
@@ -648,10 +666,26 @@
 }
 
 C2SoftAvcDec::~C2SoftAvcDec() {
+    ALOGV("in %s", __func__);
     CHECK_EQ(deInitDecoder(), (status_t)OK);
 }
 
-C2Status C2SoftAvcDec::queue_nb(
+c2_status_t C2SoftAvcDec::setListener_sm(
+        const std::shared_ptr<C2Component::Listener> &listener) {
+    std::unique_lock<std::mutex> lock(mListenerLock);
+    // TODO: we really need to lock the running check as well
+    if (listener && mThread->isRunning()) {
+        return C2_BAD_STATE;
+    }
+    mListener = listener;
+    if (mActiveListener && !listener) {
+        // wait until no active listeners are in use
+        mActiveListenerChanged.wait(lock, [this]{ return !mActiveListener; });
+    }
+    return C2_OK;
+}
+
+c2_status_t C2SoftAvcDec::queue_nb(
         std::list<std::unique_ptr<C2Work>>* const items) {
     if (!mThread->isRunning()) {
         return C2_CORRUPTED;
@@ -666,16 +700,16 @@
     return C2_OK;
 }
 
-C2Status C2SoftAvcDec::announce_nb(const std::vector<C2WorkOutline> &items) {
+c2_status_t C2SoftAvcDec::announce_nb(const std::vector<C2WorkOutline> &items) {
     // Tunneling is not supported
     (void) items;
     return C2_OMITTED;
 }
 
-C2Status C2SoftAvcDec::flush_sm(
-        bool flushThrough, std::list<std::unique_ptr<C2Work>>* const flushedWork) {
+c2_status_t C2SoftAvcDec::flush_sm(
+        flush_mode_t mode, std::list<std::unique_ptr<C2Work>>* const flushedWork) {
     // Tunneling is not supported
-    (void) flushThrough;
+    (void) mode;
 
     if (!mThread->isRunning()) {
         return C2_CORRUPTED;
@@ -698,9 +732,9 @@
     return C2_OK;
 }
 
-C2Status C2SoftAvcDec::drain_nb(bool drainThrough) {
+c2_status_t C2SoftAvcDec::drain_nb(drain_mode_t mode) {
     // Tunneling is not supported
-    (void) drainThrough;
+    (void) mode;
 
     if (!mThread->isRunning()) {
         return C2_CORRUPTED;
@@ -708,20 +742,20 @@
     std::unique_lock<std::mutex> lock(mQueueLock);
     if (!mQueue.empty()) {
         C2BufferPack &lastInput = mQueue.back()->input;
-        lastInput.flags = (flags_t)(lastInput.flags | BUFFERFLAG_END_OF_STREAM);
+        lastInput.flags = (C2BufferPack::flags_t)(lastInput.flags | C2BufferPack::FLAG_END_OF_STREAM);
         mQueueCond.notify_all();
     }
     return C2_OK;
 }
 
-C2Status C2SoftAvcDec::start() {
+c2_status_t C2SoftAvcDec::start() {
     if (!mThread->isRunning()) {
         mThread->start(shared_from_this());
     }
     return C2_OK;
 }
 
-C2Status C2SoftAvcDec::stop() {
+c2_status_t C2SoftAvcDec::stop() {
     ALOGV("stop");
     std::chrono::system_clock::time_point now = std::chrono::system_clock::now();
     std::chrono::system_clock::time_point deadline = now + std::chrono::milliseconds(500);
@@ -762,6 +796,7 @@
 }
 
 void C2SoftAvcDec::processQueue() {
+#if 0
     if (mIsInFlush) {
         setFlushMode();
 
@@ -797,9 +832,10 @@
         }
         mIsInFlush = false;
     }
+#endif
 
     std::unique_ptr<C2Work> work;
-    {
+    if (!mIsInFlush) {
         std::unique_lock<std::mutex> lock(mQueueLock);
         if (mQueue.empty()) {
             mQueueCond.wait(lock);
@@ -816,7 +852,7 @@
     process(work);
 
     std::vector<std::unique_ptr<C2Work>> done;
-    {
+    if (work) {
         std::unique_lock<std::mutex> lock(mPendingLock);
         uint32_t index = work->input.ordinal.frame_index;
         mPendingWork[index].swap(work);
@@ -828,18 +864,27 @@
     }
 
     if (!done.empty()) {
-        mListener->onWorkDone(shared_from_this(), std::move(done));
+        std::unique_lock<std::mutex> lock(mListenerLock);
+        mActiveListener = mListener;
+
+        if (mActiveListener) {
+            lock.unlock();
+            mActiveListener->onWorkDone_nb(shared_from_this(), std::move(done));
+            lock.lock();
+            mActiveListener.reset();
+            mActiveListenerChanged.notify_all();
+        }
     }
 }
 
 
 static void *ivd_aligned_malloc(void *ctxt, WORD32 alignment, WORD32 size) {
-    UNUSED(ctxt);
+    (void) ctxt;
     return memalign(alignment, size);
 }
 
 static void ivd_aligned_free(void *ctxt, void *buf) {
-    UNUSED(ctxt);
+    (void) ctxt;
     free(buf);
     return;
 }
@@ -964,6 +1009,7 @@
 }
 
 status_t C2SoftAvcDec::setFlushMode() {
+    ALOGV("setFlushMode");
     IV_API_CALL_STATUS_T status;
     ivd_ctl_flush_ip_t s_video_flush_ip;
     ivd_ctl_flush_op_t s_video_flush_op;
@@ -982,7 +1028,7 @@
                 s_video_flush_op.u4_error_code);
         return UNKNOWN_ERROR;
     }
-
+    mIsInFlush = true;
     return OK;
 }
 
@@ -1042,7 +1088,6 @@
 }
 
 status_t C2SoftAvcDec::deInitDecoder() {
-    size_t i;
     IV_API_CALL_STATUS_T status;
 
     if (mCodecCtx) {
@@ -1169,7 +1214,6 @@
     if (mSignalledError) {
         return;
     }
-
     if (NULL == mCodecCtx) {
         if (OK != initDecoder()) {
             ALOGE("Failed to initialize decoder");
@@ -1184,54 +1228,78 @@
         setParams(mStride);
     }
 
-    const C2ConstLinearBlock &buffer =
-            work->input.buffers[0]->data().linearBlocks().front();
-    if (buffer.capacity() == 0) {
-        // TODO: result?
+    uint32_t workIndex = 0;
+    std::unique_ptr<C2ReadView> input;
+    if (work) {
+        work->result = C2_OK;
 
-        std::vector<std::unique_ptr<C2Work>> done;
-        done.emplace_back(std::move(work));
-        mListener->onWorkDone(shared_from_this(), std::move(done));
-        if (!(work->input.flags & BUFFERFLAG_END_OF_STREAM)) {
-            return;
+        const C2ConstLinearBlock &buffer =
+                work->input.buffers[0]->data().linearBlocks().front();
+        if (buffer.capacity() == 0) {
+            // TODO: result?
+
+            std::vector<std::unique_ptr<C2Work>> done;
+            done.emplace_back(std::move(work));
+            mListener->onWorkDone_nb(shared_from_this(), std::move(done));
+            if (!(work->input.flags & C2BufferPack::FLAG_END_OF_STREAM)) {
+                return;
+            }
+
+            mReceivedEOS = true;
+            // TODO: flush
+        } else if (work->input.flags & C2BufferPack::FLAG_END_OF_STREAM) {
+            ALOGV("input EOS: %llu", work->input.ordinal.frame_index);
+            mReceivedEOS = true;
         }
 
-        mReceivedEOS = true;
-        // TODO: flush
-    } else if (work->input.flags & BUFFERFLAG_END_OF_STREAM) {
-        mReceivedEOS = true;
+        input.reset(new C2ReadView(work->input.buffers[0]->data().linearBlocks().front().map().get()));
+        workIndex = work->input.ordinal.frame_index & 0xFFFFFFFF;
     }
 
-    C2ReadView input = work->input.buffers[0]->data().linearBlocks().front().map().get();
-    uint32_t workIndex = work->input.ordinal.frame_index & 0xFFFFFFFF;
-
-    // TODO: populate --- assume display order?
-    if (!mAllocatedBlock) {
-        // TODO: error handling
-        // TODO: format & usage
-        uint32_t format = HAL_PIXEL_FORMAT_YV12;
-        C2MemoryUsage usage = { C2MemoryUsage::kSoftwareRead, C2MemoryUsage::kSoftwareWrite };
-        (void) work->worklets.front()->allocators[0]->fetchGraphicBlock(
-                mWidth, mHeight, format, usage, &mAllocatedBlock);
-        ALOGE("provided (%dx%d) required (%dx%d)", mAllocatedBlock->width(), mAllocatedBlock->height(), mWidth, mHeight);
-    }
-    C2GraphicView output = mAllocatedBlock->map().get();
-    ALOGE("mapped err = %d", output.error());
-
     size_t inOffset = 0u;
-    while (inOffset < input.capacity()) {
+    while (!input || inOffset < input->capacity()) {
+        if (!input) {
+            ALOGV("flushing");
+        }
+        // TODO: populate --- assume display order?
+        if (!mAllocatedBlock) {
+            // TODO: error handling
+            // TODO: format & usage
+            uint32_t format = HAL_PIXEL_FORMAT_YV12;
+            C2MemoryUsage usage = { C2MemoryUsage::kSoftwareRead, C2MemoryUsage::kSoftwareWrite };
+            // TODO: lock access to interface
+            C2BlockPool::local_id_t poolId =
+                mIntf->mOutputBlockPools->flexCount() ?
+                        mIntf->mOutputBlockPools->m.mValues[0] : C2BlockPool::BASIC_GRAPHIC;
+            if (!mOutputBlockPool || mOutputBlockPool->getLocalId() != poolId) {
+                c2_status_t err = GetCodec2BlockPool(poolId, shared_from_this(), &mOutputBlockPool);
+                if (err != C2_OK) {
+                    // TODO: trip
+                }
+            }
+            ALOGE("using allocator %u", mOutputBlockPool->getAllocatorId());
+
+            (void)mOutputBlockPool->fetchGraphicBlock(
+                    mWidth, mHeight, format, usage, &mAllocatedBlock);
+            ALOGE("provided (%dx%d) required (%dx%d)", mAllocatedBlock->width(), mAllocatedBlock->height(), mWidth, mHeight);
+        }
+        C2GraphicView output = mAllocatedBlock->map().get();
+        if (output.error() != OK) {
+            ALOGE("mapped err = %d", output.error());
+        }
+
         ivd_video_decode_ip_t s_dec_ip;
         ivd_video_decode_op_t s_dec_op;
         WORD32 timeDelay, timeTaken;
         size_t sizeY, sizeUV;
 
-        if (!setDecodeArgs(&s_dec_ip, &s_dec_op, &input, &output, workIndex, inOffset)) {
+        if (!setDecodeArgs(&s_dec_ip, &s_dec_op, input.get(), &output, workIndex, inOffset)) {
             ALOGE("Decoder arg setup failed");
             // TODO: notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
             mSignalledError = true;
             return;
         }
-        ALOGE("Decoder arg setup succeeded");
+        ALOGV("Decoder arg setup succeeded");
         // If input dump is enabled, then write to file
         DUMP_TO_FILE(mInFile, s_dec_ip.pv_stream_buffer, s_dec_ip.u4_num_Bytes, mInputOffset);
 
@@ -1272,15 +1340,24 @@
 
         PRINT_TIME("timeTaken=%6d delay=%6d numBytes=%6d", timeTaken, timeDelay,
                s_dec_op.u4_num_bytes_consumed);
-        ALOGI("bytes total=%u", input.capacity());
+        if (input) {
+            ALOGI("bytes total=%u", input->capacity());
+        }
         if (s_dec_op.u4_frame_decoded_flag && !mFlushNeeded) {
             mFlushNeeded = true;
         }
 
-        if (1 != s_dec_op.u4_frame_decoded_flag) {
-            /* If the input did not contain picture data, then ignore
-             * the associated timestamp */
-            //mTimeStampsValid[workIndex] = false;
+        if (1 != s_dec_op.u4_frame_decoded_flag && work) {
+            /* If the input did not contain picture data, return work without
+             * buffer */
+            ALOGV("no picture data");
+            std::vector<std::unique_ptr<C2Work>> done;
+            done.push_back(std::move(work));
+            done[0]->worklets.front()->output.flags = (C2BufferPack::flags_t)0;
+            done[0]->worklets.front()->output.buffers.clear();
+            done[0]->worklets.front()->output.buffers.emplace_back(nullptr);
+            done[0]->worklets.front()->output.ordinal = done[0]->input.ordinal;
+            mListener->onWorkDone_nb(shared_from_this(), std::move(done));
         }
 
         // If the decoder is in the changing resolution mode and there is no output present,
@@ -1324,15 +1401,24 @@
         }
 
         if (s_dec_op.u4_output_present) {
-            ALOGV("output_present");
-            // TODO: outHeader->nFilledLen = (mWidth * mHeight * 3) / 2;
+            ALOGV("output_present: %d", s_dec_op.u4_ts);
             std::vector<std::unique_ptr<C2Work>> done;
-            done.push_back(std::move(mPendingWork[s_dec_op.u4_ts]));
+            {
+                std::unique_lock<std::mutex> lock(mPendingLock);
+                done.push_back(std::move(mPendingWork[s_dec_op.u4_ts]));
+                mPendingWork.erase(s_dec_op.u4_ts);
+            }
+            uint32_t flags = 0;
+            if (done[0]->input.flags & C2BufferPack::FLAG_END_OF_STREAM) {
+                flags |= C2BufferPack::FLAG_END_OF_STREAM;
+                ALOGV("EOS");
+            }
+            done[0]->worklets.front()->output.flags = (C2BufferPack::flags_t)flags;
             done[0]->worklets.front()->output.buffers.clear();
             done[0]->worklets.front()->output.buffers.emplace_back(
                     std::make_shared<GraphicBuffer>(std::move(mAllocatedBlock)));
             done[0]->worklets.front()->output.ordinal = done[0]->input.ordinal;
-            mListener->onWorkDone(shared_from_this(), std::move(done));
+            mListener->onWorkDone_nb(shared_from_this(), std::move(done));
         } else if (mIsInFlush) {
             ALOGV("flush");
             /* If in flush mode and no output is returned by the codec,
@@ -1342,16 +1428,25 @@
             /* If EOS was recieved on input port and there is no output
              * from the codec, then signal EOS on output port */
             if (mReceivedEOS) {
-                // TODO
-                // outHeader->nFilledLen = 0;
-                // outHeader->nFlags |= OMX_BUFFERFLAG_EOS;
+                std::vector<std::unique_ptr<C2Work>> done;
+                {
+                    std::unique_lock<std::mutex> lock(mPendingLock);
+                    if (!mPendingWork.empty()) {
+                        done.push_back(std::move(mPendingWork.begin()->second));
+                        mPendingWork.erase(mPendingWork.begin());
+                    }
+                }
+                if (!done.empty()) {
+                    ALOGV("sending empty EOS buffer");
+                    done[0]->worklets.front()->output.flags = C2BufferPack::FLAG_END_OF_STREAM;
+                    done[0]->worklets.front()->output.buffers.clear();
+                    done[0]->worklets.front()->output.buffers.emplace_back(nullptr);
+                    done[0]->worklets.front()->output.ordinal = done[0]->input.ordinal;
+                    mListener->onWorkDone_nb(shared_from_this(), std::move(done));
+                }
 
-                // outInfo->mOwnedByUs = false;
-                // outQueue.erase(outQueue.begin());
-                // outInfo = NULL;
-                // notifyFillBufferDone(outHeader);
-                // outHeader = NULL;
                 resetPlugin();
+                return;
             }
         }
         inOffset += s_dec_op.u4_num_bytes_consumed;
@@ -1413,4 +1508,34 @@
     return C2_OK;
 }
 
+class C2SoftAvcDecFactory : public C2ComponentFactory {
+public:
+    virtual c2_status_t createComponent(
+            c2_node_id_t id, std::shared_ptr<C2Component>* const component,
+            std::function<void(::android::C2Component*)> deleter) override {
+        *component = std::shared_ptr<C2Component>(new C2SoftAvcDec("avc", id), deleter);
+        return C2_OK;
+    }
+
+    virtual c2_status_t createInterface(
+            c2_node_id_t id, std::shared_ptr<C2ComponentInterface>* const interface,
+            std::function<void(::android::C2ComponentInterface*)> deleter) override {
+        *interface =
+            std::shared_ptr<C2ComponentInterface>(new C2SoftAvcDecIntf("avc", id), deleter);
+        return C2_OK;
+    }
+
+    virtual ~C2SoftAvcDecFactory() override = default;
+};
+
 }  // namespace android
+
+extern "C" ::android::C2ComponentFactory* CreateCodec2Factory() {
+    ALOGV("in %s", __func__);
+    return new ::android::C2SoftAvcDecFactory();
+}
+
+extern "C" void DestroyCodec2Factory(::android::C2ComponentFactory* factory) {
+    ALOGV("in %s", __func__);
+    delete factory;
+}
diff --git a/media/libstagefright/codecs/avcdec/C2SoftAvcDec.h b/media/libstagefright/codecs/avcdec/C2SoftAvcDec.h
index cc33083..28f1dfd 100644
--- a/media/libstagefright/codecs/avcdec/C2SoftAvcDec.h
+++ b/media/libstagefright/codecs/avcdec/C2SoftAvcDec.h
@@ -58,9 +58,6 @@
 
 #define MIN(a, b) ((a) < (b)) ? (a) : (b)
 
-/** Used to remove warnings about unused parameters */
-#define UNUSED(x) ((void)(x))
-
 /** Get time */
 #define GETTIME(a, b) gettimeofday(a, b);
 
@@ -79,35 +76,36 @@
         SupportedValuesWithFields(const C2FieldSupportedValues &supported) : supported(supported) {}
     };
 
-    C2SoftAvcDecIntf(const char *name, node_id id);
-    virtual ~C2SoftAvcDecIntf() = default;
+    C2SoftAvcDecIntf(const char *name, c2_node_id_t id);
+    virtual ~C2SoftAvcDecIntf() override;
 
     // From C2ComponentInterface
     virtual C2String getName() const override;
-    virtual node_id getId() const override;
-    virtual C2Status query_nb(
+    virtual c2_node_id_t getId() const override;
+    virtual c2_status_t query_nb(
             const std::vector<C2Param* const> &stackParams,
             const std::vector<C2Param::Index> &heapParamIndices,
             std::vector<std::unique_ptr<C2Param>>* const heapParams) const override;
-    virtual C2Status config_nb(
+    virtual c2_status_t config_nb(
             const std::vector<C2Param* const> &params,
             std::vector<std::unique_ptr<C2SettingResult>>* const failures) override;
-    virtual C2Status commit_sm(
+    virtual c2_status_t commit_sm(
             const std::vector<C2Param* const> &params,
             std::vector<std::unique_ptr<C2SettingResult>>* const failures) override;
-    virtual C2Status createTunnel_sm(node_id targetComponent) override;
-    virtual C2Status releaseTunnel_sm(node_id targetComponent) override;
-    virtual std::shared_ptr<C2ParamReflector> getParamReflector() const override;
-    virtual C2Status getSupportedParams(
+    virtual c2_status_t createTunnel_sm(c2_node_id_t targetComponent) override;
+    virtual c2_status_t releaseTunnel_sm(c2_node_id_t targetComponent) override;
+    // TODO: move this into some common store class
+    std::shared_ptr<C2ParamReflector> getParamReflector() const;
+    virtual c2_status_t querySupportedParams_nb(
             std::vector<std::shared_ptr<C2ParamDescriptor>> * const params) const override;
-    virtual C2Status getSupportedValues(
+    virtual c2_status_t querySupportedValues_nb(
             std::vector<C2FieldSupportedValuesQuery> &fields) const override;
 
 private:
     class ParamReflector;
 
     const C2String mName;
-    const node_id mId;
+    const c2_node_id_t mId;
 
     C2ComponentDomainInfo mDomainInfo;
     // TODO: config desc
@@ -118,6 +116,7 @@
     // TODO: C2StreamMimeConfig mInputStreamMime;
     // TODO: C2StreamMimeConfig mOutputStreamMime;
     C2StreamFormatConfig::input mInputStreamFormat;
+    std::unique_ptr<C2PortBlockPoolsTuning::output> mOutputBlockPools;
     C2StreamFormatConfig::output mOutputStreamFormat;
     C2VideoSizeStreamInfo::output mVideoSize;
     C2MaxVideoSizeHintPortSetting::input mMaxVideoSizeHint;
@@ -138,24 +137,25 @@
     std::vector<std::shared_ptr<C2ParamDescriptor>> mParamDescs;
 
     void updateSupportedValues();
+    friend class C2SoftAvcDec;
 };
 
 class C2SoftAvcDec
     : public C2Component,
       public std::enable_shared_from_this<C2SoftAvcDec> {
 public:
-    C2SoftAvcDec(
-            const char *name, node_id id, const std::shared_ptr<C2ComponentListener> &listener);
+    C2SoftAvcDec(const char *name, c2_node_id_t id);
     virtual ~C2SoftAvcDec();
 
     // From C2Component
-    virtual C2Status queue_nb(std::list<std::unique_ptr<C2Work>>* const items) override;
-    virtual C2Status announce_nb(const std::vector<C2WorkOutline> &items) override;
-    virtual C2Status flush_sm(
-            bool flushThrough, std::list<std::unique_ptr<C2Work>>* const flushedWork) override;
-    virtual C2Status drain_nb(bool drainThrough) override;
-    virtual C2Status start() override;
-    virtual C2Status stop() override;
+    virtual c2_status_t setListener_sm(const std::shared_ptr<Listener> &listener) override;
+    virtual c2_status_t queue_nb(std::list<std::unique_ptr<C2Work>>* const items) override;
+    virtual c2_status_t announce_nb(const std::vector<C2WorkOutline> &items) override;
+    virtual c2_status_t flush_sm(
+            flush_mode_t mode, std::list<std::unique_ptr<C2Work>>* const flushedWork) override;
+    virtual c2_status_t drain_nb(drain_mode_t mode) override;
+    virtual c2_status_t start() override;
+    virtual c2_status_t stop() override;
     virtual void reset() override;
     virtual void release() override;
     virtual std::shared_ptr<C2ComponentInterface> intf() override;
@@ -190,7 +190,12 @@
     using IndexType = decltype(C2WorkOrdinalStruct().frame_index);
 
     const std::shared_ptr<C2SoftAvcDecIntf> mIntf;
-    const std::shared_ptr<C2ComponentListener> mListener;
+    std::shared_ptr<Listener> mListener;
+    std::shared_ptr<Listener> mActiveListener;
+    std::mutex mListenerLock;
+    std::condition_variable mActiveListenerChanged;
+
+    std::shared_ptr<C2BlockPool> mOutputBlockPool;
 
     std::mutex mQueueLock;
     std::condition_variable mQueueCond;
diff --git a/media/libstagefright/codecs/cmds/Android.bp b/media/libstagefright/codecs/cmds/Android.bp
index e44e53c..ad0bd2d 100644
--- a/media/libstagefright/codecs/cmds/Android.bp
+++ b/media/libstagefright/codecs/cmds/Android.bp
@@ -22,7 +22,6 @@
         "libstagefright",
         "libstagefright_codec2",
         "libstagefright_foundation",
-        "libstagefright_soft_c2avcdec",
         "libui",
         "libutils",
     ],
diff --git a/media/libstagefright/codecs/cmds/codec2.cpp b/media/libstagefright/codecs/cmds/codec2.cpp
index 51e1420..1972a7a 100644
--- a/media/libstagefright/codecs/cmds/codec2.cpp
+++ b/media/libstagefright/codecs/cmds/codec2.cpp
@@ -58,6 +58,9 @@
 #include <C2PlatformSupport.h>
 #include <C2Work.h>
 
+extern "C" ::android::C2ComponentFactory *CreateCodec2Factory();
+extern "C" void DestroyCodec2Factory(::android::C2ComponentFactory *);
+
 #include "../avcdec/C2SoftAvcDec.h"
 
 using namespace android;
@@ -95,9 +98,7 @@
     sp<IProducerListener> mProducerListener;
 
     std::shared_ptr<C2Allocator> mAllocIon;
-    std::shared_ptr<C2Allocator> mAllocGralloc;
     std::shared_ptr<C2BlockPool> mLinearPool;
-    std::shared_ptr<C2BlockPool> mGraphicPool;
 
     std::mutex mQueueLock;
     std::condition_variable mQueueCondition;
@@ -112,22 +113,22 @@
     sp<SurfaceControl> mControl;
 };
 
-class Listener : public C2ComponentListener {
+class Listener : public C2Component::Listener {
 public:
     explicit Listener(SimplePlayer *thiz) : mThis(thiz) {}
     virtual ~Listener() = default;
 
-    virtual void onWorkDone(std::weak_ptr<C2Component> component,
+    virtual void onWorkDone_nb(std::weak_ptr<C2Component> component,
                             std::vector<std::unique_ptr<C2Work>> workItems) override {
         mThis->onWorkDone(component, std::move(workItems));
     }
 
-    virtual void onTripped(std::weak_ptr<C2Component> component,
+    virtual void onTripped_nb(std::weak_ptr<C2Component> component,
                            std::vector<std::shared_ptr<C2SettingResult>> settingResult) override {
         mThis->onTripped(component, settingResult);
     }
 
-    virtual void onError(std::weak_ptr<C2Component> component,
+    virtual void onError_nb(std::weak_ptr<C2Component> component,
                          uint32_t errorCode) override {
         mThis->onError(component, errorCode);
     }
@@ -144,11 +145,8 @@
     CHECK_EQ(mComposerClient->initCheck(), (status_t)OK);
 
     std::shared_ptr<C2AllocatorStore> store = GetCodec2PlatformAllocatorStore();
-    CHECK_EQ(store->getAllocator(C2AllocatorStore::DEFAULT_LINEAR, &mAllocIon), C2_OK);
-    CHECK_EQ(store->getAllocator(C2AllocatorStore::DEFAULT_GRAPHIC, &mAllocGralloc), C2_OK);
-
+    CHECK_EQ(store->fetchAllocator(C2AllocatorStore::DEFAULT_LINEAR, &mAllocIon), C2_OK);
     mLinearPool = std::make_shared<C2BasicLinearBlockPool>(mAllocIon);
-    mGraphicPool = std::make_shared<C2BasicGraphicBlockPool>(mAllocGralloc);
 
     mControl = mComposerClient->createSurface(
             String8("A Surface"),
@@ -213,7 +211,15 @@
         return;
     }
 
-    std::shared_ptr<C2Component> component(std::make_shared<C2SoftAvcDec>("avc", 0, mListener));
+    std::shared_ptr<C2ComponentStore> store = GetCodec2PlatformComponentStore();
+    std::shared_ptr<C2Component> component;
+    (void)store->createComponent("c2.google.avc.decoder", &component);
+
+    (void)component->setListener_sm(mListener);
+    std::unique_ptr<C2PortBlockPoolsTuning::output> pools =
+        C2PortBlockPoolsTuning::output::alloc_unique({ (uint64_t)C2BlockPool::BASIC_GRAPHIC });
+    std::vector<std::unique_ptr<C2SettingResult>> result;
+    (void)component->intf()->config_nb({pools.get()}, &result);
     component->start();
 
     for (int i = 0; i < 8; ++i) {
@@ -323,7 +329,7 @@
                 mQueueCondition.wait_for(l, 100ms);
             }
         }
-        work->input.flags = (flags_t)0;
+        work->input.flags = (C2BufferPack::flags_t)0;
         work->input.ordinal.timestamp = timestamp;
         work->input.ordinal.frame_index = numFrames;
 
@@ -343,7 +349,6 @@
         work->input.buffers.emplace_back(new LinearBuffer(block));
         work->worklets.clear();
         work->worklets.emplace_back(new C2Worklet);
-        work->worklets.front()->allocators.emplace_back(mGraphicPool);
 
         std::list<std::unique_ptr<C2Work>> items;
         items.push_back(std::move(work));
diff --git a/media/libstagefright/include/CCodecBufferChannel.h b/media/libstagefright/include/CCodecBufferChannel.h
new file mode 100644
index 0000000..354cee2
--- /dev/null
+++ b/media/libstagefright/include/CCodecBufferChannel.h
@@ -0,0 +1,205 @@
+/*
+ * Copyright 2017, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef A_BUFFER_CHANNEL_H_
+
+#define A_BUFFER_CHANNEL_H_
+
+#include <map>
+#include <memory>
+#include <mutex>
+#include <vector>
+
+#include <C2Buffer.h>
+#include <C2Component.h>
+
+#include <media/stagefright/foundation/Mutexed.h>
+#include <media/stagefright/CodecBase.h>
+#include <media/ICrypto.h>
+
+namespace android {
+
+/**
+ * BufferChannelBase implementation for ACodec.
+ */
+class CCodecBufferChannel : public BufferChannelBase {
+public:
+    class Buffers {
+    public:
+        Buffers() = default;
+        virtual ~Buffers() = default;
+
+        inline void setAlloc(const std::shared_ptr<C2BlockPool> &alloc) { mAlloc = alloc; }
+        inline void setFormat(const sp<AMessage> &format) { mFormat = format; }
+        inline const std::shared_ptr<C2BlockPool> &getAlloc() { return mAlloc; }
+
+    protected:
+        // Input: this object uses it to allocate input buffers with which the
+        // client fills.
+        // Output: this object passes it to the component.
+        std::shared_ptr<C2BlockPool> mAlloc;
+        sp<AMessage> mFormat;
+
+    private:
+        DISALLOW_EVIL_CONSTRUCTORS(Buffers);
+    };
+
+    class InputBuffers : public Buffers {
+    public:
+        using Buffers::Buffers;
+        virtual ~InputBuffers() = default;
+
+        virtual bool requestNewBuffer(size_t *index, sp<MediaCodecBuffer> *buffer) = 0;
+        virtual std::shared_ptr<C2Buffer> releaseBuffer(const sp<MediaCodecBuffer> &buffer) = 0;
+        virtual void flush() = 0;
+
+    private:
+        DISALLOW_EVIL_CONSTRUCTORS(InputBuffers);
+    };
+
+    class OutputBuffers : public Buffers {
+    public:
+        using Buffers::Buffers;
+        virtual ~OutputBuffers() = default;
+
+        virtual bool registerBuffer(
+                const std::shared_ptr<C2Buffer> &buffer,
+                size_t *index,
+                sp<MediaCodecBuffer> *codecBuffer) = 0;
+        virtual std::shared_ptr<C2Buffer> releaseBuffer(const sp<MediaCodecBuffer> &buffer) = 0;
+        virtual void flush(const std::list<std::unique_ptr<C2Work>> &flushedWork);
+
+    private:
+        DISALLOW_EVIL_CONSTRUCTORS(OutputBuffers);
+    };
+
+    CCodecBufferChannel(const std::function<void(status_t, enum ActionCode)> &onError);
+    virtual ~CCodecBufferChannel();
+
+    // BufferChannelBase interface
+    virtual status_t queueInputBuffer(const sp<MediaCodecBuffer> &buffer) override;
+    virtual status_t queueSecureInputBuffer(
+            const sp<MediaCodecBuffer> &buffer,
+            bool secure,
+            const uint8_t *key,
+            const uint8_t *iv,
+            CryptoPlugin::Mode mode,
+            CryptoPlugin::Pattern pattern,
+            const CryptoPlugin::SubSample *subSamples,
+            size_t numSubSamples,
+            AString *errorDetailMsg) override;
+    virtual status_t renderOutputBuffer(
+            const sp<MediaCodecBuffer> &buffer, int64_t timestampNs) override;
+    virtual status_t discardBuffer(const sp<MediaCodecBuffer> &buffer) override;
+    virtual void getInputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
+    virtual void getOutputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
+
+    // Methods below are interface for CCodec to use.
+
+    void setComponent(const std::shared_ptr<C2Component> &component);
+    status_t setSurface(const sp<Surface> &surface);
+
+    /**
+     * Set C2BlockPool for input buffers.
+     *
+     * TODO: start timestamp?
+     */
+    void setInputBufferAllocator(const sp<C2BlockPool> &inAlloc);
+
+    /**
+     * Set C2BlockPool for output buffers. This object shall never use the
+     * allocator itself; it's just passed
+     *
+     * TODO: start timestamp?
+     */
+    void setOutputBufferAllocator(const sp<C2BlockPool> &outAlloc);
+
+    /**
+     * Start queueing buffers to the component. This object should never queue
+     * buffers before this call.
+     */
+    void start(const sp<AMessage> &inputFormat, const sp<AMessage> &outputFormat);
+
+    /**
+     * Stop queueing buffers to the component. This object should never queue
+     * buffers after this call, until start() is called.
+     */
+    void stop();
+
+    void flush(const std::list<std::unique_ptr<C2Work>> &flushedWork);
+
+    /**
+     * Notify MediaCodec about work done.
+     *
+     * @param workItems   finished work items.
+     */
+    void onWorkDone(std::vector<std::unique_ptr<C2Work>> workItems);
+
+private:
+    class QueueGuard;
+
+    class QueueSync {
+    public:
+        inline QueueSync() : mCount(-1) {}
+        ~QueueSync() = default;
+
+        void start();
+        void stop();
+
+    private:
+        std::mutex mMutex;
+        std::atomic_int32_t mCount;
+
+        friend class CCodecBufferChannel::QueueGuard;
+    };
+
+    class QueueGuard {
+    public:
+        QueueGuard(QueueSync &sync);
+        ~QueueGuard();
+        inline bool isRunning() { return mRunning; }
+
+    private:
+        QueueSync &mSync;
+        bool mRunning;
+    };
+
+    QueueSync mSync;
+    sp<MemoryDealer> mDealer;
+    sp<IMemory> mDecryptDestination;
+    int32_t mHeapSeqNum;
+
+    std::shared_ptr<C2Component> mComponent;
+    std::function<void(status_t, enum ActionCode)> mOnError;
+    std::shared_ptr<C2BlockPool> mInputAllocator;
+    QueueSync mQueueSync;
+    Mutexed<std::unique_ptr<InputBuffers>> mInputBuffers;
+    Mutexed<std::unique_ptr<OutputBuffers>> mOutputBuffers;
+
+    std::atomic_uint64_t mFrameIndex;
+    std::atomic_uint64_t mFirstValidFrameIndex;
+
+    sp<MemoryDealer> makeMemoryDealer(size_t heapSize);
+    Mutexed<sp<Surface>> mSurface;
+
+    inline bool hasCryptoOrDescrambler() {
+        return mCrypto != NULL || mDescrambler != NULL;
+    }
+};
+
+}  // namespace android
+
+#endif  // A_BUFFER_CHANNEL_H_
diff --git a/media/libstagefright/include/Codec2Buffer.h b/media/libstagefright/include/Codec2Buffer.h
new file mode 100644
index 0000000..0272cea
--- /dev/null
+++ b/media/libstagefright/include/Codec2Buffer.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright 2017, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CODEC2_BUFFER_H_
+
+#define CODEC2_BUFFER_H_
+
+#include <C2Buffer.h>
+
+#include <media/MediaCodecBuffer.h>
+
+namespace android {
+
+class C2Buffer;
+
+/**
+ * MediaCodecBuffer implementation wraps around C2LinearBlock.
+ */
+class Codec2Buffer : public MediaCodecBuffer {
+public:
+    static sp<Codec2Buffer> allocate(
+            const sp<AMessage> &format, const std::shared_ptr<C2LinearBlock> &block);
+
+    virtual ~Codec2Buffer() = default;
+
+    C2ConstLinearBlock share();
+
+private:
+    Codec2Buffer(
+            const sp<AMessage> &format,
+            const sp<ABuffer> &buffer,
+            const std::shared_ptr<C2LinearBlock> &block);
+    Codec2Buffer() = delete;
+
+    std::shared_ptr<C2LinearBlock> mBlock;
+};
+
+}  // namespace android
+
+#endif  // CODEC2_BUFFER_H_
diff --git a/media/libstagefright/include/media/stagefright/AudioSource.h b/media/libstagefright/include/media/stagefright/AudioSource.h
index 4984f69..9414aab 100644
--- a/media/libstagefright/include/media/stagefright/AudioSource.h
+++ b/media/libstagefright/include/media/stagefright/AudioSource.h
@@ -40,7 +40,8 @@
             uint32_t channels,
             uint32_t outSampleRate = 0,
             uid_t uid = -1,
-            pid_t pid = -1);
+            pid_t pid = -1,
+            audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE);
 
     status_t initCheck() const;
 
@@ -58,6 +59,11 @@
     status_t dataCallback(const AudioRecord::Buffer& buffer);
     virtual void signalBufferReturned(MediaBuffer *buffer);
 
+    status_t setInputDevice(audio_port_handle_t deviceId);
+    status_t getRoutedDeviceId(audio_port_handle_t* deviceId);
+    status_t addAudioDeviceCallback(const sp<AudioSystem::AudioDeviceCallback>& callback);
+    status_t removeAudioDeviceCallback(const sp<AudioSystem::AudioDeviceCallback>& callback);
+
 protected:
     virtual ~AudioSource();
 
diff --git a/media/libstagefright/include/media/stagefright/CCodec.h b/media/libstagefright/include/media/stagefright/CCodec.h
new file mode 100644
index 0000000..3e24bbe
--- /dev/null
+++ b/media/libstagefright/include/media/stagefright/CCodec.h
@@ -0,0 +1,128 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef C_CODEC_H_
+#define C_CODEC_H_
+
+#include <chrono>
+
+#include <C2Component.h>
+
+#include <android/native_window.h>
+#include <media/hardware/MetadataBufferType.h>
+#include <media/stagefright/foundation/Mutexed.h>
+#include <media/stagefright/CodecBase.h>
+#include <media/stagefright/FrameRenderTracker.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/SkipCutBuffer.h>
+#include <utils/NativeHandle.h>
+#include <hardware/gralloc.h>
+#include <nativebase/nativebase.h>
+
+namespace android {
+
+class CCodecBufferChannel;
+
+class CCodec : public CodecBase {
+public:
+    CCodec();
+
+    virtual std::shared_ptr<BufferChannelBase> getBufferChannel() override;
+    virtual void initiateAllocateComponent(const sp<AMessage> &msg) override;
+    virtual void initiateConfigureComponent(const sp<AMessage> &msg) override;
+    virtual void initiateCreateInputSurface() override;
+    virtual void initiateSetInputSurface(const sp<PersistentSurface> &surface) override;
+    virtual void initiateStart() override;
+    virtual void initiateShutdown(bool keepComponentAllocated = false) override;
+
+    virtual status_t setSurface(const sp<Surface> &surface) override;
+
+    virtual void signalFlush() override;
+    virtual void signalResume() override;
+
+    virtual void signalSetParameters(const sp<AMessage> &msg) override;
+    virtual void signalEndOfInputStream() override;
+    virtual void signalRequestIDRFrame() override;
+
+    void initiateReleaseIfStuck();
+
+protected:
+    virtual ~CCodec();
+
+    virtual void onMessageReceived(const sp<AMessage> &msg) override;
+
+private:
+    typedef std::chrono::time_point<std::chrono::steady_clock> TimePoint;
+
+    void initiateStop();
+    void initiateRelease(bool sendCallback = true);
+
+    void allocate(const AString &componentName);
+    void configure(const sp<AMessage> &msg);
+    void start();
+    void stop();
+    void flush();
+    void release(bool sendCallback);
+
+    void setDeadline(const TimePoint &deadline);
+
+    enum {
+        kWhatAllocate,
+        kWhatConfigure,
+        kWhatStart,
+        kWhatFlush,
+        kWhatStop,
+        kWhatRelease,
+    };
+
+    enum {
+        RELEASED,
+        ALLOCATED,
+        FLUSHED,
+        RUNNING,
+
+        ALLOCATING,  // RELEASED -> ALLOCATED
+        STARTING,    // ALLOCATED -> RUNNING
+        STOPPING,    // RUNNING -> ALLOCATED
+        FLUSHING,    // RUNNING -> FLUSHED
+        RESUMING,    // FLUSHED -> RUNNING
+        RELEASING,   // {ANY EXCEPT RELEASED} -> RELEASED
+    };
+
+    struct State {
+        inline State() : mState(RELEASED) {}
+
+        int mState;
+        std::shared_ptr<C2Component> mComp;
+    };
+
+    struct Formats {
+        sp<AMessage> mInputFormat;
+        sp<AMessage> mOutputFormat;
+    };
+
+    Mutexed<State> mState;
+    std::shared_ptr<CCodecBufferChannel> mChannel;
+    std::shared_ptr<C2Component::Listener> mListener;
+    Mutexed<TimePoint> mDeadline;
+    Mutexed<Formats> mFormats;
+
+    DISALLOW_EVIL_CONSTRUCTORS(CCodec);
+};
+
+}  // namespace android
+
+#endif  // C_CODEC_H_
diff --git a/media/libstagefright/include/media/stagefright/CodecBase.h b/media/libstagefright/include/media/stagefright/CodecBase.h
index 9197f7b..268662f 100644
--- a/media/libstagefright/include/media/stagefright/CodecBase.h
+++ b/media/libstagefright/include/media/stagefright/CodecBase.h
@@ -18,6 +18,7 @@
 
 #define CODEC_BASE_H_
 
+#include <list>
 #include <memory>
 
 #include <stdint.h>
@@ -26,7 +27,6 @@
 
 #include <media/hardware/CryptoAPI.h>
 #include <media/hardware/HardwareAPI.h>
-#include <media/IOMX.h>
 #include <media/MediaCodecInfo.h>
 #include <media/stagefright/foundation/AHandler.h>
 #include <media/stagefright/foundation/ColorUtils.h>
diff --git a/media/libstagefright/include/media/stagefright/MPEG4Writer.h b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
index eba3b32..5d2c120 100644
--- a/media/libstagefright/include/media/stagefright/MPEG4Writer.h
+++ b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
@@ -31,6 +31,7 @@
 struct AMessage;
 class MediaBuffer;
 class MetaData;
+struct ABuffer;
 
 class MPEG4Writer : public MediaWriter {
 public:
@@ -100,12 +101,12 @@
     bool mSendNotify;
     off64_t mOffset;
     off_t mMdatOffset;
-    uint8_t *mMoovBoxBuffer;
-    off64_t mMoovBoxBufferOffset;
-    bool  mWriteMoovBoxToMemory;
+    uint8_t *mInMemoryCache;
+    off64_t mInMemoryCacheOffset;
+    off64_t mInMemoryCacheSize;
+    bool  mWriteBoxToMemory;
     off64_t mFreeBoxOffset;
     bool mStreamableFile;
-    off64_t mEstimatedMoovBoxSize;
     off64_t mMoovExtraSize;
     uint32_t mInterleaveDurationUs;
     int32_t mTimeScale;
@@ -132,6 +133,8 @@
     status_t startTracks(MetaData *params);
     size_t numTracks();
     int64_t estimateMoovBoxSize(int32_t bitRate);
+    int64_t estimateFileLevelMetaSize();
+    void writeCachedBoxToFile(const char *type);
 
     struct Chunk {
         Track               *mTrack;        // Owner
@@ -164,6 +167,46 @@
     List<ChunkInfo> mChunkInfos;            // Chunk infos
     Condition       mChunkReadyCondition;   // Signal that chunks are available
 
+    // HEIF writing
+    typedef struct _ItemInfo {
+        bool isGrid() const { return !strcmp("grid", itemType); }
+        const char *itemType;
+        uint16_t itemId;
+        bool isPrimary;
+        bool isHidden;
+        union {
+            // image item
+            struct {
+                uint32_t offset;
+                uint32_t size;
+            };
+            // grid item
+            struct {
+                uint32_t rows;
+                uint32_t cols;
+                uint32_t width;
+                uint32_t height;
+            };
+        };
+        Vector<uint16_t> properties;
+        Vector<uint16_t> dimgRefs;
+    } ItemInfo;
+
+    typedef struct _ItemProperty {
+        uint32_t type;
+        int32_t width;
+        int32_t height;
+        sp<ABuffer> hvcc;
+    } ItemProperty;
+
+    bool mHasFileLevelMeta;
+    bool mHasMoovBox;
+    uint32_t mPrimaryItemId;
+    uint32_t mAssociationEntryCount;
+    uint32_t mNumGrids;
+    Vector<ItemInfo> mItems;
+    Vector<ItemProperty> mProperties;
+
     // Writer thread handling
     status_t startWriterThread();
     void stopWriterThread();
@@ -209,9 +252,11 @@
     void initInternal(int fd, bool isFirstSession);
 
     // Acquire lock before calling these methods
-    off64_t addSample_l(MediaBuffer *buffer);
-    off64_t addLengthPrefixedSample_l(MediaBuffer *buffer);
-    off64_t addMultipleLengthPrefixedSamples_l(MediaBuffer *buffer);
+    off64_t addSample_l(MediaBuffer *buffer, bool usePrefix, size_t *bytesWritten);
+    void addLengthPrefixedSample_l(MediaBuffer *buffer);
+    void addMultipleLengthPrefixedSamples_l(MediaBuffer *buffer);
+    uint16_t addProperty_l(const ItemProperty &);
+    uint16_t addItem_l(const ItemInfo &);
 
     bool exceedsFileSizeLimit();
     bool use32BitFileOffset() const;
@@ -230,10 +275,23 @@
     void finishCurrentSession();
 
     void addDeviceMeta();
-    void writeHdlr();
+    void writeHdlr(const char *handlerType);
     void writeKeys();
     void writeIlst();
-    void writeMetaBox();
+    void writeMoovLevelMetaBox();
+
+    // HEIF writing
+    void writeIlocBox();
+    void writeInfeBox(uint16_t itemId, const char *type, uint32_t flags);
+    void writeIinfBox();
+    void writeIpcoBox();
+    void writeIpmaBox();
+    void writeIprpBox();
+    void writeIdatBox();
+    void writeIrefBox();
+    void writePitmBox();
+    void writeFileLevelMetaBox();
+
     void sendSessionSummary();
     void release();
     status_t switchFd();
diff --git a/media/libstagefright/include/media/stagefright/MediaFilter.h b/media/libstagefright/include/media/stagefright/MediaFilter.h
index 0c10d11..a28c49d 100644
--- a/media/libstagefright/include/media/stagefright/MediaFilter.h
+++ b/media/libstagefright/include/media/stagefright/MediaFilter.h
@@ -57,7 +57,7 @@
             OWNED_BY_UPSTREAM,
         };
 
-        IOMX::buffer_id mBufferID;
+        uint32_t mBufferID;
         int32_t mGeneration;
         int32_t mOutputFlags;
         Status mStatus;
@@ -121,7 +121,7 @@
 
     status_t allocateBuffersOnPort(OMX_U32 portIndex);
     BufferInfo *findBufferByID(
-            uint32_t portIndex, IOMX::buffer_id bufferID,
+            uint32_t portIndex, uint32_t bufferID,
             ssize_t *index = NULL);
     void postFillThisBuffer(BufferInfo *info);
     void postDrainThisBuffer(BufferInfo *info);
diff --git a/media/libstagefright/include/media/stagefright/MediaMuxer.h b/media/libstagefright/include/media/stagefright/MediaMuxer.h
index 63c3ca5..66f4d72 100644
--- a/media/libstagefright/include/media/stagefright/MediaMuxer.h
+++ b/media/libstagefright/include/media/stagefright/MediaMuxer.h
@@ -48,6 +48,7 @@
         OUTPUT_FORMAT_MPEG_4      = 0,
         OUTPUT_FORMAT_WEBM        = 1,
         OUTPUT_FORMAT_THREE_GPP   = 2,
+        OUTPUT_FORMAT_HEIF        = 3,
         OUTPUT_FORMAT_LIST_END // must be last - used to validate format type
     };
 
diff --git a/media/libstagefright/include/media/stagefright/SimpleDecodingSource.h b/media/libstagefright/include/media/stagefright/SimpleDecodingSource.h
index 5060dc1..6aede08 100644
--- a/media/libstagefright/include/media/stagefright/SimpleDecodingSource.h
+++ b/media/libstagefright/include/media/stagefright/SimpleDecodingSource.h
@@ -47,7 +47,8 @@
     static sp<SimpleDecodingSource> Create(
             const sp<MediaSource> &source, uint32_t flags,
             const sp<ANativeWindow> &nativeWindow,
-            const char *desiredCodec = NULL);
+            const char *desiredCodec = NULL,
+            bool skipMediaCodecList = false);
 
     static sp<SimpleDecodingSource> Create(
             const sp<MediaSource> &source, uint32_t flags = 0);
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 79e540a..aeb32bb 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -641,38 +641,56 @@
 
 // IAudioFlinger interface
 
-
-sp<IAudioTrack> AudioFlinger::createTrack(
-        audio_stream_type_t streamType,
-        uint32_t sampleRate,
-        audio_format_t format,
-        audio_channel_mask_t channelMask,
-        size_t *frameCount,
-        audio_output_flags_t *flags,
-        const sp<IMemory>& sharedBuffer,
-        audio_io_handle_t output,
-        pid_t pid,
-        pid_t tid,
-        audio_session_t *sessionId,
-        int clientUid,
-        status_t *status,
-        audio_port_handle_t portId)
+sp<IAudioTrack> AudioFlinger::createTrack(const CreateTrackInput& input,
+                                          CreateTrackOutput& output,
+                                          status_t *status)
 {
     sp<PlaybackThread::Track> track;
     sp<TrackHandle> trackHandle;
     sp<Client> client;
     status_t lStatus;
-    audio_session_t lSessionId;
+    audio_stream_type_t streamType;
+    audio_port_handle_t portId;
 
+    bool updatePid = (input.clientInfo.clientPid == -1);
     const uid_t callingUid = IPCThreadState::self()->getCallingUid();
-    if (pid == -1 || !isTrustedCallingUid(callingUid)) {
+    uid_t clientUid = input.clientInfo.clientUid;
+    if (!isTrustedCallingUid(callingUid)) {
+        ALOGW_IF(clientUid != callingUid,
+                "%s uid %d tried to pass itself off as %d",
+                __FUNCTION__, callingUid, clientUid);
+        clientUid = callingUid;
+        updatePid = true;
+    }
+    pid_t clientPid = input.clientInfo.clientPid;
+    if (updatePid) {
         const pid_t callingPid = IPCThreadState::self()->getCallingPid();
-        ALOGW_IF(pid != -1 && pid != callingPid,
+        ALOGW_IF(clientPid != -1 && clientPid != callingPid,
                  "%s uid %d pid %d tried to pass itself off as pid %d",
-                 __func__, callingUid, callingPid, pid);
-        pid = callingPid;
+                 __func__, callingUid, callingPid, clientPid);
+        clientPid = callingPid;
     }
 
+    audio_session_t sessionId = input.sessionId;
+    if (sessionId == AUDIO_SESSION_ALLOCATE) {
+        sessionId = (audio_session_t) newAudioUniqueId(AUDIO_UNIQUE_ID_USE_SESSION);
+    } else if (audio_unique_id_get_use(sessionId) != AUDIO_UNIQUE_ID_USE_SESSION) {
+        lStatus = BAD_VALUE;
+        goto Exit;
+    }
+
+    output.sessionId = sessionId;
+    output.outputId = AUDIO_IO_HANDLE_NONE;
+    output.selectedDeviceId = input.selectedDeviceId;
+
+    lStatus = AudioSystem::getOutputForAttr(&input.attr, &output.outputId, sessionId, &streamType,
+                                            clientUid, &input.config, input.flags,
+                                            &output.selectedDeviceId, &portId);
+
+    if (lStatus != NO_ERROR || output.outputId == AUDIO_IO_HANDLE_NONE) {
+        ALOGE("createTrack() getOutputForAttr() return error %d or invalid output handle", lStatus);
+        goto Exit;
+    }
     // client AudioTrack::set already implements AUDIO_STREAM_DEFAULT => AUDIO_STREAM_MUSIC,
     // but if someone uses binder directly they could bypass that and cause us to crash
     if (uint32_t(streamType) >= AUDIO_STREAM_CNT) {
@@ -681,91 +699,76 @@
         goto Exit;
     }
 
-    // further sample rate checks are performed by createTrack_l() depending on the thread type
-    if (sampleRate == 0) {
-        ALOGE("createTrack() invalid sample rate %u", sampleRate);
-        lStatus = BAD_VALUE;
-        goto Exit;
-    }
-
     // further channel mask checks are performed by createTrack_l() depending on the thread type
-    if (!audio_is_output_channel(channelMask)) {
-        ALOGE("createTrack() invalid channel mask %#x", channelMask);
+    if (!audio_is_output_channel(input.config.channel_mask)) {
+        ALOGE("createTrack() invalid channel mask %#x", input.config.channel_mask);
         lStatus = BAD_VALUE;
         goto Exit;
     }
 
     // further format checks are performed by createTrack_l() depending on the thread type
-    if (!audio_is_valid_format(format)) {
-        ALOGE("createTrack() invalid format %#x", format);
-        lStatus = BAD_VALUE;
-        goto Exit;
-    }
-
-    if (sharedBuffer != 0 && sharedBuffer->pointer() == NULL) {
-        ALOGE("createTrack() sharedBuffer is non-0 but has NULL pointer()");
+    if (!audio_is_valid_format(input.config.format)) {
+        ALOGE("createTrack() invalid format %#x", input.config.format);
         lStatus = BAD_VALUE;
         goto Exit;
     }
 
     {
         Mutex::Autolock _l(mLock);
-        PlaybackThread *thread = checkPlaybackThread_l(output);
+        PlaybackThread *thread = checkPlaybackThread_l(output.outputId);
         if (thread == NULL) {
-            ALOGE("no playback thread found for output handle %d", output);
+            ALOGE("no playback thread found for output handle %d", output.outputId);
             lStatus = BAD_VALUE;
             goto Exit;
         }
 
-        client = registerPid(pid);
+        client = registerPid(clientPid);
 
         PlaybackThread *effectThread = NULL;
-        if (sessionId != NULL && *sessionId != AUDIO_SESSION_ALLOCATE) {
-            if (audio_unique_id_get_use(*sessionId) != AUDIO_UNIQUE_ID_USE_SESSION) {
-                ALOGE("createTrack() invalid session ID %d", *sessionId);
-                lStatus = BAD_VALUE;
-                goto Exit;
-            }
-            lSessionId = *sessionId;
-            // check if an effect chain with the same session ID is present on another
-            // output thread and move it here.
-            for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
-                sp<PlaybackThread> t = mPlaybackThreads.valueAt(i);
-                if (mPlaybackThreads.keyAt(i) != output) {
-                    uint32_t sessions = t->hasAudioSession(lSessionId);
-                    if (sessions & ThreadBase::EFFECT_SESSION) {
-                        effectThread = t.get();
-                        break;
-                    }
+        // check if an effect chain with the same session ID is present on another
+        // output thread and move it here.
+        for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
+            sp<PlaybackThread> t = mPlaybackThreads.valueAt(i);
+            if (mPlaybackThreads.keyAt(i) != output.outputId) {
+                uint32_t sessions = t->hasAudioSession(sessionId);
+                if (sessions & ThreadBase::EFFECT_SESSION) {
+                    effectThread = t.get();
+                    break;
                 }
             }
-        } else {
-            // if no audio session id is provided, create one here
-            lSessionId = (audio_session_t) nextUniqueId(AUDIO_UNIQUE_ID_USE_SESSION);
-            if (sessionId != NULL) {
-                *sessionId = lSessionId;
-            }
         }
-        ALOGV("createTrack() lSessionId: %d", lSessionId);
+        ALOGV("createTrack() sessionId: %d", sessionId);
 
-        track = thread->createTrack_l(client, streamType, sampleRate, format,
-                channelMask, frameCount, sharedBuffer, lSessionId, flags, tid,
-                clientUid, &lStatus, portId);
+        output.sampleRate = input.config.sample_rate;
+        output.frameCount = input.frameCount;
+        output.notificationFrameCount = input.notificationFrameCount;
+        output.flags = input.flags;
+
+        track = thread->createTrack_l(client, streamType, &output.sampleRate, input.config.format,
+                                      input.config.channel_mask,
+                                      &output.frameCount, &output.notificationFrameCount,
+                                      input.notificationsPerBuffer, input.speed,
+                                      input.sharedBuffer, sessionId, &output.flags,
+                                      input.clientInfo.clientTid, clientUid, &lStatus, portId);
         LOG_ALWAYS_FATAL_IF((lStatus == NO_ERROR) && (track == 0));
         // we don't abort yet if lStatus != NO_ERROR; there is still work to be done regardless
 
+        output.afFrameCount = thread->frameCount();
+        output.afSampleRate = thread->sampleRate();
+        output.afLatencyMs = thread->latency();
+
         // move effect chain to this output thread if an effect on same session was waiting
         // for a track to be created
         if (lStatus == NO_ERROR && effectThread != NULL) {
             // no risk of deadlock because AudioFlinger::mLock is held
             Mutex::Autolock _dl(thread->mLock);
             Mutex::Autolock _sl(effectThread->mLock);
-            moveEffectChain_l(lSessionId, effectThread, thread, true);
+            moveEffectChain_l(sessionId, effectThread, thread, true);
         }
 
         // Look for sync events awaiting for a session to be used.
         for (size_t i = 0; i < mPendingSyncEvents.size(); i++) {
-            if (mPendingSyncEvents[i]->triggerSession() == lSessionId) {
+            if (mPendingSyncEvents[i]->triggerSession() == sessionId) {
                 if (thread->isValidSyncEvent(mPendingSyncEvents[i])) {
                     if (lStatus == NO_ERROR) {
                         (void) track->setSyncEvent(mPendingSyncEvents[i]);
@@ -778,7 +781,7 @@
             }
         }
 
-        setAudioHwSyncForSession_l(thread, lSessionId);
+        setAudioHwSyncForSession_l(thread, sessionId);
     }
 
     if (lStatus != NO_ERROR) {
@@ -798,6 +801,9 @@
     trackHandle = new TrackHandle(track);
 
 Exit:
+    if (lStatus != NO_ERROR && output.outputId != AUDIO_IO_HANDLE_NONE) {
+        AudioSystem::releaseOutput(output.outputId, streamType, sessionId);
+    }
     *status = lStatus;
     return trackHandle;
 }
@@ -1566,120 +1572,144 @@
 
 // ----------------------------------------------------------------------------
 
-sp<media::IAudioRecord> AudioFlinger::openRecord(
-        audio_io_handle_t input,
-        uint32_t sampleRate,
-        audio_format_t format,
-        audio_channel_mask_t channelMask,
-        const String16& opPackageName,
-        size_t *frameCount,
-        audio_input_flags_t *flags,
-        pid_t pid,
-        pid_t tid,
-        int clientUid,
-        audio_session_t *sessionId,
-        size_t *notificationFrames,
-        sp<IMemory>& cblk,
-        sp<IMemory>& buffers,
-        status_t *status,
-        audio_port_handle_t portId)
+sp<media::IAudioRecord> AudioFlinger::createRecord(const CreateRecordInput& input,
+                                                   CreateRecordOutput& output,
+                                                   status_t *status)
 {
     sp<RecordThread::RecordTrack> recordTrack;
     sp<RecordHandle> recordHandle;
     sp<Client> client;
     status_t lStatus;
-    audio_session_t lSessionId;
+    audio_session_t sessionId = input.sessionId;
+    audio_port_handle_t portId;
 
-    cblk.clear();
-    buffers.clear();
+    output.cblk.clear();
+    output.buffers.clear();
 
-    bool updatePid = (pid == -1);
+    bool updatePid = (input.clientInfo.clientPid == -1);
     const uid_t callingUid = IPCThreadState::self()->getCallingUid();
+    uid_t clientUid = input.clientInfo.clientUid;
     if (!isTrustedCallingUid(callingUid)) {
-        ALOGW_IF((uid_t)clientUid != callingUid,
-                "%s uid %d tried to pass itself off as %d", __FUNCTION__, callingUid, clientUid);
+        ALOGW_IF(clientUid != callingUid,
+                "%s uid %d tried to pass itself off as %d",
+                __FUNCTION__, callingUid, clientUid);
         clientUid = callingUid;
         updatePid = true;
     }
-
+    pid_t clientPid = input.clientInfo.clientPid;
     if (updatePid) {
         const pid_t callingPid = IPCThreadState::self()->getCallingPid();
-        ALOGW_IF(pid != -1 && pid != callingPid,
+        ALOGW_IF(clientPid != -1 && clientPid != callingPid,
                  "%s uid %d pid %d tried to pass itself off as pid %d",
-                 __func__, callingUid, callingPid, pid);
-        pid = callingPid;
+                 __func__, callingUid, callingPid, clientPid);
+        clientPid = callingPid;
     }
 
     // check calling permissions
-    if (!recordingAllowed(opPackageName, tid, clientUid)) {
-        ALOGE("openRecord() permission denied: recording not allowed");
+    if (!recordingAllowed(input.opPackageName, input.clientInfo.clientTid, clientUid)) {
+        ALOGE("createRecord() permission denied: recording not allowed");
         lStatus = PERMISSION_DENIED;
         goto Exit;
     }
-
-    // further sample rate checks are performed by createRecordTrack_l()
-    if (sampleRate == 0) {
-        ALOGE("openRecord() invalid sample rate %u", sampleRate);
-        lStatus = BAD_VALUE;
-        goto Exit;
-    }
-
     // we don't yet support anything other than linear PCM
-    if (!audio_is_valid_format(format) || !audio_is_linear_pcm(format)) {
-        ALOGE("openRecord() invalid format %#x", format);
+    if (!audio_is_valid_format(input.config.format) || !audio_is_linear_pcm(input.config.format)) {
+        ALOGE("createRecord() invalid format %#x", input.config.format);
         lStatus = BAD_VALUE;
         goto Exit;
     }
 
     // further channel mask checks are performed by createRecordTrack_l()
-    if (!audio_is_input_channel(channelMask)) {
-        ALOGE("openRecord() invalid channel mask %#x", channelMask);
+    if (!audio_is_input_channel(input.config.channel_mask)) {
+        ALOGE("createRecord() invalid channel mask %#x", input.config.channel_mask);
         lStatus = BAD_VALUE;
         goto Exit;
     }
 
+    if (sessionId == AUDIO_SESSION_ALLOCATE) {
+        sessionId = (audio_session_t) newAudioUniqueId(AUDIO_UNIQUE_ID_USE_SESSION);
+    } else if (audio_unique_id_get_use(sessionId) != AUDIO_UNIQUE_ID_USE_SESSION) {
+        lStatus = BAD_VALUE;
+        goto Exit;
+    }
+
+    output.sessionId = sessionId;
+    output.inputId = AUDIO_IO_HANDLE_NONE;
+    output.selectedDeviceId = input.selectedDeviceId;
+    output.flags = input.flags;
+
+    client = registerPid(clientPid);
+
+    // Not a conventional loop, but a retry loop for at most two iterations total.
+    // Try first maybe with FAST flag then try again without FAST flag if that fails.
+    // Exits loop via break on no error of got exit on error
+    // The sp<> references will be dropped when re-entering scope.
+    // The lack of indentation is deliberate, to reduce code churn and ease merges.
+    for (;;) {
+    lStatus = AudioSystem::getInputForAttr(&input.attr, &output.inputId,
+                                      sessionId,
+                                    // FIXME compare to AudioTrack
+                                      clientPid,
+                                      clientUid,
+                                      &input.config,
+                                      output.flags, &output.selectedDeviceId, &portId);
+
     {
         Mutex::Autolock _l(mLock);
-        RecordThread *thread = checkRecordThread_l(input);
+        RecordThread *thread = checkRecordThread_l(output.inputId);
         if (thread == NULL) {
-            ALOGE("openRecord() checkRecordThread_l failed");
+            ALOGE("createRecord() checkRecordThread_l failed");
             lStatus = BAD_VALUE;
             goto Exit;
         }
 
-        client = registerPid(pid);
+        ALOGV("createRecord() lSessionId: %d input %d", sessionId, output.inputId);
 
-        if (sessionId != NULL && *sessionId != AUDIO_SESSION_ALLOCATE) {
-            if (audio_unique_id_get_use(*sessionId) != AUDIO_UNIQUE_ID_USE_SESSION) {
-                lStatus = BAD_VALUE;
-                goto Exit;
-            }
-            lSessionId = *sessionId;
-        } else {
-            // if no audio session id is provided, create one here
-            lSessionId = (audio_session_t) nextUniqueId(AUDIO_UNIQUE_ID_USE_SESSION);
-            if (sessionId != NULL) {
-                *sessionId = lSessionId;
-            }
-        }
-        ALOGV("openRecord() lSessionId: %d input %d", lSessionId, input);
+        output.sampleRate = input.config.sample_rate;
+        output.frameCount = input.frameCount;
+        output.notificationFrameCount = input.notificationFrameCount;
 
-        recordTrack = thread->createRecordTrack_l(client, sampleRate, format, channelMask,
-                                                  frameCount, lSessionId, notificationFrames,
-                                                  clientUid, flags, tid, &lStatus, portId);
+        recordTrack = thread->createRecordTrack_l(client, &output.sampleRate,
+                                                  input.config.format, input.config.channel_mask,
+                                                  &output.frameCount, sessionId,
+                                                  &output.notificationFrameCount,
+                                                  clientUid, &output.flags,
+                                                  input.clientInfo.clientTid,
+                                                  &lStatus, portId);
         LOG_ALWAYS_FATAL_IF((lStatus == NO_ERROR) && (recordTrack == 0));
 
-        if (lStatus == NO_ERROR) {
-            // Check if one effect chain was awaiting for an AudioRecord to be created on this
-            // session and move it to this thread.
-            sp<EffectChain> chain = getOrphanEffectChain_l(lSessionId);
-            if (chain != 0) {
-                Mutex::Autolock _l(thread->mLock);
-                thread->addEffectChain_l(chain);
-            }
+        // lStatus == BAD_TYPE means FAST flag was rejected: request a new input from
+        // audio policy manager without FAST constraint
+        if (lStatus == BAD_TYPE) {
+            AudioSystem::releaseInput(output.inputId, sessionId);
+            recordTrack.clear();
+            continue;
         }
+
+        if (lStatus != NO_ERROR) {
+            recordTrack.clear();
+            goto Exit;
+        }
+
+        // Check if one effect chain was awaiting for an AudioRecord to be created on this
+        // session and move it to this thread.
+        sp<EffectChain> chain = getOrphanEffectChain_l(sessionId);
+        if (chain != 0) {
+            Mutex::Autolock _l(thread->mLock);
+            thread->addEffectChain_l(chain);
+        }
+        break;
+    }
+    // End of retry loop.
+    // The lack of indentation is deliberate, to reduce code churn and ease merges.
     }
 
+    output.cblk = recordTrack->getCblk();
+    output.buffers = recordTrack->getBuffers();
+
+    // return handle to client
+    recordHandle = new RecordHandle(recordTrack);
+
+Exit:
     if (lStatus != NO_ERROR) {
         // remove local strong reference to Client before deleting the RecordTrack so that the
         // Client destructor is called by the TrackBase destructor with mClientLock held
@@ -1689,17 +1719,8 @@
             Mutex::Autolock _cl(mClientLock);
             client.clear();
         }
-        recordTrack.clear();
-        goto Exit;
     }
 
-    cblk = recordTrack->getCblk();
-    buffers = recordTrack->getBuffers();
-
-    // return handle to client
-    recordHandle = new RecordHandle(recordTrack);
-
-Exit:
     *status = lStatus;
     return recordHandle;
 }
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index dff94d2..bc73ffd 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -114,39 +114,13 @@
     virtual     status_t    dump(int fd, const Vector<String16>& args);
 
     // IAudioFlinger interface, in binder opcode order
-    virtual sp<IAudioTrack> createTrack(
-                                audio_stream_type_t streamType,
-                                uint32_t sampleRate,
-                                audio_format_t format,
-                                audio_channel_mask_t channelMask,
-                                size_t *pFrameCount,
-                                audio_output_flags_t *flags,
-                                const sp<IMemory>& sharedBuffer,
-                                audio_io_handle_t output,
-                                pid_t pid,
-                                pid_t tid,
-                                audio_session_t *sessionId,
-                                int clientUid,
-                                status_t *status /*non-NULL*/,
-                                audio_port_handle_t portId);
+    virtual sp<IAudioTrack> createTrack(const CreateTrackInput& input,
+                                        CreateTrackOutput& output,
+                                        status_t *status);
 
-    virtual sp<media::IAudioRecord> openRecord(
-                                audio_io_handle_t input,
-                                uint32_t sampleRate,
-                                audio_format_t format,
-                                audio_channel_mask_t channelMask,
-                                const String16& opPackageName,
-                                size_t *pFrameCount,
-                                audio_input_flags_t *flags,
-                                pid_t pid,
-                                pid_t tid,
-                                int clientUid,
-                                audio_session_t *sessionId,
-                                size_t *notificationFrames,
-                                sp<IMemory>& cblk,
-                                sp<IMemory>& buffers,
-                                status_t *status /*non-NULL*/,
-                                audio_port_handle_t portId);
+    virtual sp<media::IAudioRecord> createRecord(const CreateRecordInput& input,
+                                                 CreateRecordOutput& output,
+                                                 status_t *status);
 
     virtual     uint32_t    sampleRate(audio_io_handle_t ioHandle) const;
     virtual     audio_format_t format(audio_io_handle_t output) const;
@@ -538,6 +512,13 @@
     };
 
     // --- PlaybackThread ---
+#ifdef FLOAT_EFFECT_CHAIN
+#define EFFECT_BUFFER_FORMAT AUDIO_FORMAT_PCM_FLOAT
+using effect_buffer_t = float;
+#else
+#define EFFECT_BUFFER_FORMAT AUDIO_FORMAT_PCM_16_BIT
+using effect_buffer_t = int16_t;
+#endif
 
 #include "Threads.h"
 
diff --git a/services/audioflinger/Configuration.h b/services/audioflinger/Configuration.h
index 845697a..bad46dc 100644
--- a/services/audioflinger/Configuration.h
+++ b/services/audioflinger/Configuration.h
@@ -41,4 +41,7 @@
 // uncomment to log CPU statistics every n wall clock seconds
 //#define DEBUG_CPU_USAGE 10
 
+// define FLOAT_EFFECT_CHAIN to request float effects (falls back to int16_t if unavailable)
+#define FLOAT_EFFECT_CHAIN
+
 #endif // ANDROID_AUDIOFLINGER_CONFIGURATION_H
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index bd5f146..e0d0d7b 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -19,6 +19,8 @@
 #define LOG_TAG "AudioFlinger"
 //#define LOG_NDEBUG 0
 
+#include <algorithm>
+
 #include "Configuration.h"
 #include <utils/Log.h>
 #include <system/audio_effects/effect_aec.h>
@@ -47,8 +49,6 @@
 #define ALOGVV(a...) do { } while(0)
 #endif
 
-#define min(a, b) ((a) < (b) ? (a) : (b))
-
 namespace android {
 
 // ----------------------------------------------------------------------------
@@ -73,6 +73,9 @@
       // mDisableWaitCnt is set by process() and updateState() and not used before then
       mSuspended(false),
       mAudioFlinger(thread->mAudioFlinger)
+#ifdef FLOAT_EFFECT_CHAIN
+      , mSupportsFloat(false)
+#endif
 {
     ALOGV("Constructor %p pinned %d", this, pinned);
     int lStatus;
@@ -285,31 +288,114 @@
         return;
     }
 
+    // TODO: Implement multichannel effects; here outChannelCount == FCC_2 == 2
+    const uint32_t inChannelCount =
+            audio_channel_count_from_out_mask(mConfig.inputCfg.channels);
+    const uint32_t outChannelCount =
+            audio_channel_count_from_out_mask(mConfig.outputCfg.channels);
+    const bool auxType =
+            (mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY;
+
     if (isProcessEnabled()) {
-        // do 32 bit to 16 bit conversion for auxiliary effect input buffer
-        if ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) {
-            ditherAndClamp(mConfig.inputCfg.buffer.s32,
-                                        mConfig.inputCfg.buffer.s32,
-                                        mConfig.inputCfg.buffer.frameCount/2);
-        }
         int ret;
         if (isProcessImplemented()) {
-            // do the actual processing in the effect engine
+            if (auxType) {
+                // We overwrite the aux input buffer here and clear after processing.
+                // Note that aux input buffers are format q4_27.
+#ifdef FLOAT_EFFECT_CHAIN
+                if (mSupportsFloat) {
+                    // Do in-place float conversion for auxiliary effect input buffer.
+                    static_assert(sizeof(float) <= sizeof(int32_t),
+                            "in-place conversion requires sizeof(float) <= sizeof(int32_t)");
+
+                    const int32_t * const p32 = mConfig.inputCfg.buffer.s32;
+                    float * const pFloat = mConfig.inputCfg.buffer.f32;
+                    memcpy_to_float_from_q4_27(pFloat, p32, mConfig.inputCfg.buffer.frameCount);
+                } else {
+                    memcpy_to_i16_from_q4_27(mConfig.inputCfg.buffer.s16,
+                            mConfig.inputCfg.buffer.s32,
+                            mConfig.inputCfg.buffer.frameCount);
+                }
+#else
+                memcpy_to_i16_from_q4_27(mConfig.inputCfg.buffer.s16,
+                                            mConfig.inputCfg.buffer.s32,
+                                            mConfig.inputCfg.buffer.frameCount);
+#endif
+            }
+#ifdef FLOAT_EFFECT_CHAIN
+            if (mSupportsFloat) {
+                ret = mEffectInterface->process();
+            } else {
+                {   // convert input to int16_t as effect doesn't support float.
+                    if (!auxType) {
+                        if (mInBuffer16.get() == nullptr) {
+                            ALOGW("%s: mInBuffer16 is null, bypassing", __func__);
+                            goto data_bypass;
+                        }
+                        const float * const pIn = mInBuffer->audioBuffer()->f32;
+                        int16_t * const pIn16 = mInBuffer16->audioBuffer()->s16;
+                        memcpy_to_i16_from_float(
+                                pIn16, pIn, inChannelCount * mConfig.inputCfg.buffer.frameCount);
+                    }
+                    if (mConfig.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
+                        if (mOutBuffer16.get() == nullptr) {
+                            ALOGW("%s: mOutBuffer16 is null, bypassing", __func__);
+                            goto data_bypass;
+                        }
+                        int16_t * const pOut16 = mOutBuffer16->audioBuffer()->s16;
+                        const float * const pOut = mOutBuffer->audioBuffer()->f32;
+                        memcpy_to_i16_from_float(
+                                pOut16,
+                                pOut,
+                                outChannelCount * mConfig.outputCfg.buffer.frameCount);
+                    }
+                }
+
+                ret = mEffectInterface->process();
+
+                {   // convert output back to float.
+                    const int16_t * const pOut16 = mOutBuffer16->audioBuffer()->s16;
+                    float * const pOut = mOutBuffer->audioBuffer()->f32;
+                    memcpy_to_float_from_i16(
+                            pOut, pOut16, outChannelCount * mConfig.outputCfg.buffer.frameCount);
+                }
+            }
+#else
             ret = mEffectInterface->process();
+#endif
         } else {
-            if (mConfig.inputCfg.buffer.raw != mConfig.outputCfg.buffer.raw) {
-                size_t frameCnt = mConfig.inputCfg.buffer.frameCount * FCC_2;  //always stereo here
-                int16_t *in = mConfig.inputCfg.buffer.s16;
-                int16_t *out = mConfig.outputCfg.buffer.s16;
+#ifdef FLOAT_EFFECT_CHAIN
+            data_bypass:
+#endif
+            if (!auxType  /* aux effects do not require data bypass */
+                    && mConfig.inputCfg.buffer.raw != mConfig.outputCfg.buffer.raw
+                    && inChannelCount == outChannelCount) {
+                const size_t sampleCount = std::min(
+                        mConfig.inputCfg.buffer.frameCount,
+                        mConfig.outputCfg.buffer.frameCount) * outChannelCount;
+
+#ifdef FLOAT_EFFECT_CHAIN
+                const float * const in = mConfig.inputCfg.buffer.f32;
+                float * const out = mConfig.outputCfg.buffer.f32;
 
                 if (mConfig.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
-                    for (size_t i = 0; i < frameCnt; i++) {
-                        out[i] = clamp16((int32_t)out[i] + (int32_t)in[i]);
-                    }
+                    accumulate_float(out, in, sampleCount);
                 } else {
-                    memcpy(mConfig.outputCfg.buffer.raw, mConfig.inputCfg.buffer.raw,
-                           frameCnt * sizeof(int16_t));
+                    memcpy(mConfig.outputCfg.buffer.f32, mConfig.inputCfg.buffer.f32,
+                            sampleCount * sizeof(*mConfig.outputCfg.buffer.f32));
                 }
+
+#else
+                const int16_t * const in = mConfig.inputCfg.buffer.s16;
+                int16_t * const out = mConfig.outputCfg.buffer.s16;
+
+                if (mConfig.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
+                    accumulate_i16(out, in, sampleCount);
+                } else {
+                    memcpy(mConfig.outputCfg.buffer.s16, mConfig.inputCfg.buffer.s16,
+                            sampleCount * sizeof(*mConfig.outputCfg.buffer.s16));
+                }
+#endif
             }
             ret = -ENODATA;
         }
@@ -319,22 +405,33 @@
         }
 
         // clear auxiliary effect input buffer for next accumulation
-        if ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) {
-            memset(mConfig.inputCfg.buffer.raw, 0,
-                   mConfig.inputCfg.buffer.frameCount*sizeof(int32_t));
+        if (auxType) {
+            // input always q4_27 regardless of FLOAT_EFFECT_CHAIN.
+            const size_t size =
+                    mConfig.inputCfg.buffer.frameCount * inChannelCount * sizeof(int32_t);
+            memset(mConfig.inputCfg.buffer.raw, 0, size);
         }
     } else if ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_INSERT &&
+                // mInBuffer->audioBuffer()->raw != mOutBuffer->audioBuffer()->raw
                 mConfig.inputCfg.buffer.raw != mConfig.outputCfg.buffer.raw) {
         // If an insert effect is idle and input buffer is different from output buffer,
         // accumulate input onto output
         sp<EffectChain> chain = mChain.promote();
-        if (chain != 0 && chain->activeTrackCnt() != 0) {
-            size_t frameCnt = mConfig.inputCfg.buffer.frameCount * FCC_2;  //always stereo here
-            int16_t *in = mConfig.inputCfg.buffer.s16;
-            int16_t *out = mConfig.outputCfg.buffer.s16;
-            for (size_t i = 0; i < frameCnt; i++) {
-                out[i] = clamp16((int32_t)out[i] + (int32_t)in[i]);
-            }
+        if (chain != 0
+                && chain->activeTrackCnt() != 0
+                && inChannelCount == outChannelCount) {
+            const size_t sampleCount = std::min(
+                    mConfig.inputCfg.buffer.frameCount,
+                    mConfig.outputCfg.buffer.frameCount) * outChannelCount;
+#ifdef FLOAT_EFFECT_CHAIN
+            const float * const in = mConfig.inputCfg.buffer.f32;
+            float * const out = mConfig.outputCfg.buffer.f32;
+            accumulate_float(out, in, sampleCount);
+#else
+            const int16_t * const in = mConfig.inputCfg.buffer.s16;
+            int16_t * const out = mConfig.outputCfg.buffer.s16;
+            accumulate_i16(out, in, sampleCount);
+#endif
         }
     }
 }
@@ -349,6 +446,7 @@
 
 status_t AudioFlinger::EffectModule::configure()
 {
+    ALOGVV("configure() started");
     status_t status;
     sp<ThreadBase> thread;
     uint32_t size;
@@ -384,8 +482,8 @@
         }
     }
 
-    mConfig.inputCfg.format = AUDIO_FORMAT_PCM_16_BIT;
-    mConfig.outputCfg.format = AUDIO_FORMAT_PCM_16_BIT;
+    mConfig.inputCfg.format = EFFECT_BUFFER_FORMAT;
+    mConfig.outputCfg.format = EFFECT_BUFFER_FORMAT;
     mConfig.inputCfg.samplingRate = thread->sampleRate();
     mConfig.outputCfg.samplingRate = mConfig.inputCfg.samplingRate;
     mConfig.inputCfg.bufferProvider.cookie = NULL;
@@ -413,12 +511,6 @@
     mConfig.outputCfg.mask = EFFECT_CONFIG_ALL;
     mConfig.inputCfg.buffer.frameCount = thread->frameCount();
     mConfig.outputCfg.buffer.frameCount = mConfig.inputCfg.buffer.frameCount;
-    if (mInBuffer != 0) {
-        mInBuffer->setFrameCount(mConfig.inputCfg.buffer.frameCount);
-    }
-    if (mOutBuffer != 0) {
-        mOutBuffer->setFrameCount(mConfig.outputCfg.buffer.frameCount);
-    }
 
     ALOGV("configure() %p thread %p buffer %p framecount %zu",
             this, thread.get(), mConfig.inputCfg.buffer.raw, mConfig.inputCfg.buffer.frameCount);
@@ -430,32 +522,60 @@
                                        &mConfig,
                                        &size,
                                        &cmdStatus);
-    if (status == 0) {
+    if (status == NO_ERROR) {
         status = cmdStatus;
+#ifdef FLOAT_EFFECT_CHAIN
+        mSupportsFloat = true;
+#endif
     }
-
-    if (status == 0 &&
-            (memcmp(&mDescriptor.type, SL_IID_VISUALIZATION, sizeof(effect_uuid_t)) == 0)) {
-        uint32_t buf32[sizeof(effect_param_t) / sizeof(uint32_t) + 2];
-        effect_param_t *p = (effect_param_t *)buf32;
-
-        p->psize = sizeof(uint32_t);
-        p->vsize = sizeof(uint32_t);
-        size = sizeof(int);
-        *(int32_t *)p->data = VISUALIZER_PARAM_LATENCY;
-
-        uint32_t latency = 0;
-        PlaybackThread *pbt = thread->mAudioFlinger->checkPlaybackThread_l(thread->mId);
-        if (pbt != NULL) {
-            latency = pbt->latency_l();
+#ifdef FLOAT_EFFECT_CHAIN
+    else {
+        ALOGV("EFFECT_CMD_SET_CONFIG failed with float format, retry with int16_t.");
+        mConfig.inputCfg.format = AUDIO_FORMAT_PCM_16_BIT;
+        mConfig.outputCfg.format = AUDIO_FORMAT_PCM_16_BIT;
+        status = mEffectInterface->command(EFFECT_CMD_SET_CONFIG,
+                                           sizeof(effect_config_t),
+                                           &mConfig,
+                                           &size,
+                                           &cmdStatus);
+        if (status == NO_ERROR) {
+            status = cmdStatus;
+            mSupportsFloat = false;
+            ALOGVV("config worked with 16 bit");
+        } else {
+            ALOGE("%s failed %d with int16_t (as well as float)", __func__, status);
         }
+    }
+#endif
 
-        *((int32_t *)p->data + 1)= latency;
-        mEffectInterface->command(EFFECT_CMD_SET_PARAM,
-                                  sizeof(effect_param_t) + 8,
-                                  &buf32,
-                                  &size,
-                                  &cmdStatus);
+    if (status == NO_ERROR) {
+        // Establish Buffer strategy
+        setInBuffer(mInBuffer);
+        setOutBuffer(mOutBuffer);
+
+        // Update visualizer latency
+        if (memcmp(&mDescriptor.type, SL_IID_VISUALIZATION, sizeof(effect_uuid_t)) == 0) {
+            uint32_t buf32[sizeof(effect_param_t) / sizeof(uint32_t) + 2];
+            effect_param_t *p = (effect_param_t *)buf32;
+
+            p->psize = sizeof(uint32_t);
+            p->vsize = sizeof(uint32_t);
+            size = sizeof(int);
+            *(int32_t *)p->data = VISUALIZER_PARAM_LATENCY;
+
+            uint32_t latency = 0;
+            PlaybackThread *pbt = thread->mAudioFlinger->checkPlaybackThread_l(thread->mId);
+            if (pbt != NULL) {
+                latency = pbt->latency_l();
+            }
+
+            *((int32_t *)p->data + 1)= latency;
+            mEffectInterface->command(EFFECT_CMD_SET_PARAM,
+                    sizeof(effect_param_t) + 8,
+                    &buf32,
+                    &size,
+                    &cmdStatus);
+        }
     }
 
     mMaxDisableWaitCnt = (MAX_DISABLE_TIME_MS * mConfig.outputCfg.samplingRate) /
@@ -463,6 +583,7 @@
 
 exit:
     mStatus = status;
+    ALOGVV("configure ended");
     return status;
 }
 
@@ -774,6 +895,7 @@
 }
 
 void AudioFlinger::EffectModule::setInBuffer(const sp<EffectBufferHalInterface>& buffer) {
+    ALOGVV("setInBuffer %p",(&buffer));
     if (buffer != 0) {
         mConfig.inputCfg.buffer.raw = buffer->audioBuffer()->raw;
         buffer->setFrameCount(mConfig.inputCfg.buffer.frameCount);
@@ -782,9 +904,40 @@
     }
     mInBuffer = buffer;
     mEffectInterface->setInBuffer(buffer);
+
+#ifdef FLOAT_EFFECT_CHAIN
+    // aux effects do in place conversion to float - we don't allocate mInBuffer16 for them.
+    // Theoretically insert effects can also do in-place conversions (destroying
+    // the original buffer) when the output buffer is identical to the input buffer,
+    // but we don't optimize for it here.
+    const bool auxType = (mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY;
+    if (!auxType && !mSupportsFloat && mInBuffer.get() != nullptr) {
+        // we need to translate - create hidl shared buffer and intercept
+        const size_t inFrameCount = mConfig.inputCfg.buffer.frameCount;
+        const int inChannels = audio_channel_count_from_out_mask(mConfig.inputCfg.channels);
+        const size_t size = inChannels * inFrameCount * sizeof(int16_t);
+
+        ALOGV("%s: setInBuffer updating for inChannels:%d inFrameCount:%zu total size:%zu",
+                __func__, inChannels, inFrameCount, size);
+
+        if (size > 0 && (mInBuffer16.get() == nullptr || size > mInBuffer16->getSize())) {
+            mInBuffer16.clear();
+            ALOGV("%s: allocating mInBuffer16 %zu", __func__, size);
+            (void)EffectBufferHalInterface::allocate(size, &mInBuffer16);
+        }
+        if (mInBuffer16.get() != nullptr) {
+            // FIXME: confirm buffer has enough size.
+            mInBuffer16->setFrameCount(inFrameCount);
+            mEffectInterface->setInBuffer(mInBuffer16);
+        } else if (size > 0) {
+            ALOGE("%s cannot create mInBuffer16", __func__);
+        }
+    }
+#endif
 }
 
 void AudioFlinger::EffectModule::setOutBuffer(const sp<EffectBufferHalInterface>& buffer) {
+    ALOGVV("setOutBuffer %p",(&buffer));
     if (buffer != 0) {
         mConfig.outputCfg.buffer.raw = buffer->audioBuffer()->raw;
         buffer->setFrameCount(mConfig.outputCfg.buffer.frameCount);
@@ -793,6 +946,31 @@
     }
     mOutBuffer = buffer;
     mEffectInterface->setOutBuffer(buffer);
+
+#ifdef FLOAT_EFFECT_CHAIN
+    // Note: Any effect that does not accumulate does not need mOutBuffer16 and
+    // can do in-place conversion from int16_t to float.  We don't optimize here.
+    if (!mSupportsFloat && mOutBuffer.get() != nullptr) {
+        const size_t outFrameCount = mConfig.outputCfg.buffer.frameCount;
+        const int outChannels = audio_channel_count_from_out_mask(mConfig.outputCfg.channels);
+        const size_t size = outChannels * outFrameCount * sizeof(int16_t);
+
+        ALOGV("%s: setOutBuffer updating for outChannels:%d outFrameCount:%zu total size:%zu",
+                __func__, outChannels, outFrameCount, size);
+
+        if (size > 0 && (mOutBuffer16.get() == nullptr || size > mOutBuffer16->getSize())) {
+            mOutBuffer16.clear();
+            ALOGV("%s: allocating mOutBuffer16 %zu", __func__, size);
+            (void)EffectBufferHalInterface::allocate(size, &mOutBuffer16);
+        }
+        if (mOutBuffer16.get() != nullptr) {
+            mOutBuffer16->setFrameCount(outFrameCount);
+            mEffectInterface->setOutBuffer(mOutBuffer16);
+        } else if (size > 0) {
+            ALOGE("%s cannot create mOutBuffer16", __func__);
+        }
+    }
+#endif
 }
 
 status_t AudioFlinger::EffectModule::setVolume(uint32_t *left, uint32_t *right, bool controller)
@@ -1126,6 +1304,22 @@
             formatToString((audio_format_t)mConfig.outputCfg.format).c_str());
     result.append(buffer);
 
+#ifdef FLOAT_EFFECT_CHAIN
+    if (!mSupportsFloat) {
+        int16_t* pIn16 = mInBuffer16 != 0 ? mInBuffer16->audioBuffer()->s16 : NULL;
+        int16_t* pOut16 = mOutBuffer16 != 0 ? mOutBuffer16->audioBuffer()->s16 : NULL;
+
+        result.append("\t\t- Float and int16 buffers\n");
+        result.append("\t\t\tIn_float   In_int16   Out_float  Out_int16\n");
+        snprintf(buffer, SIZE,"\t\t\t%p %p %p %p\n",
+                mConfig.inputCfg.buffer.raw,
+                pIn16,
+                pOut16,
+                mConfig.outputCfg.buffer.raw);
+        result.append(buffer);
+    }
+#endif
+
     snprintf(buffer, SIZE, "\t\t%zu Clients:\n", mHandles.size());
     result.append(buffer);
     result.append("\t\t\t  Pid Priority Ctrl Locked client server\n");
@@ -1602,8 +1796,11 @@
     // and sample format changes for effects.
     // Currently effects processing is only available for stereo, AUDIO_FORMAT_PCM_16_BIT
     // (4 bytes frame size)
+
     const size_t frameSize =
-            audio_bytes_per_sample(AUDIO_FORMAT_PCM_16_BIT) * min(FCC_2, thread->channelCount());
+            audio_bytes_per_sample(EFFECT_BUFFER_FORMAT)
+            * std::min((uint32_t)FCC_2, thread->channelCount());
+
     memset(mInBuffer->audioBuffer()->raw, 0, thread->frameCount() * frameSize);
     mInBuffer->commit();
 }
@@ -1718,8 +1915,13 @@
         // calling the process in effect engine
         size_t numSamples = thread->frameCount();
         sp<EffectBufferHalInterface> halBuffer;
+#ifdef FLOAT_EFFECT_CHAIN
+        status_t result = EffectBufferHalInterface::allocate(
+                numSamples * sizeof(float), &halBuffer);
+#else
         status_t result = EffectBufferHalInterface::allocate(
                 numSamples * sizeof(int32_t), &halBuffer);
+#endif
         if (result != OK) return result;
         effect->setInBuffer(halBuffer);
         // auxiliary effects output samples to chain input buffer for further processing
diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h
index e29798b..1864e0f 100644
--- a/services/audioflinger/Effects.h
+++ b/services/audioflinger/Effects.h
@@ -168,6 +168,12 @@
     bool     mSuspended;            // effect is suspended: temporarily disabled by framework
     bool     mOffloaded;            // effect is currently offloaded to the audio DSP
     wp<AudioFlinger>    mAudioFlinger;
+
+#ifdef FLOAT_EFFECT_CHAIN
+    bool    mSupportsFloat;         // effect supports float processing
+    sp<EffectBufferHalInterface> mInBuffer16;  // Buffers for interacting with HAL at 16 bits
+    sp<EffectBufferHalInterface> mOutBuffer16;
+#endif
 };
 
 // The EffectHandle class implements the IEffect interface. It provides resources
@@ -308,14 +314,14 @@
     void setInBuffer(const sp<EffectBufferHalInterface>& buffer) {
         mInBuffer = buffer;
     }
-    int16_t *inBuffer() const {
-        return mInBuffer != 0 ? reinterpret_cast<int16_t*>(mInBuffer->ptr()) : NULL;
+    effect_buffer_t *inBuffer() const {
+        return mInBuffer != 0 ? reinterpret_cast<effect_buffer_t*>(mInBuffer->ptr()) : NULL;
     }
     void setOutBuffer(const sp<EffectBufferHalInterface>& buffer) {
         mOutBuffer = buffer;
     }
-    int16_t *outBuffer() const {
-        return mOutBuffer != 0 ? reinterpret_cast<int16_t*>(mOutBuffer->ptr()) : NULL;
+    effect_buffer_t *outBuffer() const {
+        return mOutBuffer != 0 ? reinterpret_cast<effect_buffer_t*>(mOutBuffer->ptr()) : NULL;
     }
 
     void incTrackCnt() { android_atomic_inc(&mTrackCnt); }
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index 946d88f..e97bb06 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -68,8 +68,8 @@
             status_t    attachAuxEffect(int EffectId);
             void        setAuxBuffer(int EffectId, int32_t *buffer);
             int32_t     *auxBuffer() const { return mAuxBuffer; }
-            void        setMainBuffer(int16_t *buffer) { mMainBuffer = buffer; }
-            int16_t     *mainBuffer() const { return mMainBuffer; }
+            void        setMainBuffer(effect_buffer_t *buffer) { mMainBuffer = buffer; }
+            effect_buffer_t *mainBuffer() const { return mMainBuffer; }
             int         auxEffectId() const { return mAuxEffectId; }
     virtual status_t    getTimestamp(AudioTimestamp& timestamp);
             void        signal();
@@ -150,7 +150,8 @@
                                     // allocated statically at track creation time,
                                     // and is even allocated (though unused) for fast tracks
                                     // FIXME don't allocate track name for fast tracks
-    int16_t             *mMainBuffer;
+    effect_buffer_t     *mMainBuffer;
+
     int32_t             *mAuxBuffer;
     int                 mAuxEffectId;
     bool                mHasVolumeController;
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 8c7c830..7636df6 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -1837,10 +1837,13 @@
 sp<AudioFlinger::PlaybackThread::Track> AudioFlinger::PlaybackThread::createTrack_l(
         const sp<AudioFlinger::Client>& client,
         audio_stream_type_t streamType,
-        uint32_t sampleRate,
+        uint32_t *pSampleRate,
         audio_format_t format,
         audio_channel_mask_t channelMask,
         size_t *pFrameCount,
+        size_t *pNotificationFrameCount,
+        uint32_t notificationsPerBuffer,
+        float speed,
         const sp<IMemory>& sharedBuffer,
         audio_session_t sessionId,
         audio_output_flags_t *flags,
@@ -1850,9 +1853,16 @@
         audio_port_handle_t portId)
 {
     size_t frameCount = *pFrameCount;
+    size_t notificationFrameCount = *pNotificationFrameCount;
     sp<Track> track;
     status_t lStatus;
     audio_output_flags_t outputFlags = mOutput->flags;
+    audio_output_flags_t requestedFlags = *flags;
+
+    if (*pSampleRate == 0) {
+        *pSampleRate = mSampleRate;
+    }
+    uint32_t sampleRate = *pSampleRate;
 
     // special case for FAST flag considered OK if fast mixer is present
     if (hasFastMixer()) {
@@ -1929,36 +1939,114 @@
         *flags = (audio_output_flags_t)(*flags & ~AUDIO_OUTPUT_FLAG_FAST);
       }
     }
-    // For normal PCM streaming tracks, update minimum frame count.
-    // For compatibility with AudioTrack calculation, buffer depth is forced
-    // to be at least 2 x the normal mixer frame count and cover audio hardware latency.
-    // This is probably too conservative, but legacy application code may depend on it.
-    // If you change this calculation, also review the start threshold which is related.
-    if (!(*flags & AUDIO_OUTPUT_FLAG_FAST)
-            && audio_has_proportional_frames(format) && sharedBuffer == 0) {
-        // this must match AudioTrack.cpp calculateMinFrameCount().
-        // TODO: Move to a common library
-        uint32_t latencyMs = 0;
-        lStatus = mOutput->stream->getLatency(&latencyMs);
-        if (lStatus != OK) {
-            ALOGE("Error when retrieving output stream latency: %d", lStatus);
+
+    if (!audio_has_proportional_frames(format)) {
+        if (sharedBuffer != 0) {
+            // Same comment as below about ignoring frameCount parameter for set()
+            frameCount = sharedBuffer->size();
+        } else if (frameCount == 0) {
+            frameCount = mNormalFrameCount;
+        }
+        if (notificationFrameCount != frameCount) {
+            notificationFrameCount = frameCount;
+        }
+    } else if (sharedBuffer != 0) {
+        // FIXME: Ensure client side memory buffers need
+        // not have additional alignment beyond sample
+        // (e.g. 16 bit stereo accessed as 32 bit frame).
+        size_t alignment = audio_bytes_per_sample(format);
+        if (alignment & 1) {
+            // for AUDIO_FORMAT_PCM_24_BIT_PACKED (not exposed through Java).
+            alignment = 1;
+        }
+        uint32_t channelCount = audio_channel_count_from_out_mask(channelMask);
+        size_t frameSize = channelCount * audio_bytes_per_sample(format);
+        if (channelCount > 1) {
+            // More than 2 channels does not require stronger alignment than stereo
+            alignment <<= 1;
+        }
+        if (((uintptr_t)sharedBuffer->pointer() & (alignment - 1)) != 0) {
+            ALOGE("Invalid buffer alignment: address %p, channel count %u",
+                  sharedBuffer->pointer(), channelCount);
+            lStatus = BAD_VALUE;
             goto Exit;
         }
-        uint32_t minBufCount = latencyMs / ((1000 * mNormalFrameCount) / mSampleRate);
-        if (minBufCount < 2) {
-            minBufCount = 2;
+
+        // When initializing a shared buffer AudioTrack via constructors,
+        // there's no frameCount parameter.
+        // But when initializing a shared buffer AudioTrack via set(),
+        // there _is_ a frameCount parameter.  We silently ignore it.
+        frameCount = sharedBuffer->size() / frameSize;
+    } else {
+        size_t minFrameCount = 0;
+        // For fast tracks we try to respect the application's request for notifications per buffer.
+        if (*flags & AUDIO_OUTPUT_FLAG_FAST) {
+            if (notificationsPerBuffer > 0) {
+                // Avoid possible arithmetic overflow during multiplication.
+                if (notificationsPerBuffer > SIZE_MAX / mFrameCount) {
+                    ALOGE("Requested notificationPerBuffer=%u ignored for HAL frameCount=%zu",
+                          notificationsPerBuffer, mFrameCount);
+                } else {
+                    minFrameCount = mFrameCount * notificationsPerBuffer;
+                }
+            }
+        } else {
+            // For normal PCM streaming tracks, update minimum frame count.
+            // Buffer depth is forced to be at least 2 x the normal mixer frame count and
+            // cover audio hardware latency.
+            // This is probably too conservative, but legacy application code may depend on it.
+            // If you change this calculation, also review the start threshold which is related.
+            uint32_t latencyMs = latency_l();
+            if (latencyMs == 0) {
+                ALOGE("Error when retrieving output stream latency");
+                lStatus = UNKNOWN_ERROR;
+                goto Exit;
+            }
+
+            minFrameCount = AudioSystem::calculateMinFrameCount(latencyMs, mNormalFrameCount,
+                                mSampleRate, sampleRate, speed /*, 0 mNotificationsPerBufferReq*/);
+
         }
-        // For normal mixing tracks, if speed is > 1.0f (normal), AudioTrack
-        // or the client should compute and pass in a larger buffer request.
-        size_t minFrameCount =
-                minBufCount * sourceFramesNeededWithTimestretch(
-                        sampleRate, mNormalFrameCount,
-                        mSampleRate, AUDIO_TIMESTRETCH_SPEED_NORMAL /*speed*/);
-        if (frameCount < minFrameCount) { // including frameCount == 0
+        if (frameCount < minFrameCount) {
             frameCount = minFrameCount;
         }
     }
+
+    // Make sure that application is notified with sufficient margin before underrun.
+    // The client can divide the AudioTrack buffer into sub-buffers,
+    // and expresses its desire to server as the notification frame count.
+    if (sharedBuffer == 0 && audio_is_linear_pcm(format)) {
+        size_t maxNotificationFrames;
+        if (*flags & AUDIO_OUTPUT_FLAG_FAST) {
+            // notify every HAL buffer, regardless of the size of the track buffer
+            maxNotificationFrames = mFrameCount;
+        } else {
+            // For normal tracks, use at least double-buffering if no sample rate conversion,
+            // or at least triple-buffering if there is sample rate conversion
+            const int nBuffering = sampleRate == mSampleRate ? 2 : 3;
+            maxNotificationFrames = frameCount / nBuffering;
+            // If client requested a fast track but this was denied, then use the smaller maximum.
+            if (requestedFlags & AUDIO_OUTPUT_FLAG_FAST) {
+                size_t maxNotificationFramesFastDenied = FMS_20 * sampleRate / 1000;
+                if (maxNotificationFrames > maxNotificationFramesFastDenied) {
+                    maxNotificationFrames = maxNotificationFramesFastDenied;
+                }
+            }
+        }
+        if (notificationFrameCount == 0 || notificationFrameCount > maxNotificationFrames) {
+            if (notificationFrameCount == 0) {
+                ALOGD("Client defaulted notificationFrames to %zu for frameCount %zu",
+                    maxNotificationFrames, frameCount);
+            } else {
+                ALOGW("Client adjusted notificationFrames from %zu to %zu for frameCount %zu",
+                      notificationFrameCount, maxNotificationFrames, frameCount);
+            }
+            notificationFrameCount = maxNotificationFrames;
+        }
+    }
+
     *pFrameCount = frameCount;
+    *pNotificationFrameCount = notificationFrameCount;
 
     switch (mType) {
 
@@ -2449,7 +2537,7 @@
     free(mEffectBuffer);
     mEffectBuffer = NULL;
     if (mEffectBufferEnabled) {
-        mEffectBufferFormat = AUDIO_FORMAT_PCM_16_BIT; // Note: Effects support 16b only
+        mEffectBufferFormat = EFFECT_BUFFER_FORMAT;
         mEffectBufferSize = mNormalFrameCount * mChannelCount
                 * audio_bytes_per_sample(mEffectBufferFormat);
         (void)posix_memalign(&mEffectBuffer, 32, mEffectBufferSize);
@@ -2796,8 +2884,7 @@
             &halInBuffer);
     if (result != OK) return result;
     halOutBuffer = halInBuffer;
-    int16_t *buffer = reinterpret_cast<int16_t*>(halInBuffer->externalData());
-
+    effect_buffer_t *buffer = reinterpret_cast<effect_buffer_t*>(halInBuffer->externalData());
     ALOGV("addEffectChain_l() %p on thread %p for session %d", chain.get(), this, session);
     if (session > AUDIO_SESSION_OUTPUT_MIX) {
         // Only one effect chain can be present in direct output thread and it uses
@@ -2805,10 +2892,14 @@
         if (mType != DIRECT) {
             size_t numSamples = mNormalFrameCount * mChannelCount;
             status_t result = EffectBufferHalInterface::allocate(
-                    numSamples * sizeof(int16_t),
+                    numSamples * sizeof(effect_buffer_t),
                     &halInBuffer);
             if (result != OK) return result;
+#ifdef FLOAT_EFFECT_CHAIN
+            buffer = halInBuffer->audioBuffer()->f32;
+#else
             buffer = halInBuffer->audioBuffer()->s16;
+#endif
             ALOGV("addEffectChain_l() creating new input buffer %p session %d",
                     buffer, session);
         }
@@ -2883,7 +2974,7 @@
             for (size_t i = 0; i < mTracks.size(); ++i) {
                 sp<Track> track = mTracks[i];
                 if (session == track->sessionId()) {
-                    track->setMainBuffer(reinterpret_cast<int16_t*>(mSinkBuffer));
+                    track->setMainBuffer(reinterpret_cast<effect_buffer_t*>(mSinkBuffer));
                     chain->decTrackCnt();
                 }
             }
@@ -4466,7 +4557,7 @@
                 mAudioMixer->setParameter(
                         name,
                         AudioMixer::TRACK,
-                        AudioMixer::MIXER_FORMAT, (void *)AUDIO_FORMAT_PCM_16_BIT);
+                        AudioMixer::MIXER_FORMAT, (void *)EFFECT_BUFFER_FORMAT);
                 mAudioMixer->setParameter(
                         name,
                         AudioMixer::TRACK,
@@ -6617,12 +6708,12 @@
 // RecordThread::createRecordTrack_l() must be called with AudioFlinger::mLock held
 sp<AudioFlinger::RecordThread::RecordTrack> AudioFlinger::RecordThread::createRecordTrack_l(
         const sp<AudioFlinger::Client>& client,
-        uint32_t sampleRate,
+        uint32_t *pSampleRate,
         audio_format_t format,
         audio_channel_mask_t channelMask,
         size_t *pFrameCount,
         audio_session_t sessionId,
-        size_t *notificationFrames,
+        size_t *pNotificationFrameCount,
         uid_t uid,
         audio_input_flags_t *flags,
         pid_t tid,
@@ -6630,16 +6721,30 @@
         audio_port_handle_t portId)
 {
     size_t frameCount = *pFrameCount;
+    size_t notificationFrameCount = *pNotificationFrameCount;
     sp<RecordTrack> track;
     status_t lStatus;
     audio_input_flags_t inputFlags = mInput->flags;
+    audio_input_flags_t requestedFlags = *flags;
+    uint32_t sampleRate;
+
+    lStatus = initCheck();
+    if (lStatus != NO_ERROR) {
+        ALOGE("createRecordTrack_l() audio driver not initialized");
+        goto Exit;
+    }
+
+    if (*pSampleRate == 0) {
+        *pSampleRate = mSampleRate;
+    }
+    sampleRate = *pSampleRate;
 
     // special case for FAST flag considered OK if fast capture is present
     if (hasFastCapture()) {
         inputFlags = (audio_input_flags_t)(inputFlags | AUDIO_INPUT_FLAG_FAST);
     }
 
-    // Check if requested flags are compatible with output stream flags
+    // Check if requested flags are compatible with input stream flags
     if ((*flags & inputFlags) != *flags) {
         ALOGW("createRecordTrack_l(): mismatch between requested flags (%08x) and"
                 " input flags (%08x)",
@@ -6694,12 +6799,20 @@
       }
     }
 
+    // If FAST or RAW flags were corrected, ask caller to request new input from audio policy
+    if ((*flags & AUDIO_INPUT_FLAG_FAST) !=
+            (requestedFlags & AUDIO_INPUT_FLAG_FAST)) {
+        *flags = (audio_input_flags_t) (*flags & ~(AUDIO_INPUT_FLAG_FAST | AUDIO_INPUT_FLAG_RAW));
+        lStatus = BAD_TYPE;
+        goto Exit;
+    }
+
     // compute track buffer size in frames, and suggest the notification frame count
     if (*flags & AUDIO_INPUT_FLAG_FAST) {
         // fast track: frame count is exactly the pipe depth
         frameCount = mPipeFramesP2;
         // ignore requested notificationFrames, and always notify exactly once every HAL buffer
-        *notificationFrames = mFrameCount;
+        notificationFrameCount = mFrameCount;
     } else {
         // not fast track: max notification period is resampled equivalent of one HAL buffer time
         //                 or 20 ms if there is a fast capture
@@ -6718,17 +6831,12 @@
         const size_t minFrameCount = maxNotificationFrames *
                 max(kMinNotifications, minNotificationsByMs);
         frameCount = max(frameCount, minFrameCount);
-        if (*notificationFrames == 0 || *notificationFrames > maxNotificationFrames) {
-            *notificationFrames = maxNotificationFrames;
+        if (notificationFrameCount == 0 || notificationFrameCount > maxNotificationFrames) {
+            notificationFrameCount = maxNotificationFrames;
         }
     }
     *pFrameCount = frameCount;
-
-    lStatus = initCheck();
-    if (lStatus != NO_ERROR) {
-        ALOGE("createRecordTrack_l() audio driver not initialized");
-        goto Exit;
-    }
+    *pNotificationFrameCount = notificationFrameCount;
 
     { // scope for mLock
         Mutex::Autolock _l(mLock);
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index b685e1b..17f26c5 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -706,10 +706,13 @@
                 sp<Track>   createTrack_l(
                                 const sp<AudioFlinger::Client>& client,
                                 audio_stream_type_t streamType,
-                                uint32_t sampleRate,
+                                uint32_t *sampleRate,
                                 audio_format_t format,
                                 audio_channel_mask_t channelMask,
                                 size_t *pFrameCount,
+                                size_t *pNotificationFrameCount,
+                                uint32_t notificationsPerBuffer,
+                                float speed,
                                 const sp<IMemory>& sharedBuffer,
                                 audio_session_t sessionId,
                                 audio_output_flags_t *flags,
@@ -738,11 +741,10 @@
     virtual     String8     getParameters(const String8& keys);
     virtual     void        ioConfigChanged(audio_io_config_event event, pid_t pid = 0);
                 status_t    getRenderPosition(uint32_t *halFrames, uint32_t *dspFrames);
-                // FIXME rename mixBuffer() to sinkBuffer() and remove int16_t* dependency.
                 // Consider also removing and passing an explicit mMainBuffer initialization
                 // parameter to AF::PlaybackThread::Track::Track().
-                int16_t     *mixBuffer() const {
-                    return reinterpret_cast<int16_t *>(mSinkBuffer); };
+                effect_buffer_t *sinkBuffer() const {
+                    return reinterpret_cast<effect_buffer_t *>(mSinkBuffer); };
 
     virtual     void detachAuxEffect_l(int effectId);
                 status_t attachAuxEffect(const sp<AudioFlinger::PlaybackThread::Track>& track,
@@ -1325,12 +1327,12 @@
 
             sp<AudioFlinger::RecordThread::RecordTrack>  createRecordTrack_l(
                     const sp<AudioFlinger::Client>& client,
-                    uint32_t sampleRate,
+                    uint32_t *pSampleRate,
                     audio_format_t format,
                     audio_channel_mask_t channelMask,
                     size_t *pFrameCount,
                     audio_session_t sessionId,
-                    size_t *notificationFrames,
+                    size_t *pNotificationFrameCount,
                     uid_t uid,
                     audio_input_flags_t *flags,
                     pid_t tid,
diff --git a/services/audioflinger/TrackBase.h b/services/audioflinger/TrackBase.h
index d4ce0b4..a3ea756 100644
--- a/services/audioflinger/TrackBase.h
+++ b/services/audioflinger/TrackBase.h
@@ -192,7 +192,7 @@
                                     // where for AudioTrack (but not AudioRecord),
                                     // 8-bit PCM samples are stored as 16-bit
     const size_t        mFrameCount;// size of track buffer given at createTrack() or
-                                    // openRecord(), and then adjusted as needed
+                                    // createRecord(), and then adjusted as needed
 
     const audio_session_t mSessionId;
     uid_t               mUid;
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 50c0e23..1445572 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -395,7 +395,7 @@
     mSharedBuffer(sharedBuffer),
     mStreamType(streamType),
     mName(-1),  // see note below
-    mMainBuffer(thread->mixBuffer()),
+    mMainBuffer(thread->sinkBuffer()),
     mAuxBuffer(NULL),
     mAuxEffectId(0), mHasVolumeController(false),
     mPresentationCompleteFrames(0),
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h
index b169bac..d9cd121 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h
@@ -34,8 +34,8 @@
 class AudioInputDescriptor: public AudioPortConfig, public AudioSessionInfoProvider
 {
 public:
-    explicit AudioInputDescriptor(const sp<IOProfile>& profile);
-    void setIoHandle(audio_io_handle_t ioHandle);
+    explicit AudioInputDescriptor(const sp<IOProfile>& profile,
+                                  AudioPolicyClientInterface *clientInterface);
     audio_port_handle_t getId() const;
     audio_module_handle_t getModuleHandle() const;
     uint32_t getOpenRefCount() const;
@@ -73,6 +73,14 @@
 
     void setPatchHandle(audio_patch_handle_t handle);
 
+    status_t open(const audio_config_t *config,
+                  audio_devices_t device,
+                  const String8& address,
+                  audio_source_t source,
+                  audio_input_flags_t flags,
+                  audio_io_handle_t *input);
+    void close();
+
 private:
     audio_patch_handle_t          mPatchHandle;
     audio_port_handle_t           mId;
@@ -85,6 +93,7 @@
     // a particular input started and prevent preemption of this active input by this session.
     // We also inherit sessions from the preempted input to avoid a 3 way preemption loop etc...
     SortedVector<audio_session_t> mPreemptedSessions;
+    AudioPolicyClientInterface *mClientInterface;
 };
 
 class AudioInputCollection :
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
index c09cb5a..0be8fc1 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
@@ -101,8 +101,6 @@
 
     status_t    dump(int fd);
 
-    void setIoHandle(audio_io_handle_t ioHandle);
-
     virtual audio_devices_t device() const;
     virtual bool sharesHwModuleWith(const sp<AudioOutputDescriptor>& outputDesc);
     virtual audio_devices_t supportedDevices();
@@ -122,6 +120,14 @@
                            const struct audio_port_config *srcConfig = NULL) const;
     virtual void toAudioPort(struct audio_port *port) const;
 
+            status_t open(const audio_config_t *config,
+                          audio_devices_t device,
+                          const String8& address,
+                          audio_stream_type_t stream,
+                          audio_output_flags_t flags,
+                          audio_io_handle_t *output);
+            void close();
+
     const sp<IOProfile> mProfile;          // I/O profile this output derives from
     audio_io_handle_t mIoHandle;           // output handle
     uint32_t mLatency;                  //
diff --git a/services/audiopolicy/common/managerdefinitions/include/IOProfile.h b/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
index ec04ef7..118f0d2 100644
--- a/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
+++ b/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
@@ -34,7 +34,11 @@
 {
 public:
     IOProfile(const String8 &name, audio_port_role_t role)
-        : AudioPort(name, AUDIO_PORT_TYPE_MIX, role) {}
+        : AudioPort(name, AUDIO_PORT_TYPE_MIX, role),
+          maxOpenCount((role == AUDIO_PORT_ROLE_SOURCE) ? 1 : 0),
+          curOpenCount(0),
+          maxActiveCount(1),
+          curActiveCount(0) {}
 
     // For a Profile aka MixPort, tag name and name are equivalent.
     virtual const String8 getTagName() const { return getName(); }
@@ -103,6 +107,34 @@
 
     const DeviceVector &getSupportedDevices() const { return mSupportedDevices; }
 
+    bool canOpenNewIo() {
+        if (maxOpenCount == 0 || curOpenCount < maxOpenCount) {
+            return true;
+        }
+        return false;
+    }
+
+    bool canStartNewIo() {
+        if (maxActiveCount == 0 || curActiveCount < maxActiveCount) {
+            return true;
+        }
+        return false;
+    }
+
+    // Maximum number of input or output streams that can be simultaneously opened for this profile.
+    // By convention 0 means no limit. To respect legacy behavior, initialized to 1 for output
+    // profiles and 0 for input profiles
+    uint32_t     maxOpenCount;
+    // Number of streams currently opened for this profile.
+    uint32_t     curOpenCount;
+    // Maximum number of input or output streams that can be simultaneously active for this profile.
+    // By convention 0 means no limit. To respect legacy behavior, initialized to 0 for output
+    // profiles and 1 for input profiles
+    uint32_t     maxActiveCount;
+    // Number of streams currently active for this profile. This is not the number of active clients
+    // (AudioTrack or AudioRecord) but the number of active HAL streams.
+    uint32_t     curActiveCount;
+
 private:
     DeviceVector mSupportedDevices; // supported devices: this input/output can be routed from/to
 };
diff --git a/services/audiopolicy/common/managerdefinitions/include/Serializer.h b/services/audiopolicy/common/managerdefinitions/include/Serializer.h
index 078b582..3b0e209 100644
--- a/services/audiopolicy/common/managerdefinitions/include/Serializer.h
+++ b/services/audiopolicy/common/managerdefinitions/include/Serializer.h
@@ -92,6 +92,8 @@
         static const char name[];
         static const char role[];
         static const char flags[];
+        static const char maxOpenCount[];
+        static const char maxActiveCount[];
     };
 
     typedef IOProfile Element;
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
index 2492ed6..737872d 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
@@ -17,6 +17,7 @@
 #define LOG_TAG "APM::AudioInputDescriptor"
 //#define LOG_NDEBUG 0
 
+#include <AudioPolicyInterface.h>
 #include "AudioInputDescriptor.h"
 #include "IOProfile.h"
 #include "AudioGain.h"
@@ -26,10 +27,12 @@
 
 namespace android {
 
-AudioInputDescriptor::AudioInputDescriptor(const sp<IOProfile>& profile)
+AudioInputDescriptor::AudioInputDescriptor(const sp<IOProfile>& profile,
+                                           AudioPolicyClientInterface *clientInterface)
     : mIoHandle(0),
       mDevice(AUDIO_DEVICE_NONE), mPolicyMix(NULL),
-      mProfile(profile), mPatchHandle(AUDIO_PATCH_HANDLE_NONE), mId(0)
+      mProfile(profile), mPatchHandle(AUDIO_PATCH_HANDLE_NONE), mId(0),
+      mClientInterface(clientInterface)
 {
     if (profile != NULL) {
         profile->pickAudioProfile(mSamplingRate, mChannelMask, mFormat);
@@ -39,12 +42,6 @@
     }
 }
 
-void AudioInputDescriptor::setIoHandle(audio_io_handle_t ioHandle)
-{
-    mId = AudioPort::getNextUniqueId();
-    mIoHandle = ioHandle;
-}
-
 audio_module_handle_t AudioInputDescriptor::getModuleHandle() const
 {
     if (mProfile == 0) {
@@ -192,6 +189,71 @@
     return config;
 }
 
+status_t AudioInputDescriptor::open(const audio_config_t *config,
+                                       audio_devices_t device,
+                                       const String8& address,
+                                       audio_source_t source,
+                                       audio_input_flags_t flags,
+                                       audio_io_handle_t *input)
+{
+    audio_config_t lConfig;
+    if (config == nullptr) {
+        lConfig = AUDIO_CONFIG_INITIALIZER;
+        lConfig.sample_rate = mSamplingRate;
+        lConfig.channel_mask = mChannelMask;
+        lConfig.format = mFormat;
+    } else {
+        lConfig = *config;
+    }
+
+    String8 lAddress = address;
+    if (lAddress == "") {
+        const DeviceVector& supportedDevices = mProfile->getSupportedDevices();
+        const DeviceVector& devicesForType = supportedDevices.getDevicesFromType(device);
+        lAddress = devicesForType.size() > 0 ? devicesForType.itemAt(0)->mAddress
+                  : String8("");
+    }
+
+    mDevice = device;
+
+    ALOGV("opening input for device %08x address %s profile %p name %s",
+          mDevice, lAddress.string(), mProfile.get(), mProfile->getName().string());
+
+    status_t status = mClientInterface->openInput(mProfile->getModuleHandle(),
+                                                  input,
+                                                  &lConfig,
+                                                  &mDevice,
+                                                  lAddress,
+                                                  source,
+                                                  flags);
+    LOG_ALWAYS_FATAL_IF(mDevice != device,
+                        "%s openInput returned device %08x when given device %08x",
+                        __FUNCTION__, mDevice, device);
+
+    if (status == NO_ERROR) {
+        mSamplingRate = lConfig.sample_rate;
+        mChannelMask = lConfig.channel_mask;
+        mFormat = lConfig.format;
+        mId = AudioPort::getNextUniqueId();
+        mIoHandle = *input;
+        mProfile->curOpenCount++;
+    }
+
+    return status;
+}
+
+
+void AudioInputDescriptor::close()
+{
+    if (mIoHandle != AUDIO_IO_HANDLE_NONE) {
+        mClientInterface->closeInput(mIoHandle);
+        LOG_ALWAYS_FATAL_IF(mProfile->curOpenCount < 1, "%s profile open count %u",
+                            __FUNCTION__, mProfile->curOpenCount);
+        mProfile->curOpenCount--;
+        mIoHandle = AUDIO_IO_HANDLE_NONE;
+    }
+}
+
 status_t AudioInputDescriptor::dump(int fd)
 {
     const size_t SIZE = 256;
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index 4d3c3b5..be5a1c1 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -23,6 +23,7 @@
 #include "AudioGain.h"
 #include "Volume.h"
 #include "HwModule.h"
+#include <media/AudioParameter.h>
 #include <media/AudioPolicy.h>
 
 // A device mask for all audio output devices that are considered "remote" when evaluating
@@ -231,13 +232,6 @@
     }
 }
 
-void SwAudioOutputDescriptor::setIoHandle(audio_io_handle_t ioHandle)
-{
-    mId = AudioPort::getNextUniqueId();
-    mIoHandle = ioHandle;
-}
-
-
 status_t SwAudioOutputDescriptor::dump(int fd)
 {
     const size_t SIZE = 256;
@@ -387,6 +381,93 @@
     return changed;
 }
 
+status_t SwAudioOutputDescriptor::open(const audio_config_t *config,
+                                       audio_devices_t device,
+                                       const String8& address,
+                                       audio_stream_type_t stream,
+                                       audio_output_flags_t flags,
+                                       audio_io_handle_t *output)
+{
+    audio_config_t lConfig;
+    if (config == nullptr) {
+        lConfig = AUDIO_CONFIG_INITIALIZER;
+        lConfig.sample_rate = mSamplingRate;
+        lConfig.channel_mask = mChannelMask;
+        lConfig.format = mFormat;
+    } else {
+        lConfig = *config;
+    }
+
+    String8 lAddress = address;
+    if (lAddress == "") {
+        const DeviceVector& supportedDevices = mProfile->getSupportedDevices();
+        const DeviceVector& devicesForType = supportedDevices.getDevicesFromType(device);
+        lAddress = devicesForType.size() > 0 ? devicesForType.itemAt(0)->mAddress
+                  : String8("");
+    }
+
+    mDevice = device;
+    // if the selected profile is offloaded and no offload info was specified,
+    // create a default one
+    if ((mProfile->getFlags() & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) &&
+            lConfig.offload_info.format == AUDIO_FORMAT_DEFAULT) {
+        flags = (audio_output_flags_t)(flags | AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD);
+        lConfig.offload_info = AUDIO_INFO_INITIALIZER;
+        lConfig.offload_info.sample_rate = lConfig.sample_rate;
+        lConfig.offload_info.channel_mask = lConfig.channel_mask;
+        lConfig.offload_info.format = lConfig.format;
+        lConfig.offload_info.stream_type = stream;
+        lConfig.offload_info.duration_us = -1;
+        lConfig.offload_info.has_video = true; // conservative
+        lConfig.offload_info.is_streaming = true; // likely
+    }
+
+    mFlags = (audio_output_flags_t)(mFlags | flags);
+
+    ALOGV("opening output for device %08x address %s profile %p name %s",
+          mDevice, lAddress.string(), mProfile.get(), mProfile->getName().string());
+
+    status_t status = mClientInterface->openOutput(mProfile->getModuleHandle(),
+                                                   output,
+                                                   &lConfig,
+                                                   &mDevice,
+                                                   lAddress,
+                                                   &mLatency,
+                                                   mFlags);
+    LOG_ALWAYS_FATAL_IF(mDevice != device,
+                        "%s openOutput returned device %08x when given device %08x",
+                        __FUNCTION__, mDevice, device);
+
+    if (status == NO_ERROR) {
+        mSamplingRate = lConfig.sample_rate;
+        mChannelMask = lConfig.channel_mask;
+        mFormat = lConfig.format;
+        mId = AudioPort::getNextUniqueId();
+        mIoHandle = *output;
+        mProfile->curOpenCount++;
+    }
+
+    return status;
+}
+
+
+void SwAudioOutputDescriptor::close()
+{
+    if (mIoHandle != AUDIO_IO_HANDLE_NONE) {
+        AudioParameter param;
+        param.add(String8("closing"), String8("true"));
+        mClientInterface->setParameters(mIoHandle, param.toString());
+
+        mClientInterface->closeOutput(mIoHandle);
+
+        LOG_ALWAYS_FATAL_IF(mProfile->curOpenCount < 1, "%s profile open count %u",
+                            __FUNCTION__, mProfile->curOpenCount);
+        mProfile->curOpenCount--;
+        mIoHandle = AUDIO_IO_HANDLE_NONE;
+    }
+}
+
+
 // HwAudioOutputDescriptor implementation
 HwAudioOutputDescriptor::HwAudioOutputDescriptor(const sp<AudioSourceDescriptor>& source,
                                                  AudioPolicyClientInterface *clientInterface)
diff --git a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
index 74ef4ec..fc89672 100644
--- a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
@@ -122,6 +122,16 @@
     result.append("\n");
     write(fd, result.string(), result.size());
     mSupportedDevices.dump(fd, String8("Supported"), 4, false);
+
+    result.clear();
+    snprintf(buffer, SIZE, "\n    - maxOpenCount: %u - curOpenCount: %u\n",
+             maxOpenCount, curOpenCount);
+    result.append(buffer);
+    snprintf(buffer, SIZE, "    - maxActiveCount: %u - curActiveCount: %u\n",
+             maxActiveCount, curActiveCount);
+    result.append(buffer);
+
+    write(fd, result.string(), result.size());
 }
 
 void IOProfile::log()
diff --git a/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp b/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
index 0908ffc..aa589f4 100644
--- a/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
@@ -217,6 +217,8 @@
 const char MixPortTraits::Attributes::name[] = "name";
 const char MixPortTraits::Attributes::role[] = "role";
 const char MixPortTraits::Attributes::flags[] = "flags";
+const char MixPortTraits::Attributes::maxOpenCount[] = "maxOpenCount";
+const char MixPortTraits::Attributes::maxActiveCount[] = "maxActiveCount";
 
 status_t MixPortTraits::deserialize(_xmlDoc *doc, const _xmlNode *child, PtrElement &mixPort,
                                     PtrSerializingCtx /*serializingContext*/)
@@ -259,6 +261,14 @@
             mixPort->setFlags(InputFlagConverter::maskFromString(flags));
         }
     }
+    string maxOpenCount = getXmlAttribute(child, Attributes::maxOpenCount);
+    if (!maxOpenCount.empty()) {
+        convertTo(maxOpenCount, mixPort->maxOpenCount);
+    }
+    string maxActiveCount = getXmlAttribute(child, Attributes::maxActiveCount);
+    if (!maxActiveCount.empty()) {
+        convertTo(maxActiveCount, mixPort->maxActiveCount);
+    }
     // Deserialize children
     AudioGainTraits::Collection gains;
     deserializeCollection<AudioGainTraits>(doc, child, gains, NULL);
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 80a8dc6..b363779 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -843,12 +843,10 @@
         flags = (audio_output_flags_t)(flags | AUDIO_OUTPUT_FLAG_HW_AV_SYNC);
     }
 
-    ALOGV("getOutputForAttr() device 0x%x, samplingRate %d, format %x, channelMask %x, flags %x",
+    ALOGV("getOutputForAttr() device 0x%x, sampling rate %d, format %x, channel mask %x, flags %x",
           device, config->sample_rate, config->format, config->channel_mask, flags);
 
-    *output = getOutputForDevice(device, session, *stream,
-                                 config->sample_rate, config->format, config->channel_mask,
-                                 flags, &config->offload_info);
+    *output = getOutputForDevice(device, session, *stream, config, flags);
     if (*output == AUDIO_IO_HANDLE_NONE) {
         mOutputRoutes.removeRoute(session);
         return INVALID_OPERATION;
@@ -867,11 +865,8 @@
         audio_devices_t device,
         audio_session_t session,
         audio_stream_type_t stream,
-        uint32_t samplingRate,
-        audio_format_t format,
-        audio_channel_mask_t channelMask,
-        audio_output_flags_t flags,
-        const audio_offload_info_t *offloadInfo)
+        const audio_config_t *config,
+        audio_output_flags_t flags)
 {
     audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
     status_t status;
@@ -898,7 +893,7 @@
     if (stream == AUDIO_STREAM_TTS) {
         flags = AUDIO_OUTPUT_FLAG_TTS;
     } else if (stream == AUDIO_STREAM_VOICE_CALL &&
-               audio_is_linear_pcm(format)) {
+               audio_is_linear_pcm(config->format)) {
         flags = (audio_output_flags_t)(AUDIO_OUTPUT_FLAG_VOIP_RX |
                                        AUDIO_OUTPUT_FLAG_DIRECT);
         ALOGV("Set VoIP and Direct output flags for PCM format");
@@ -909,8 +904,8 @@
     // skip direct output selection if the request can obviously be attached to a mixed output
     // and not explicitly requested
     if (((flags & AUDIO_OUTPUT_FLAG_DIRECT) == 0) &&
-            audio_is_linear_pcm(format) && samplingRate <= SAMPLE_RATE_HZ_MAX &&
-            audio_channel_count_from_out_mask(channelMask) <= 2) {
+            audio_is_linear_pcm(config->format) && config->sample_rate <= SAMPLE_RATE_HZ_MAX &&
+            audio_channel_count_from_out_mask(config->channel_mask) <= 2) {
         goto non_direct_output;
     }
 
@@ -924,102 +919,58 @@
     if (((flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) == 0) ||
             !(mEffects.isNonOffloadableEffectEnabled() || mMasterMono)) {
         profile = getProfileForDirectOutput(device,
-                                           samplingRate,
-                                           format,
-                                           channelMask,
+                                           config->sample_rate,
+                                           config->format,
+                                           config->channel_mask,
                                            (audio_output_flags_t)flags);
     }
 
     if (profile != 0) {
-        sp<SwAudioOutputDescriptor> outputDesc = NULL;
-
         for (size_t i = 0; i < mOutputs.size(); i++) {
             sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
             if (!desc->isDuplicated() && (profile == desc->mProfile)) {
-                outputDesc = desc;
                 // reuse direct output if currently open by the same client
                 // and configured with same parameters
-                if ((samplingRate == outputDesc->mSamplingRate) &&
-                    audio_formats_match(format, outputDesc->mFormat) &&
-                    (channelMask == outputDesc->mChannelMask)) {
-                  if (session == outputDesc->mDirectClientSession) {
-                      outputDesc->mDirectOpenCount++;
-                      ALOGV("getOutputForDevice() reusing direct output %d for session %d",
-                            mOutputs.keyAt(i), session);
-                      return mOutputs.keyAt(i);
-                  } else {
-                      ALOGV("getOutputForDevice() do not reuse direct output because"
-                              "current client (%d) is not the same as requesting client (%d)",
-                            outputDesc->mDirectClientSession, session);
-                      goto non_direct_output;
-                  }
+                if ((config->sample_rate == desc->mSamplingRate) &&
+                    audio_formats_match(config->format, desc->mFormat) &&
+                    (config->channel_mask == desc->mChannelMask) &&
+                    (session == desc->mDirectClientSession)) {
+                    desc->mDirectOpenCount++;
+                    ALOGV("getOutputForDevice() reusing direct output %d for session %d",
+                        mOutputs.keyAt(i), session);
+                    return mOutputs.keyAt(i);
                 }
             }
         }
-        // close direct output if currently open and configured with different parameters
-        if (outputDesc != NULL) {
-            closeOutput(outputDesc->mIoHandle);
+
+        if (!profile->canOpenNewIo()) {
+            goto non_direct_output;
         }
 
-        // if the selected profile is offloaded and no offload info was specified,
-        // create a default one
-        audio_offload_info_t defaultOffloadInfo = AUDIO_INFO_INITIALIZER;
-        if ((profile->getFlags() & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) && !offloadInfo) {
-            flags = (audio_output_flags_t)(flags | AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD);
-            defaultOffloadInfo.sample_rate = samplingRate;
-            defaultOffloadInfo.channel_mask = channelMask;
-            defaultOffloadInfo.format = format;
-            defaultOffloadInfo.stream_type = stream;
-            defaultOffloadInfo.bit_rate = 0;
-            defaultOffloadInfo.duration_us = -1;
-            defaultOffloadInfo.has_video = true; // conservative
-            defaultOffloadInfo.is_streaming = true; // likely
-            offloadInfo = &defaultOffloadInfo;
-        }
-
-        outputDesc = new SwAudioOutputDescriptor(profile, mpClientInterface);
-        outputDesc->mDevice = device;
-        outputDesc->mLatency = 0;
-        outputDesc->mFlags = (audio_output_flags_t)(outputDesc->mFlags | flags);
-        audio_config_t config = AUDIO_CONFIG_INITIALIZER;
-        config.sample_rate = samplingRate;
-        config.channel_mask = channelMask;
-        config.format = format;
-        if (offloadInfo != NULL) {
-            config.offload_info = *offloadInfo;
-        }
-        DeviceVector outputDevices = mAvailableOutputDevices.getDevicesFromType(device);
-        String8 address = outputDevices.size() > 0 ? outputDevices.itemAt(0)->mAddress
-                : String8("");
-        status = mpClientInterface->openOutput(profile->getModuleHandle(),
-                                               &output,
-                                               &config,
-                                               &outputDesc->mDevice,
-                                               address,
-                                               &outputDesc->mLatency,
-                                               outputDesc->mFlags);
+        sp<SwAudioOutputDescriptor> outputDesc =
+                new SwAudioOutputDescriptor(profile, mpClientInterface);
+        status = outputDesc->open(config, device, String8(""), stream, flags, &output);
 
         // only accept an output with the requested parameters
         if (status != NO_ERROR ||
-            (samplingRate != 0 && samplingRate != config.sample_rate) ||
-            (format != AUDIO_FORMAT_DEFAULT && !audio_formats_match(format, config.format)) ||
-            (channelMask != 0 && channelMask != config.channel_mask)) {
-            ALOGV("getOutputForDevice() failed opening direct output: output %d samplingRate %d %d,"
-                    "format %d %d, channelMask %04x %04x", output, samplingRate,
-                    outputDesc->mSamplingRate, format, outputDesc->mFormat, channelMask,
-                    outputDesc->mChannelMask);
+            (config->sample_rate != 0 && config->sample_rate != outputDesc->mSamplingRate) ||
+            (config->format != AUDIO_FORMAT_DEFAULT &&
+                    !audio_formats_match(config->format, outputDesc->mFormat)) ||
+            (config->channel_mask != 0 && config->channel_mask != outputDesc->mChannelMask)) {
+            ALOGV("getOutputForDevice() failed opening direct output: output %d sample rate %d %d,"
+                    "format %d %d, channel mask %04x %04x", output, config->sample_rate,
+                    outputDesc->mSamplingRate, config->format, outputDesc->mFormat,
+                    config->channel_mask, outputDesc->mChannelMask);
             if (output != AUDIO_IO_HANDLE_NONE) {
-                mpClientInterface->closeOutput(output);
+                outputDesc->close();
             }
             // fall back to mixer output if possible when the direct output could not be open
-            if (audio_is_linear_pcm(format) && samplingRate <= SAMPLE_RATE_HZ_MAX) {
+            if (audio_is_linear_pcm(config->format) &&
+                    config->sample_rate  <= SAMPLE_RATE_HZ_MAX) {
                 goto non_direct_output;
             }
             return AUDIO_IO_HANDLE_NONE;
         }
-        outputDesc->mSamplingRate = config.sample_rate;
-        outputDesc->mChannelMask = config.channel_mask;
-        outputDesc->mFormat = config.format;
         outputDesc->mRefCount[stream] = 0;
         outputDesc->mStopTime[stream] = 0;
         outputDesc->mDirectOpenCount = 1;
@@ -1045,18 +996,18 @@
     // open a non direct output
 
     // for non direct outputs, only PCM is supported
-    if (audio_is_linear_pcm(format)) {
+    if (audio_is_linear_pcm(config->format)) {
         // get which output is suitable for the specified stream. The actual
         // routing change will happen when startOutput() will be called
         SortedVector<audio_io_handle_t> outputs = getOutputsForDevice(device, mOutputs);
 
         // at this stage we should ignore the DIRECT flag as no direct output could be found earlier
         flags = (audio_output_flags_t)(flags & ~AUDIO_OUTPUT_FLAG_DIRECT);
-        output = selectOutput(outputs, flags, format);
+        output = selectOutput(outputs, flags, config->format);
     }
     ALOGW_IF((output == 0), "getOutputForDevice() could not find output for stream %d, "
-            "samplingRate %d, format %d, channels %x, flags %x",
-            stream, samplingRate, format, channelMask, flags);
+            "sampling rate %d, format %d, channels %x, flags %x",
+            stream, config->sample_rate, config->format, config->channel_mask, flags);
 
     return output;
 }
@@ -1155,6 +1106,13 @@
 
     sp<SwAudioOutputDescriptor> outputDesc = mOutputs.valueAt(index);
 
+    if (!outputDesc->isActive()) {
+        if (!outputDesc->mProfile->canStartNewIo()) {
+            return INVALID_OPERATION;
+        }
+        outputDesc->mProfile->curActiveCount++;
+    }
+
     // Routing?
     mOutputRoutes.incRouteActivity(session);
 
@@ -1182,6 +1140,12 @@
 
     if (status != NO_ERROR) {
         mOutputRoutes.decRouteActivity(session);
+        if (!outputDesc->isActive()) {
+            LOG_ALWAYS_FATAL_IF(outputDesc->mProfile->curActiveCount < 1,
+                                "%s invalid profile active count %u",
+                                __FUNCTION__, outputDesc->mProfile->curActiveCount);
+            outputDesc->mProfile->curActiveCount--;
+        }
         return status;
     }
     // Automatically enable the remote submix input when output is started on a re routing mix
@@ -1370,7 +1334,15 @@
         }
     }
 
-    return stopSource(outputDesc, stream, forceDeviceUpdate);
+    status_t status = stopSource(outputDesc, stream, forceDeviceUpdate);
+
+    if (status == NO_ERROR && !outputDesc->isActive()) {
+        LOG_ALWAYS_FATAL_IF(outputDesc->mProfile->curActiveCount < 1,
+                            "%s invalid profile active count %u",
+                            __FUNCTION__, outputDesc->mProfile->curActiveCount);
+        outputDesc->mProfile->curActiveCount--;
+    }
+    return status;
 }
 
 status_t AudioPolicyManager::stopSource(const sp<AudioOutputDescriptor>& outputDesc,
@@ -1473,7 +1445,7 @@
                                              input_type_t *inputType,
                                              audio_port_handle_t *portId)
 {
-    ALOGV("getInputForAttr() source %d, samplingRate %d, format %d, channelMask %x,"
+    ALOGV("getInputForAttr() source %d, sampling rate %d, format %d, channel mask %x,"
             "session %d, flags %#x",
           attr->source, config->sample_rate, config->format, config->channel_mask, session, flags);
 
@@ -1485,6 +1457,10 @@
     AudioMix *policyMix = NULL;
     DeviceVector inputDevices;
 
+    if (inputSource == AUDIO_SOURCE_DEFAULT) {
+        inputSource = AUDIO_SOURCE_MIC;
+    }
+
     // Explicit routing?
     sp<DeviceDescriptor> deviceDesc;
     if (*selectedDeviceId != AUDIO_PORT_HANDLE_NONE) {
@@ -1541,9 +1517,6 @@
     *input = AUDIO_IO_HANDLE_NONE;
     *inputType = API_INPUT_INVALID;
 
-    if (inputSource == AUDIO_SOURCE_DEFAULT) {
-        inputSource = AUDIO_SOURCE_MIC;
-    }
     halInputSource = inputSource;
 
     // TODO: check for existing client for this port ID
@@ -1593,7 +1566,7 @@
     }
 
     *input = getInputForDevice(device, address, session, uid, inputSource,
-                               config->sample_rate, config->format, config->channel_mask, flags,
+                               config, flags,
                                policyMix);
     if (*input == AUDIO_IO_HANDLE_NONE) {
         status = INVALID_OPERATION;
@@ -1620,9 +1593,7 @@
                                                         audio_session_t session,
                                                         uid_t uid,
                                                         audio_source_t inputSource,
-                                                        uint32_t samplingRate,
-                                                        audio_format_t format,
-                                                        audio_channel_mask_t channelMask,
+                                                        const audio_config_base_t *config,
                                                         audio_input_flags_t flags,
                                                         AudioMix *policyMix)
 {
@@ -1641,16 +1612,17 @@
             halInputSource = AUDIO_SOURCE_VOICE_RECOGNITION;
         }
     } else if (inputSource == AUDIO_SOURCE_VOICE_COMMUNICATION &&
-               audio_is_linear_pcm(format)) {
+               audio_is_linear_pcm(config->format)) {
         flags = (audio_input_flags_t)(flags | AUDIO_INPUT_FLAG_VOIP_TX);
     }
 
     // find a compatible input profile (not necessarily identical in parameters)
     sp<IOProfile> profile;
-    // samplingRate and flags may be updated by getInputProfile
-    uint32_t profileSamplingRate = (samplingRate == 0) ? SAMPLE_RATE_HZ_DEFAULT : samplingRate;
-    audio_format_t profileFormat = format;
-    audio_channel_mask_t profileChannelMask = channelMask;
+    // sampling rate and flags may be updated by getInputProfile
+    uint32_t profileSamplingRate = (config->sample_rate == 0) ?
+            SAMPLE_RATE_HZ_DEFAULT : config->sample_rate;
+    audio_format_t profileFormat = config->format;
+    audio_channel_mask_t profileChannelMask = config->channel_mask;
     audio_input_flags_t profileFlags = flags;
     for (;;) {
         profile = getInputProfile(device, address,
@@ -1664,12 +1636,13 @@
             profileFlags = AUDIO_INPUT_FLAG_NONE; // retry
         } else { // fail
             ALOGW("getInputForDevice() could not find profile for device 0x%X, "
-                  "samplingRate %u, format %#x, channelMask 0x%X, flags %#x",
-                    device, samplingRate, format, channelMask, flags);
+                  "sampling rate %u, format %#x, channel mask 0x%X, flags %#x",
+                    device, config->sample_rate, config->format, config->channel_mask, flags);
             return input;
         }
     }
     // Pick input sampling rate if not specified by client
+    uint32_t samplingRate = config->sample_rate;
     if (samplingRate == 0) {
         samplingRate = profileSamplingRate;
     }
@@ -1680,14 +1653,14 @@
     }
 
     sp<AudioSession> audioSession = new AudioSession(session,
-                                                              inputSource,
-                                                              format,
-                                                              samplingRate,
-                                                              channelMask,
-                                                              flags,
-                                                              uid,
-                                                              isSoundTrigger,
-                                                              policyMix, mpClientInterface);
+                                                     inputSource,
+                                                     config->format,
+                                                     samplingRate,
+                                                     config->channel_mask,
+                                                     flags,
+                                                     uid,
+                                                     isSoundTrigger,
+                                                     policyMix, mpClientInterface);
 
 // FIXME: disable concurrent capture until UI is ready
 #if 0
@@ -1731,8 +1704,8 @@
             // can be selected.
             if (!isConcurrentSource(inputSource) &&
                     ((desc->mSamplingRate != samplingRate ||
-                    desc->mChannelMask != channelMask ||
-                    !audio_formats_match(desc->mFormat, format)) &&
+                    desc->mChannelMask != config->channel_mask ||
+                    !audio_formats_match(desc->mFormat, config->format)) &&
                     (source_priority(desc->getHighestPrioritySource(false /*activeOnly*/)) <
                      source_priority(inputSource)))) {
                 reusedInputDesc = desc;
@@ -1755,44 +1728,34 @@
     }
 #endif
 
-    audio_config_t config = AUDIO_CONFIG_INITIALIZER;
-    config.sample_rate = profileSamplingRate;
-    config.channel_mask = profileChannelMask;
-    config.format = profileFormat;
-
-    if (address == "") {
-        DeviceVector inputDevices = mAvailableInputDevices.getDevicesFromType(device);
-        //   the inputs vector must be of size 1, but we don't want to crash here
-        address = inputDevices.size() > 0 ? inputDevices.itemAt(0)->mAddress : String8("");
+    if (!profile->canOpenNewIo()) {
+        return AUDIO_IO_HANDLE_NONE;
     }
 
-    status_t status = mpClientInterface->openInput(profile->getModuleHandle(),
-                                                   &input,
-                                                   &config,
-                                                   &device,
-                                                   address,
-                                                   halInputSource,
-                                                   profileFlags);
+    sp<AudioInputDescriptor> inputDesc = new AudioInputDescriptor(profile, mpClientInterface);
+
+    audio_config_t lConfig = AUDIO_CONFIG_INITIALIZER;
+    lConfig.sample_rate = profileSamplingRate;
+    lConfig.channel_mask = profileChannelMask;
+    lConfig.format = profileFormat;
+
+    status_t status = inputDesc->open(&lConfig, device, address,
+            halInputSource, profileFlags, &input);
 
     // only accept input with the exact requested set of parameters
     if (status != NO_ERROR || input == AUDIO_IO_HANDLE_NONE ||
-        (profileSamplingRate != config.sample_rate) ||
-        !audio_formats_match(profileFormat, config.format) ||
-        (profileChannelMask != config.channel_mask)) {
-        ALOGW("getInputForAttr() failed opening input: samplingRate %d"
-              ", format %d, channelMask %x",
-                samplingRate, format, channelMask);
+        (profileSamplingRate != lConfig.sample_rate) ||
+        !audio_formats_match(profileFormat, lConfig.format) ||
+        (profileChannelMask != lConfig.channel_mask)) {
+        ALOGW("getInputForAttr() failed opening input: sampling rate %d"
+              ", format %d, channel mask %x",
+              profileSamplingRate, profileFormat, profileChannelMask);
         if (input != AUDIO_IO_HANDLE_NONE) {
-            mpClientInterface->closeInput(input);
+            inputDesc->close();
         }
         return AUDIO_IO_HANDLE_NONE;
     }
 
-    sp<AudioInputDescriptor> inputDesc = new AudioInputDescriptor(profile);
-    inputDesc->mSamplingRate = profileSamplingRate;
-    inputDesc->mFormat = profileFormat;
-    inputDesc->mChannelMask = profileChannelMask;
-    inputDesc->mDevice = device;
     inputDesc->mPolicyMix = policyMix;
     inputDesc->addAudioSession(session, audioSession);
 
@@ -2006,6 +1969,13 @@
         setInputDevice(input, device, true /* force */);
 
         if (inputDesc->getAudioSessionCount(true/*activeOnly*/) == 1) {
+            if (!inputDesc->mProfile->canStartNewIo()) {
+                mInputRoutes.decRouteActivity(session);
+                audioSession->changeActiveCount(-1);
+                return INVALID_OPERATION;
+            }
+            inputDesc->mProfile->curActiveCount++;
+
             // if input maps to a dynamic policy with an activity listener, notify of state change
             if ((inputDesc->mPolicyMix != NULL)
                     && ((inputDesc->mPolicyMix->mCbFlags & AudioMix::kCbFlagNotifyActivity) != 0)) {
@@ -2075,6 +2045,11 @@
         if (inputDesc->isActive()) {
             setInputDevice(input, getNewInputDevice(inputDesc), false /* force */);
         } else {
+            LOG_ALWAYS_FATAL_IF(inputDesc->mProfile->curActiveCount < 1,
+                                "%s invalid profile active count %u",
+                                __FUNCTION__, inputDesc->mProfile->curActiveCount);
+            inputDesc->mProfile->curActiveCount--;
+
             // if input maps to a dynamic policy with an activity listener, notify of state change
             if ((inputDesc->mPolicyMix != NULL)
                     && ((inputDesc->mPolicyMix->mCbFlags & AudioMix::kCbFlagNotifyActivity) != 0)) {
@@ -2169,7 +2144,7 @@
             mAudioPatches.removeItemsAt(patch_index);
             patchRemoved = true;
         }
-        mpClientInterface->closeInput(mInputs.keyAt(input_index));
+        inputDesc->close();
     }
     mInputs.clear();
     SoundTrigger::setCaptureState(false);
@@ -3632,6 +3607,12 @@
         {
             const sp<IOProfile> outProfile = mHwModules[i]->mOutputProfiles[j];
 
+            if (!outProfile->canOpenNewIo()) {
+                ALOGE("Invalid Output profile max open count %u for profile %s",
+                      outProfile->maxOpenCount, outProfile->getTagName().c_str());
+                continue;
+            }
+
             if (!outProfile->hasSupportedDevices()) {
                 ALOGW("Output profile contains no device on module %s", mHwModules[i]->getName());
                 continue;
@@ -3660,30 +3641,15 @@
             const DeviceVector &devicesForType = supportedDevices.getDevicesFromType(profileType);
             String8 address = devicesForType.size() > 0 ? devicesForType.itemAt(0)->mAddress
                     : String8("");
-
-            outputDesc->mDevice = profileType;
-            audio_config_t config = AUDIO_CONFIG_INITIALIZER;
-            config.sample_rate = outputDesc->mSamplingRate;
-            config.channel_mask = outputDesc->mChannelMask;
-            config.format = outputDesc->mFormat;
             audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
-            status_t status = mpClientInterface->openOutput(outProfile->getModuleHandle(),
-                                                            &output,
-                                                            &config,
-                                                            &outputDesc->mDevice,
-                                                            address,
-                                                            &outputDesc->mLatency,
-                                                            outputDesc->mFlags);
+            status_t status = outputDesc->open(nullptr, profileType, address,
+                                           AUDIO_STREAM_DEFAULT, AUDIO_OUTPUT_FLAG_NONE, &output);
 
             if (status != NO_ERROR) {
                 ALOGW("Cannot open output stream for device %08x on hw module %s",
                       outputDesc->mDevice,
                       mHwModules[i]->getName());
             } else {
-                outputDesc->mSamplingRate = config.sample_rate;
-                outputDesc->mChannelMask = config.channel_mask;
-                outputDesc->mFormat = config.format;
-
                 for (size_t k = 0; k  < supportedDevices.size(); k++) {
                     ssize_t index = mAvailableOutputDevices.indexOf(supportedDevices[k]);
                     // give a valid ID to an attached device once confirmed it is reachable
@@ -3697,11 +3663,11 @@
                 }
                 addOutput(output, outputDesc);
                 setOutputDevice(outputDesc,
-                                outputDesc->mDevice,
+                                profileType,
                                 true,
                                 0,
                                 NULL,
-                                address.string());
+                                address);
             }
         }
         // open input streams needed to access attached devices to validate
@@ -3710,6 +3676,12 @@
         {
             const sp<IOProfile> inProfile = mHwModules[i]->mInputProfiles[j];
 
+            if (!inProfile->canOpenNewIo()) {
+                ALOGE("Invalid Input profile max open count %u for profile %s",
+                      inProfile->maxOpenCount, inProfile->getTagName().c_str());
+                continue;
+            }
+
             if (!inProfile->hasSupportedDevices()) {
                 ALOGW("Input profile contains no device on module %s", mHwModules[i]->getName());
                 continue;
@@ -3722,30 +3694,15 @@
                 continue;
             }
             sp<AudioInputDescriptor> inputDesc =
-                    new AudioInputDescriptor(inProfile);
+                    new AudioInputDescriptor(inProfile, mpClientInterface);
 
-            inputDesc->mDevice = profileType;
-
-            // find the address
-            DeviceVector inputDevices = mAvailableInputDevices.getDevicesFromType(profileType);
-            //   the inputs vector must be of size 1, but we don't want to crash here
-            String8 address = inputDevices.size() > 0 ? inputDevices.itemAt(0)->mAddress
-                    : String8("");
-            ALOGV("  for input device 0x%x using address %s", profileType, address.string());
-            ALOGE_IF(inputDevices.size() == 0, "Input device list is empty!");
-
-            audio_config_t config = AUDIO_CONFIG_INITIALIZER;
-            config.sample_rate = inputDesc->mSamplingRate;
-            config.channel_mask = inputDesc->mChannelMask;
-            config.format = inputDesc->mFormat;
             audio_io_handle_t input = AUDIO_IO_HANDLE_NONE;
-            status_t status = mpClientInterface->openInput(inProfile->getModuleHandle(),
-                                                           &input,
-                                                           &config,
-                                                           &inputDesc->mDevice,
-                                                           address,
-                                                           AUDIO_SOURCE_MIC,
-                                                           AUDIO_INPUT_FLAG_NONE);
+            status_t status = inputDesc->open(nullptr,
+                                              profileType,
+                                              String8(""),
+                                              AUDIO_SOURCE_MIC,
+                                              AUDIO_INPUT_FLAG_NONE,
+                                              &input);
 
             if (status == NO_ERROR) {
                 const DeviceVector &supportedDevices = inProfile->getSupportedDevices();
@@ -3760,10 +3717,10 @@
                         }
                     }
                 }
-                mpClientInterface->closeInput(input);
+                inputDesc->close();
             } else {
                 ALOGW("Cannot open input stream for device %08x on hw module %s",
-                      inputDesc->mDevice,
+                      profileType,
                       mHwModules[i]->getName());
             }
         }
@@ -3804,10 +3761,10 @@
 AudioPolicyManager::~AudioPolicyManager()
 {
    for (size_t i = 0; i < mOutputs.size(); i++) {
-        mpClientInterface->closeOutput(mOutputs.keyAt(i));
+        mOutputs.valueAt(i)->close();
    }
    for (size_t i = 0; i < mInputs.size(); i++) {
-        mpClientInterface->closeInput(mInputs.keyAt(i));
+        mInputs.valueAt(i)->close();
    }
    mAvailableOutputDevices.clear();
    mAvailableInputDevices.clear();
@@ -3825,7 +3782,6 @@
 
 void AudioPolicyManager::addOutput(audio_io_handle_t output, const sp<SwAudioOutputDescriptor>& outputDesc)
 {
-    outputDesc->setIoHandle(output);
     mOutputs.add(output, outputDesc);
     updateMono(output); // update mono status when adding to output list
     selectOutputForMusicEffects();
@@ -3840,7 +3796,6 @@
 
 void AudioPolicyManager::addInput(audio_io_handle_t input, const sp<AudioInputDescriptor>& inputDesc)
 {
-    inputDesc->setIoHandle(input);
     mInputs.add(input, inputDesc);
     nextAudioPortGeneration();
 }
@@ -3934,30 +3889,20 @@
                 continue;
             }
 
+            if (!profile->canOpenNewIo()) {
+                ALOGW("Max Output number %u already opened for this profile %s",
+                      profile->maxOpenCount, profile->getTagName().c_str());
+                continue;
+            }
+
             ALOGV("opening output for device %08x with params %s profile %p name %s",
                   device, address.string(), profile.get(), profile->getName().string());
             desc = new SwAudioOutputDescriptor(profile, mpClientInterface);
-            desc->mDevice = device;
-            audio_config_t config = AUDIO_CONFIG_INITIALIZER;
-            config.sample_rate = desc->mSamplingRate;
-            config.channel_mask = desc->mChannelMask;
-            config.format = desc->mFormat;
-            config.offload_info.sample_rate = desc->mSamplingRate;
-            config.offload_info.channel_mask = desc->mChannelMask;
-            config.offload_info.format = desc->mFormat;
             audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
-            status_t status = mpClientInterface->openOutput(profile->getModuleHandle(),
-                                                            &output,
-                                                            &config,
-                                                            &desc->mDevice,
-                                                            address,
-                                                            &desc->mLatency,
-                                                            desc->mFlags);
-            if (status == NO_ERROR) {
-                desc->mSamplingRate = config.sample_rate;
-                desc->mChannelMask = config.channel_mask;
-                desc->mFormat = config.format;
+            status_t status = desc->open(nullptr, device, address,
+                                         AUDIO_STREAM_DEFAULT, AUDIO_OUTPUT_FLAG_NONE, &output);
 
+            if (status == NO_ERROR) {
                 // Here is where the out_set_parameters() for card & device gets called
                 if (!address.isEmpty()) {
                     char *param = audio_device_address_to_parameter(device, address);
@@ -3967,27 +3912,21 @@
                 updateAudioProfiles(device, output, profile->getAudioProfiles());
                 if (!profile->hasValidAudioProfile()) {
                     ALOGW("checkOutputsForDevice() missing param");
-                    mpClientInterface->closeOutput(output);
+                    desc->close();
                     output = AUDIO_IO_HANDLE_NONE;
                 } else if (profile->hasDynamicAudioProfile()) {
-                    mpClientInterface->closeOutput(output);
+                    desc->close();
                     output = AUDIO_IO_HANDLE_NONE;
-                    profile->pickAudioProfile(config.sample_rate, config.channel_mask, config.format);
+                    audio_config_t config = AUDIO_CONFIG_INITIALIZER;
+                    profile->pickAudioProfile(
+                            config.sample_rate, config.channel_mask, config.format);
                     config.offload_info.sample_rate = config.sample_rate;
                     config.offload_info.channel_mask = config.channel_mask;
                     config.offload_info.format = config.format;
-                    status = mpClientInterface->openOutput(profile->getModuleHandle(),
-                                                           &output,
-                                                           &config,
-                                                           &desc->mDevice,
-                                                           address,
-                                                           &desc->mLatency,
-                                                           desc->mFlags);
-                    if (status == NO_ERROR) {
-                        desc->mSamplingRate = config.sample_rate;
-                        desc->mChannelMask = config.channel_mask;
-                        desc->mFormat = config.format;
-                    } else {
+
+                    status_t status = desc->open(&config, device, address, AUDIO_STREAM_DEFAULT,
+                                                 AUDIO_OUTPUT_FLAG_NONE, &output);
+                    if (status != NO_ERROR) {
                         output = AUDIO_IO_HANDLE_NONE;
                     }
                 }
@@ -4033,7 +3972,7 @@
                         } else {
                             ALOGW("checkOutputsForDevice() could not open dup output for %d and %d",
                                     mPrimaryOutput->mIoHandle, output);
-                            mpClientInterface->closeOutput(output);
+                            desc->close();
                             removeOutput(output);
                             nextAudioPortGeneration();
                             output = AUDIO_IO_HANDLE_NONE;
@@ -4161,6 +4100,7 @@
         for (ssize_t profile_index = 0; profile_index < (ssize_t)profiles.size(); profile_index++) {
 
             sp<IOProfile> profile = profiles[profile_index];
+
             // nothing to do if one input is already opened for this profile
             size_t input_index;
             for (input_index = 0; input_index < mInputs.size(); input_index++) {
@@ -4176,31 +4116,22 @@
                 continue;
             }
 
-            ALOGV("opening input for device 0x%X with params %s", device, address.string());
-            desc = new AudioInputDescriptor(profile);
-            desc->mDevice = device;
-            audio_config_t config = AUDIO_CONFIG_INITIALIZER;
-            config.sample_rate = desc->mSamplingRate;
-            config.channel_mask = desc->mChannelMask;
-            config.format = desc->mFormat;
+            if (!profile->canOpenNewIo()) {
+                ALOGW("Max Input number %u already opened for this profile %s",
+                      profile->maxOpenCount, profile->getTagName().c_str());
+                continue;
+            }
+
+            desc = new AudioInputDescriptor(profile, mpClientInterface);
             audio_io_handle_t input = AUDIO_IO_HANDLE_NONE;
-
-            ALOGV("opening inputput for device %08x with params %s profile %p name %s",
-                  desc->mDevice, address.string(), profile.get(), profile->getName().string());
-
-            status_t status = mpClientInterface->openInput(profile->getModuleHandle(),
-                                                           &input,
-                                                           &config,
-                                                           &desc->mDevice,
-                                                           address,
-                                                           AUDIO_SOURCE_MIC,
-                                                           AUDIO_INPUT_FLAG_NONE /*FIXME*/);
+            status_t status = desc->open(nullptr,
+                                         device,
+                                         address,
+                                         AUDIO_SOURCE_MIC,
+                                         AUDIO_INPUT_FLAG_NONE,
+                                         &input);
 
             if (status == NO_ERROR) {
-                desc->mSamplingRate = config.sample_rate;
-                desc->mChannelMask = config.channel_mask;
-                desc->mFormat = config.format;
-
                 if (!address.isEmpty()) {
                     char *param = audio_device_address_to_parameter(device, address);
                     mpClientInterface->setParameters(input, String8(param));
@@ -4209,7 +4140,7 @@
                 updateAudioProfiles(device, input, profile->getAudioProfiles());
                 if (!profile->hasValidAudioProfile()) {
                     ALOGW("checkInputsForDevice() direct input missing param");
-                    mpClientInterface->closeInput(input);
+                    desc->close();
                     input = AUDIO_IO_HANDLE_NONE;
                 }
 
@@ -4317,11 +4248,8 @@
         mpClientInterface->onAudioPatchListUpdate();
     }
 
-    AudioParameter param;
-    param.add(String8("closing"), String8("true"));
-    mpClientInterface->setParameters(output, param.toString());
+    outputDesc->close();
 
-    mpClientInterface->closeOutput(output);
     removeOutput(output);
     mPreviousOutputs = mOutputs;
 }
@@ -4346,7 +4274,7 @@
         mpClientInterface->onAudioPatchListUpdate();
     }
 
-    mpClientInterface->closeInput(input);
+    inputDesc->close();
     mInputs.removeItem(input);
 }
 
@@ -5595,6 +5523,11 @@
             case AUDIO_FORMAT_E_AC3:
             case AUDIO_FORMAT_DTS:
             case AUDIO_FORMAT_DTS_HD:
+                // If ALWAYS, remove all other surround formats here since we will add them later.
+                if (forceUse == AUDIO_POLICY_FORCE_ENCODED_SURROUND_ALWAYS) {
+                    formats.removeAt(formatIndex);
+                    formatIndex--;
+                }
                 supportsOtherSurround = true;
                 break;
             case AUDIO_FORMAT_IEC61937:
@@ -5640,8 +5573,7 @@
         // If ALWAYS, add support for raw surround formats if all are missing.
         // This assumes that if any of these formats are reported by the HAL
         // then the report is valid and should not be modified.
-        if ((forceUse == AUDIO_POLICY_FORCE_ENCODED_SURROUND_ALWAYS)
-                && !supportsOtherSurround) {
+        if (forceUse == AUDIO_POLICY_FORCE_ENCODED_SURROUND_ALWAYS) {
             formats.add(AUDIO_FORMAT_E_AC3);
             formats.add(AUDIO_FORMAT_DTS);
             formats.add(AUDIO_FORMAT_DTS_HD);
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 11894dc..2d41bd1 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -601,20 +601,15 @@
                 audio_devices_t device,
                 audio_session_t session,
                 audio_stream_type_t stream,
-                uint32_t samplingRate,
-                audio_format_t format,
-                audio_channel_mask_t channelMask,
-                audio_output_flags_t flags,
-                const audio_offload_info_t *offloadInfo);
+                const audio_config_t *config,
+                audio_output_flags_t flags);
         // internal method to return the input handle for the given device and format
         audio_io_handle_t getInputForDevice(audio_devices_t device,
                 String8 address,
                 audio_session_t session,
                 uid_t uid,
                 audio_source_t inputSource,
-                uint32_t samplingRate,
-                audio_format_t format,
-                audio_channel_mask_t channelMask,
+                const audio_config_base_t *config,
                 audio_input_flags_t flags,
                 AudioMix *policyMix);
 
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index bd94e3e..1ee5ccf 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -278,8 +278,8 @@
         return NO_INIT;
     }
     // already checked by client, but double-check in case the client wrapper is bypassed
-    if (attr->source >= AUDIO_SOURCE_CNT && attr->source != AUDIO_SOURCE_HOTWORD &&
-        attr->source != AUDIO_SOURCE_FM_TUNER) {
+    if (attr->source < AUDIO_SOURCE_DEFAULT && attr->source >= AUDIO_SOURCE_CNT &&
+            attr->source != AUDIO_SOURCE_HOTWORD && attr->source != AUDIO_SOURCE_FM_TUNER) {
         return BAD_VALUE;
     }
 
diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk
index 7ec3ccb..1fbba58 100644
--- a/services/camera/libcameraservice/Android.mk
+++ b/services/camera/libcameraservice/Android.mk
@@ -91,9 +91,6 @@
 
 LOCAL_CFLAGS += -Wall -Wextra -Werror
 
-# Workaround for invalid unused-lambda-capture warning http://b/38349491
-LOCAL_CLANG_CFLAGS += -Wno-error=unused-lambda-capture
-
 LOCAL_MODULE:= libcameraservice
 
 include $(BUILD_SHARED_LIBRARY)
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index 2cf648f..585d2eb 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -859,6 +859,12 @@
 
     outputStreams.push(getPreviewStreamId());
 
+    if (params.isDeviceZslSupported) {
+        // If device ZSL is supported, resume preview buffers that may be paused
+        // during last takePicture().
+        mDevice->dropStreamBuffers(false, getPreviewStreamId());
+    }
+
     if (!params.recordingHint) {
         if (!restart) {
             res = mStreamingProcessor->updatePreviewRequest(params);
diff --git a/services/camera/libcameraservice/api1/CameraClient.cpp b/services/camera/libcameraservice/api1/CameraClient.cpp
index a407d0b..910dd78 100644
--- a/services/camera/libcameraservice/api1/CameraClient.cpp
+++ b/services/camera/libcameraservice/api1/CameraClient.cpp
@@ -136,7 +136,12 @@
     const char *enddump = "\n\n";
     write(fd, enddump, strlen(enddump));
 
-    return mHardware->dump(fd, args);
+    sp<CameraHardwareInterface> hardware = mHardware;
+    if (hardware != nullptr) {
+        return hardware->dump(fd, args);
+    }
+    ALOGI("%s: camera device closed already, skip dumping", __FUNCTION__);
+    return OK;
 }
 
 // ----------------------------------------------------------------------------
diff --git a/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp b/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp
index b65f1c7..1ee216f 100644
--- a/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp
+++ b/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp
@@ -553,6 +553,12 @@
         return DONE;
     }
 
+    if (l.mParameters.isDeviceZslSupported) {
+        // If device ZSL is supported, drop all pending preview buffers to reduce the chance of
+        // rendering preview frames newer than the still frame.
+        client->getCameraDevice()->dropStreamBuffers(true, client->getPreviewStreamId());
+    }
+
     /**
      * Clear the streaming request for still-capture pictures
      *   (as opposed to i.e. video snapshots)
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index 68384b0..f1f96c3 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -359,6 +359,12 @@
             const std::vector<android::camera3::OutputStreamInfo> &outputInfo,
             const std::vector<size_t> &removedSurfaceIds,
             KeyedVector<sp<Surface>, size_t> *outputMap/*out*/) = 0;
+
+    /**
+     * Drop buffers for stream of streamId if dropping is true. If dropping is false, do not
+     * drop buffers for stream of streamId.
+     */
+    virtual status_t dropStreamBuffers(bool /*dropping*/, int /*streamId*/) = 0;
 };
 
 }; // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index e0a2dd4..c0db8e7 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -2039,6 +2039,20 @@
     return res;
 }
 
+status_t Camera3Device::dropStreamBuffers(bool dropping, int streamId) {
+    Mutex::Autolock il(mInterfaceLock);
+    Mutex::Autolock l(mLock);
+
+    int idx = mOutputStreams.indexOfKey(streamId);
+    if (idx == NAME_NOT_FOUND) {
+        ALOGE("%s: Stream %d is not found.", __FUNCTION__, streamId);
+        return BAD_VALUE;
+    }
+
+    sp<Camera3OutputStreamInterface> stream = mOutputStreams.editValueAt(idx);
+    return stream->dropBuffers(dropping);
+}
+
 /**
  * Camera3Device private methods
  */
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 357b893..e9466ab 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -192,6 +192,12 @@
             const std::vector<size_t> &removedSurfaceIds,
             KeyedVector<sp<Surface>, size_t> *outputMap/*out*/);
 
+    /**
+     * Drop buffers for stream of streamId if dropping is true. If dropping is false, do not
+     * drop buffers for stream of streamId.
+     */
+    status_t dropStreamBuffers(bool dropping, int streamId) override;
+
   private:
 
     // internal typedefs
diff --git a/services/camera/libcameraservice/device3/Camera3DummyStream.cpp b/services/camera/libcameraservice/device3/Camera3DummyStream.cpp
index 4b36ea2..0a245c4 100644
--- a/services/camera/libcameraservice/device3/Camera3DummyStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3DummyStream.cpp
@@ -108,6 +108,10 @@
     return false;
 }
 
+status_t Camera3DummyStream::dropBuffers(bool /*dropping*/) {
+    return OK;
+}
+
 status_t Camera3DummyStream::setConsumers(const std::vector<sp<Surface>>& /*consumers*/) {
     ALOGE("%s: Stream %d: Dummy stream doesn't support set consumer surface!",
             __FUNCTION__, mId);
diff --git a/services/camera/libcameraservice/device3/Camera3DummyStream.h b/services/camera/libcameraservice/device3/Camera3DummyStream.h
index 3212031..684f4b0 100644
--- a/services/camera/libcameraservice/device3/Camera3DummyStream.h
+++ b/services/camera/libcameraservice/device3/Camera3DummyStream.h
@@ -57,6 +57,12 @@
     virtual status_t detachBuffer(sp<GraphicBuffer>* buffer, int* fenceFd);
 
     /**
+     * Drop buffers for stream of streamId if dropping is true. If dropping is false, do not
+     * drop buffers for stream of streamId.
+     */
+    virtual status_t dropBuffers(bool /*dropping*/) override;
+
+    /**
      * Return if this output stream is for video encoding.
      */
     bool isVideoStream() const;
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 329172a..e79eecc 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -44,6 +44,7 @@
         mUseBufferManager(false),
         mTimestampOffset(timestampOffset),
         mConsumerUsage(0),
+        mDropBuffers(false),
         mDequeueBufferLatency(kDequeueLatencyBinSize) {
 
     if (mConsumer == NULL) {
@@ -70,6 +71,7 @@
         mUseBufferManager(false),
         mTimestampOffset(timestampOffset),
         mConsumerUsage(0),
+        mDropBuffers(false),
         mDequeueBufferLatency(kDequeueLatencyBinSize) {
 
     if (format != HAL_PIXEL_FORMAT_BLOB && format != HAL_PIXEL_FORMAT_RAW_OPAQUE) {
@@ -100,6 +102,7 @@
         mUseBufferManager(false),
         mTimestampOffset(timestampOffset),
         mConsumerUsage(consumerUsage),
+        mDropBuffers(false),
         mDequeueBufferLatency(kDequeueLatencyBinSize) {
     // Deferred consumer only support preview surface format now.
     if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
@@ -139,6 +142,7 @@
         mUseBufferManager(false),
         mTimestampOffset(timestampOffset),
         mConsumerUsage(consumerUsage),
+        mDropBuffers(false),
         mDequeueBufferLatency(kDequeueLatencyBinSize) {
 
     if (setId > CAMERA3_STREAM_SET_ID_INVALID) {
@@ -227,9 +231,14 @@
     /**
      * Return buffer back to ANativeWindow
      */
-    if (buffer.status == CAMERA3_BUFFER_STATUS_ERROR) {
+    if (buffer.status == CAMERA3_BUFFER_STATUS_ERROR || mDropBuffers) {
         // Cancel buffer
-        ALOGW("A frame is dropped for stream %d", mId);
+        if (mDropBuffers) {
+            ALOGV("%s: Dropping a frame for stream %d.", __FUNCTION__, mId);
+        } else {
+            ALOGW("%s: A frame is dropped for stream %d due to buffer error.", __FUNCTION__, mId);
+        }
+
         res = currentConsumer->cancelBuffer(currentConsumer.get(),
                 anwBuffer,
                 anwReleaseFence);
@@ -785,6 +794,12 @@
     return res;
 }
 
+status_t Camera3OutputStream::dropBuffers(bool dropping) {
+    Mutex::Autolock l(mLock);
+    mDropBuffers = dropping;
+    return OK;
+}
+
 status_t Camera3OutputStream::notifyBufferReleased(ANativeWindowBuffer* /*anwBuffer*/) {
     return OK;
 }
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index fbb14fe..18b1901 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -166,6 +166,11 @@
     virtual status_t notifyBufferReleased(ANativeWindowBuffer *anwBuffer);
 
     /**
+     * Drop buffers if dropping is true. If dropping is false, do not drop buffers.
+     */
+    virtual status_t dropBuffers(bool dropping) override;
+
+    /**
      * Set the graphic buffer manager to get/return the stream buffers.
      *
      * It is only legal to call this method when stream is in STATE_CONSTRUCTED state.
@@ -260,6 +265,9 @@
      */
     uint64_t    mConsumerUsage;
 
+    // Whether to drop valid buffers.
+    bool mDropBuffers;
+
     /**
      * Internal Camera3Stream interface
      */
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
index edfbab1..08fcf38 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
@@ -73,6 +73,11 @@
             const std::vector<OutputStreamInfo> &outputInfo,
             const std::vector<size_t> &removedSurfaceIds,
             KeyedVector<sp<Surface>, size_t> *outputMap/*out*/) = 0;
+
+    /**
+     * Drop buffers if dropping is true. If dropping is false, do not drop buffers.
+     */
+    virtual status_t dropBuffers(bool /*dropping*/) = 0;
 };
 
 } // namespace camera3
diff --git a/services/mediaanalytics/MediaAnalyticsService.cpp b/services/mediaanalytics/MediaAnalyticsService.cpp
index 83992aa..7f42b1b 100644
--- a/services/mediaanalytics/MediaAnalyticsService.cpp
+++ b/services/mediaanalytics/MediaAnalyticsService.cpp
@@ -159,7 +159,8 @@
           mMaxRecordAgeNs(kMaxRecordAgeNs),
           mMaxRecordSets(kMaxRecordSets),
           mNewSetInterval(kNewSetIntervalNs),
-          mDumpProto(MediaAnalyticsItem::PROTO_V0) {
+          mDumpProto(MediaAnalyticsItem::PROTO_V1),
+          mDumpProtoDefault(MediaAnalyticsItem::PROTO_V1) {
 
     ALOGD("MediaAnalyticsService created");
     // clear our queues
@@ -271,7 +272,7 @@
     }
 
     ALOGV("given uid %d; sanitized uid: %d sanitized pkg: %s "
-          "sanitized pkg version: %d",
+          "sanitized pkg version: %"  PRId64,
           uid_given, item->getUid(),
           item->getPkgName().c_str(),
           item->getPkgVersionCode());
@@ -381,6 +382,7 @@
     String16 summaryOption("-summary");
     bool summary = false;
     String16 protoOption("-proto");
+    int chosenProto = mDumpProtoDefault;
     String16 clearOption("-clear");
     bool clear = false;
     String16 sinceOption("-since");
@@ -400,7 +402,7 @@
             i++;
             if (i < n) {
                 String8 value(args[i]);
-                int proto = MediaAnalyticsItem::PROTO_V0;       // default to original
+                int proto = MediaAnalyticsItem::PROTO_V0;
                 char *endp;
                 const char *p = value.string();
                 proto = strtol(p, &endp, 10);
@@ -410,8 +412,12 @@
                     } else if (proto > MediaAnalyticsItem::PROTO_LAST) {
                         proto = MediaAnalyticsItem::PROTO_LAST;
                     }
-                    mDumpProto = proto;
+                    chosenProto = proto;
+                } else {
+                    result.append("unable to parse value for -proto\n\n");
                 }
+            } else {
+                result.append("missing value for -proto\n\n");
             }
         } else if (args[i] == sinceOption) {
             i++;
@@ -437,7 +443,7 @@
         } else if (args[i] == helpOption) {
             result.append("Recognized parameters:\n");
             result.append("-help        this help message\n");
-            result.append("-proto X     dump using protocol X (defaults to 1)");
+            result.append("-proto #     dump using protocol #");
             result.append("-summary     show summary info\n");
             result.append("-clear       clears out saved records\n");
             result.append("-only X      process records for component X\n");
@@ -450,6 +456,8 @@
 
     Mutex::Autolock _l(mLock);
 
+    mDumpProto = chosenProto;
+
     // we ALWAYS dump this piece
     snprintf(buffer, SIZE, "Dump of the %s process:\n", kServiceName);
     result.append(buffer);
@@ -848,7 +856,7 @@
     } else {
         AString pkg;
         std::string installer = "";
-        int32_t versionCode = 0;
+        int64_t versionCode = 0;
 
         struct passwd *pw = getpwuid(uid);
         if (pw) {
@@ -918,7 +926,7 @@
                 }
 
 
-                ALOGV("package '%s' installed by '%s' versioncode %d / %08x",
+                ALOGV("package '%s' installed by '%s' versioncode %"  PRId64 " / %" PRIx64,
                       pkg.c_str(), installer.c_str(), versionCode, versionCode);
 
                 if (strncmp(installer.c_str(), "com.android.", 12) == 0) {
diff --git a/services/mediaanalytics/MediaAnalyticsService.h b/services/mediaanalytics/MediaAnalyticsService.h
index 52e4631..fce7d08 100644
--- a/services/mediaanalytics/MediaAnalyticsService.h
+++ b/services/mediaanalytics/MediaAnalyticsService.h
@@ -125,6 +125,7 @@
 
     // support for generating output
     int mDumpProto;
+    int mDumpProtoDefault;
     String8 dumpQueue(List<MediaAnalyticsItem*> *);
     String8 dumpQueue(List<MediaAnalyticsItem*> *, nsecs_t, const char *only);
 
@@ -137,7 +138,7 @@
         uid_t uid;
         AString pkg;
         AString installer;
-        int32_t versionCode;
+        int64_t versionCode;
         nsecs_t expiration;
     };
 
diff --git a/services/oboeservice/AAudioMixer.cpp b/services/oboeservice/AAudioMixer.cpp
index 57241a1..b031888 100644
--- a/services/oboeservice/AAudioMixer.cpp
+++ b/services/oboeservice/AAudioMixer.cpp
@@ -49,7 +49,7 @@
     memset(mOutputBuffer, 0, mBufferSizeInBytes);
 }
 
-bool AAudioMixer::mix(int streamIndex, FifoBuffer *fifo, bool allowUnderflow) {
+int32_t AAudioMixer::mix(int streamIndex, FifoBuffer *fifo, bool allowUnderflow) {
     WrappingBuffer wrappingBuffer;
     float *destination = mOutputBuffer;
 
@@ -105,7 +105,7 @@
     ATRACE_END();
 #endif /* AAUDIO_MIXER_ATRACE_ENABLED */
 
-    return (framesLeft > 0); // did not get all the frames we needed, ie. "underflow"
+    return (framesDesired - framesLeft); // framesRead
 }
 
 void AAudioMixer::mixPart(float *destination, float *source, int32_t numFrames) {
diff --git a/services/oboeservice/AAudioMixer.h b/services/oboeservice/AAudioMixer.h
index 5625d4d..d5abc5b 100644
--- a/services/oboeservice/AAudioMixer.h
+++ b/services/oboeservice/AAudioMixer.h
@@ -36,15 +36,17 @@
      * @param streamIndex for marking stream variables in systrace
      * @param fifo to read from
      * @param allowUnderflow if true then allow mixer to advance read index past the write index
-     * @return true if actually underflowed
+     * @return frames read from this stream
      */
-    bool mix(int streamIndex, android::FifoBuffer *fifo, bool allowUnderflow);
-
-    void mixPart(float *destination, float *source, int32_t numFrames);
+    int32_t mix(int streamIndex, android::FifoBuffer *fifo, bool allowUnderflow);
 
     float *getOutputBuffer();
 
+    int32_t getFramesPerBurst() const { return mFramesPerBurst; }
+
 private:
+    void mixPart(float *destination, float *source, int32_t numFrames);
+
     float   *mOutputBuffer = nullptr;
     int32_t  mSamplesPerFrame = 0;
     int32_t  mFramesPerBurst = 0;
diff --git a/services/oboeservice/AAudioServiceEndpointCapture.cpp b/services/oboeservice/AAudioServiceEndpointCapture.cpp
index f902bef..efac788 100644
--- a/services/oboeservice/AAudioServiceEndpointCapture.cpp
+++ b/services/oboeservice/AAudioServiceEndpointCapture.cpp
@@ -58,7 +58,6 @@
 // Read data from the shared MMAP stream and then distribute it to the client streams.
 void *AAudioServiceEndpointCapture::callbackLoop() {
     ALOGD("callbackLoop() entering");
-    int32_t underflowCount = 0;
     aaudio_result_t result = AAUDIO_OK;
     int64_t timeoutNanos = getStreamInternal()->calculateReasonableTimeout();
 
@@ -102,9 +101,10 @@
                             int64_t positionOffset = mmapFramesRead - clientFramesWritten;
                             streamShared->setTimestampPositionOffset(positionOffset);
 
+                            // Is the buffer too full to write a burst?
                             if (fifo->getFifoControllerBase()->getEmptyFramesAvailable() <
-                                getFramesPerBurst()) {
-                                underflowCount++;
+                                    getFramesPerBurst()) {
+                                streamShared->incrementXRunCount();
                             } else {
                                 fifo->write(mDistributionBuffer, getFramesPerBurst());
                             }
@@ -125,6 +125,6 @@
         }
     }
 
-    ALOGD("callbackLoop() exiting, %d underflows", underflowCount);
+    ALOGD("callbackLoop() exiting");
     return NULL; // TODO review
 }
diff --git a/services/oboeservice/AAudioServiceEndpointPlay.cpp b/services/oboeservice/AAudioServiceEndpointPlay.cpp
index c2feb6b..2601f3f 100644
--- a/services/oboeservice/AAudioServiceEndpointPlay.cpp
+++ b/services/oboeservice/AAudioServiceEndpointPlay.cpp
@@ -34,6 +34,7 @@
 #include "AAudioServiceStreamShared.h"
 #include "AAudioServiceEndpointPlay.h"
 #include "AAudioServiceEndpointShared.h"
+#include "AAudioServiceStreamBase.h"
 
 using namespace android;  // TODO just import names needed
 using namespace aaudio;   // TODO just import names needed
@@ -108,9 +109,19 @@
                         int64_t positionOffset = mmapFramesWritten - clientFramesRead;
                         streamShared->setTimestampPositionOffset(positionOffset);
 
-                        bool underflowed = mMixer.mix(index, fifo, allowUnderflow);
-                        if (underflowed) {
-                            streamShared->incrementXRunCount();
+                        int32_t framesMixed = mMixer.mix(index, fifo, allowUnderflow);
+
+                        if (streamShared->isFlowing()) {
+                            // Consider it an underflow if we got less than a burst
+                            // after the data started flowing.
+                            bool underflowed = allowUnderflow
+                                               && framesMixed < mMixer.getFramesPerBurst();
+                            if (underflowed) {
+                                streamShared->incrementXRunCount();
+                            }
+                        } else if (framesMixed > 0) {
+                            // Mark beginning of data flow after a start.
+                            streamShared->setFlowing(true);
                         }
                         clientFramesRead = fifo->getReadCounter();
                     }
diff --git a/services/oboeservice/AAudioServiceEndpointShared.cpp b/services/oboeservice/AAudioServiceEndpointShared.cpp
index 820ed28..6af9e7e 100644
--- a/services/oboeservice/AAudioServiceEndpointShared.cpp
+++ b/services/oboeservice/AAudioServiceEndpointShared.cpp
@@ -47,6 +47,7 @@
            << std::setfill('0') << std::setw(8)
            << std::hex << mStreamInternal->getServiceHandle()
            << std::dec << std::setfill(' ');
+    result << ", XRuns = " << mStreamInternal->getXRunCount();
     result << "\n";
     result << "    Running Stream Count: " << mRunningStreamCount << "\n";
 
diff --git a/services/oboeservice/AAudioServiceStreamBase.cpp b/services/oboeservice/AAudioServiceStreamBase.cpp
index 6652cc9..635b45c 100644
--- a/services/oboeservice/AAudioServiceStreamBase.cpp
+++ b/services/oboeservice/AAudioServiceStreamBase.cpp
@@ -172,6 +172,8 @@
         goto error;
     }
 
+    setFlowing(false);
+
     // Start with fresh presentation timestamps.
     mAtomicTimestamp.clear();
 
@@ -311,12 +313,19 @@
 }
 
 aaudio_result_t AAudioServiceStreamBase::sendServiceEvent(aaudio_service_event_t event,
-                                               double  dataDouble,
-                                               int64_t dataLong) {
+                                                          double  dataDouble) {
     AAudioServiceMessage command;
     command.what = AAudioServiceMessage::code::EVENT;
     command.event.event = event;
     command.event.dataDouble = dataDouble;
+    return writeUpMessageQueue(&command);
+}
+
+aaudio_result_t AAudioServiceStreamBase::sendServiceEvent(aaudio_service_event_t event,
+                                                          int64_t dataLong) {
+    AAudioServiceMessage command;
+    command.what = AAudioServiceMessage::code::EVENT;
+    command.event.event = event;
     command.event.dataLong = dataLong;
     return writeUpMessageQueue(&command);
 }
@@ -336,6 +345,10 @@
     }
 }
 
+aaudio_result_t AAudioServiceStreamBase::sendXRunCount(int32_t xRunCount) {
+    return sendServiceEvent(AAUDIO_SERVICE_EVENT_XRUN, (int64_t) xRunCount);
+}
+
 aaudio_result_t AAudioServiceStreamBase::sendCurrentTimestamp() {
     AAudioServiceMessage command;
     // Send a timestamp for the clock model.
diff --git a/services/oboeservice/AAudioServiceStreamBase.h b/services/oboeservice/AAudioServiceStreamBase.h
index af435b4..29987f6 100644
--- a/services/oboeservice/AAudioServiceStreamBase.h
+++ b/services/oboeservice/AAudioServiceStreamBase.h
@@ -129,11 +129,15 @@
     // -------------------------------------------------------------------
 
     /**
-     * Send a message to the client.
+     * Send a message to the client with an int64_t data value.
      */
     aaudio_result_t sendServiceEvent(aaudio_service_event_t event,
-                                     double  dataDouble = 0.0,
                                      int64_t dataLong = 0);
+    /**
+     * Send a message to the client with an double data value.
+     */
+    aaudio_result_t sendServiceEvent(aaudio_service_event_t event,
+                                     double  dataDouble);
 
     /**
      * Fill in a parcelable description of stream.
@@ -182,6 +186,19 @@
 
     void onVolumeChanged(float volume);
 
+    /**
+     * Set false when the stream is started.
+     * Set true when data is first read from the stream.
+     * @param b
+     */
+    void setFlowing(bool b) {
+        mFlowing = b;
+    }
+
+    bool isFlowing() const {
+        return mFlowing;
+    }
+
 protected:
 
     /**
@@ -204,6 +221,8 @@
 
     aaudio_result_t sendCurrentTimestamp();
 
+    aaudio_result_t sendXRunCount(int32_t xRunCount);
+
     /**
      * @param positionFrames
      * @param timeNanos
@@ -237,6 +256,8 @@
 
 private:
     aaudio_handle_t         mHandle = -1;
+
+    bool                    mFlowing = false;
 };
 
 } /* namespace aaudio */
diff --git a/services/oboeservice/AAudioServiceStreamShared.h b/services/oboeservice/AAudioServiceStreamShared.h
index 8499ea5..3b12e61 100644
--- a/services/oboeservice/AAudioServiceStreamShared.h
+++ b/services/oboeservice/AAudioServiceStreamShared.h
@@ -1,4 +1,4 @@
-/*
+ /*
  * Copyright (C) 2017 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
@@ -80,7 +80,7 @@
     }
 
     void incrementXRunCount() {
-        mXRunCount++;
+        sendXRunCount(++mXRunCount);
     }
 
     int32_t getXRunCount() const {