Merge "Added writer fuzzers"
diff --git a/METADATA b/METADATA
index 1fbda08..aabda36 100644
--- a/METADATA
+++ b/METADATA
@@ -2,6 +2,22 @@
 #     CONSULT THE OWNERS AND opensource-licensing@google.com BEFORE
 #     DEPENDING ON IT IN YOUR PROJECT. ***
 third_party {
-  # would be NOTICE save for drm/mediadrm/plugins/clearkey/hidl/
+  # would be NOTICE save for Widevine Master License Agreement in:
+  #   drm/mediadrm/plugins/clearkey/hidl/DeviceFiles.cpp
+  #   drm/mediadrm/plugins/clearkey/hidl/MemoryFileSystem.cpp
+  #   drm/mediadrm/plugins/clearkey/hidl/include/DeviceFiles.h
+  #   drm/mediadrm/plugins/clearkey/hidl/protos/DeviceFiles.proto
+  #   drm/mediadrm/plugins/clearkey/hidl/include/MemoryFileSystem.h
+  # and patent disclaimers in:
+  #   media/codec2/components/aac/patent_disclaimer.txt
+  #   media/codec2/components/amr_nb_wb/patent_disclaimer.txt
+  #   media/codec2/components/mp3/patent_disclaimer.txt
+  #   media/codec2/components/mpeg4_h263/patent_disclaimer.txt
+  #   media/codecs/amrnb/patent_disclaimer.txt
+  #   media/codecs/amrwb/dec/patent_disclaimer.txt
+  #   media/codecs/amrwb/enc/patent_disclaimer.txt
+  #   media/codecs/m4v_h263/patent_disclaimer.txt
+  #   media/codecs/mp3dec/patent_disclaimer.txt
+  #   media/libstagefright/codecs/aacenc/patent_disclaimer.txt
   license_type: BY_EXCEPTION_ONLY
 }
diff --git a/camera/ndk/include/camera/NdkCameraCaptureSession.h b/camera/ndk/include/camera/NdkCameraCaptureSession.h
index 07176cf..6c1cf33 100644
--- a/camera/ndk/include/camera/NdkCameraCaptureSession.h
+++ b/camera/ndk/include/camera/NdkCameraCaptureSession.h
@@ -45,8 +45,6 @@
 
 __BEGIN_DECLS
 
-#if __ANDROID_API__ >= 24
-
 /**
  * ACameraCaptureSession is an opaque type that manages frame captures of a camera device.
  *
@@ -593,10 +591,6 @@
 camera_status_t ACameraCaptureSession_abortCaptures(ACameraCaptureSession* session)
         __INTRODUCED_IN(24);
 
-#endif /* __ANDROID_API__ >= 24 */
-
-#if __ANDROID_API__ >= 28
-
 typedef struct ACaptureSessionOutput ACaptureSessionOutput;
 
 /**
@@ -641,9 +635,7 @@
  */
 camera_status_t ACameraCaptureSession_updateSharedOutput(ACameraCaptureSession* session,
         ACaptureSessionOutput* output) __INTRODUCED_IN(28);
-#endif /* __ANDROID_API__ >= 28 */
 
-#if __ANDROID_API__ >= 29
 /**
  * The definition of final capture result callback with logical multi-camera support.
  *
@@ -788,8 +780,6 @@
         int numRequests, ACaptureRequest** requests,
         /*optional*/int* captureSequenceId) __INTRODUCED_IN(29);
 
-#endif /* __ANDROID_API__ >= 29 */
-
 __END_DECLS
 
 #endif /* _NDK_CAMERA_CAPTURE_SESSION_H */
diff --git a/camera/ndk/include/camera/NdkCameraDevice.h b/camera/ndk/include/camera/NdkCameraDevice.h
index 1537bde..f72fe8d 100644
--- a/camera/ndk/include/camera/NdkCameraDevice.h
+++ b/camera/ndk/include/camera/NdkCameraDevice.h
@@ -44,8 +44,6 @@
 
 __BEGIN_DECLS
 
-#if __ANDROID_API__ >= 24
-
 /**
  * ACameraDevice is opaque type that provides access to a camera device.
  *
@@ -687,10 +685,6 @@
         const ACameraCaptureSession_stateCallbacks* callbacks,
         /*out*/ACameraCaptureSession** session) __INTRODUCED_IN(24);
 
-#endif /* __ANDROID_API__ >= 24 */
-
-#if __ANDROID_API__ >= 28
-
 /**
  * Create a shared ACaptureSessionOutput object.
  *
@@ -782,10 +776,6 @@
         const ACameraCaptureSession_stateCallbacks* callbacks,
         /*out*/ACameraCaptureSession** session) __INTRODUCED_IN(28);
 
-#endif /* __ANDROID_API__ >= 28 */
-
-#if __ANDROID_API__ >= 29
-
 /**
  * Create a ACaptureSessionOutput object used for streaming from a physical
  * camera as part of a logical camera device.
@@ -890,8 +880,6 @@
         const ACameraDevice* device,
         const ACaptureSessionOutputContainer* sessionOutputContainer) __INTRODUCED_IN(29);
 
-#endif /* __ANDROID_API__ >= 29 */
-
 __END_DECLS
 
 #endif /* _NDK_CAMERA_DEVICE_H */
diff --git a/camera/ndk/include/camera/NdkCameraError.h b/camera/ndk/include/camera/NdkCameraError.h
index fc618ee..9d77eb4 100644
--- a/camera/ndk/include/camera/NdkCameraError.h
+++ b/camera/ndk/include/camera/NdkCameraError.h
@@ -40,8 +40,6 @@
 
 __BEGIN_DECLS
 
-#if __ANDROID_API__ >= 24
-
 typedef enum {
     ACAMERA_OK = 0,
 
@@ -138,8 +136,6 @@
     ACAMERA_ERROR_UNSUPPORTED_OPERATION = ACAMERA_ERROR_BASE - 14,
 } camera_status_t;
 
-#endif /* __ANDROID_API__ >= 24 */
-
 __END_DECLS
 
 #endif /* _NDK_CAMERA_ERROR_H */
diff --git a/camera/ndk/include/camera/NdkCameraManager.h b/camera/ndk/include/camera/NdkCameraManager.h
index 0a2ee57..be32b11 100644
--- a/camera/ndk/include/camera/NdkCameraManager.h
+++ b/camera/ndk/include/camera/NdkCameraManager.h
@@ -44,8 +44,6 @@
 
 __BEGIN_DECLS
 
-#if __ANDROID_API__ >= 24
-
 /**
  * ACameraManager is opaque type that provides access to camera service.
  *
@@ -293,10 +291,6 @@
         ACameraDevice_StateCallbacks* callback,
         /*out*/ACameraDevice** device) __INTRODUCED_IN(24);
 
-#endif /* __ANDROID_API__ >= 24 */
-
-#if __ANDROID_API__ >= 29
-
 /**
  * Definition of camera access permission change callback.
  *
@@ -419,8 +413,6 @@
         __INTRODUCED_IN(29);
 #endif
 
-#endif /* __ANDROID_API__ >= 29 */
-
 __END_DECLS
 
 #endif /* _NDK_CAMERA_MANAGER_H */
diff --git a/camera/ndk/include/camera/NdkCameraMetadata.h b/camera/ndk/include/camera/NdkCameraMetadata.h
index a840bd1..0d5e6c4 100644
--- a/camera/ndk/include/camera/NdkCameraMetadata.h
+++ b/camera/ndk/include/camera/NdkCameraMetadata.h
@@ -41,9 +41,7 @@
 #include <sys/cdefs.h>
 
 #ifndef __ANDROID_VNDK__
-#if __ANDROID_API__ >= 30
 #include "jni.h"
-#endif  /* __ANDROID_API__ >= 30 */
 #endif  /* __ANDROID_VNDK__ */
 
 #include "NdkCameraError.h"
@@ -51,8 +49,6 @@
 
 __BEGIN_DECLS
 
-#if __ANDROID_API__ >= 24
-
 /**
  * ACameraMetadata is opaque type that provides access to read-only camera metadata like camera
  * characteristics (via {@link ACameraManager_getCameraCharacteristics}) or capture results (via
@@ -238,10 +234,6 @@
  */
 void ACameraMetadata_free(ACameraMetadata* metadata) __INTRODUCED_IN(24);
 
-#endif /* __ANDROID_API__ >= 24 */
-
-#if __ANDROID_API__ >= 29
-
 /**
  * Helper function to check if a camera is logical multi-camera.
  *
@@ -260,10 +252,7 @@
         /*out*/size_t* numPhysicalCameras, /*out*/const char* const** physicalCameraIds)
         __INTRODUCED_IN(29);
 
-#endif /* __ANDROID_API__ >= 29 */
-
 #ifndef __ANDROID_VNDK__
-#if __ANDROID_API__ >= 30
 
 /**
  * Return a {@link ACameraMetadata} that references the same data as
@@ -290,7 +279,6 @@
 ACameraMetadata* ACameraMetadata_fromCameraMetadata(JNIEnv* env, jobject cameraMetadata)
         __INTRODUCED_IN(30);
 
-#endif /* __ANDROID_API__ >= 30 */
 #endif  /* __ANDROID_VNDK__ */
 
 __END_DECLS
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 1354fce..af12354 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -40,8 +40,6 @@
 
 __BEGIN_DECLS
 
-#if __ANDROID_API__ >= 24
-
 typedef enum acamera_metadata_section {
     ACAMERA_COLOR_CORRECTION,
     ACAMERA_CONTROL,
@@ -8993,8 +8991,6 @@
 
 
 
-#endif /* __ANDROID_API__ >= 24 */
-
 __END_DECLS
 
 #endif /* _NDK_CAMERA_METADATA_TAGS_H */
diff --git a/camera/ndk/include/camera/NdkCaptureRequest.h b/camera/ndk/include/camera/NdkCaptureRequest.h
index d3f8826..a4dc374 100644
--- a/camera/ndk/include/camera/NdkCaptureRequest.h
+++ b/camera/ndk/include/camera/NdkCaptureRequest.h
@@ -44,8 +44,6 @@
 
 __BEGIN_DECLS
 
-#if __ANDROID_API__ >= 24
-
 // Container for output targets
 typedef struct ACameraOutputTargets ACameraOutputTargets;
 
@@ -304,10 +302,6 @@
  */
 void ACaptureRequest_free(ACaptureRequest* request) __INTRODUCED_IN(24);
 
-#endif /* __ANDROID_API__ >= 24 */
-
-#if __ANDROID_API__ >= 28
-
 /**
  * Associate an arbitrary user context pointer to the {@link ACaptureRequest}
  *
@@ -356,10 +350,6 @@
  */
 ACaptureRequest* ACaptureRequest_copy(const ACaptureRequest* src) __INTRODUCED_IN(28);
 
-#endif /* __ANDROID_API__ >= 28 */
-
-#if __ANDROID_API__ >= 29
-
 /**
  * Get a metadata entry from input {@link ACaptureRequest} for
  * a physical camera backing a logical multi-camera device.
@@ -569,8 +559,6 @@
         ACaptureRequest* request, const char* physicalId, uint32_t tag,
         uint32_t count, const ACameraMetadata_rational* data) __INTRODUCED_IN(29);
 
-#endif /* __ANDROID_API__ >= 29 */
-
 __END_DECLS
 
 #endif /* _NDK_CAPTURE_REQUEST_H */
diff --git a/media/bufferpool/2.0/AccessorImpl.cpp b/media/bufferpool/2.0/AccessorImpl.cpp
index 6111fea..1d2562e 100644
--- a/media/bufferpool/2.0/AccessorImpl.cpp
+++ b/media/bufferpool/2.0/AccessorImpl.cpp
@@ -39,6 +39,8 @@
 
     static constexpr size_t kMinAllocBytesForEviction = 1024*1024*15;
     static constexpr size_t kMinBufferCountForEviction = 25;
+    static constexpr size_t kMaxUnusedBufferCount = 64;
+    static constexpr size_t kUnusedBufferCountTarget = kMaxUnusedBufferCount - 16;
 
     static constexpr nsecs_t kEvictGranularityNs = 1000000000; // 1 sec
     static constexpr nsecs_t kEvictDurationNs = 5000000000; // 5 secs
@@ -724,9 +726,11 @@
 }
 
 void Accessor::Impl::BufferPool::cleanUp(bool clearCache) {
-    if (clearCache || mTimestampUs > mLastCleanUpUs + kCleanUpDurationUs) {
+    if (clearCache || mTimestampUs > mLastCleanUpUs + kCleanUpDurationUs ||
+            mStats.buffersNotInUse() > kMaxUnusedBufferCount) {
         mLastCleanUpUs = mTimestampUs;
-        if (mTimestampUs > mLastLogUs + kLogDurationUs) {
+        if (mTimestampUs > mLastLogUs + kLogDurationUs ||
+                mStats.buffersNotInUse() > kMaxUnusedBufferCount) {
             mLastLogUs = mTimestampUs;
             ALOGD("bufferpool2 %p : %zu(%zu size) total buffers - "
                   "%zu(%zu size) used buffers - %zu/%zu (recycle/alloc) - "
@@ -737,8 +741,9 @@
                   mStats.mTotalFetches, mStats.mTotalTransfers);
         }
         for (auto freeIt = mFreeBuffers.begin(); freeIt != mFreeBuffers.end();) {
-            if (!clearCache && (mStats.mSizeCached < kMinAllocBytesForEviction
-                    || mBuffers.size() < kMinBufferCountForEviction)) {
+            if (!clearCache && mStats.buffersNotInUse() <= kUnusedBufferCountTarget &&
+                    (mStats.mSizeCached < kMinAllocBytesForEviction ||
+                     mBuffers.size() < kMinBufferCountForEviction)) {
                 break;
             }
             auto it = mBuffers.find(*freeIt);
diff --git a/media/bufferpool/2.0/AccessorImpl.h b/media/bufferpool/2.0/AccessorImpl.h
index cd1b4d0..3d39941 100644
--- a/media/bufferpool/2.0/AccessorImpl.h
+++ b/media/bufferpool/2.0/AccessorImpl.h
@@ -193,6 +193,12 @@
                 : mSizeCached(0), mBuffersCached(0), mSizeInUse(0), mBuffersInUse(0),
                   mTotalAllocations(0), mTotalRecycles(0), mTotalTransfers(0), mTotalFetches(0) {}
 
+            /// # of currently unused buffers
+            size_t buffersNotInUse() const {
+                ALOG_ASSERT(mBuffersCached >= mBuffersInUse);
+                return mBuffersCached - mBuffersInUse;
+            }
+
             /// A new buffer is allocated on an allocation request.
             void onBufferAllocated(size_t allocSize) {
                 mSizeCached += allocSize;
diff --git a/media/bufferpool/2.0/BufferPoolClient.cpp b/media/bufferpool/2.0/BufferPoolClient.cpp
index 342fef6..9308b81 100644
--- a/media/bufferpool/2.0/BufferPoolClient.cpp
+++ b/media/bufferpool/2.0/BufferPoolClient.cpp
@@ -32,6 +32,8 @@
 static constexpr int64_t kReceiveTimeoutUs = 1000000; // 100ms
 static constexpr int kPostMaxRetry = 3;
 static constexpr int kCacheTtlUs = 1000000; // TODO: tune
+static constexpr size_t kMaxCachedBufferCount = 64;
+static constexpr size_t kCachedBufferCountTarget = kMaxCachedBufferCount - 16;
 
 class BufferPoolClient::Impl
         : public std::enable_shared_from_this<BufferPoolClient::Impl> {
@@ -136,6 +138,10 @@
             --mActive;
             mLastChangeUs = getTimestampNow();
         }
+
+        int cachedBufferCount() const {
+            return mBuffers.size() - mActive;
+        }
     } mCache;
 
     // FMQ - release notifier
@@ -668,10 +674,12 @@
 // should have mCache.mLock
 void BufferPoolClient::Impl::evictCaches(bool clearCache) {
     int64_t now = getTimestampNow();
-    if (now >= mLastEvictCacheUs + kCacheTtlUs || clearCache) {
+    if (now >= mLastEvictCacheUs + kCacheTtlUs ||
+            clearCache || mCache.cachedBufferCount() > kMaxCachedBufferCount) {
         size_t evicted = 0;
         for (auto it = mCache.mBuffers.begin(); it != mCache.mBuffers.end();) {
-            if (!it->second->hasCache() && (it->second->expire() || clearCache)) {
+            if (!it->second->hasCache() && (it->second->expire() ||
+                        clearCache || mCache.cachedBufferCount() > kCachedBufferCountTarget)) {
                 it = mCache.mBuffers.erase(it);
                 ++evicted;
             } else {
diff --git a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
index 82c061a..b1cf388 100644
--- a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
+++ b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
@@ -30,6 +30,7 @@
 
 namespace android {
 constexpr size_t kMinInputBufferSize = 2 * 1024 * 1024;
+constexpr size_t kMaxDimension = 1920;
 constexpr char COMPONENT_NAME[] = "c2.android.mpeg2.decoder";
 
 class C2SoftMpeg2Dec::IntfImpl : public SimpleInterface<void>::BaseParams {
@@ -64,8 +65,8 @@
                 DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
                 .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
                 .withFields({
-                    C2F(mSize, width).inRange(16, 1920, 4),
-                    C2F(mSize, height).inRange(16, 1088, 4),
+                    C2F(mSize, width).inRange(16, kMaxDimension, 2),
+                    C2F(mSize, height).inRange(16, kMaxDimension, 2),
                 })
                 .withSetter(SizeSetter)
                 .build());
@@ -91,8 +92,8 @@
                 DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE)
                 .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 320, 240))
                 .withFields({
-                    C2F(mSize, width).inRange(2, 1920, 2),
-                    C2F(mSize, height).inRange(2, 1088, 2),
+                    C2F(mSize, width).inRange(2, kMaxDimension, 2),
+                    C2F(mSize, height).inRange(2, kMaxDimension, 2),
                 })
                 .withSetter(MaxPictureSizeSetter, mSize)
                 .build());
@@ -204,8 +205,8 @@
                                     const C2P<C2StreamPictureSizeInfo::output> &size) {
         (void)mayBlock;
         // TODO: get max width/height from the size's field helpers vs. hardcoding
-        me.set().width = c2_min(c2_max(me.v.width, size.v.width), 1920u);
-        me.set().height = c2_min(c2_max(me.v.height, size.v.height), 1088u);
+        me.set().width = c2_min(c2_max(me.v.width, size.v.width), kMaxDimension);
+        me.set().height = c2_min(c2_max(me.v.height, size.v.height), kMaxDimension);
         return C2R::Ok();
     }
 
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
index a7cc037..ddd312f 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
@@ -35,8 +35,10 @@
 namespace android {
 constexpr size_t kMinInputBufferSize = 2 * 1024 * 1024;
 #ifdef MPEG4
+constexpr size_t kMaxDimension = 1920;
 constexpr char COMPONENT_NAME[] = "c2.android.mpeg4.decoder";
 #else
+constexpr size_t kMaxDimension = 352;
 constexpr char COMPONENT_NAME[] = "c2.android.h263.decoder";
 #endif
 
@@ -75,13 +77,8 @@
                 DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
                 .withDefault(new C2StreamPictureSizeInfo::output(0u, 176, 144))
                 .withFields({
-#ifdef MPEG4
-                    C2F(mSize, width).inRange(2, 1920, 2),
-                    C2F(mSize, height).inRange(2, 1088, 2),
-#else
-                    C2F(mSize, width).inRange(2, 352, 2),
-                    C2F(mSize, height).inRange(2, 288, 2),
-#endif
+                    C2F(mSize, width).inRange(2, kMaxDimension, 2),
+                    C2F(mSize, height).inRange(2, kMaxDimension, 2),
                 })
                 .withSetter(SizeSetter)
                 .build());
@@ -130,19 +127,10 @@
 
         addParameter(
                 DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE)
-#ifdef MPEG4
-                .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 1920, 1088))
-#else
                 .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 352, 288))
-#endif
                 .withFields({
-#ifdef MPEG4
-                    C2F(mSize, width).inRange(2, 1920, 2),
-                    C2F(mSize, height).inRange(2, 1088, 2),
-#else
-                    C2F(mSize, width).inRange(2, 352, 2),
-                    C2F(mSize, height).inRange(2, 288, 2),
-#endif
+                    C2F(mSize, width).inRange(2, kMaxDimension, 2),
+                    C2F(mSize, height).inRange(2, kMaxDimension, 2),
                 })
                 .withSetter(MaxPictureSizeSetter, mSize)
                 .build());
@@ -200,13 +188,8 @@
                                     const C2P<C2StreamPictureSizeInfo::output> &size) {
         (void)mayBlock;
         // TODO: get max width/height from the size's field helpers vs. hardcoding
-#ifdef MPEG4
-        me.set().width = c2_min(c2_max(me.v.width, size.v.width), 1920u);
-        me.set().height = c2_min(c2_max(me.v.height, size.v.height), 1088u);
-#else
-        me.set().width = c2_min(c2_max(me.v.width, size.v.width), 352u);
-        me.set().height = c2_min(c2_max(me.v.height, size.v.height), 288u);
-#endif
+        me.set().width = c2_min(c2_max(me.v.width, size.v.width), kMaxDimension);
+        me.set().height = c2_min(c2_max(me.v.height, size.v.height), kMaxDimension);
         return C2R::Ok();
     }
 
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index 38f7389..752140a 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -151,6 +151,7 @@
 
     /* protected content */
     kParamIndexSecureMode,
+    kParamIndexEncryptedBuffer, // info-buffer, used with SM_READ_PROTECTED_WITH_ENCRYPTED
 
     // deprecated
     kParamIndexDelayRequest = kParamIndexDelay | C2Param::CoreIndex::IS_REQUEST_FLAG,
@@ -221,6 +222,7 @@
     kParamIndexDrcEffectType, // drc, enum
     kParamIndexDrcOutputLoudness, // drc, float (dBFS)
     kParamIndexDrcAlbumMode, // drc, enum
+    kParamIndexAudioFrameSize, // int
 
     /* ============================== platform-defined parameters ============================== */
 
@@ -1144,6 +1146,8 @@
 C2ENUM(C2Config::secure_mode_t, uint32_t,
     SM_UNPROTECTED,    ///< no content protection
     SM_READ_PROTECTED, ///< input and output buffers shall be protected from reading
+    /// both read protected and readable encrypted buffers are used
+    SM_READ_PROTECTED_WITH_ENCRYPTED,
 )
 
 typedef C2GlobalParam<C2Tuning, C2SimpleValueStruct<C2Config::secure_mode_t>, kParamIndexSecureMode>
@@ -1969,9 +1973,20 @@
 /**
  * DRC output loudness in dBFS. Retrieved during decoding
  */
- typedef C2StreamParam<C2Info, C2FloatValue, kParamIndexDrcOutputLoudness>
+typedef C2StreamParam<C2Info, C2FloatValue, kParamIndexDrcOutputLoudness>
         C2StreamDrcOutputLoudnessTuning;
- constexpr char C2_PARAMKEY_DRC_OUTPUT_LOUDNESS[] = "output.drc.output-loudness";
+constexpr char C2_PARAMKEY_DRC_OUTPUT_LOUDNESS[] = "output.drc.output-loudness";
+
+/**
+ * Audio frame size in samples.
+ *
+ * Audio encoders can expose this parameter to signal the desired audio frame
+ * size that corresponds to a single coded access unit.
+ * Default value is 0, meaning that the encoder accepts input buffers of any size.
+ */
+typedef C2StreamParam<C2Info, C2Uint32Value, kParamIndexAudioFrameSize>
+        C2StreamAudioFrameSizeInfo;
+constexpr char C2_PARAMKEY_AUDIO_FRAME_SIZE[] = "raw.audio-frame-size";
 
 /* --------------------------------------- AAC components --------------------------------------- */
 
diff --git a/media/codec2/hidl/1.0/utils/types.cpp b/media/codec2/hidl/1.0/utils/types.cpp
index 1f0c856..72f7c43 100644
--- a/media/codec2/hidl/1.0/utils/types.cpp
+++ b/media/codec2/hidl/1.0/utils/types.cpp
@@ -895,13 +895,12 @@
         BufferPoolSender* bufferPoolSender,
         std::list<BaseBlock>* baseBlocks,
         std::map<const void*, uint32_t>* baseBlockIndices) {
-    // TODO: C2InfoBuffer is not implemented.
-    (void)d;
-    (void)s;
-    (void)bufferPoolSender;
-    (void)baseBlocks;
-    (void)baseBlockIndices;
-    LOG(INFO) << "InfoBuffer not implemented.";
+    d->index = static_cast<ParamIndex>(s.index());
+    Buffer& dBuffer = d->buffer;
+    if (!objcpy(&dBuffer, s.data(), bufferPoolSender, baseBlocks, baseBlockIndices)) {
+        LOG(ERROR) << "Invalid C2InfoBuffer::data";
+        return false;
+    }
     return true;
 }
 
@@ -1336,6 +1335,68 @@
     return true;
 }
 
+// InfoBuffer -> C2InfoBuffer
+bool objcpy(std::vector<C2InfoBuffer> *d, const InfoBuffer& s,
+        const std::vector<C2BaseBlock>& baseBlocks) {
+
+    // Currently, a non-null C2InfoBufer must contain exactly 1 block.
+    if (s.buffer.blocks.size() == 0) {
+        return true;
+    } else if (s.buffer.blocks.size() != 1) {
+        LOG(ERROR) << "Invalid InfoBuffer::Buffer "
+                      "Currently, a C2InfoBuffer must contain exactly 1 block.";
+        return false;
+    }
+
+    const Block &sBlock = s.buffer.blocks[0];
+    if (sBlock.index >= baseBlocks.size()) {
+        LOG(ERROR) << "Invalid InfoBuffer::Buffer::blocks[0].index: "
+                      "Array index out of range.";
+        return false;
+    }
+    const C2BaseBlock &baseBlock = baseBlocks[sBlock.index];
+
+    // Parse meta.
+    std::vector<C2Param*> sBlockMeta;
+    if (!parseParamsBlob(&sBlockMeta, sBlock.meta)) {
+        LOG(ERROR) << "Invalid InfoBuffer::Buffer::blocks[0].meta.";
+        return false;
+    }
+
+    // Copy fence.
+    C2Fence dFence;
+    if (!objcpy(&dFence, sBlock.fence)) {
+        LOG(ERROR) << "Invalid InfoBuffer::Buffer::blocks[0].fence.";
+        return false;
+    }
+
+    // Construct a block.
+    switch (baseBlock.type) {
+    case C2BaseBlock::LINEAR:
+        if (sBlockMeta.size() == 1 && sBlockMeta[0] != nullptr &&
+            sBlockMeta[0]->size() == sizeof(C2Hidl_RangeInfo)) {
+            C2Hidl_RangeInfo *rangeInfo =
+                    reinterpret_cast<C2Hidl_RangeInfo*>(sBlockMeta[0]);
+            d->emplace_back(C2InfoBuffer::CreateLinearBuffer(
+                    s.index,
+                    baseBlock.linear->share(
+                            rangeInfo->offset, rangeInfo->length, dFence)));
+            return true;
+        }
+        LOG(ERROR) << "Invalid Meta for C2BaseBlock::Linear InfoBuffer.";
+        break;
+    case C2BaseBlock::GRAPHIC:
+        // It's not used now
+        LOG(ERROR) << "Non-Used C2BaseBlock::type for InfoBuffer.";
+        break;
+    default:
+        LOG(ERROR) << "Invalid C2BaseBlock::type for InfoBuffer.";
+        break;
+    }
+
+    return false;
+}
+
 // FrameData -> C2FrameData
 bool objcpy(C2FrameData* d, const FrameData& s,
         const std::vector<C2BaseBlock>& baseBlocks) {
@@ -1370,8 +1431,18 @@
         }
     }
 
-    // TODO: Implement this once C2InfoBuffer has constructors.
     d->infoBuffers.clear();
+    if (s.infoBuffers.size() == 0) {
+        // InfoBuffer is optional
+        return true;
+    }
+    d->infoBuffers.reserve(s.infoBuffers.size());
+    for (const InfoBuffer &sInfoBuffer: s.infoBuffers) {
+        if (!objcpy(&(d->infoBuffers), sInfoBuffer, baseBlocks)) {
+            LOG(ERROR) << "Invalid Framedata::infoBuffers.";
+            return false;
+        }
+    }
     return true;
 }
 
diff --git a/media/codec2/sfplugin/Android.bp b/media/codec2/sfplugin/Android.bp
index 94034b5..c3cfcce 100644
--- a/media/codec2/sfplugin/Android.bp
+++ b/media/codec2/sfplugin/Android.bp
@@ -11,6 +11,7 @@
         "CCodecConfig.cpp",
         "Codec2Buffer.cpp",
         "Codec2InfoBuilder.cpp",
+        "FrameReassembler.cpp",
         "PipelineWatcher.cpp",
         "ReflectedParamUpdater.cpp",
     ],
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index ab7c9af..6e9413a 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -518,9 +518,24 @@
     virtual void onError(
             const std::weak_ptr<Codec2Client::Component>& component,
             uint32_t errorCode) override {
-        // TODO
-        (void)component;
-        (void)errorCode;
+        {
+            // Component is only used for reporting as we use a separate listener for each instance
+            std::shared_ptr<Codec2Client::Component> comp = component.lock();
+            if (!comp) {
+                ALOGD("Component died with error: 0x%x", errorCode);
+            } else {
+                ALOGD("Component \"%s\" returned error: 0x%x", comp->getName().c_str(), errorCode);
+            }
+        }
+
+        // Report to MediaCodec
+        // Note: for now we do not propagate the error code to MediaCodec as we would need
+        // to translate to a MediaCodec error.
+        sp<CCodec> codec(mCodec.promote());
+        if (!codec || !codec->mCallback) {
+            return;
+        }
+        codec->mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
     }
 
     virtual void onDeath(
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 05c1182..ba1d178 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -143,7 +143,8 @@
       mFrameIndex(0u),
       mFirstValidFrameIndex(0u),
       mMetaMode(MODE_NONE),
-      mInputMetEos(false) {
+      mInputMetEos(false),
+      mSendEncryptedInfoBuffer(false) {
     mOutputSurface.lock()->maxDequeueBuffers = kSmoothnessFactor + kRenderingDepth;
     {
         Mutexed<Input>::Locked input(mInput);
@@ -188,7 +189,10 @@
     return mInputSurface->signalEndOfInputStream();
 }
 
-status_t CCodecBufferChannel::queueInputBufferInternal(sp<MediaCodecBuffer> buffer) {
+status_t CCodecBufferChannel::queueInputBufferInternal(
+        sp<MediaCodecBuffer> buffer,
+        std::shared_ptr<C2LinearBlock> encryptedBlock,
+        size_t blockSize) {
     int64_t timeUs;
     CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
 
@@ -209,6 +213,7 @@
         flags |= C2FrameData::FLAG_CODEC_CONFIG;
     }
     ALOGV("[%s] queueInputBuffer: buffer->size() = %zu", mName, buffer->size());
+    std::list<std::unique_ptr<C2Work>> items;
     std::unique_ptr<C2Work> work(new C2Work);
     work->input.ordinal.timestamp = timeUs;
     work->input.ordinal.frameIndex = mFrameIndex++;
@@ -218,9 +223,8 @@
     work->input.ordinal.customOrdinal = timeUs;
     work->input.buffers.clear();
 
-    uint64_t queuedFrameIndex = work->input.ordinal.frameIndex.peeku();
-    std::vector<std::shared_ptr<C2Buffer>> queuedBuffers;
     sp<Codec2Buffer> copy;
+    bool usesFrameReassembler = false;
 
     if (buffer->size() > 0u) {
         Mutexed<Input>::Locked input(mInput);
@@ -245,30 +249,38 @@
                       "buffer starvation on component.", mName);
             }
         }
-        work->input.buffers.push_back(c2buffer);
-        queuedBuffers.push_back(c2buffer);
+        if (input->frameReassembler) {
+            usesFrameReassembler = true;
+            input->frameReassembler.process(buffer, &items);
+        } else {
+            work->input.buffers.push_back(c2buffer);
+            if (encryptedBlock) {
+                work->input.infoBuffers.emplace_back(C2InfoBuffer::CreateLinearBuffer(
+                        kParamIndexEncryptedBuffer,
+                        encryptedBlock->share(0, blockSize, C2Fence())));
+            }
+        }
     } else if (eos) {
         flags |= C2FrameData::FLAG_END_OF_STREAM;
     }
-    work->input.flags = (C2FrameData::flags_t)flags;
-    // TODO: fill info's
+    if (usesFrameReassembler) {
+        if (!items.empty()) {
+            items.front()->input.configUpdate = std::move(mParamsToBeSet);
+            mFrameIndex = (items.back()->input.ordinal.frameIndex + 1).peek();
+        }
+    } else {
+        work->input.flags = (C2FrameData::flags_t)flags;
+        // TODO: fill info's
 
-    work->input.configUpdate = std::move(mParamsToBeSet);
-    work->worklets.clear();
-    work->worklets.emplace_back(new C2Worklet);
+        work->input.configUpdate = std::move(mParamsToBeSet);
+        work->worklets.clear();
+        work->worklets.emplace_back(new C2Worklet);
 
-    std::list<std::unique_ptr<C2Work>> items;
-    items.push_back(std::move(work));
-    mPipelineWatcher.lock()->onWorkQueued(
-            queuedFrameIndex,
-            std::move(queuedBuffers),
-            PipelineWatcher::Clock::now());
-    c2_status_t err = mComponent->queue(&items);
-    if (err != C2_OK) {
-        mPipelineWatcher.lock()->onWorkDone(queuedFrameIndex);
+        items.push_back(std::move(work));
+
+        eos = eos && buffer->size() > 0u;
     }
-
-    if (err == C2_OK && eos && buffer->size() > 0u) {
+    if (eos) {
         work.reset(new C2Work);
         work->input.ordinal.timestamp = timeUs;
         work->input.ordinal.frameIndex = mFrameIndex++;
@@ -277,23 +289,28 @@
         work->input.buffers.clear();
         work->input.flags = C2FrameData::FLAG_END_OF_STREAM;
         work->worklets.emplace_back(new C2Worklet);
-
-        queuedFrameIndex = work->input.ordinal.frameIndex.peeku();
-        queuedBuffers.clear();
-
-        items.clear();
         items.push_back(std::move(work));
-
-        mPipelineWatcher.lock()->onWorkQueued(
-                queuedFrameIndex,
-                std::move(queuedBuffers),
-                PipelineWatcher::Clock::now());
-        err = mComponent->queue(&items);
-        if (err != C2_OK) {
-            mPipelineWatcher.lock()->onWorkDone(queuedFrameIndex);
-        }
     }
-    if (err == C2_OK) {
+    c2_status_t err = C2_OK;
+    if (!items.empty()) {
+        {
+            Mutexed<PipelineWatcher>::Locked watcher(mPipelineWatcher);
+            PipelineWatcher::Clock::time_point now = PipelineWatcher::Clock::now();
+            for (const std::unique_ptr<C2Work> &work : items) {
+                watcher->onWorkQueued(
+                        work->input.ordinal.frameIndex.peeku(),
+                        std::vector(work->input.buffers),
+                        now);
+            }
+        }
+        err = mComponent->queue(&items);
+    }
+    if (err != C2_OK) {
+        Mutexed<PipelineWatcher>::Locked watcher(mPipelineWatcher);
+        for (const std::unique_ptr<C2Work> &work : items) {
+            watcher->onWorkDone(work->input.ordinal.frameIndex.peeku());
+        }
+    } else {
         Mutexed<Input>::Locked input(mInput);
         bool released = false;
         if (buffer) {
@@ -514,6 +531,40 @@
     }
     sp<EncryptedLinearBlockBuffer> encryptedBuffer((EncryptedLinearBlockBuffer *)buffer.get());
 
+    std::shared_ptr<C2LinearBlock> block;
+    size_t allocSize = buffer->size();
+    size_t bufferSize = 0;
+    c2_status_t blockRes = C2_OK;
+    bool copied = false;
+    if (mSendEncryptedInfoBuffer) {
+        static const C2MemoryUsage kDefaultReadWriteUsage{
+            C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+        constexpr int kAllocGranule0 = 1024 * 64;
+        constexpr int kAllocGranule1 = 1024 * 1024;
+        std::shared_ptr<C2BlockPool> pool = mBlockPools.lock()->inputPool;
+        // round up encrypted sizes to limit fragmentation and encourage buffer reuse
+        if (allocSize <= kAllocGranule1) {
+            bufferSize = align(allocSize, kAllocGranule0);
+        } else {
+            bufferSize = align(allocSize, kAllocGranule1);
+        }
+        blockRes = pool->fetchLinearBlock(
+                bufferSize, kDefaultReadWriteUsage, &block);
+
+        if (blockRes == C2_OK) {
+            C2WriteView view = block->map().get();
+            if (view.error() == C2_OK && view.size() == bufferSize) {
+                copied = true;
+                // TODO: only copy clear sections
+                memcpy(view.data(), buffer->data(), allocSize);
+            }
+        }
+    }
+
+    if (!copied) {
+        block.reset();
+    }
+
     ssize_t result = -1;
     ssize_t codecDataOffset = 0;
     if (numSubSamples == 1
@@ -605,7 +656,8 @@
     }
 
     buffer->setRange(codecDataOffset, result - codecDataOffset);
-    return queueInputBufferInternal(buffer);
+
+    return queueInputBufferInternal(buffer, block, bufferSize);
 }
 
 void CCodecBufferChannel::feedInputBufferIfAvailable() {
@@ -882,27 +934,31 @@
         bool buffersBoundToCodec) {
     C2StreamBufferTypeSetting::input iStreamFormat(0u);
     C2StreamBufferTypeSetting::output oStreamFormat(0u);
+    C2ComponentKindSetting kind;
     C2PortReorderBufferDepthTuning::output reorderDepth;
     C2PortReorderKeySetting::output reorderKey;
     C2PortActualDelayTuning::input inputDelay(0);
     C2PortActualDelayTuning::output outputDelay(0);
     C2ActualPipelineDelayTuning pipelineDelay(0);
+    C2SecureModeTuning secureMode(C2Config::SM_UNPROTECTED);
 
     c2_status_t err = mComponent->query(
             {
                 &iStreamFormat,
                 &oStreamFormat,
+                &kind,
                 &reorderDepth,
                 &reorderKey,
                 &inputDelay,
                 &pipelineDelay,
                 &outputDelay,
+                &secureMode,
             },
             {},
             C2_DONT_BLOCK,
             nullptr);
     if (err == C2_BAD_INDEX) {
-        if (!iStreamFormat || !oStreamFormat) {
+        if (!iStreamFormat || !oStreamFormat || !kind) {
             return UNKNOWN_ERROR;
         }
     } else if (err != C2_OK) {
@@ -919,18 +975,26 @@
     // TODO: get this from input format
     bool secure = mComponent->getName().find(".secure") != std::string::npos;
 
+    // secure mode is a static parameter (shall not change in the executing state)
+    mSendEncryptedInfoBuffer = secureMode.value == C2Config::SM_READ_PROTECTED_WITH_ENCRYPTED;
+
     std::shared_ptr<C2AllocatorStore> allocatorStore = GetCodec2PlatformAllocatorStore();
     int poolMask = GetCodec2PoolMask();
     C2PlatformAllocatorStore::id_t preferredLinearId = GetPreferredLinearAllocatorId(poolMask);
 
     if (inputFormat != nullptr) {
         bool graphic = (iStreamFormat.value == C2BufferData::GRAPHIC);
+        bool audioEncoder = !graphic && (kind.value == C2Component::KIND_ENCODER);
         C2Config::api_feature_t apiFeatures = C2Config::api_feature_t(
                 API_REFLECTION |
                 API_VALUES |
                 API_CURRENT_VALUES |
                 API_DEPENDENCY |
                 API_SAME_INPUT_BUFFER);
+        C2StreamAudioFrameSizeInfo::input encoderFrameSize(0u);
+        C2StreamSampleRateInfo::input sampleRate(0u);
+        C2StreamChannelCountInfo::input channelCount(0u);
+        C2StreamPcmEncodingInfo::input pcmEncoding(0u);
         std::shared_ptr<C2BlockPool> pool;
         {
             Mutexed<BlockPools>::Locked pools(mBlockPools);
@@ -943,7 +1007,19 @@
             // from component, create the input block pool with given ID. Otherwise, use default IDs.
             std::vector<std::unique_ptr<C2Param>> params;
             C2ApiFeaturesSetting featuresSetting{apiFeatures};
-            err = mComponent->query({ &featuresSetting },
+            std::vector<C2Param *> stackParams({&featuresSetting});
+            if (audioEncoder) {
+                stackParams.push_back(&encoderFrameSize);
+                stackParams.push_back(&sampleRate);
+                stackParams.push_back(&channelCount);
+                stackParams.push_back(&pcmEncoding);
+            } else {
+                encoderFrameSize.invalidate();
+                sampleRate.invalidate();
+                channelCount.invalidate();
+                pcmEncoding.invalidate();
+            }
+            err = mComponent->query(stackParams,
                                     { C2PortAllocatorsTuning::input::PARAM_TYPE },
                                     C2_DONT_BLOCK,
                                     &params);
@@ -1001,10 +1077,21 @@
         input->numSlots = numInputSlots;
         input->extraBuffers.flush();
         input->numExtraSlots = 0u;
+        if (audioEncoder && encoderFrameSize && sampleRate && channelCount) {
+            input->frameReassembler.init(
+                    pool,
+                    {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE},
+                    encoderFrameSize.value,
+                    sampleRate.value,
+                    channelCount.value,
+                    pcmEncoding ? pcmEncoding.value : C2Config::PCM_16);
+        }
         bool conforming = (apiFeatures & API_SAME_INPUT_BUFFER);
         // For encrypted content, framework decrypts source buffer (ashmem) into
         // C2Buffers. Thus non-conforming codecs can process these.
-        if (!buffersBoundToCodec && (hasCryptoOrDescrambler() || conforming)) {
+        if (!buffersBoundToCodec
+                && !input->frameReassembler
+                && (hasCryptoOrDescrambler() || conforming)) {
             input->buffers.reset(new SlotInputBuffers(mName));
         } else if (graphic) {
             if (mInputSurface) {
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index e2c9aaa..b9e8d39 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -31,6 +31,7 @@
 #include <media/stagefright/CodecBase.h>
 
 #include "CCodecBuffers.h"
+#include "FrameReassembler.h"
 #include "InputSurfaceWrapper.h"
 #include "PipelineWatcher.h"
 
@@ -238,7 +239,9 @@
 
     void feedInputBufferIfAvailable();
     void feedInputBufferIfAvailableInternal();
-    status_t queueInputBufferInternal(sp<MediaCodecBuffer> buffer);
+    status_t queueInputBufferInternal(sp<MediaCodecBuffer> buffer,
+                                      std::shared_ptr<C2LinearBlock> encryptedBlock = nullptr,
+                                      size_t blockSize = 0);
     bool handleWork(
             std::unique_ptr<C2Work> work, const sp<AMessage> &outputFormat,
             const C2StreamInitDataInfo::output *initData);
@@ -269,6 +272,8 @@
         size_t numExtraSlots;
         uint32_t inputDelay;
         uint32_t pipelineDelay;
+
+        FrameReassembler frameReassembler;
     };
     Mutexed<Input> mInput;
     struct Output {
@@ -315,6 +320,7 @@
     inline bool hasCryptoOrDescrambler() {
         return mCrypto != nullptr || mDescrambler != nullptr;
     }
+    std::atomic_bool mSendEncryptedInfoBuffer;
 };
 
 // Conversion of a c2_status_t value to a status_t value may depend on the
diff --git a/media/codec2/sfplugin/Codec2InfoBuilder.cpp b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
index b112249..a26f89e 100644
--- a/media/codec2/sfplugin/Codec2InfoBuilder.cpp
+++ b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
@@ -103,11 +103,16 @@
     c2_status_t err1 = intf->querySupportedParams(&paramDescs);
     if (err1 == C2_OK) {
         for (const std::shared_ptr<C2ParamDescriptor> &desc : paramDescs) {
-            switch ((uint32_t)desc->index()) {
-            case C2StreamHdr10PlusInfo::output::PARAM_TYPE:
+            C2Param::Type type = desc->index();
+            // only consider supported parameters on raw ports
+            if (!(encoder ? type.forInput() : type.forOutput())) {
+                continue;
+            }
+            switch (type.coreIndex()) {
+            case C2StreamHdr10PlusInfo::CORE_INDEX:
                 supportsHdr10Plus = true;
                 break;
-            case C2StreamHdrStaticInfo::output::PARAM_TYPE:
+            case C2StreamHdrStaticInfo::CORE_INDEX:
                 supportsHdr = true;
                 break;
             default:
diff --git a/media/codec2/sfplugin/FrameReassembler.cpp b/media/codec2/sfplugin/FrameReassembler.cpp
new file mode 100644
index 0000000..f8e6937
--- /dev/null
+++ b/media/codec2/sfplugin/FrameReassembler.cpp
@@ -0,0 +1,226 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "FrameReassembler"
+
+#include <log/log.h>
+
+#include <media/stagefright/foundation/AMessage.h>
+
+#include "FrameReassembler.h"
+
+namespace android {
+
+static constexpr uint64_t kToleranceUs = 1000;  // 1ms
+
+FrameReassembler::FrameReassembler()
+    : mUsage{0, 0},
+      mSampleRate(0u),
+      mChannelCount(0u),
+      mEncoding(C2Config::PCM_16),
+      mCurrentOrdinal({0, 0, 0}) {
+}
+
+void FrameReassembler::init(
+        const std::shared_ptr<C2BlockPool> &pool,
+        C2MemoryUsage usage,
+        uint32_t frameSize,
+        uint32_t sampleRate,
+        uint32_t channelCount,
+        C2Config::pcm_encoding_t encoding) {
+    mBlockPool = pool;
+    mUsage = usage;
+    mFrameSize = frameSize;
+    mSampleRate = sampleRate;
+    mChannelCount = channelCount;
+    mEncoding = encoding;
+}
+
+void FrameReassembler::updateFrameSize(uint32_t frameSize) {
+    finishCurrentBlock(&mPendingWork);
+    mFrameSize = frameSize;
+}
+
+void FrameReassembler::updateSampleRate(uint32_t sampleRate) {
+    finishCurrentBlock(&mPendingWork);
+    mSampleRate = sampleRate;
+}
+
+void FrameReassembler::updateChannelCount(uint32_t channelCount) {
+    finishCurrentBlock(&mPendingWork);
+    mChannelCount = channelCount;
+}
+
+void FrameReassembler::updatePcmEncoding(C2Config::pcm_encoding_t encoding) {
+    finishCurrentBlock(&mPendingWork);
+    mEncoding = encoding;
+}
+
+void FrameReassembler::reset() {
+    flush();
+    mCurrentOrdinal = {0, 0, 0};
+    mBlockPool.reset();
+    mFrameSize.reset();
+    mSampleRate = 0u;
+    mChannelCount = 0u;
+    mEncoding = C2Config::PCM_16;
+}
+
+FrameReassembler::operator bool() const {
+    return mFrameSize.has_value();
+}
+
+c2_status_t FrameReassembler::process(
+        const sp<MediaCodecBuffer> &buffer,
+        std::list<std::unique_ptr<C2Work>> *items) {
+    int64_t timeUs;
+    if (buffer->size() == 0u
+            || !buffer->meta()->findInt64("timeUs", &timeUs)) {
+        return C2_BAD_VALUE;
+    }
+
+    items->splice(items->end(), mPendingWork);
+
+    // Fill mCurrentBlock
+    if (mCurrentBlock) {
+        // First check the timestamp
+        c2_cntr64_t endTimestampUs = mCurrentOrdinal.timestamp;
+        endTimestampUs += bytesToSamples(mWriteView->size()) * 1000000 / mSampleRate;
+        if (timeUs < endTimestampUs.peek()) {
+            uint64_t diffUs = (endTimestampUs - timeUs).peeku();
+            if (diffUs > kToleranceUs) {
+                // The timestamp is going back in time in large amount.
+                // TODO: b/145702136
+                ALOGW("timestamp going back in time! from %lld to %lld",
+                        endTimestampUs.peekll(), (long long)timeUs);
+            }
+        } else {  // timeUs >= endTimestampUs.peek()
+            uint64_t diffUs = (timeUs - endTimestampUs).peeku();
+            if (diffUs > kToleranceUs) {
+                // The timestamp is going forward; add silence as necessary.
+                size_t gapSamples = usToSamples(diffUs);
+                size_t remainingSamples =
+                    (mWriteView->capacity() - mWriteView->size())
+                    / mChannelCount / bytesPerSample();
+                if (gapSamples < remainingSamples) {
+                    size_t gapBytes = gapSamples * mChannelCount * bytesPerSample();
+                    memset(mWriteView->base() + mWriteView->size(), 0u, gapBytes);
+                    mWriteView->setSize(mWriteView->size() + gapBytes);
+                } else {
+                    finishCurrentBlock(items);
+                }
+            }
+        }
+    }
+
+    if (mCurrentBlock) {
+        // Append the data at the end of the current block
+        size_t copySize = std::min(
+                buffer->size(),
+                size_t(mWriteView->capacity() - mWriteView->size()));
+        memcpy(mWriteView->base() + mWriteView->size(), buffer->data(), copySize);
+        buffer->setRange(buffer->offset() + copySize, buffer->size() - copySize);
+        mWriteView->setSize(mWriteView->size() + copySize);
+        if (mWriteView->size() == mWriteView->capacity()) {
+            finishCurrentBlock(items);
+        }
+        timeUs += bytesToSamples(copySize) * 1000000 / mSampleRate;
+    }
+
+    if (buffer->size() > 0) {
+        mCurrentOrdinal.timestamp = timeUs;
+    }
+
+    size_t frameSizeBytes = mFrameSize.value() * mChannelCount * bytesPerSample();
+    while (buffer->size() > 0) {
+        LOG_ALWAYS_FATAL_IF(
+                mCurrentBlock,
+                "There's remaining data but the pending block is not filled & finished");
+        std::unique_ptr<C2Work> work(new C2Work);
+        c2_status_t err = mBlockPool->fetchLinearBlock(frameSizeBytes, mUsage, &mCurrentBlock);
+        if (err != C2_OK) {
+            return err;
+        }
+        size_t copySize = std::min(buffer->size(), frameSizeBytes);
+        mWriteView = mCurrentBlock->map().get();
+        if (mWriteView->error() != C2_OK) {
+            return mWriteView->error();
+        }
+        ALOGV("buffer={offset=%zu size=%zu) copySize=%zu",
+                buffer->offset(), buffer->size(), copySize);
+        memcpy(mWriteView->base(), buffer->data(), copySize);
+        mWriteView->setOffset(0u);
+        mWriteView->setSize(copySize);
+        buffer->setRange(buffer->offset() + copySize, buffer->size() - copySize);
+        if (copySize == frameSizeBytes) {
+            finishCurrentBlock(items);
+        }
+    }
+
+    int32_t eos = 0;
+    if (buffer->meta()->findInt32("eos", &eos) && eos) {
+        finishCurrentBlock(items);
+    }
+
+    return C2_OK;
+}
+
+void FrameReassembler::flush() {
+    mPendingWork.clear();
+    mWriteView.reset();
+    mCurrentBlock.reset();
+}
+
+uint64_t FrameReassembler::bytesToSamples(size_t numBytes) const {
+    return numBytes / mChannelCount / bytesPerSample();
+}
+
+size_t FrameReassembler::usToSamples(uint64_t us) const {
+    return (us * mChannelCount * mSampleRate / 1000000);
+}
+
+uint32_t FrameReassembler::bytesPerSample() const {
+    return (mEncoding == C2Config::PCM_8) ? 1
+         : (mEncoding == C2Config::PCM_16) ? 2
+         : (mEncoding == C2Config::PCM_FLOAT) ? 4 : 0;
+}
+
+void FrameReassembler::finishCurrentBlock(std::list<std::unique_ptr<C2Work>> *items) {
+    if (!mCurrentBlock) {
+        // No-op
+        return;
+    }
+    if (mWriteView->size() < mWriteView->capacity()) {
+        memset(mWriteView->base() + mWriteView->size(), 0u,
+                mWriteView->capacity() - mWriteView->size());
+        mWriteView->setSize(mWriteView->capacity());
+    }
+    std::unique_ptr<C2Work> work{std::make_unique<C2Work>()};
+    work->input.ordinal = mCurrentOrdinal;
+    work->input.buffers.push_back(C2Buffer::CreateLinearBuffer(
+            mCurrentBlock->share(0, mCurrentBlock->capacity(), C2Fence())));
+    work->worklets.clear();
+    work->worklets.emplace_back(new C2Worklet);
+    items->push_back(std::move(work));
+
+    ++mCurrentOrdinal.frameIndex;
+    mCurrentOrdinal.timestamp += mFrameSize.value() * 1000000 / mSampleRate;
+    mCurrentBlock.reset();
+    mWriteView.reset();
+}
+
+}  // namespace android
diff --git a/media/codec2/sfplugin/FrameReassembler.h b/media/codec2/sfplugin/FrameReassembler.h
new file mode 100644
index 0000000..17ac06d
--- /dev/null
+++ b/media/codec2/sfplugin/FrameReassembler.h
@@ -0,0 +1,75 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FRAME_REASSEMBLER_H_
+#define FRAME_REASSEMBLER_H_
+
+#include <set>
+#include <memory>
+
+#include <media/MediaCodecBuffer.h>
+
+#include <C2Config.h>
+#include <C2Work.h>
+
+namespace android {
+
+class FrameReassembler {
+public:
+    FrameReassembler();
+
+    void init(
+            const std::shared_ptr<C2BlockPool> &pool,
+            C2MemoryUsage usage,
+            uint32_t frameSize,
+            uint32_t sampleRate,
+            uint32_t channelCount,
+            C2Config::pcm_encoding_t encoding);
+    void updateFrameSize(uint32_t frameSize);
+    void updateSampleRate(uint32_t sampleRate);
+    void updateChannelCount(uint32_t channelCount);
+    void updatePcmEncoding(C2Config::pcm_encoding_t encoding);
+    void reset();
+    void flush();
+
+    explicit operator bool() const;
+
+    c2_status_t process(
+            const sp<MediaCodecBuffer> &buffer,
+            std::list<std::unique_ptr<C2Work>> *items);
+
+private:
+    std::shared_ptr<C2BlockPool> mBlockPool;
+    C2MemoryUsage mUsage;
+    std::optional<uint32_t> mFrameSize;
+    uint32_t mSampleRate;
+    uint32_t mChannelCount;
+    C2Config::pcm_encoding_t mEncoding;
+    std::list<std::unique_ptr<C2Work>> mPendingWork;
+    C2WorkOrdinalStruct mCurrentOrdinal;
+    std::shared_ptr<C2LinearBlock> mCurrentBlock;
+    std::optional<C2WriteView> mWriteView;
+
+    uint64_t bytesToSamples(size_t numBytes) const;
+    size_t usToSamples(uint64_t us) const;
+    uint32_t bytesPerSample() const;
+
+    void finishCurrentBlock(std::list<std::unique_ptr<C2Work>> *items);
+};
+
+}  // namespace android
+
+#endif  // FRAME_REASSEMBLER_H_
diff --git a/media/codec2/sfplugin/tests/Android.bp b/media/codec2/sfplugin/tests/Android.bp
index 8d1a9c3..51b99a4 100644
--- a/media/codec2/sfplugin/tests/Android.bp
+++ b/media/codec2/sfplugin/tests/Android.bp
@@ -4,6 +4,7 @@
     srcs: [
         "CCodecBuffers_test.cpp",
         "CCodecConfig_test.cpp",
+        "FrameReassembler_test.cpp",
         "ReflectedParamUpdater_test.cpp",
     ],
 
diff --git a/media/codec2/sfplugin/tests/FrameReassembler_test.cpp b/media/codec2/sfplugin/tests/FrameReassembler_test.cpp
new file mode 100644
index 0000000..6738ee7
--- /dev/null
+++ b/media/codec2/sfplugin/tests/FrameReassembler_test.cpp
@@ -0,0 +1,340 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "FrameReassembler.h"
+
+#include <gtest/gtest.h>
+
+#include <C2PlatformSupport.h>
+
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+namespace android {
+
+static size_t BytesPerSample(C2Config::pcm_encoding_t encoding) {
+    return encoding == PCM_8 ? 1
+         : encoding == PCM_16 ? 2
+         : encoding == PCM_FLOAT ? 4 : 0;
+}
+
+static uint64_t Diff(c2_cntr64_t a, c2_cntr64_t b) {
+    return std::abs((a - b).peek());
+}
+
+class FrameReassemblerTest : public ::testing::Test {
+public:
+    static const C2MemoryUsage kUsage;
+    static constexpr uint64_t kTimestampToleranceUs = 100;
+
+    FrameReassemblerTest() {
+        mInitStatus = GetCodec2BlockPool(C2BlockPool::BASIC_LINEAR, nullptr, &mPool);
+    }
+
+    status_t initStatus() const { return mInitStatus; }
+
+    void testPushSameSize(
+            size_t encoderFrameSize,
+            size_t sampleRate,
+            size_t channelCount,
+            C2Config::pcm_encoding_t encoding,
+            size_t inputFrameSizeInBytes,
+            size_t count,
+            size_t expectedOutputSize) {
+        FrameReassembler frameReassembler;
+        frameReassembler.init(
+                mPool,
+                kUsage,
+                encoderFrameSize,
+                sampleRate,
+                channelCount,
+                encoding);
+
+        ASSERT_TRUE(frameReassembler) << "FrameReassembler init failed";
+
+        size_t inputIndex = 0, outputIndex = 0;
+        size_t expectCount = 0;
+        for (size_t i = 0; i < count; ++i) {
+            sp<MediaCodecBuffer> buffer = new MediaCodecBuffer(
+                    new AMessage, new ABuffer(inputFrameSizeInBytes));
+            buffer->setRange(0, inputFrameSizeInBytes);
+            buffer->meta()->setInt64(
+                    "timeUs",
+                    inputIndex * 1000000 / sampleRate / channelCount / BytesPerSample(encoding));
+            if (i == count - 1) {
+                buffer->meta()->setInt32("eos", 1);
+            }
+            for (size_t j = 0; j < inputFrameSizeInBytes; ++j, ++inputIndex) {
+                buffer->base()[j] = (inputIndex & 0xFF);
+            }
+            std::list<std::unique_ptr<C2Work>> items;
+            ASSERT_EQ(C2_OK, frameReassembler.process(buffer, &items));
+            while (!items.empty()) {
+                std::unique_ptr<C2Work> work = std::move(*items.begin());
+                items.erase(items.begin());
+                // Verify timestamp
+                uint64_t expectedTimeUs =
+                    outputIndex * 1000000 / sampleRate / channelCount / BytesPerSample(encoding);
+                EXPECT_GE(
+                        kTimestampToleranceUs,
+                        Diff(expectedTimeUs, work->input.ordinal.timestamp))
+                    << "expected timestamp: " << expectedTimeUs
+                    << " actual timestamp: " << work->input.ordinal.timestamp.peeku()
+                    << " output index: " << outputIndex;
+
+                // Verify buffer
+                ASSERT_EQ(1u, work->input.buffers.size());
+                std::shared_ptr<C2Buffer> buffer = work->input.buffers.front();
+                ASSERT_EQ(C2BufferData::LINEAR, buffer->data().type());
+                ASSERT_EQ(1u, buffer->data().linearBlocks().size());
+                C2ReadView view = buffer->data().linearBlocks().front().map().get();
+                ASSERT_EQ(C2_OK, view.error());
+                ASSERT_EQ(encoderFrameSize * BytesPerSample(encoding), view.capacity());
+                for (size_t j = 0; j < view.capacity(); ++j, ++outputIndex) {
+                    ASSERT_TRUE(outputIndex < inputIndex
+                             || inputIndex == inputFrameSizeInBytes * count);
+                    uint8_t expected = outputIndex < inputIndex ? (outputIndex & 0xFF) : 0;
+                    if (expectCount < 10) {
+                        ++expectCount;
+                        EXPECT_EQ(expected, view.data()[j]) << "output index = " << outputIndex;
+                    }
+                }
+            }
+        }
+
+        ASSERT_EQ(inputFrameSizeInBytes * count, inputIndex);
+        size_t encoderFrameSizeInBytes =
+            encoderFrameSize * channelCount * BytesPerSample(encoding);
+        ASSERT_EQ(0, outputIndex % encoderFrameSizeInBytes)
+            << "output size must be multiple of frame size: output size = " << outputIndex
+            << " frame size = " << encoderFrameSizeInBytes;
+        ASSERT_EQ(expectedOutputSize, outputIndex)
+            << "output size must be smallest multiple of frame size, "
+            << "equal to or larger than input size. output size = " << outputIndex
+            << " input size = " << inputIndex << " frame size = " << encoderFrameSizeInBytes;
+    }
+
+private:
+    status_t mInitStatus;
+    std::shared_ptr<C2BlockPool> mPool;
+};
+
+const C2MemoryUsage FrameReassemblerTest::kUsage{C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+
+// Push frames with exactly the same size as the encoder requested.
+TEST_F(FrameReassemblerTest, PushExactFrameSize) {
+    ASSERT_EQ(OK, initStatus());
+    testPushSameSize(
+            1024 /* frame size in samples */,
+            48000 /* sample rate */,
+            1 /* channel count */,
+            PCM_8,
+            1024 /* input frame size in bytes = 1024 samples * 1 channel * 1 bytes/sample */,
+            10 /* count */,
+            10240 /* expected output size = 10 * 1024 bytes/frame */);
+    testPushSameSize(
+            1024 /* frame size in samples */,
+            48000 /* sample rate */,
+            1 /* channel count */,
+            PCM_16,
+            2048 /* input frame size in bytes = 1024 samples * 1 channel * 2 bytes/sample */,
+            10 /* count */,
+            20480 /* expected output size = 10 * 2048 bytes/frame */);
+    testPushSameSize(
+            1024 /* frame size in samples */,
+            48000 /* sample rate */,
+            1 /* channel count */,
+            PCM_FLOAT,
+            4096 /* input frame size in bytes = 1024 samples * 1 channel * 4 bytes/sample */,
+            10 /* count */,
+            40960 /* expected output size = 10 * 4096 bytes/frame */);
+}
+
+// Push frames with half the size that the encoder requested.
+TEST_F(FrameReassemblerTest, PushHalfFrameSize) {
+    ASSERT_EQ(OK, initStatus());
+    testPushSameSize(
+            1024 /* frame size in samples */,
+            48000 /* sample rate */,
+            1 /* channel count */,
+            PCM_8,
+            512 /* input frame size in bytes = 512 samples * 1 channel * 1 bytes per sample */,
+            10 /* count */,
+            5120 /* expected output size = 5 * 1024 bytes/frame */);
+    testPushSameSize(
+            1024 /* frame size in samples */,
+            48000 /* sample rate */,
+            1 /* channel count */,
+            PCM_16,
+            1024 /* input frame size in bytes = 512 samples * 1 channel * 2 bytes per sample */,
+            10 /* count */,
+            10240 /* expected output size = 5 * 2048 bytes/frame */);
+    testPushSameSize(
+            1024 /* frame size in samples */,
+            48000 /* sample rate */,
+            1 /* channel count */,
+            PCM_FLOAT,
+            2048 /* input frame size in bytes = 512 samples * 1 channel * 4 bytes per sample */,
+            10 /* count */,
+            20480 /* expected output size = 5 * 4096 bytes/frame */);
+}
+
+// Push frames with twice the size that the encoder requested.
+TEST_F(FrameReassemblerTest, PushDoubleFrameSize) {
+    ASSERT_EQ(OK, initStatus());
+    testPushSameSize(
+            1024 /* frame size in samples */,
+            48000 /* sample rate */,
+            1 /* channel count */,
+            PCM_8,
+            2048 /* input frame size in bytes = 2048 samples * 1 channel * 1 bytes per sample */,
+            10 /* count */,
+            20480 /* expected output size = 20 * 1024 bytes/frame */);
+    testPushSameSize(
+            1024 /* frame size in samples */,
+            48000 /* sample rate */,
+            1 /* channel count */,
+            PCM_16,
+            4096 /* input frame size in bytes = 2048 samples * 1 channel * 2 bytes per sample */,
+            10 /* count */,
+            40960 /* expected output size = 20 * 2048 bytes/frame */);
+    testPushSameSize(
+            1024 /* frame size in samples */,
+            48000 /* sample rate */,
+            1 /* channel count */,
+            PCM_FLOAT,
+            8192 /* input frame size in bytes = 2048 samples * 1 channel * 4 bytes per sample */,
+            10 /* count */,
+            81920 /* expected output size = 20 * 4096 bytes/frame */);
+}
+
+// Push frames with a little bit larger (+5 samples) than the requested size.
+TEST_F(FrameReassemblerTest, PushLittleLargerFrameSize) {
+    ASSERT_EQ(OK, initStatus());
+    testPushSameSize(
+            1024 /* frame size in samples */,
+            48000 /* sample rate */,
+            1 /* channel count */,
+            PCM_8,
+            1029 /* input frame size in bytes = 1029 samples * 1 channel * 1 bytes per sample */,
+            10 /* count */,
+            11264 /* expected output size = 11 * 1024 bytes/frame */);
+    testPushSameSize(
+            1024 /* frame size in samples */,
+            48000 /* sample rate */,
+            1 /* channel count */,
+            PCM_16,
+            2058 /* input frame size in bytes = 1029 samples * 1 channel * 2 bytes per sample */,
+            10 /* count */,
+            22528 /* expected output size = 11 * 2048 bytes/frame */);
+    testPushSameSize(
+            1024 /* frame size in samples */,
+            48000 /* sample rate */,
+            1 /* channel count */,
+            PCM_FLOAT,
+            4116 /* input frame size in bytes = 1029 samples * 1 channel * 4 bytes per sample */,
+            10 /* count */,
+            45056 /* expected output size = 11 * 4096 bytes/frame */);
+}
+
+// Push frames with a little bit smaller (-5 samples) than the requested size.
+TEST_F(FrameReassemblerTest, PushLittleSmallerFrameSize) {
+    ASSERT_EQ(OK, initStatus());
+    testPushSameSize(
+            1024 /* frame size in samples */,
+            48000 /* sample rate */,
+            1 /* channel count */,
+            PCM_8,
+            1019 /* input frame size in bytes = 1019 samples * 1 channel * 1 bytes per sample */,
+            10 /* count */,
+            10240 /* expected output size = 10 * 1024 bytes/frame */);
+    testPushSameSize(
+            1024 /* frame size in samples */,
+            48000 /* sample rate */,
+            1 /* channel count */,
+            PCM_16,
+            2038 /* input frame size in bytes = 1019 samples * 1 channel * 2 bytes per sample */,
+            10 /* count */,
+            20480 /* expected output size = 10 * 2048 bytes/frame */);
+    testPushSameSize(
+            1024 /* frame size in samples */,
+            48000 /* sample rate */,
+            1 /* channel count */,
+            PCM_FLOAT,
+            4076 /* input frame size in bytes = 1019 samples * 1 channel * 4 bytes per sample */,
+            10 /* count */,
+            40960 /* expected output size = 10 * 4096 bytes/frame */);
+}
+
+// Push single-byte frames
+TEST_F(FrameReassemblerTest, PushSingleByte) {
+    ASSERT_EQ(OK, initStatus());
+    testPushSameSize(
+            1024 /* frame size in samples */,
+            48000 /* sample rate */,
+            1 /* channel count */,
+            PCM_8,
+            1 /* input frame size in bytes */,
+            100000 /* count */,
+            100352 /* expected output size = 98 * 1024 bytes/frame */);
+    testPushSameSize(
+            1024 /* frame size in samples */,
+            48000 /* sample rate */,
+            1 /* channel count */,
+            PCM_16,
+            1 /* input frame size in bytes */,
+            100000 /* count */,
+            100352 /* expected output size = 49 * 2048 bytes/frame */);
+    testPushSameSize(
+            1024 /* frame size in samples */,
+            48000 /* sample rate */,
+            1 /* channel count */,
+            PCM_FLOAT,
+            1 /* input frame size in bytes */,
+            100000 /* count */,
+            102400 /* expected output size = 25 * 4096 bytes/frame */);
+}
+
+// Push one big chunk.
+TEST_F(FrameReassemblerTest, PushBigChunk) {
+    ASSERT_EQ(OK, initStatus());
+    testPushSameSize(
+            1024 /* frame size in samples */,
+            48000 /* sample rate */,
+            1 /* channel count */,
+            PCM_8,
+            100000 /* input frame size in bytes */,
+            1 /* count */,
+            100352 /* expected output size = 98 * 1024 bytes/frame */);
+    testPushSameSize(
+            1024 /* frame size in samples */,
+            48000 /* sample rate */,
+            1 /* channel count */,
+            PCM_16,
+            100000 /* input frame size in bytes */,
+            1 /* count */,
+            100352 /* expected output size = 49 * 2048 bytes/frame */);
+    testPushSameSize(
+            1024 /* frame size in samples */,
+            48000 /* sample rate */,
+            1 /* channel count */,
+            PCM_FLOAT,
+            100000 /* input frame size in bytes */,
+            1 /* count */,
+            102400 /* expected output size = 25 * 4096 bytes/frame */);
+}
+
+} // namespace android
diff --git a/media/extractors/flac/Android.bp b/media/extractors/flac/Android.bp
index 2593000..d1f92c1 100644
--- a/media/extractors/flac/Android.bp
+++ b/media/extractors/flac/Android.bp
@@ -1,6 +1,6 @@
 cc_library {
     name: "libflacextractor",
-    defaults: ["extractor-defaults"],
+    defaults: ["extractor-defaults", "libbinder_ndk_host_user"],
 
     srcs: ["FLACExtractor.cpp"],
 
diff --git a/media/extractors/wav/Android.bp b/media/extractors/wav/Android.bp
index 85d4cce..6a0ef22 100644
--- a/media/extractors/wav/Android.bp
+++ b/media/extractors/wav/Android.bp
@@ -1,7 +1,7 @@
 cc_library {
     name: "libwavextractor",
 
-    defaults: ["extractor-defaults"],
+    defaults: ["extractor-defaults", "libbinder_ndk_host_user"],
 
     srcs: ["WAVExtractor.cpp"],
 
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index e0ac7e5..709c656 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -1024,7 +1024,6 @@
 // Stream Control
 // ============================================================
 
-#if __ANDROID_API__ >= 30
 /**
  * Free the audio resources associated with a stream created by
  * AAudioStreamBuilder_openStream().
@@ -1042,11 +1041,12 @@
  * On other "Legacy" streams some audio resources will still be in use
  * and some callbacks may still be in process after this call.
  *
+ * Available since API level 30.
+ *
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return {@link #AAUDIO_OK} or a negative error.
  */
 AAUDIO_API aaudio_result_t  AAudioStream_release(AAudioStream* stream) __INTRODUCED_IN(30);
-#endif // __ANDROID_API__
 
 /**
  * Delete the internal data structures associated with the stream created
@@ -1054,6 +1054,8 @@
  *
  * If AAudioStream_release() has not been called then it will be called automatically.
  *
+ * Available since API level 26.
+ *
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return {@link #AAUDIO_OK} or a negative error.
  */
diff --git a/media/libaudiohal/Android.bp b/media/libaudiohal/Android.bp
index fab0fea..f64111b 100644
--- a/media/libaudiohal/Android.bp
+++ b/media/libaudiohal/Android.bp
@@ -14,11 +14,10 @@
     ],
 
     required: [
-        "libaudiohal@2.0",
         "libaudiohal@4.0",
         "libaudiohal@5.0",
         "libaudiohal@6.0",
-//        "libaudiohal@7.0",
+        "libaudiohal@7.0",
     ],
 
     shared_libs: [
diff --git a/media/libaudiohal/FactoryHalHidl.cpp b/media/libaudiohal/FactoryHalHidl.cpp
index 7228b22..e420d07 100644
--- a/media/libaudiohal/FactoryHalHidl.cpp
+++ b/media/libaudiohal/FactoryHalHidl.cpp
@@ -35,7 +35,6 @@
     "6.0",
     "5.0",
     "4.0",
-    "2.0",
     nullptr
 };
 
diff --git a/media/libaudiohal/impl/Android.bp b/media/libaudiohal/impl/Android.bp
index df006b5..833c373 100644
--- a/media/libaudiohal/impl/Android.bp
+++ b/media/libaudiohal/impl/Android.bp
@@ -53,29 +53,15 @@
 }
 
 cc_library_shared {
-    name: "libaudiohal@2.0",
-    defaults: ["libaudiohal_default"],
-    shared_libs: [
-        "android.hardware.audio.common@2.0",
-        "android.hardware.audio.common@2.0-util",
-        "android.hardware.audio.effect@2.0",
-        "android.hardware.audio@2.0",
-    ],
-    cflags: [
-        "-DMAJOR_VERSION=2",
-        "-DMINOR_VERSION=0",
-        "-include common/all-versions/VersionMacro.h",
-    ]
-}
-
-cc_library_shared {
     name: "libaudiohal@4.0",
     defaults: ["libaudiohal_default"],
     shared_libs: [
         "android.hardware.audio.common@4.0",
         "android.hardware.audio.common@4.0-util",
         "android.hardware.audio.effect@4.0",
+        "android.hardware.audio.effect@4.0-util",
         "android.hardware.audio@4.0",
+        "android.hardware.audio@4.0-util",
     ],
     cflags: [
         "-DMAJOR_VERSION=4",
@@ -91,7 +77,9 @@
         "android.hardware.audio.common@5.0",
         "android.hardware.audio.common@5.0-util",
         "android.hardware.audio.effect@5.0",
+        "android.hardware.audio.effect@5.0-util",
         "android.hardware.audio@5.0",
+        "android.hardware.audio@5.0-util",
     ],
     cflags: [
         "-DMAJOR_VERSION=5",
@@ -107,7 +95,9 @@
         "android.hardware.audio.common@6.0",
         "android.hardware.audio.common@6.0-util",
         "android.hardware.audio.effect@6.0",
+        "android.hardware.audio.effect@6.0-util",
         "android.hardware.audio@6.0",
+        "android.hardware.audio@6.0-util",
     ],
     cflags: [
         "-DMAJOR_VERSION=6",
@@ -117,14 +107,15 @@
 }
 
 cc_library_shared {
-    enabled: false,
     name: "libaudiohal@7.0",
     defaults: ["libaudiohal_default"],
     shared_libs: [
         "android.hardware.audio.common@7.0",
         "android.hardware.audio.common@7.0-util",
         "android.hardware.audio.effect@7.0",
+        "android.hardware.audio.effect@7.0-util",
         "android.hardware.audio@7.0",
+        "android.hardware.audio@7.0-util",
     ],
     cflags: [
         "-DMAJOR_VERSION=7",
@@ -132,4 +123,3 @@
         "-include common/all-versions/VersionMacro.h",
     ]
 }
-
diff --git a/media/libaudiohal/impl/ConversionHelperHidl.cpp b/media/libaudiohal/impl/ConversionHelperHidl.cpp
index cf07a47..b6fee6d 100644
--- a/media/libaudiohal/impl/ConversionHelperHidl.cpp
+++ b/media/libaudiohal/impl/ConversionHelperHidl.cpp
@@ -120,129 +120,5 @@
     ALOGE("%s %p %s: %s (from rpc)", mClassName, this, funcName, description);
 }
 
-#if MAJOR_VERSION >= 4
-// TODO: Use the same implementation in the hal when it moves to a util library.
-static std::string deviceAddressToHal(const DeviceAddress& address) {
-    // HAL assumes that the address is NUL-terminated.
-    char halAddress[AUDIO_DEVICE_MAX_ADDRESS_LEN];
-    memset(halAddress, 0, sizeof(halAddress));
-    audio_devices_t halDevice = static_cast<audio_devices_t>(address.device);
-    if (getAudioDeviceOutAllA2dpSet().count(halDevice) > 0 ||
-        halDevice == AUDIO_DEVICE_IN_BLUETOOTH_A2DP) {
-        snprintf(halAddress, sizeof(halAddress), "%02X:%02X:%02X:%02X:%02X:%02X",
-                 address.address.mac[0], address.address.mac[1], address.address.mac[2],
-                 address.address.mac[3], address.address.mac[4], address.address.mac[5]);
-    } else if (halDevice == AUDIO_DEVICE_OUT_IP || halDevice == AUDIO_DEVICE_IN_IP) {
-        snprintf(halAddress, sizeof(halAddress), "%d.%d.%d.%d", address.address.ipv4[0],
-                 address.address.ipv4[1], address.address.ipv4[2], address.address.ipv4[3]);
-    } else if (getAudioDeviceOutAllUsbSet().count(halDevice) > 0 ||
-               getAudioDeviceInAllUsbSet().count(halDevice) > 0) {
-        snprintf(halAddress, sizeof(halAddress), "card=%d;device=%d", address.address.alsa.card,
-                 address.address.alsa.device);
-    } else if (halDevice == AUDIO_DEVICE_OUT_BUS || halDevice == AUDIO_DEVICE_IN_BUS) {
-        snprintf(halAddress, sizeof(halAddress), "%s", address.busAddress.c_str());
-    } else if (halDevice == AUDIO_DEVICE_OUT_REMOTE_SUBMIX ||
-               halDevice == AUDIO_DEVICE_IN_REMOTE_SUBMIX) {
-        snprintf(halAddress, sizeof(halAddress), "%s", address.rSubmixAddress.c_str());
-    } else {
-        snprintf(halAddress, sizeof(halAddress), "%s", address.busAddress.c_str());
-    }
-    return halAddress;
-}
-
-//local conversion helpers
-
-static audio_microphone_channel_mapping_t  channelMappingToHal(AudioMicrophoneChannelMapping mapping) {
-    switch (mapping) {
-        case AudioMicrophoneChannelMapping::UNUSED:
-            return AUDIO_MICROPHONE_CHANNEL_MAPPING_UNUSED;
-        case AudioMicrophoneChannelMapping::DIRECT:
-            return AUDIO_MICROPHONE_CHANNEL_MAPPING_DIRECT;
-        case AudioMicrophoneChannelMapping::PROCESSED:
-            return AUDIO_MICROPHONE_CHANNEL_MAPPING_PROCESSED;
-        default:
-            LOG_ALWAYS_FATAL("Unknown channelMappingToHal conversion %d", mapping);
-    }
-}
-
-static audio_microphone_location_t locationToHal(AudioMicrophoneLocation location) {
-    switch (location) {
-        case AudioMicrophoneLocation::UNKNOWN:
-            return AUDIO_MICROPHONE_LOCATION_UNKNOWN;
-        case AudioMicrophoneLocation::MAINBODY:
-            return AUDIO_MICROPHONE_LOCATION_MAINBODY;
-        case AudioMicrophoneLocation::MAINBODY_MOVABLE:
-            return AUDIO_MICROPHONE_LOCATION_MAINBODY_MOVABLE;
-        case AudioMicrophoneLocation::PERIPHERAL:
-            return AUDIO_MICROPHONE_LOCATION_PERIPHERAL;
-        default:
-            LOG_ALWAYS_FATAL("Unknown locationToHal conversion %d", location);
-    }
-}
-static audio_microphone_directionality_t directionalityToHal(AudioMicrophoneDirectionality dir) {
-    switch (dir) {
-        case AudioMicrophoneDirectionality::UNKNOWN:
-            return AUDIO_MICROPHONE_DIRECTIONALITY_UNKNOWN;
-        case AudioMicrophoneDirectionality::OMNI:
-            return AUDIO_MICROPHONE_DIRECTIONALITY_OMNI;
-        case AudioMicrophoneDirectionality::BI_DIRECTIONAL:
-            return AUDIO_MICROPHONE_DIRECTIONALITY_BI_DIRECTIONAL;
-        case AudioMicrophoneDirectionality::CARDIOID:
-            return AUDIO_MICROPHONE_DIRECTIONALITY_CARDIOID;
-        case AudioMicrophoneDirectionality::HYPER_CARDIOID:
-            return AUDIO_MICROPHONE_DIRECTIONALITY_HYPER_CARDIOID;
-        case AudioMicrophoneDirectionality::SUPER_CARDIOID:
-            return AUDIO_MICROPHONE_DIRECTIONALITY_SUPER_CARDIOID;
-        default:
-            LOG_ALWAYS_FATAL("Unknown directionalityToHal conversion %d", dir);
-    }
-}
-
-void microphoneInfoToHal(const MicrophoneInfo& src,
-                         audio_microphone_characteristic_t *pDst) {
-    if (pDst != NULL) {
-        snprintf(pDst->device_id, sizeof(pDst->device_id),
-                 "%s", src.deviceId.c_str());
-        pDst->device = static_cast<audio_devices_t>(src.deviceAddress.device);
-        snprintf(pDst->address, sizeof(pDst->address),
-                 "%s", deviceAddressToHal(src.deviceAddress).c_str());
-        if (src.channelMapping.size() > AUDIO_CHANNEL_COUNT_MAX) {
-            ALOGW("microphoneInfoToStruct found %zu channelMapping elements. Max expected is %d",
-                  src.channelMapping.size(), AUDIO_CHANNEL_COUNT_MAX);
-        }
-        size_t ch;
-        for (ch = 0; ch < src.channelMapping.size() && ch < AUDIO_CHANNEL_COUNT_MAX; ch++) {
-            pDst->channel_mapping[ch] = channelMappingToHal(src.channelMapping[ch]);
-        }
-        for (; ch < AUDIO_CHANNEL_COUNT_MAX; ch++) {
-            pDst->channel_mapping[ch] = AUDIO_MICROPHONE_CHANNEL_MAPPING_UNUSED;
-        }
-        pDst->location = locationToHal(src.location);
-        pDst->group = (audio_microphone_group_t)src.group;
-        pDst->index_in_the_group = (unsigned int)src.indexInTheGroup;
-        pDst->sensitivity = src.sensitivity;
-        pDst->max_spl = src.maxSpl;
-        pDst->min_spl = src.minSpl;
-        pDst->directionality = directionalityToHal(src.directionality);
-        pDst->num_frequency_responses = (unsigned int)src.frequencyResponse.size();
-        if (pDst->num_frequency_responses > AUDIO_MICROPHONE_MAX_FREQUENCY_RESPONSES) {
-            ALOGW("microphoneInfoToStruct found %d frequency responses. Max expected is %d",
-                  pDst->num_frequency_responses, AUDIO_MICROPHONE_MAX_FREQUENCY_RESPONSES);
-            pDst->num_frequency_responses = AUDIO_MICROPHONE_MAX_FREQUENCY_RESPONSES;
-        }
-        for (size_t k = 0; k < pDst->num_frequency_responses; k++) {
-            pDst->frequency_responses[0][k] = src.frequencyResponse[k].frequency;
-            pDst->frequency_responses[1][k] = src.frequencyResponse[k].level;
-        }
-        pDst->geometric_location.x = src.position.x;
-        pDst->geometric_location.y = src.position.y;
-        pDst->geometric_location.z = src.position.z;
-        pDst->orientation.x = src.orientation.x;
-        pDst->orientation.y = src.orientation.y;
-        pDst->orientation.z = src.orientation.z;
-    }
-}
-#endif
-
 }  // namespace CPP_VERSION
 }  // namespace android
diff --git a/media/libaudiohal/impl/ConversionHelperHidl.h b/media/libaudiohal/impl/ConversionHelperHidl.h
index fb3bb9d..59122c7 100644
--- a/media/libaudiohal/impl/ConversionHelperHidl.h
+++ b/media/libaudiohal/impl/ConversionHelperHidl.h
@@ -82,12 +82,6 @@
     void emitError(const char* funcName, const char* description);
 };
 
-#if MAJOR_VERSION >= 4
-using ::android::hardware::audio::CPP_VERSION::MicrophoneInfo;
-void microphoneInfoToHal(const MicrophoneInfo& src,
-                         audio_microphone_characteristic_t *pDst);
-#endif
-
 }  // namespace CPP_VERSION
 }  // namespace android
 
diff --git a/media/libaudiohal/impl/DeviceHalHidl.cpp b/media/libaudiohal/impl/DeviceHalHidl.cpp
index 12d70c3..da16477 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalHidl.cpp
@@ -19,22 +19,24 @@
 #define LOG_TAG "DeviceHalHidl"
 //#define LOG_NDEBUG 0
 
-#include PATH(android/hardware/audio/FILE_VERSION/IPrimaryDevice.h)
 #include <cutils/native_handle.h>
 #include <hwbinder/IPCThreadState.h>
 #include <media/AudioContainers.h>
 #include <utils/Log.h>
 
+#include PATH(android/hardware/audio/FILE_VERSION/IPrimaryDevice.h)
+#include <HidlUtils.h>
 #include <common/all-versions/VersionUtils.h>
+#include <util/CoreUtils.h>
 
 #include "DeviceHalHidl.h"
 #include "EffectHalHidl.h"
-#include "HidlUtils.h"
+#include "ParameterUtils.h"
 #include "StreamHalHidl.h"
-#include "VersionUtils.h"
 
 using ::android::hardware::audio::common::CPP_VERSION::implementation::HidlUtils;
 using ::android::hardware::audio::common::utils::EnumBitfield;
+using ::android::hardware::audio::CPP_VERSION::implementation::CoreUtils;
 using ::android::hardware::hidl_string;
 using ::android::hardware::hidl_vec;
 
@@ -46,47 +48,6 @@
 
 using EffectHalHidl = ::android::effect::CPP_VERSION::EffectHalHidl;
 
-namespace {
-
-status_t deviceAddressFromHal(
-        audio_devices_t device, const char* halAddress, DeviceAddress* address) {
-    address->device = AudioDevice(device);
-
-    if (halAddress == nullptr || strnlen(halAddress, AUDIO_DEVICE_MAX_ADDRESS_LEN) == 0) {
-        return OK;
-    }
-    if (getAudioDeviceOutAllA2dpSet().count(device) > 0
-            || device == AUDIO_DEVICE_IN_BLUETOOTH_A2DP) {
-        int status = sscanf(halAddress,
-                "%hhX:%hhX:%hhX:%hhX:%hhX:%hhX",
-                &address->address.mac[0], &address->address.mac[1], &address->address.mac[2],
-                &address->address.mac[3], &address->address.mac[4], &address->address.mac[5]);
-        return status == 6 ? OK : BAD_VALUE;
-    } else if (device == AUDIO_DEVICE_OUT_IP || device == AUDIO_DEVICE_IN_IP) {
-        int status = sscanf(halAddress,
-                "%hhu.%hhu.%hhu.%hhu",
-                &address->address.ipv4[0], &address->address.ipv4[1],
-                &address->address.ipv4[2], &address->address.ipv4[3]);
-        return status == 4 ? OK : BAD_VALUE;
-    } else if (getAudioDeviceOutAllUsbSet().count(device) > 0
-            || getAudioDeviceInAllUsbSet().count(device) > 0) {
-        int status = sscanf(halAddress,
-                "card=%d;device=%d",
-                &address->address.alsa.card, &address->address.alsa.device);
-        return status == 2 ? OK : BAD_VALUE;
-    } else if (device == AUDIO_DEVICE_OUT_BUS || device == AUDIO_DEVICE_IN_BUS) {
-        address->busAddress = halAddress;
-        return OK;
-    } else if (device == AUDIO_DEVICE_OUT_REMOTE_SUBMIX
-            || device == AUDIO_DEVICE_IN_REMOTE_SUBMIX) {
-        address->rSubmixAddress = halAddress;
-        return OK;
-    }
-    return OK;
-}
-
-}  // namespace
-
 DeviceHalHidl::DeviceHalHidl(const sp<IDevice>& device)
         : ConversionHelperHidl("Device"), mDevice(device),
           mPrimaryDevice(IPrimaryDevice::castFrom(device)) {
@@ -234,16 +195,22 @@
         sp<StreamOutHalInterface> *outStream) {
     if (mDevice == 0) return NO_INIT;
     DeviceAddress hidlDevice;
-    status_t status = deviceAddressFromHal(deviceType, address, &hidlDevice);
-    if (status != OK) return status;
+    if (status_t status = CoreUtils::deviceAddressFromHal(deviceType, address, &hidlDevice);
+            status != OK) {
+        return status;
+    }
     AudioConfig hidlConfig;
-    HidlUtils::audioConfigFromHal(*config, false /*isInput*/, &hidlConfig);
+    if (status_t status = HidlUtils::audioConfigFromHal(*config, false /*isInput*/, &hidlConfig);
+            status != OK) {
+        return status;
+    }
+    CoreUtils::AudioOutputFlags hidlFlags;
+    if (status_t status = CoreUtils::audioOutputFlagsFromHal(flags, &hidlFlags); status != OK) {
+        return status;
+    }
     Result retval = Result::NOT_INITIALIZED;
     Return<void> ret = mDevice->openOutputStream(
-            handle,
-            hidlDevice,
-            hidlConfig,
-            EnumBitfield<AudioOutputFlag>(flags),
+            handle, hidlDevice, hidlConfig, hidlFlags,
 #if MAJOR_VERSION >= 4
             {} /* metadata */,
 #endif
@@ -269,26 +236,45 @@
         sp<StreamInHalInterface> *inStream) {
     if (mDevice == 0) return NO_INIT;
     DeviceAddress hidlDevice;
-    status_t status = deviceAddressFromHal(devices, address, &hidlDevice);
-    if (status != OK) return status;
+    if (status_t status = CoreUtils::deviceAddressFromHal(devices, address, &hidlDevice);
+            status != OK) {
+        return status;
+    }
     AudioConfig hidlConfig;
-    HidlUtils::audioConfigFromHal(*config, true /*isInput*/, &hidlConfig);
+    if (status_t status = HidlUtils::audioConfigFromHal(*config, true /*isInput*/, &hidlConfig);
+            status != OK) {
+        return status;
+    }
+    CoreUtils::AudioInputFlags hidlFlags;
+    if (status_t status = CoreUtils::audioInputFlagsFromHal(flags, &hidlFlags); status != OK) {
+        return status;
+    }
     Result retval = Result::NOT_INITIALIZED;
 #if MAJOR_VERSION == 2
     auto sinkMetadata = AudioSource(source);
 #elif MAJOR_VERSION >= 4
     // TODO: correctly propagate the tracks sources and volume
     //       for now, only send the main source at 1dbfs
-    SinkMetadata sinkMetadata = {{{ .source = AudioSource(source), .gain = 1 }}};
+    AudioSource hidlSource;
+    if (status_t status = HidlUtils::audioSourceFromHal(source, &hidlSource); status != OK) {
+        return status;
+    }
+    SinkMetadata sinkMetadata = {{{ .source = std::move(hidlSource), .gain = 1 }}};
 #endif
 #if MAJOR_VERSION < 5
     (void)outputDevice;
     (void)outputDeviceAddress;
 #else
+#if MAJOR_VERSION >= 7
+    (void)HidlUtils::audioChannelMaskFromHal(
+            AUDIO_CHANNEL_NONE, true /*isInput*/, &sinkMetadata.tracks[0].channelMask);
+#endif
     if (outputDevice != AUDIO_DEVICE_NONE) {
         DeviceAddress hidlOutputDevice;
-        status = deviceAddressFromHal(outputDevice, outputDeviceAddress, &hidlOutputDevice);
-        if (status != OK) return status;
+        if (status_t status = CoreUtils::deviceAddressFromHal(
+                        outputDevice, outputDeviceAddress, &hidlOutputDevice); status != OK) {
+            return status;
+        }
         sinkMetadata.tracks[0].destination.device(std::move(hidlOutputDevice));
     }
 #endif
@@ -297,11 +283,7 @@
     flags = static_cast<audio_input_flags_t>(flags & ~AUDIO_INPUT_FLAG_DIRECT);
 #endif
     Return<void> ret = mDevice->openInputStream(
-            handle,
-            hidlDevice,
-            hidlConfig,
-            EnumBitfield<AudioInputFlag>(flags),
-            sinkMetadata,
+            handle, hidlDevice, hidlConfig, hidlFlags, sinkMetadata,
             [&](Result r, const sp<IStreamIn>& result, const AudioConfig& suggestedConfig) {
                 retval = r;
                 if (retval == Result::OK) {
@@ -411,7 +393,7 @@
         for (size_t k = 0; k < micArrayHal.size(); k++) {
             audio_microphone_characteristic_t dst;
             //convert
-            microphoneInfoToHal(micArrayHal[k], &dst);
+            (void)CoreUtils::microphoneInfoToHal(micArrayHal[k], &dst);
             media::MicrophoneInfo microphone = media::MicrophoneInfo(dst);
             microphonesInfo->push_back(microphone);
         }
diff --git a/media/libaudiohal/impl/EffectHalHidl.cpp b/media/libaudiohal/impl/EffectHalHidl.cpp
index 506feb8..c589a48 100644
--- a/media/libaudiohal/impl/EffectHalHidl.cpp
+++ b/media/libaudiohal/impl/EffectHalHidl.cpp
@@ -23,12 +23,13 @@
 #include <media/EffectsFactoryApi.h>
 #include <utils/Log.h>
 
+#include <util/EffectUtils.h>
+
 #include "EffectBufferHalHidl.h"
 #include "EffectHalHidl.h"
-#include "UuidUtils.h"
 
-using ::android::hardware::audio::common::CPP_VERSION::implementation::UuidUtils;
 using ::android::hardware::audio::common::utils::EnumBitfield;
+using ::android::hardware::audio::effect::CPP_VERSION::implementation::EffectUtils;
 using ::android::hardware::hidl_vec;
 using ::android::hardware::MQDescriptorSync;
 using ::android::hardware::Return;
@@ -42,6 +43,10 @@
 
 EffectHalHidl::EffectHalHidl(const sp<IEffect>& effect, uint64_t effectId)
         : mEffect(effect), mEffectId(effectId), mBuffersChanged(true), mEfGroup(nullptr) {
+    effect_descriptor_t halDescriptor{};
+    if (EffectHalHidl::getDescriptor(&halDescriptor) == NO_ERROR) {
+        mIsInput = (halDescriptor.flags & EFFECT_FLAG_TYPE_PRE_PROC) == EFFECT_FLAG_TYPE_PRE_PROC;
+    }
 }
 
 EffectHalHidl::~EffectHalHidl() {
@@ -56,59 +61,6 @@
 }
 
 // static
-void EffectHalHidl::effectDescriptorToHal(
-        const EffectDescriptor& descriptor, effect_descriptor_t* halDescriptor) {
-    UuidUtils::uuidToHal(descriptor.type, &halDescriptor->type);
-    UuidUtils::uuidToHal(descriptor.uuid, &halDescriptor->uuid);
-    halDescriptor->flags = static_cast<uint32_t>(descriptor.flags);
-    halDescriptor->cpuLoad = descriptor.cpuLoad;
-    halDescriptor->memoryUsage = descriptor.memoryUsage;
-    memcpy(halDescriptor->name, descriptor.name.data(), descriptor.name.size());
-    memcpy(halDescriptor->implementor,
-            descriptor.implementor.data(), descriptor.implementor.size());
-}
-
-// TODO(mnaganov): These buffer conversion functions should be shared with Effect wrapper
-// via HidlUtils. Move them there when hardware/interfaces will get un-frozen again.
-
-// static
-void EffectHalHidl::effectBufferConfigFromHal(
-        const buffer_config_t& halConfig, EffectBufferConfig* config) {
-    config->samplingRateHz = halConfig.samplingRate;
-    config->channels = EnumBitfield<AudioChannelMask>(halConfig.channels);
-    config->format = AudioFormat(halConfig.format);
-    config->accessMode = EffectBufferAccess(halConfig.accessMode);
-    config->mask = EnumBitfield<EffectConfigParameters>(halConfig.mask);
-}
-
-// static
-void EffectHalHidl::effectBufferConfigToHal(
-        const EffectBufferConfig& config, buffer_config_t* halConfig) {
-    halConfig->buffer.frameCount = 0;
-    halConfig->buffer.raw = NULL;
-    halConfig->samplingRate = config.samplingRateHz;
-    halConfig->channels = static_cast<uint32_t>(config.channels);
-    halConfig->bufferProvider.cookie = NULL;
-    halConfig->bufferProvider.getBuffer = NULL;
-    halConfig->bufferProvider.releaseBuffer = NULL;
-    halConfig->format = static_cast<uint8_t>(config.format);
-    halConfig->accessMode = static_cast<uint8_t>(config.accessMode);
-    halConfig->mask = static_cast<uint8_t>(config.mask);
-}
-
-// static
-void EffectHalHidl::effectConfigFromHal(const effect_config_t& halConfig, EffectConfig* config) {
-    effectBufferConfigFromHal(halConfig.inputCfg, &config->inputCfg);
-    effectBufferConfigFromHal(halConfig.outputCfg, &config->outputCfg);
-}
-
-// static
-void EffectHalHidl::effectConfigToHal(const EffectConfig& config, effect_config_t* halConfig) {
-    effectBufferConfigToHal(config.inputCfg, &halConfig->inputCfg);
-    effectBufferConfigToHal(config.outputCfg, &halConfig->outputCfg);
-}
-
-// static
 status_t EffectHalHidl::analyzeResult(const Result& result) {
     switch (result) {
         case Result::OK: return OK;
@@ -269,7 +221,7 @@
             [&](Result r, const EffectDescriptor& result) {
                 retval = r;
                 if (retval == Result::OK) {
-                    effectDescriptorToHal(result, pDescriptor);
+                    EffectUtils::effectDescriptorToHal(result, pDescriptor);
                 }
             });
     return ret.isOk() ? analyzeResult(retval) : FAILED_TRANSACTION;
@@ -301,14 +253,16 @@
         ret = mEffect->getConfig([&] (Result r, const EffectConfig &hidlConfig) {
             result = analyzeResult(r);
             if (r == Result::OK) {
-                effectConfigToHal(hidlConfig, static_cast<effect_config_t*>(pReplyData));
+                EffectUtils::effectConfigToHal(
+                        hidlConfig, static_cast<effect_config_t*>(pReplyData));
             }
         });
     } else {
         ret = mEffect->getConfigReverse([&] (Result r, const EffectConfig &hidlConfig) {
             result = analyzeResult(r);
             if (r == Result::OK) {
-                effectConfigToHal(hidlConfig, static_cast<effect_config_t*>(pReplyData));
+                EffectUtils::effectConfigToHal(
+                        hidlConfig, static_cast<effect_config_t*>(pReplyData));
             }
         });
     }
@@ -332,7 +286,7 @@
         ALOGE("Buffer provider callbacks are not supported");
     }
     EffectConfig hidlConfig;
-    effectConfigFromHal(*halConfig, &hidlConfig);
+    EffectUtils::effectConfigFromHal(*halConfig, mIsInput, &hidlConfig);
     Return<Result> ret = cmdCode == EFFECT_CMD_SET_CONFIG ?
             mEffect->setConfig(hidlConfig, nullptr, nullptr) :
             mEffect->setConfigReverse(hidlConfig, nullptr, nullptr);
diff --git a/media/libaudiohal/impl/EffectHalHidl.h b/media/libaudiohal/impl/EffectHalHidl.h
index 1f238c0..8e46638 100644
--- a/media/libaudiohal/impl/EffectHalHidl.h
+++ b/media/libaudiohal/impl/EffectHalHidl.h
@@ -65,9 +65,6 @@
 
     uint64_t effectId() const { return mEffectId; }
 
-    static void effectDescriptorToHal(
-            const EffectDescriptor& descriptor, effect_descriptor_t* halDescriptor);
-
   private:
     friend class EffectsFactoryHalHidl;
     typedef MessageQueue<Result, hardware::kSynchronizedReadWrite> StatusMQ;
@@ -79,14 +76,9 @@
     bool mBuffersChanged;
     std::unique_ptr<StatusMQ> mStatusMQ;
     EventFlag* mEfGroup;
+    bool mIsInput = false;
 
     static status_t analyzeResult(const Result& result);
-    static void effectBufferConfigFromHal(
-            const buffer_config_t& halConfig, EffectBufferConfig* config);
-    static void effectBufferConfigToHal(
-            const EffectBufferConfig& config, buffer_config_t* halConfig);
-    static void effectConfigFromHal(const effect_config_t& halConfig, EffectConfig* config);
-    static void effectConfigToHal(const EffectConfig& config, effect_config_t* halConfig);
 
     // Can not be constructed directly by clients.
     EffectHalHidl(const sp<IEffect>& effect, uint64_t effectId);
diff --git a/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp b/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp
index b48acaa..8d15592 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp
+++ b/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp
@@ -19,13 +19,16 @@
 
 #include <cutils/native_handle.h>
 
+#include <UuidUtils.h>
+#include <util/EffectUtils.h>
+
 #include "ConversionHelperHidl.h"
 #include "EffectBufferHalHidl.h"
 #include "EffectHalHidl.h"
 #include "EffectsFactoryHalHidl.h"
-#include "UuidUtils.h"
 
 using ::android::hardware::audio::common::CPP_VERSION::implementation::UuidUtils;
+using ::android::hardware::audio::effect::CPP_VERSION::implementation::EffectUtils;
 using ::android::hardware::Return;
 
 namespace android {
@@ -76,7 +79,7 @@
         if (queryResult != OK) return queryResult;
     }
     if (index >= mLastDescriptors.size()) return NAME_NOT_FOUND;
-    EffectHalHidl::effectDescriptorToHal(mLastDescriptors[index], pDescriptor);
+    EffectUtils::effectDescriptorToHal(mLastDescriptors[index], pDescriptor);
     return OK;
 }
 
@@ -91,7 +94,7 @@
             [&](Result r, const EffectDescriptor& result) {
                 retval = r;
                 if (retval == Result::OK) {
-                    EffectHalHidl::effectDescriptorToHal(result, pDescriptor);
+                    EffectUtils::effectDescriptorToHal(result, pDescriptor);
                 }
             });
     if (ret.isOk()) {
diff --git a/media/libaudiohal/impl/VersionUtils.h b/media/libaudiohal/impl/ParameterUtils.h
similarity index 91%
rename from media/libaudiohal/impl/VersionUtils.h
rename to media/libaudiohal/impl/ParameterUtils.h
index eb0a42a..9cab72e 100644
--- a/media/libaudiohal/impl/VersionUtils.h
+++ b/media/libaudiohal/impl/ParameterUtils.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2018 The Android Open Source Project
+ * Copyright (C) 2021 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,8 +14,7 @@
  * limitations under the License.
  */
 
-#ifndef ANDROID_HARDWARE_VERSION_UTILS_H
-#define ANDROID_HARDWARE_VERSION_UTILS_H
+#pragma once
 
 #include PATH(android/hardware/audio/FILE_VERSION/types.h)
 #include <hidl/HidlSupport.h>
@@ -59,5 +58,3 @@
 } // namespace utils
 } // namespace CPP_VERSION
 } // namespace android
-
-#endif // ANDROID_HARDWARE_VERSION_UTILS_H
diff --git a/media/libaudiohal/impl/StreamHalHidl.cpp b/media/libaudiohal/impl/StreamHalHidl.cpp
index 8a9eec3..6da8bbd 100644
--- a/media/libaudiohal/impl/StreamHalHidl.cpp
+++ b/media/libaudiohal/impl/StreamHalHidl.cpp
@@ -17,18 +17,23 @@
 #define LOG_TAG "StreamHalHidl"
 //#define LOG_NDEBUG 0
 
-#include PATH(android/hardware/audio/FILE_VERSION/IStreamOutCallback.h)
+#include <android/hidl/manager/1.0/IServiceManager.h>
 #include <hwbinder/IPCThreadState.h>
 #include <media/AudioParameter.h>
 #include <mediautils/SchedulingPolicyService.h>
 #include <utils/Log.h>
 
+#include PATH(android/hardware/audio/FILE_VERSION/IStreamOutCallback.h)
+#include <HidlUtils.h>
+#include <util/CoreUtils.h>
+
 #include "DeviceHalHidl.h"
 #include "EffectHalHidl.h"
-#include "HidlUtils.h"
+#include "ParameterUtils.h"
 #include "StreamHalHidl.h"
-#include "VersionUtils.h"
 
+using ::android::hardware::audio::common::CPP_VERSION::implementation::HidlUtils;
+using ::android::hardware::audio::CPP_VERSION::implementation::CoreUtils;
 using ::android::hardware::MQDescriptorSync;
 using ::android::hardware::Return;
 using ::android::hardware::Void;
@@ -50,14 +55,11 @@
 
     // Instrument audio signal power logging.
     // Note: This assumes channel mask, format, and sample rate do not change after creation.
-    if (mStream != nullptr /* && mStreamPowerLog.isUserDebugOrEngBuild() */) {
-        // Obtain audio properties (see StreamHalHidl::getAudioProperties() below).
-        Return<void> ret = mStream->getAudioProperties(
-                [&](auto sr, auto m, auto f) {
-                mStreamPowerLog.init(sr,
-                        static_cast<audio_channel_mask_t>(m),
-                        static_cast<audio_format_t>(f));
-            });
+    audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
+    if (/* mStreamPowerLog.isUserDebugOrEngBuild() && */
+        StreamHalHidl::getAudioProperties(
+                &config.sample_rate, &config.channel_mask, &config.format) == NO_ERROR) {
+        mStreamPowerLog.init(config.sample_rate, config.channel_mask, config.format);
     }
 }
 
@@ -65,9 +67,12 @@
     mStream = nullptr;
 }
 
+// Note: this method will be removed
 status_t StreamHalHidl::getSampleRate(uint32_t *rate) {
-    if (!mStream) return NO_INIT;
-    return processReturn("getSampleRate", mStream->getSampleRate(), rate);
+    audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
+    status_t status = getAudioProperties(&config.sample_rate, &config.channel_mask, &config.format);
+    *rate = config.sample_rate;
+    return status;
 }
 
 status_t StreamHalHidl::getBufferSize(size_t *size) {
@@ -79,19 +84,26 @@
     return status;
 }
 
+// Note: this method will be removed
 status_t StreamHalHidl::getChannelMask(audio_channel_mask_t *mask) {
-    if (!mStream) return NO_INIT;
-    return processReturn("getChannelMask", mStream->getChannelMask(), mask);
+    audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
+    status_t status = getAudioProperties(&config.sample_rate, &config.channel_mask, &config.format);
+    *mask = config.channel_mask;
+    return status;
 }
 
+// Note: this method will be removed
 status_t StreamHalHidl::getFormat(audio_format_t *format) {
-    if (!mStream) return NO_INIT;
-    return processReturn("getFormat", mStream->getFormat(), format);
+    audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
+    status_t status = getAudioProperties(&config.sample_rate, &config.channel_mask, &config.format);
+    *format = config.format;
+    return status;
 }
 
 status_t StreamHalHidl::getAudioProperties(
         uint32_t *sampleRate, audio_channel_mask_t *mask, audio_format_t *format) {
     if (!mStream) return NO_INIT;
+#if MAJOR_VERSION <= 6
     Return<void> ret = mStream->getAudioProperties(
             [&](uint32_t sr, auto m, auto f) {
                 *sampleRate = sr;
@@ -99,6 +111,26 @@
                 *format = static_cast<audio_format_t>(f);
             });
     return processReturn("getAudioProperties", ret);
+#else
+    Result retval;
+    status_t conversionStatus = BAD_VALUE;
+    audio_config_base_t halConfig = AUDIO_CONFIG_BASE_INITIALIZER;
+    Return<void> ret = mStream->getAudioProperties(
+            [&](Result r, const AudioConfigBase& config) {
+                retval = r;
+                if (retval == Result::OK) {
+                    conversionStatus = HidlUtils::audioConfigBaseToHal(config, &halConfig);
+                }
+            });
+    if (status_t status = processReturn("getAudioProperties", ret, retval); status == NO_ERROR) {
+        *sampleRate = halConfig.sample_rate;
+        *mask = halConfig.channel_mask;
+        *format = halConfig.format;
+        return conversionStatus;
+    } else {
+        return status;
+    }
+#endif
 }
 
 status_t StreamHalHidl::setParameters(const String8& kvPairs) {
@@ -226,6 +258,24 @@
     return getBufferSize(size);
 }
 
+status_t StreamHalHidl::getHalPid(pid_t *pid) {
+    using ::android::hidl::base::V1_0::DebugInfo;
+    using ::android::hidl::manager::V1_0::IServiceManager;
+
+    DebugInfo debugInfo;
+    auto ret = mStream->getDebugInfo([&] (const auto &info) {
+        debugInfo = info;
+    });
+    if (!ret.isOk()) {
+        return INVALID_OPERATION;
+    }
+    if (debugInfo.pid != (int)IServiceManager::PidConstant::NO_PID) {
+        *pid = debugInfo.pid;
+        return NO_ERROR;
+    }
+    return NAME_NOT_FOUND;
+}
+
 bool StreamHalHidl::requestHalThreadPriority(pid_t threadPid, pid_t threadId) {
     if (mHalThreadPriority == HAL_THREAD_PRIORITY_DEFAULT) {
         return true;
@@ -454,7 +504,7 @@
                     const CommandMQ::Descriptor& commandMQ,
                     const DataMQ::Descriptor& dataMQ,
                     const StatusMQ::Descriptor& statusMQ,
-                    const ThreadInfo& halThreadInfo) {
+                    const auto& halThreadInfo) {
                 retval = r;
                 if (retval == Result::OK) {
                     tempCommandMQ.reset(new CommandMQ(commandMQ));
@@ -463,8 +513,12 @@
                     if (tempDataMQ->isValid() && tempDataMQ->getEventFlagWord()) {
                         EventFlag::createEventFlag(tempDataMQ->getEventFlagWord(), &mEfGroup);
                     }
+#if MAJOR_VERSION <= 6
                     halThreadPid = halThreadInfo.pid;
                     halThreadTid = halThreadInfo.tid;
+#else
+                    halThreadTid = halThreadInfo;
+#endif
                 }
             });
     if (!ret.isOk() || retval != Result::OK) {
@@ -485,6 +539,11 @@
         ALOGE_IF(!mEfGroup, "Event flag creation for writing failed");
         return NO_INIT;
     }
+#if MAJOR_VERSION >= 7
+    if (status_t status = getHalPid(&halThreadPid); status != NO_ERROR) {
+        return status;
+    }
+#endif
     requestHalThreadPriority(halThreadPid, halThreadTid);
 
     mCommandMQ = std::move(tempCommandMQ);
@@ -598,40 +657,15 @@
     return INVALID_OPERATION;
 }
 #elif MAJOR_VERSION >= 4
-/** Transform a standard collection to an HIDL vector. */
-template <class Values, class ElementConverter>
-static auto transformToHidlVec(const Values& values, ElementConverter converter) {
-    hidl_vec<decltype(converter(*values.begin()))> result{values.size()};
-    using namespace std;
-    transform(begin(values), end(values), begin(result), converter);
-    return result;
-}
-
 status_t StreamOutHalHidl::updateSourceMetadata(
         const StreamOutHalInterface::SourceMetadata& sourceMetadata) {
-    CPP_VERSION::SourceMetadata halMetadata = {
-        .tracks = transformToHidlVec(sourceMetadata.tracks,
-              [](const playback_track_metadata_v7& metadata) -> PlaybackTrackMetadata {
-                  PlaybackTrackMetadata halTrackMetadata = {
-                      .usage=static_cast<AudioUsage>(metadata.base.usage),
-                      .contentType=static_cast<AudioContentType>(metadata.base.content_type),
-                      .gain=metadata.base.gain,
-                  };
-#if MAJOR_VERSION >= 7
-                  HidlUtils::audioChannelMaskFromHal(metadata.channel_mask, false /*isInput*/,
-                                                    &halTrackMetadata.channelMask);
-
-                  std::istringstream tags{metadata.tags};
-                  std::string tag;
-                  while (std::getline(tags, tag, HidlUtils::sAudioTagSeparator)) {
-                      if (!tag.empty()) {
-                          halTrackMetadata.tags.push_back(tag);
-                      }
-                  }
-#endif
-                  return halTrackMetadata;
-              })};
-    return processReturn("updateSourceMetadata", mStream->updateSourceMetadata(halMetadata));
+    CPP_VERSION::SourceMetadata hidlMetadata;
+    if (status_t status = CoreUtils::sourceMetadataFromHalV7(
+                    sourceMetadata.tracks, true /*ignoreNonVendorTags*/, &hidlMetadata);
+            status != OK) {
+        return status;
+    }
+    return processReturn("updateSourceMetadata", mStream->updateSourceMetadata(hidlMetadata));
 }
 #endif
 
@@ -904,7 +938,7 @@
                     const CommandMQ::Descriptor& commandMQ,
                     const DataMQ::Descriptor& dataMQ,
                     const StatusMQ::Descriptor& statusMQ,
-                    const ThreadInfo& halThreadInfo) {
+                    const auto& halThreadInfo) {
                 retval = r;
                 if (retval == Result::OK) {
                     tempCommandMQ.reset(new CommandMQ(commandMQ));
@@ -913,8 +947,12 @@
                     if (tempDataMQ->isValid() && tempDataMQ->getEventFlagWord()) {
                         EventFlag::createEventFlag(tempDataMQ->getEventFlagWord(), &mEfGroup);
                     }
+#if MAJOR_VERSION <= 6
                     halThreadPid = halThreadInfo.pid;
                     halThreadTid = halThreadInfo.tid;
+#else
+                    halThreadTid = halThreadInfo;
+#endif
                 }
             });
     if (!ret.isOk() || retval != Result::OK) {
@@ -935,6 +973,11 @@
         ALOGE_IF(!mEfGroup, "Event flag creation for reading failed");
         return NO_INIT;
     }
+#if MAJOR_VERSION >= 7
+    if (status_t status = getHalPid(&halThreadPid); status != NO_ERROR) {
+        return status;
+    }
+#endif
     requestHalThreadPriority(halThreadPid, halThreadTid);
 
     mCommandMQ = std::move(tempCommandMQ);
@@ -997,7 +1040,7 @@
         for (size_t k = 0; k < micArrayHal.size(); k++) {
             audio_microphone_characteristic_t dst;
             // convert
-            microphoneInfoToHal(micArrayHal[k], &dst);
+            (void)CoreUtils::microphoneInfoToHal(micArrayHal[k], &dst);
             media::MicrophoneInfo microphone = media::MicrophoneInfo(dst);
             microphonesInfo->push_back(microphone);
         }
@@ -1007,27 +1050,13 @@
 
 status_t StreamInHalHidl::updateSinkMetadata(const
         StreamInHalInterface::SinkMetadata& sinkMetadata) {
-    CPP_VERSION::SinkMetadata halMetadata = {
-        .tracks = transformToHidlVec(sinkMetadata.tracks,
-              [](const record_track_metadata_v7& metadata) -> RecordTrackMetadata {
-                  RecordTrackMetadata halTrackMetadata = {
-                      .source=static_cast<AudioSource>(metadata.base.source),
-                      .gain=metadata.base.gain,
-                  };
-#if MAJOR_VERSION >= 7
-                  HidlUtils::audioChannelMaskFromHal(metadata.channel_mask, true /*isInput*/,
-                                                    &halTrackMetadata.channelMask);
-                  std::istringstream tags{metadata.tags};
-                  std::string tag;
-                  while (std::getline(tags, tag, HidlUtils::sAudioTagSeparator)) {
-                      if (!tag.empty()) {
-                          halTrackMetadata.tags.push_back(tag);
-                      }
-                  }
-#endif
-                  return halTrackMetadata;
-              })};
-    return processReturn("updateSinkMetadata", mStream->updateSinkMetadata(halMetadata));
+    CPP_VERSION::SinkMetadata hidlMetadata;
+    if (status_t status = CoreUtils::sinkMetadataFromHalV7(
+                    sinkMetadata.tracks, true /*ignoreNonVendorTags*/, &hidlMetadata);
+            status != OK) {
+        return status;
+    }
+    return processReturn("updateSinkMetadata", mStream->updateSinkMetadata(hidlMetadata));
 }
 #endif
 
diff --git a/media/libaudiohal/impl/StreamHalHidl.h b/media/libaudiohal/impl/StreamHalHidl.h
index 2db4973..72ce60b 100644
--- a/media/libaudiohal/impl/StreamHalHidl.h
+++ b/media/libaudiohal/impl/StreamHalHidl.h
@@ -105,6 +105,8 @@
 
     status_t getCachedBufferSize(size_t *size);
 
+    status_t getHalPid(pid_t *pid);
+
     bool requestHalThreadPriority(pid_t threadPid, pid_t threadId);
 
     // mStreamPowerLog is used for audio signal power logging.
diff --git a/media/libaudiohal/impl/StreamHalLocal.cpp b/media/libaudiohal/impl/StreamHalLocal.cpp
index a3f2fb4..e89b288 100644
--- a/media/libaudiohal/impl/StreamHalLocal.cpp
+++ b/media/libaudiohal/impl/StreamHalLocal.cpp
@@ -22,8 +22,8 @@
 #include <utils/Log.h>
 
 #include "DeviceHalLocal.h"
+#include "ParameterUtils.h"
 #include "StreamHalLocal.h"
-#include "VersionUtils.h"
 
 namespace android {
 namespace CPP_VERSION {
@@ -258,7 +258,7 @@
 
 #if MAJOR_VERSION >= 7
 void StreamOutHalLocal::doUpdateSourceMetadataV7(const SourceMetadata& sourceMetadata) {
-    const source_metadata_t metadata {
+    const source_metadata_v7_t metadata {
         .track_count = sourceMetadata.tracks.size(),
         // const cast is fine as it is in a const structure
         .tracks = const_cast<playback_track_metadata_v7*>(sourceMetadata.tracks.data()),
@@ -274,7 +274,7 @@
     }
     doUpdateSourceMetadata(sourceMetadata);
 #else
-    if (mDevice->version() < AUDIO_DEVICE_API_VERSION_3_2)
+    if (mDevice->version() < AUDIO_DEVICE_API_VERSION_3_2) {
         if (mStream->update_source_metadata == nullptr) {
             return INVALID_OPERATION;
         }
@@ -446,13 +446,12 @@
 
 status_t StreamInHalLocal::updateSinkMetadata(const SinkMetadata& sinkMetadata) {
 #if MAJOR_VERSION < 7
-
     if (mStream->update_sink_metadata == nullptr) {
         return INVALID_OPERATION;  // not supported by the HAL
     }
     doUpdateSinkMetadata(sinkMetadata);
 #else
-    if (mDevice->version() < AUDIO_DEVICE_API_VERSION_3_2)
+    if (mDevice->version() < AUDIO_DEVICE_API_VERSION_3_2) {
         if (mStream->update_sink_metadata == nullptr) {
             return INVALID_OPERATION;  // not supported by the HAL
         }
diff --git a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
index 097e9a2..b47f536 100644
--- a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
@@ -31,18 +31,22 @@
 class StreamHalInterface : public virtual RefBase
 {
   public:
+    // TODO(mnaganov): Remove
     // Return the sampling rate in Hz - eg. 44100.
     virtual status_t getSampleRate(uint32_t *rate) = 0;
 
     // Return size of input/output buffer in bytes for this stream - eg. 4800.
     virtual status_t getBufferSize(size_t *size) = 0;
 
+    // TODO(mnaganov): Remove
     // Return the channel mask.
     virtual status_t getChannelMask(audio_channel_mask_t *mask) = 0;
 
+    // TODO(mnaganov): Remove
     // Return the audio format - e.g. AUDIO_FORMAT_PCM_16_BIT.
     virtual status_t getFormat(audio_format_t *format) = 0;
 
+    // TODO(mnaganov): Change to use audio_config_base_t
     // Convenience method.
     virtual status_t getAudioProperties(
             uint32_t *sampleRate, audio_channel_mask_t *mask, audio_format_t *format) = 0;
diff --git a/media/libeffects/preprocessing/tests/correlation.cpp b/media/libeffects/preprocessing/tests/correlation.cpp
index b13dcc7..eb56fc3 100644
--- a/media/libeffects/preprocessing/tests/correlation.cpp
+++ b/media/libeffects/preprocessing/tests/correlation.cpp
@@ -107,12 +107,19 @@
         return EXIT_FAILURE;
     }
 
-    int numFrames = fileSize1 / sizeof(int16_t);
+    size_t numFrames = fileSize1 / sizeof(int16_t);
     std::unique_ptr<int16_t[]> inBuffer1(new int16_t[numFrames]());
     std::unique_ptr<int16_t[]> inBuffer2(new int16_t[numFrames]());
 
-    fread(inBuffer1.get(), sizeof(int16_t), numFrames, fInput1.get());
-    fread(inBuffer2.get(), sizeof(int16_t), numFrames, fInput2.get());
+    if (numFrames != fread(inBuffer1.get(), sizeof(int16_t), numFrames, fInput1.get())) {
+        printf("\nError: Unable to read %zu samples from file %s\n", numFrames, argv[1]);
+        return EXIT_FAILURE;
+    }
+
+    if (numFrames != fread(inBuffer2.get(), sizeof(int16_t), numFrames, fInput2.get())) {
+        printf("\nError: Unable to read %zu samples from file %s\n", numFrames, argv[2]);
+        return EXIT_FAILURE;
+    }
 
     auto pairAutoCorr1 = correlation(inBuffer1.get(), inBuffer1.get(), numFrames, enableCrossCorr);
     auto pairAutoCorr2 = correlation(inBuffer2.get(), inBuffer2.get(), numFrames, enableCrossCorr);
diff --git a/media/libmediahelper/tests/typeconverter_tests.cpp b/media/libmediahelper/tests/typeconverter_tests.cpp
index d7bfb89..181d636 100644
--- a/media/libmediahelper/tests/typeconverter_tests.cpp
+++ b/media/libmediahelper/tests/typeconverter_tests.cpp
@@ -182,8 +182,9 @@
         audio_format_t format;
         EXPECT_TRUE(FormatConverter::fromString(stringVal, format))
                 << "Conversion of \"" << stringVal << "\" failed";
-        EXPECT_TRUE(audio_is_valid_format(format))
-                << "Converted format \"" << stringVal << "\" is invalid";
+        EXPECT_EQ(enumVal != xsd::AudioFormat::AUDIO_FORMAT_DEFAULT,
+                audio_is_valid_format(format))
+                << "Validity of \"" << stringVal << "\" is not as expected";
         EXPECT_EQ(stringVal, toString(format));
     }
 }
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 4fe871f..0ed0de1 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -317,7 +317,7 @@
 
 class MediaCodec::ReleaseSurface {
 public:
-    ReleaseSurface() {
+    explicit ReleaseSurface(uint64_t usage) {
         BufferQueue::createBufferQueue(&mProducer, &mConsumer);
         mSurface = new Surface(mProducer, false /* controlledByApp */);
         struct ConsumerListener : public BnConsumerListener {
@@ -328,6 +328,7 @@
         sp<ConsumerListener> listener{new ConsumerListener};
         mConsumer->consumerConnect(listener, false);
         mConsumer->setConsumerName(String8{"MediaCodec.release"});
+        mConsumer->setConsumerUsageBits(usage);
     }
 
     const sp<Surface> &getSurface() {
@@ -3122,7 +3123,11 @@
             if (asyncNotify != nullptr) {
                 if (mSurface != NULL) {
                     if (!mReleaseSurface) {
-                        mReleaseSurface.reset(new ReleaseSurface);
+                        uint64_t usage = 0;
+                        if (mSurface->getConsumerUsage(&usage) != OK) {
+                            usage = 0;
+                        }
+                        mReleaseSurface.reset(new ReleaseSurface(usage));
                     }
                     if (mSurface != mReleaseSurface->getSurface()) {
                         status_t err = connectToSurface(mReleaseSurface->getSurface());
diff --git a/media/libstagefright/tests/Android.bp b/media/libstagefright/tests/Android.bp
index 5f3f72c..4a505d4 100644
--- a/media/libstagefright/tests/Android.bp
+++ b/media/libstagefright/tests/Android.bp
@@ -9,7 +9,6 @@
         "libmedia",
         "libstagefright",
         "libstagefright_foundation",
-        "libstagefright_omx",
         "libutils",
         "liblog",
     ],
@@ -17,11 +16,8 @@
     include_dirs: [
         "frameworks/av/media/libstagefright",
         "frameworks/av/media/libstagefright/include",
-        "frameworks/native/include/media/openmax",
     ],
 
-    compile_multilib: "prefer32",
-
     cflags: [
         "-Werror",
         "-Wall",
diff --git a/media/libstagefright/tests/fuzzers/Android.bp b/media/libstagefright/tests/fuzzers/Android.bp
index 49ff69a..d39dd18 100644
--- a/media/libstagefright/tests/fuzzers/Android.bp
+++ b/media/libstagefright/tests/fuzzers/Android.bp
@@ -8,7 +8,9 @@
     ],
     shared_libs: [
         "libstagefright",
-	"libstagefright_codecbase",
+        "libstagefright_codecbase",
+        "libbase",
+        "libcutils",
         "libutils",
         "libstagefright_foundation",
         "libmedia",
@@ -16,7 +18,10 @@
         "libmedia_omx",
         "libgui",
         "libbinder",
-        "libcutils",
+        "liblog",
+    ],
+    include_dirs: [
+        "frameworks/av/media/libstagefright",
     ],
 }
 
@@ -51,3 +56,27 @@
     ],
     defaults: ["libstagefright_fuzzer_defaults"],
 }
+
+cc_fuzz {
+    name: "libstagefright_frameDecoder_fuzzer",
+    srcs: [
+        "FrameDecoderFuzzer.cpp",
+    ],
+    defaults: ["libstagefright_fuzzer_defaults"],
+}
+
+cc_fuzz {
+    name: "libstagefright_writer_fuzzer",
+    srcs: [
+        "FuzzerMediaUtility.cpp",
+        "WriterFuzzer.cpp",
+    ],
+    dictionary: "dictionaries/formats.dict",
+    defaults: ["libstagefright_fuzzer_defaults"],
+    static_libs: [
+        "libstagefright_webm",
+        "libdatasource",
+        "libstagefright_esds",
+        "libogg",
+    ],
+}
diff --git a/media/libstagefright/tests/fuzzers/FrameDecoderFuzzer.cpp b/media/libstagefright/tests/fuzzers/FrameDecoderFuzzer.cpp
new file mode 100644
index 0000000..a628c70
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/FrameDecoderFuzzer.cpp
@@ -0,0 +1,83 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "include/FrameDecoder.h"
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/IMediaSource.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/foundation/AString.h>
+#include "FrameDecoderHelpers.h"
+#include "IMediaSourceFuzzImpl.h"
+
+namespace android {
+
+#define MAX_MEDIA_BUFFER_SIZE 2048
+
+// Fuzzer entry point.
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+    // Init our wrapper
+    FuzzedDataProvider fdp(data, size);
+
+    std::string name = fdp.ConsumeRandomLengthString(fdp.remaining_bytes());
+    AString componentName(name.c_str());
+    sp<MetaData> trackMeta = generateMetaData(&fdp);
+    sp<IMediaSource> source = new IMediaSourceFuzzImpl(&fdp, MAX_MEDIA_BUFFER_SIZE);
+
+    // Image or video Decoder?
+    sp<FrameDecoder> decoder;
+    bool isVideoDecoder = fdp.ConsumeBool();
+    if (isVideoDecoder) {
+        decoder = new VideoFrameDecoder(componentName, trackMeta, source);
+    } else {
+        decoder = new ImageDecoder(componentName, trackMeta, source);
+    }
+
+    while (fdp.remaining_bytes()) {
+        switch (fdp.ConsumeIntegralInRange<uint8_t>(0, 3)) {
+            case 0:
+                decoder->init(/*frameTimeUs*/ fdp.ConsumeIntegral<int64_t>(),
+                              /*option*/ fdp.ConsumeIntegral<int>(),
+                              /*colorFormat*/ fdp.ConsumeIntegral<int>());
+                break;
+            case 1:
+                decoder->extractFrame();
+                break;
+            case 2: {
+                FrameRect rect;
+                rect.left = fdp.ConsumeIntegral<int32_t>();
+                rect.top = fdp.ConsumeIntegral<int32_t>();
+                rect.right = fdp.ConsumeIntegral<int32_t>();
+                rect.bottom = fdp.ConsumeIntegral<int32_t>();
+                decoder->extractFrame(&rect);
+                break;
+            }
+            case 3: {
+                sp<MetaData> trackMeta = generateMetaData(&fdp);
+                decoder->getMetadataOnly(trackMeta,
+                                         /*colorFormat*/ fdp.ConsumeIntegral<int>(),
+                                         /*thumbnail*/ fdp.ConsumeBool());
+                break;
+            }
+        }
+    }
+
+    generated_mime_types.clear();
+
+    return 0;
+}
+
+}  // namespace android
+
diff --git a/media/libstagefright/tests/fuzzers/FrameDecoderHelpers.h b/media/libstagefright/tests/fuzzers/FrameDecoderHelpers.h
new file mode 100644
index 0000000..228c04a
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/FrameDecoderHelpers.h
@@ -0,0 +1,90 @@
+
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <media/stagefright/MetaData.h>
+#include "MediaMimeTypes.h"
+
+#define MAX_METADATA_BUF_SIZE 512
+
+namespace android {
+
+std::vector<std::shared_ptr<char>> generated_mime_types;
+
+sp<MetaData> generateMetaData(FuzzedDataProvider *fdp) {
+    sp<MetaData> newMeta = new MetaData();
+
+    // random MIME Type
+    const char *mime_type;
+    size_t index = fdp->ConsumeIntegralInRange<size_t>(0, kMimeTypes.size());
+    // Let there be a chance of a true random string
+    if (index == kMimeTypes.size()) {
+        std::string mime_str = fdp->ConsumeRandomLengthString(64);
+        std::shared_ptr<char> mime_cstr(new char[mime_str.length()+1]);
+        generated_mime_types.push_back(mime_cstr);
+        strncpy(mime_cstr.get(), mime_str.c_str(), mime_str.length()+1);
+        mime_type = mime_cstr.get();
+    } else {
+        mime_type = kMimeTypes[index];
+    }
+    newMeta->setCString(kKeyMIMEType, mime_type);
+
+    // Thumbnail time
+    newMeta->setInt64(kKeyThumbnailTime, fdp->ConsumeIntegral<int64_t>());
+
+    // Values used by allocVideoFrame
+    newMeta->setInt32(kKeyRotation, fdp->ConsumeIntegral<int32_t>());
+    size_t profile_size =
+        fdp->ConsumeIntegralInRange<size_t>(0, MAX_METADATA_BUF_SIZE);
+    std::vector<uint8_t> profile_bytes =
+        fdp->ConsumeBytes<uint8_t>(profile_size);
+    newMeta->setData(kKeyIccProfile,
+                     fdp->ConsumeIntegral<int32_t>(),
+                     profile_bytes.empty() ? nullptr : profile_bytes.data(),
+                     profile_bytes.size());
+    newMeta->setInt32(kKeySARWidth, fdp->ConsumeIntegral<int32_t>());
+    newMeta->setInt32(kKeySARHeight, fdp->ConsumeIntegral<int32_t>());
+    newMeta->setInt32(kKeyDisplayWidth, fdp->ConsumeIntegral<int32_t>());
+    newMeta->setInt32(kKeyDisplayHeight, fdp->ConsumeIntegral<int32_t>());
+
+    // Values used by findThumbnailInfo
+    newMeta->setInt32(kKeyThumbnailWidth, fdp->ConsumeIntegral<int32_t>());
+    newMeta->setInt32(kKeyThumbnailHeight, fdp->ConsumeIntegral<int32_t>());
+    size_t thumbnail_size =
+        fdp->ConsumeIntegralInRange<size_t>(0, MAX_METADATA_BUF_SIZE);
+    std::vector<uint8_t> thumb_bytes =
+        fdp->ConsumeBytes<uint8_t>(thumbnail_size);
+    newMeta->setData(kKeyThumbnailHVCC,
+                     fdp->ConsumeIntegral<int32_t>(),
+                     thumb_bytes.empty() ? nullptr : thumb_bytes.data(),
+                     thumb_bytes.size());
+
+    // Values used by findGridInfo
+    newMeta->setInt32(kKeyTileWidth, fdp->ConsumeIntegral<int32_t>());
+    newMeta->setInt32(kKeyTileHeight, fdp->ConsumeIntegral<int32_t>());
+    newMeta->setInt32(kKeyGridRows, fdp->ConsumeIntegral<int32_t>());
+    newMeta->setInt32(kKeyGridCols, fdp->ConsumeIntegral<int32_t>());
+
+    // A few functions perform a CHECK() that height/width are set
+    newMeta->setInt32(kKeyHeight, fdp->ConsumeIntegral<int32_t>());
+    newMeta->setInt32(kKeyWidth, fdp->ConsumeIntegral<int32_t>());
+
+    return newMeta;
+}
+
+}  // namespace android
diff --git a/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.cpp b/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.cpp
new file mode 100644
index 0000000..810ae95
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.cpp
@@ -0,0 +1,124 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "FuzzerMediaUtility.h"
+
+#include <media/stagefright/AACWriter.h>
+#include <media/stagefright/AMRWriter.h>
+#include <media/stagefright/MPEG2TSWriter.h>
+#include <media/stagefright/MPEG4Writer.h>
+#include <media/stagefright/OggWriter.h>
+
+#include "MediaMimeTypes.h"
+#include "webm/WebmWriter.h"
+
+namespace android {
+std::string genMimeType(FuzzedDataProvider *dataProvider) {
+    uint8_t idx = dataProvider->ConsumeIntegralInRange<uint8_t>(0, kMimeTypes.size() - 1);
+    return std::string(kMimeTypes[idx]);
+}
+
+sp<IMediaExtractor> genMediaExtractor(FuzzedDataProvider *dataProvider, std::string mimeType,
+                                      uint16_t maxDataAmount) {
+    uint32_t dataBlobSize = dataProvider->ConsumeIntegralInRange<uint16_t>(0, maxDataAmount);
+    std::vector<uint8_t> data = dataProvider->ConsumeBytes<uint8_t>(dataBlobSize);
+    // data:[<mediatype>][;base64],<data>
+    std::string uri("data:");
+    uri += mimeType;
+    // Currently libstagefright only accepts base64 uris
+    uri += ";base64,";
+    android::AString out;
+    android::encodeBase64(data.data(), data.size(), &out);
+    uri += out.c_str();
+
+    sp<DataSource> source =
+        DataSourceFactory::getInstance()->CreateFromURI(NULL /* httpService */, uri.c_str());
+
+    if (source == NULL) {
+        return NULL;
+    }
+
+    return MediaExtractorFactory::Create(source);
+}
+
+sp<MediaSource> genMediaSource(FuzzedDataProvider *dataProvider, uint16_t maxMediaBlobSize) {
+    std::string mime = genMimeType(dataProvider);
+    sp<IMediaExtractor> extractor = genMediaExtractor(dataProvider, mime, maxMediaBlobSize);
+
+    if (extractor == NULL) {
+        return NULL;
+    }
+
+    for (size_t i = 0; i < extractor->countTracks(); ++i) {
+        sp<MetaData> meta = extractor->getTrackMetaData(i);
+
+        const char *trackMime;
+        if (!strcasecmp(mime.c_str(), trackMime)) {
+            sp<IMediaSource> track = extractor->getTrack(i);
+            if (track == NULL) {
+                return NULL;
+            }
+            return new CallbackMediaSource(track);
+        }
+    }
+
+    return NULL;
+}
+
+sp<MediaWriter> createWriter(int fd, StandardWriters writerType, sp<MetaData> fileMeta) {
+    sp<MediaWriter> writer;
+    switch (writerType) {
+        case OGG:
+            writer = new OggWriter(fd);
+            fileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_OGG);
+            break;
+        case AAC:
+            writer = new AACWriter(fd);
+            fileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_AAC_ADIF);
+            break;
+        case AAC_ADTS:
+            writer = new AACWriter(fd);
+            fileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_AAC_ADTS);
+            break;
+        case WEBM:
+            writer = new WebmWriter(fd);
+            fileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_WEBM);
+            break;
+        case MPEG4:
+            writer = new MPEG4Writer(fd);
+            fileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_MPEG_4);
+            break;
+        case AMR_NB:
+            writer = new AMRWriter(fd);
+            fileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_AMR_NB);
+            break;
+        case AMR_WB:
+            writer = new AMRWriter(fd);
+            fileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_AMR_WB);
+            break;
+        case MPEG2TS:
+            writer = new MPEG2TSWriter(fd);
+            fileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_MPEG2TS);
+            break;
+        default:
+            return nullptr;
+    }
+    if (writer != nullptr) {
+        fileMeta->setInt32(kKeyRealTimeRecording, false);
+    }
+    return writer;
+}
+}  // namespace android
\ No newline at end of file
diff --git a/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.h b/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.h
new file mode 100644
index 0000000..98bfb94
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+#include <datasource/DataSourceFactory.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <android/IMediaExtractor.h>
+#include <media/IMediaHTTPService.h>
+#include <media/mediarecorder.h>
+#include <media/stagefright/CallbackMediaSource.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaExtractorFactory.h>
+#include <media/stagefright/MediaWriter.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/foundation/base64.h>
+#include <utils/StrongPointer.h>
+
+namespace android {
+enum StandardWriters {
+    OGG,
+    AAC,
+    AAC_ADTS,
+    WEBM,
+    MPEG4,
+    AMR_NB,
+    AMR_WB,
+    MPEG2TS,
+    // Allows FuzzedDataProvider to find the end of this enum.
+    kMaxValue = MPEG2TS,
+};
+
+std::string genMimeType(FuzzedDataProvider *dataProvider);
+sp<IMediaExtractor> genMediaExtractor(FuzzedDataProvider *dataProvider, uint16_t dataAmount);
+sp<MediaSource> genMediaSource(FuzzedDataProvider *dataProvider, uint16_t maxMediaBlobSize);
+
+sp<MediaWriter> createWriter(int32_t fd, StandardWriters writerType, sp<MetaData> fileMeta);
+}  // namespace android
diff --git a/media/libstagefright/tests/fuzzers/IMediaSourceFuzzImpl.h b/media/libstagefright/tests/fuzzers/IMediaSourceFuzzImpl.h
new file mode 100644
index 0000000..e769950
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/IMediaSourceFuzzImpl.h
@@ -0,0 +1,87 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef IMEDIASOURCEFUZZIMPL_H
+#define IMEDIASOURCEFUZZIMPL_H
+
+#include <media/stagefright/MediaSource.h>
+
+namespace android {
+
+class IMediaSourceFuzzImpl : public IMediaSource {
+ public:
+    IMediaSourceFuzzImpl(FuzzedDataProvider *_fdp, size_t _max_buffer_size) :
+        fdp(_fdp),
+        max_buffer_size(_max_buffer_size) {}
+    status_t start(MetaData*) override { return 0; }
+    status_t stop() override { return 0; }
+    sp<MetaData> getFormat() override { return nullptr; }
+    status_t read(MediaBufferBase**,
+        const MediaSource::ReadOptions*) override;
+    status_t readMultiple(Vector<MediaBufferBase*>*, uint32_t,
+        const MediaSource::ReadOptions*) override;
+    bool supportReadMultiple() override { return true; }
+    bool supportNonblockingRead() override { return true; }
+    status_t pause() override { return 0; }
+
+ protected:
+    IBinder* onAsBinder() { return nullptr; }
+
+ private:
+    FuzzedDataProvider *fdp;
+    std::vector<std::shared_ptr<MediaBufferBase>> buffer_bases;
+    const size_t max_buffer_size;
+};
+
+// This class is simply to expose the destructor
+class MediaBufferFuzzImpl : public MediaBuffer {
+ public:
+    MediaBufferFuzzImpl(void *data, size_t size) : MediaBuffer(data, size) {}
+    ~MediaBufferFuzzImpl() {}
+};
+
+status_t IMediaSourceFuzzImpl::read(MediaBufferBase **buffer,
+        const MediaSource::ReadOptions *options) {
+    Vector<MediaBufferBase*> buffers;
+    status_t ret = readMultiple(&buffers, 1, options);
+    *buffer = buffers.empty() ? nullptr : buffers[0];
+
+    return ret;
+}
+
+status_t IMediaSourceFuzzImpl::readMultiple(Vector<MediaBufferBase*>* buffers,
+        uint32_t maxNumBuffers, const MediaSource::ReadOptions*) {
+    uint32_t num_buffers =
+        fdp->ConsumeIntegralInRange<uint32_t>(0, maxNumBuffers);
+    for(uint32_t i = 0; i < num_buffers; i++) {
+        std::vector<uint8_t> buf = fdp->ConsumeBytes<uint8_t>(
+            fdp->ConsumeIntegralInRange<size_t>(0, max_buffer_size));
+
+        std::shared_ptr<MediaBufferBase> mbb(
+            new MediaBufferFuzzImpl(buf.data(), buf.size()));
+
+        buffer_bases.push_back(mbb);
+        buffers->push_back(mbb.get());
+    }
+
+    // STATUS_OK
+    return 0;
+}
+
+} // namespace android
+
+#endif // IMEDIASOURCEFUZZIMPL_H
+
diff --git a/media/libstagefright/tests/fuzzers/MediaClockFuzzer.cpp b/media/libstagefright/tests/fuzzers/MediaClockFuzzer.cpp
index e473541..9b26f0b 100644
--- a/media/libstagefright/tests/fuzzers/MediaClockFuzzer.cpp
+++ b/media/libstagefright/tests/fuzzers/MediaClockFuzzer.cpp
@@ -17,52 +17,53 @@
 //          dylan.katz@leviathansecurity.com
 
 #include <fuzzer/FuzzedDataProvider.h>
-#include <media/stagefright/MediaClock.h>
 #include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaClock.h>
 
 namespace android {
 extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
-  FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
-  sp<MediaClock> mClock(new MediaClock);
+    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+    sp<MediaClock> mClock(new MediaClock);
 
-  bool registered = false;
-  while (fdp.remaining_bytes() > 0) {
-    switch (fdp.ConsumeIntegralInRange<uint8_t>(0, 5)) {
-    case 0: {
-      if (registered == false) {
-        mClock->init();
-        registered = true;
-      }
-      break;
+    bool registered = false;
+    while (fdp.remaining_bytes() > 0) {
+        switch (fdp.ConsumeIntegralInRange<uint8_t>(0, 5)) {
+            case 0: {
+                if (registered == false) {
+                    mClock->init();
+                    registered = true;
+                }
+                break;
+                }
+            case 1: {
+                int64_t startingTimeMediaUs = fdp.ConsumeIntegral<int64_t>();
+                mClock->setStartingTimeMedia(startingTimeMediaUs);
+                break;
+            }
+            case 2: {
+                mClock->clearAnchor();
+                break;
+            }
+            case 3: {
+                int64_t anchorTimeRealUs = fdp.ConsumeIntegral<int64_t>();
+                int64_t anchorTimeMediaUs = fdp.ConsumeIntegral<int64_t>();
+                int64_t maxTimeMediaUs = fdp.ConsumeIntegral<int64_t>();
+                mClock->updateAnchor(anchorTimeMediaUs, anchorTimeRealUs,
+                                     maxTimeMediaUs);
+                break;
+            }
+            case 4: {
+                int64_t maxTimeMediaUs = fdp.ConsumeIntegral<int64_t>();
+                mClock->updateMaxTimeMedia(maxTimeMediaUs);
+                break;
+                }
+            case 5: {
+                wp<AMessage> msg(new AMessage);
+                mClock->setNotificationMessage(msg.promote());
+            }
+        }
     }
-    case 1: {
-      int64_t startingTimeMediaUs = fdp.ConsumeIntegral<int64_t>();
-      mClock->setStartingTimeMedia(startingTimeMediaUs);
-      break;
-    }
-    case 2: {
-      mClock->clearAnchor();
-      break;
-    }
-    case 3: {
-      int64_t anchorTimeRealUs = fdp.ConsumeIntegral<int64_t>();
-      int64_t anchorTimeMediaUs = fdp.ConsumeIntegral<int64_t>();
-      int64_t maxTimeMediaUs = fdp.ConsumeIntegral<int64_t>();
-      mClock->updateAnchor(anchorTimeMediaUs, anchorTimeRealUs, maxTimeMediaUs);
-      break;
-    }
-    case 4: {
-      int64_t maxTimeMediaUs = fdp.ConsumeIntegral<int64_t>();
-      mClock->updateMaxTimeMedia(maxTimeMediaUs);
-      break;
-    }
-    case 5: {
-      wp<AMessage> msg(new AMessage);
-      mClock->setNotificationMessage(msg.promote());
-    }
-    }
-  }
 
-  return 0;
+    return 0;
 }
-} // namespace android
+}  // namespace android
diff --git a/media/libstagefright/tests/fuzzers/MediaMimeTypes.h b/media/libstagefright/tests/fuzzers/MediaMimeTypes.h
new file mode 100644
index 0000000..9f337ac
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/MediaMimeTypes.h
@@ -0,0 +1,85 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FUZZER_MEDIAMIMETYPES_H_
+#define FUZZER_MEDIAMIMETYPES_H_
+
+#include <media/stagefright/foundation/MediaDefs.h>
+
+namespace android {
+
+static const std::vector<const char*> kMimeTypes {
+    MEDIA_MIMETYPE_IMAGE_JPEG,
+    MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC,
+    MEDIA_MIMETYPE_VIDEO_VP8,
+    MEDIA_MIMETYPE_VIDEO_VP9,
+    MEDIA_MIMETYPE_VIDEO_AV1,
+    MEDIA_MIMETYPE_VIDEO_AVC,
+    MEDIA_MIMETYPE_VIDEO_HEVC,
+    MEDIA_MIMETYPE_VIDEO_MPEG4,
+    MEDIA_MIMETYPE_VIDEO_H263,
+    MEDIA_MIMETYPE_VIDEO_MPEG2,
+    MEDIA_MIMETYPE_VIDEO_RAW,
+    MEDIA_MIMETYPE_VIDEO_DOLBY_VISION,
+    MEDIA_MIMETYPE_VIDEO_SCRAMBLED,
+    MEDIA_MIMETYPE_VIDEO_DIVX,
+    MEDIA_MIMETYPE_VIDEO_DIVX3,
+    MEDIA_MIMETYPE_VIDEO_XVID,
+    MEDIA_MIMETYPE_VIDEO_MJPEG,
+    MEDIA_MIMETYPE_AUDIO_AMR_NB,
+    MEDIA_MIMETYPE_AUDIO_AMR_WB,
+    MEDIA_MIMETYPE_AUDIO_MPEG,
+    MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I,
+    MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II,
+    MEDIA_MIMETYPE_AUDIO_MIDI,
+    MEDIA_MIMETYPE_AUDIO_AAC,
+    MEDIA_MIMETYPE_AUDIO_QCELP,
+    MEDIA_MIMETYPE_AUDIO_VORBIS,
+    MEDIA_MIMETYPE_AUDIO_OPUS,
+    MEDIA_MIMETYPE_AUDIO_G711_ALAW,
+    MEDIA_MIMETYPE_AUDIO_G711_MLAW,
+    MEDIA_MIMETYPE_AUDIO_RAW,
+    MEDIA_MIMETYPE_AUDIO_FLAC,
+    MEDIA_MIMETYPE_AUDIO_AAC_ADTS,
+    MEDIA_MIMETYPE_AUDIO_MSGSM,
+    MEDIA_MIMETYPE_AUDIO_AC3,
+    MEDIA_MIMETYPE_AUDIO_EAC3,
+    MEDIA_MIMETYPE_AUDIO_EAC3_JOC,
+    MEDIA_MIMETYPE_AUDIO_AC4,
+    MEDIA_MIMETYPE_AUDIO_SCRAMBLED,
+    MEDIA_MIMETYPE_AUDIO_ALAC,
+    MEDIA_MIMETYPE_AUDIO_WMA,
+    MEDIA_MIMETYPE_AUDIO_MS_ADPCM,
+    MEDIA_MIMETYPE_AUDIO_DVI_IMA_ADPCM,
+    MEDIA_MIMETYPE_CONTAINER_MPEG4,
+    MEDIA_MIMETYPE_CONTAINER_WAV,
+    MEDIA_MIMETYPE_CONTAINER_OGG,
+    MEDIA_MIMETYPE_CONTAINER_MATROSKA,
+    MEDIA_MIMETYPE_CONTAINER_MPEG2TS,
+    MEDIA_MIMETYPE_CONTAINER_AVI,
+    MEDIA_MIMETYPE_CONTAINER_MPEG2PS,
+    MEDIA_MIMETYPE_CONTAINER_HEIF,
+    MEDIA_MIMETYPE_TEXT_3GPP,
+    MEDIA_MIMETYPE_TEXT_SUBRIP,
+    MEDIA_MIMETYPE_TEXT_VTT,
+    MEDIA_MIMETYPE_TEXT_CEA_608,
+    MEDIA_MIMETYPE_TEXT_CEA_708,
+    MEDIA_MIMETYPE_DATA_TIMED_ID3
+};
+
+}  // namespace android
+
+#endif  // FUZZER_MEDIAMIMETYPES_H_
diff --git a/media/libstagefright/tests/fuzzers/StagefrightMediaScannerFuzzer.cpp b/media/libstagefright/tests/fuzzers/StagefrightMediaScannerFuzzer.cpp
index a072b7c..c50c951 100644
--- a/media/libstagefright/tests/fuzzers/StagefrightMediaScannerFuzzer.cpp
+++ b/media/libstagefright/tests/fuzzers/StagefrightMediaScannerFuzzer.cpp
@@ -16,7 +16,6 @@
 // Authors: corbin.souffrant@leviathansecurity.com
 //          dylan.katz@leviathansecurity.com
 
-#include <cutils/ashmem.h>
 #include <fuzzer/FuzzedDataProvider.h>
 #include <media/stagefright/StagefrightMediaScanner.h>
 
@@ -24,59 +23,39 @@
 
 namespace android {
 class FuzzMediaScannerClient : public MediaScannerClient {
-public:
-  virtual status_t scanFile(const char *, long long, long long, bool, bool) {
-    return 0;
-  }
+ public:
+    virtual status_t scanFile(const char*, long long, long long, bool, bool) {
+        return 0;
+    }
 
-  virtual status_t handleStringTag(const char *, const char *) { return 0; }
+    virtual status_t handleStringTag(const char*, const char*) { return 0; }
 
-  virtual status_t setMimeType(const char *) { return 0; }
+    virtual status_t setMimeType(const char*) { return 0; }
 };
 
 extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
-  FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
-  StagefrightMediaScanner mScanner = StagefrightMediaScanner();
-  // Without this, the fuzzer crashes for some reason.
-  mScanner.setLocale("");
+    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+    StagefrightMediaScanner mScanner = StagefrightMediaScanner();
+    // Without this, the fuzzer crashes for some reason.
+    mScanner.setLocale("");
 
-  size_t data_size = fdp.ConsumeIntegralInRange<size_t>(0, size);
-  int fd =
-      ashmem_create_region("stagefrightmediascanner_fuzz_region", data_size);
-  if (fd < 0)
+    while (fdp.remaining_bytes() > 0) {
+        switch (fdp.ConsumeIntegralInRange<uint8_t>(0, 1)) {
+            case 0: {
+                std::string path = fdp.ConsumeRandomLengthString(fdp.remaining_bytes());
+                std::string mimeType =
+                    fdp.ConsumeRandomLengthString(fdp.remaining_bytes());
+                std::shared_ptr<MediaScannerClient> client(new FuzzMediaScannerClient());
+                mScanner.processFile(path.c_str(), mimeType.c_str(), *client);
+                break;
+            }
+            case 1: {
+                int fd = fdp.ConsumeIntegral<int>();
+                if (fd >= 0 && fd <= 2) fd = 3;
+                mScanner.extractAlbumArt(fd);
+            }
+        }
+    }
     return 0;
-
-  uint8_t *sh_data = static_cast<uint8_t *>(
-      mmap(NULL, data_size, PROT_READ | PROT_WRITE, MAP_SHARED, fd, 0));
-  if (sh_data == MAP_FAILED)
-    return 0;
-
-  while (fdp.remaining_bytes() > 8) {
-    switch (fdp.ConsumeIntegralInRange<uint8_t>(0, 1)) {
-    case 0: {
-      std::string path = fdp.ConsumeRandomLengthString(fdp.remaining_bytes());
-      std::string mimeType =
-          fdp.ConsumeRandomLengthString(fdp.remaining_bytes());
-      std::shared_ptr<MediaScannerClient> client(new FuzzMediaScannerClient());
-      mScanner.processFile(path.c_str(), mimeType.c_str(), *client);
-      break;
-    }
-    case 1: {
-      size_t to_copy = fdp.ConsumeIntegralInRange<size_t>(1, data_size);
-      std::vector<uint8_t> rand_buf = fdp.ConsumeBytes<uint8_t>(to_copy);
-
-      // If fdp doesn't have enough bytes left it will just make a shorter
-      // vector.
-      to_copy = std::min(rand_buf.size(), data_size);
-
-      std::copy(sh_data, sh_data + to_copy, rand_buf.begin());
-      mScanner.extractAlbumArt(fd);
-    }
-    }
-  }
-
-  munmap(sh_data, data_size);
-  close(fd);
-  return 0;
 }
-} // namespace android
+}  // namespace android
diff --git a/media/libstagefright/tests/fuzzers/WriterFuzzer.cpp b/media/libstagefright/tests/fuzzers/WriterFuzzer.cpp
new file mode 100644
index 0000000..03e9b43
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/WriterFuzzer.cpp
@@ -0,0 +1,222 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Authors: corbin.souffrant@leviathansecurity.com
+//          dylan.katz@leviathansecurity.com
+
+#include <android-base/file.h>
+#include <ctype.h>
+#include <media/mediarecorder.h>
+#include <media/stagefright/MPEG4Writer.h>
+#include <media/stagefright/MediaDefs.h>
+#include <stdlib.h>
+#include <utils/StrongPointer.h>
+#include <utils/Vector.h>
+
+#include <functional>
+#include <string>
+
+#include "FuzzerMediaUtility.h"
+#include "fuzzer/FuzzedDataProvider.h"
+
+static constexpr uint16_t kMaxOperations = 5000;
+static constexpr uint8_t kMaxPackageNameLen = 50;
+// For other strings in mpeg we want a higher limit.
+static constexpr uint16_t kMaxMPEGStrLen = 1000;
+static constexpr uint16_t kMaxMediaBlobSize = 1000;
+
+namespace android {
+
+std::string getFourCC(FuzzedDataProvider *fdp) {
+    std::string fourCC = fdp->ConsumeRandomLengthString(4);
+    // Replace any existing nulls
+    for (size_t pos = 0; pos < fourCC.length(); pos++) {
+        if (fourCC.at(pos) == '\0') {
+            fourCC.replace(pos, 1, "a");
+        }
+    }
+
+    // If our string is too short, fill the remainder with "a"s.
+    while (fourCC.length() < 4) {
+        fourCC += 'a';
+    }
+    return fourCC;
+}
+
+typedef std::vector<std::function<void(FuzzedDataProvider*,
+                                    sp<MediaWriter>, sp<MetaData>, int tmpFileFd)>> OperationVec;
+typedef std::vector<std::function<void(FuzzedDataProvider*, MPEG4Writer*)>> MPEG4OperationVec;
+static const OperationVec operations = {
+    [](FuzzedDataProvider*, sp<MediaWriter> mediaWriter, sp<MetaData>, int) {
+        mediaWriter->pause();
+    },
+    [](FuzzedDataProvider *dataProvider, sp<MediaWriter> mediaWriter, sp<MetaData>, int tmpFd) {
+        bool valid_fd = dataProvider->ConsumeBool();
+        int fd = -1;
+        if (valid_fd) {
+            fd = tmpFd;
+        }
+        // Args don't seem to be used
+        Vector<String16> args;
+        mediaWriter->dump(fd, args);
+    },
+    [](FuzzedDataProvider *dataProvider, sp<MediaWriter> mediaWriter, sp<MetaData>, int tmpFd) {
+        bool valid_fd = dataProvider->ConsumeBool();
+        int fd = -1;
+        if (valid_fd) {
+            fd = tmpFd;
+        }
+        mediaWriter->setNextFd(fd);
+    },
+    [](FuzzedDataProvider *dataProvider, sp<MediaWriter> mediaWriter, sp<MetaData>, int) {
+        mediaWriter->setCaptureRate(dataProvider->ConsumeFloatingPoint<float>());
+    },
+    [](FuzzedDataProvider *dataProvider, sp<MediaWriter> mediaWriter, sp<MetaData>, int) {
+        mediaWriter->setMaxFileDuration(dataProvider->ConsumeIntegral<int64_t>());
+    },
+    [](FuzzedDataProvider *dataProvider, sp<MediaWriter> mediaWriter, sp<MetaData>, int) {
+        mediaWriter->setStartTimeOffsetMs(dataProvider->ConsumeIntegral<int>());
+
+        // Likely won't do much, but might as well as do a quick check
+        // while we're here.
+        mediaWriter->getStartTimeOffsetMs();
+    },
+    [](FuzzedDataProvider *dataProvider, sp<MediaWriter> mediaWriter, sp<MetaData>, int) {
+        mediaWriter->setMaxFileDuration(dataProvider->ConsumeIntegral<int64_t>());
+    },
+    [](FuzzedDataProvider *dataProvider, sp<MediaWriter> mediaWriter, sp<MetaData>, int) {
+        mediaWriter->setMaxFileDuration(dataProvider->ConsumeIntegral<int64_t>());
+    },
+};
+
+static const MPEG4OperationVec mpeg4Operations = {
+    [](FuzzedDataProvider*, MPEG4Writer *mediaWriter) { mediaWriter->notifyApproachingLimit(); },
+    // Lower level write methods.
+    // High-level startBox/endBox/etc are all called elsewhere,
+    [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
+        uint8_t val = dataProvider->ConsumeIntegral<uint8_t>();
+        mediaWriter->writeInt8(val);
+    },
+    [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
+        uint16_t val = dataProvider->ConsumeIntegral<uint16_t>();
+        mediaWriter->writeInt16(val);
+    },
+    [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
+        uint32_t val = dataProvider->ConsumeIntegral<uint32_t>();
+        mediaWriter->writeInt32(val);
+    },
+    [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
+        uint64_t val = dataProvider->ConsumeIntegral<uint64_t>();
+        mediaWriter->writeInt64(val);
+    },
+    [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
+        std::string strVal = dataProvider->ConsumeRandomLengthString(kMaxMPEGStrLen);
+        mediaWriter->writeCString(strVal.c_str());
+    },
+    [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
+        std::string fourCC = getFourCC(dataProvider);
+        mediaWriter->writeFourcc(fourCC.c_str());
+    },
+
+    // Misc setters
+    [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
+        uint32_t layers = dataProvider->ConsumeIntegral<uint32_t>();
+        mediaWriter->setTemporalLayerCount(layers);
+    },
+    [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
+        uint32_t duration = dataProvider->ConsumeIntegral<uint32_t>();
+        mediaWriter->setInterleaveDuration(duration);
+    },
+    [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
+        int lat = dataProvider->ConsumeIntegral<int>();
+        int lon = dataProvider->ConsumeIntegral<int>();
+        mediaWriter->setGeoData(lat, lon);
+    },
+};
+
+// Not all writers can always add new sources, so we'll need additional checks.
+void addSource(FuzzedDataProvider *dataProvider, sp<MediaWriter> mediaWriter) {
+    sp<MediaSource> mediaSource = genMediaSource(dataProvider, kMaxMediaBlobSize);
+    if (mediaSource == NULL) {
+        // There's a static check preventing NULLs in addSource.
+        return;
+    }
+    mediaWriter->addSource(mediaSource);
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+    FuzzedDataProvider dataProvider(data, size);
+    TemporaryFile tf;
+    sp<MetaData> fileMeta = new MetaData;
+    StandardWriters writerType = dataProvider.ConsumeEnum<StandardWriters>();
+    sp<MediaWriter> writer = createWriter(tf.fd, writerType, fileMeta);
+
+    std::string packageName = dataProvider.ConsumeRandomLengthString(kMaxPackageNameLen);
+
+    sp<MediaRecorder> mr = new MediaRecorder(String16(packageName.c_str()));
+    writer->setListener(mr);
+
+    uint8_t baseOpLen = operations.size();
+    uint8_t totalLen = baseOpLen;
+    uint8_t maxSources;
+    // Different writers support different amounts of sources.
+    switch (writerType) {
+        case StandardWriters::AAC:
+        case StandardWriters::AAC_ADTS:
+        case StandardWriters::AMR_NB:
+        case StandardWriters::AMR_WB:
+        case StandardWriters::OGG:
+            maxSources = 1;
+            break;
+        case StandardWriters::WEBM:
+            maxSources = 2;
+            break;
+        default:
+            maxSources = UINT8_MAX;
+            break;
+    }
+    // Initialize some number of sources and add them to our writer.
+    uint8_t sourceCount = dataProvider.ConsumeIntegralInRange<uint8_t>(0, maxSources);
+    for (uint8_t i = 0; i < sourceCount; i++) {
+        addSource(&dataProvider, writer);
+    }
+
+    // Increase our range if additional operations are implemented.
+    // Currently only MPEG4 has additiona public operations on their writer.
+    if (writerType == StandardWriters::MPEG4) {
+        totalLen += mpeg4Operations.size();
+    }
+
+    // Many operations require the writer to be started.
+    writer->start(fileMeta.get());
+    for (size_t ops_run = 0; dataProvider.remaining_bytes() > 0 && ops_run < kMaxOperations - 1;
+            ops_run++) {
+        uint8_t op = dataProvider.ConsumeIntegralInRange<uint8_t>(0, totalLen - 1);
+        if (op < baseOpLen) {
+            operations[op](&dataProvider, writer, fileMeta, tf.fd);
+        } else if (writerType == StandardWriters::MPEG4) {
+            mpeg4Operations[op - baseOpLen](&dataProvider, (MPEG4Writer*)writer.get());
+        } else {
+            // Here just in case, will error out.
+            operations[op](&dataProvider, writer, fileMeta, tf.fd);
+        }
+    }
+    writer->stop();
+
+    writer.clear();
+    writer = nullptr;
+    return 0;
+}
+}  // namespace android
diff --git a/media/libstagefright/tests/fuzzers/dictionaries/formats.dict b/media/libstagefright/tests/fuzzers/dictionaries/formats.dict
new file mode 100644
index 0000000..4ab22de
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/dictionaries/formats.dict
@@ -0,0 +1,299 @@
+############################################################################################################
+# This file is a combination of these dictionaries:                                                        #
+# https://github.com/google/fuzzing/blob/0c48531c4d317cea9479b3ec1b0ddb9edc438c3a/dictionaries/mp4.dict    #
+# https://github.com/google/fuzzing/blob/0c48531c4d317cea9479b3ec1b0ddb9edc438c3a/dictionaries/webm.dict   #
+# https://github.com/google/fuzzing/blob/0c48531c4d317cea9479b3ec1b0ddb9edc438c3a/dictionaries/ogg.dict    #
+# MPEG4, OGG, and WEBM are all formats used by MediaWriter.                                                #
+############################################################################################################
+# mp4.dict
+# Taken from https://chromium.googlesource.com/chromium/src/+/master/media/test/mp4.dict
+FOURCC_NULL="\x00\x00\x00\x00"
+FOURCC_AC3 ="\x61\x63\x2d\x33"
+FOURCC_EAC3="\x65\x63\x2d\x33"
+FOURCC_AVC1="\x61\x76\x63\x31"
+FOURCC_AVC3="\x61\x76\x63\x33"
+FOURCC_AVCC="\x61\x76\x63\x43"
+FOURCC_BLOC="\x62\x6C\x6F\x63"
+FOURCC_CENC="\x63\x65\x6e\x63"
+FOURCC_CO64="\x63\x6f\x36\x34"
+FOURCC_CTTS="\x63\x74\x74\x73"
+FOURCC_DINF="\x64\x69\x6e\x66"
+FOURCC_EDTS="\x65\x64\x74\x73"
+FOURCC_EMSG="\x65\x6d\x73\x67"
+FOURCC_ELST="\x65\x6c\x73\x74"
+FOURCC_ENCA="\x65\x6e\x63\x61"
+FOURCC_ENCV="\x65\x6e\x63\x76"
+FOURCC_ESDS="\x65\x73\x64\x73"
+FOURCC_FREE="\x66\x72\x65\x65"
+FOURCC_FRMA="\x66\x72\x6d\x61"
+FOURCC_FTYP="\x66\x74\x79\x70"
+FOURCC_HDLR="\x68\x64\x6c\x72"
+FOURCC_HINT="\x68\x69\x6e\x74"
+FOURCC_HVC1="\x68\x76\x63\x31"
+FOURCC_HVCC="\x68\x76\x63\x43"
+FOURCC_IODS="\x69\x6f\x64\x73"
+FOURCC_MDAT="\x6d\x64\x61\x74"
+FOURCC_MDHD="\x6d\x64\x68\x64"
+FOURCC_MDIA="\x6d\x64\x69\x61"
+FOURCC_MECO="\x6d\x65\x63\x6f"
+FOURCC_MEHD="\x6d\x65\x68\x64"
+FOURCC_META="\x6d\x65\x74\x61"
+FOURCC_MFHD="\x6d\x66\x68\x64"
+FOURCC_MFRA="\x6d\x66\x72\x61"
+FOURCC_MINF="\x6d\x69\x6e\x66"
+FOURCC_MOOF="\x6d\x6f\x6f\x66"
+FOURCC_MOOV="\x6d\x6f\x6f\x76"
+FOURCC_MP4A="\x6d\x70\x34\x61"
+FOURCC_MP4V="\x6d\x70\x34\x76"
+FOURCC_MVEX="\x6d\x76\x65\x78"
+FOURCC_MVHD="\x6d\x76\x68\x64"
+FOURCC_PASP="\x70\x61\x73\x70"
+FOURCC_PDIN="\x70\x64\x69\x6e"
+FOURCC_PRFT="\x70\x72\x66\x74"
+FOURCC_PSSH="\x70\x73\x73\x68"
+FOURCC_SAIO="\x73\x61\x69\x6f"
+FOURCC_SAIZ="\x73\x61\x69\x7a"
+FOURCC_SBGP="\x73\x62\x67\x70"
+FOURCC_SCHI="\x73\x63\x68\x69"
+FOURCC_SCHM="\x73\x63\x68\x6d"
+FOURCC_SDTP="\x73\x64\x74\x70"
+FOURCC_SEIG="\x73\x65\x69\x67"
+FOURCC_SENC="\x73\x65\x6e\x63"
+FOURCC_SGPD="\x73\x67\x70\x64"
+FOURCC_SIDX="\x73\x69\x64\x78"
+FOURCC_SINF="\x73\x69\x6e\x66"
+FOURCC_SKIP="\x73\x6b\x69\x70"
+FOURCC_SMHD="\x73\x6d\x68\x64"
+FOURCC_SOUN="\x73\x6f\x75\x6e"
+FOURCC_SSIX="\x73\x73\x69\x78"
+FOURCC_STBL="\x73\x74\x62\x6c"
+FOURCC_STCO="\x73\x74\x63\x6f"
+FOURCC_STSC="\x73\x74\x73\x63"
+FOURCC_STSD="\x73\x74\x73\x64"
+FOURCC_STSS="\x73\x74\x73\x73"
+FOURCC_STSZ="\x73\x74\x73\x7a"
+FOURCC_STTS="\x73\x74\x74\x73"
+FOURCC_STYP="\x73\x74\x79\x70"
+FOURCC_TENC="\x74\x65\x6e\x63"
+FOURCC_TFDT="\x74\x66\x64\x74"
+FOURCC_TFHD="\x74\x66\x68\x64"
+FOURCC_TKHD="\x74\x6b\x68\x64"
+FOURCC_TRAF="\x74\x72\x61\x66"
+FOURCC_TRAK="\x74\x72\x61\x6b"
+FOURCC_TREX="\x74\x72\x65\x78"
+FOURCC_TRUN="\x74\x72\x75\x6e"
+FOURCC_UDTA="\x75\x64\x74\x61"
+FOURCC_UUID="\x75\x75\x69\x64"
+FOURCC_VIDE="\x76\x69\x64\x65"
+FOURCC_VMHD="\x76\x6d\x68\x64"
+FOURCC_WIDE="\x77\x69\x64\x65"
+
+# ogg.dict
+# https://xiph.org/vorbis/doc/Vorbis_I_spec.html
+
+header="OggS"
+
+# Codecs
+"BBCD\x00"
+"\x7fFLAC"
+"\x80theora"
+"\x01vorbis"
+"CELT    "
+"CMML\x00\x00\x00\x00"
+"\x8bJNG\x0d\x0a\x1a\x0a"
+"\x80kate\x00\x00\x00"
+"OggMIDI\x00"
+"\x8aMNG\x0d\x0a\x1a\x0a"
+"PCM     "
+"\x89PNG\x0d\x0a\x1a\x0a"
+"Speex   "
+"YUV4MPEG"
+
+# Metadata
+"TITLE="
+"VERSION="
+"ALBUM="
+"TRACKNUMBER="
+"ARTIST="
+"PERFORMER="
+"COPYRIGHT="
+"LICENSE="
+"ORGANIZATION="
+"DESCRIPTION="
+"GENRE="
+"DATE="
+"LOCATION="
+"CONTACT="
+"ISRC="
+
+# webm.dict
+# Element IDs.
+IdAesSettingsCipherMode = "\x47\xE8"
+IdAlphaMode = "\x53\xC0"
+IdAspectRatioType = "\x54\xB3"
+IdAudio = "\xE1"
+IdBitDepth = "\x62\x64"
+IdBitsPerChannel = "\x55\xB2"
+IdBlock = "\xA1"
+IdBlockAddId = "\xEE"
+IdBlockAdditional = "\xA5"
+IdBlockAdditions = "\x75\xA1"
+IdBlockDuration = "\x9B"
+IdBlockGroup = "\xA0"
+IdBlockMore = "\xA6"
+IdBlockVirtual = "\xA2"
+IdCbSubsamplingHorz = "\x55\xB5"
+IdCbSubsamplingVert = "\x55\xB6"
+IdChannels = "\x9F"
+IdChapCountry = "\x43\x7E"
+IdChapLanguage = "\x43\x7C"
+IdChapString = "\x85"
+IdChapterAtom = "\xB6"
+IdChapterDisplay = "\x80"
+IdChapterStringUID = "\x56\x54"
+IdChapterStringUid = "\x56\x54"
+IdChapterTimeEnd = "\x92"
+IdChapterTimeStart = "\x91"
+IdChapterUID = "\x73\xC4"
+IdChapterUid = "\x73\xC4"
+IdChapters = "\x10\x43\xA7\x70"
+IdChromaSitingHorz = "\x55\xB7"
+IdChromaSitingVert = "\x55\xB8"
+IdChromaSubsamplingHorz = "\x55\xB3"
+IdChromaSubsamplingVert = "\x55\xB4"
+IdCluster = "\x1F\x43\xB6\x75"
+IdCodecDelay = "\x56\xAA"
+IdCodecID = "\x86"
+IdCodecName = "\x25\x86\x88"
+IdCodecPrivate = "\x63\xA2"
+IdColour = "\x55\xB0"
+IdContentEncAESSettings = "\x47\xE7"
+IdContentEncAesSettings = "\x47\xE7"
+IdContentEncAlgo = "\x47\xE1"
+IdContentEncKeyId = "\x47\xE2"
+IdContentEncoding = "\x62\x40"
+IdContentEncodingOrder = "\x50\x31"
+IdContentEncodingScope = "\x50\x32"
+IdContentEncodingType = "\x50\x33"
+IdContentEncodings = "\x6D\x80"
+IdContentEncryption = "\x50\x35"
+IdCueBlockNumber = "\x53\x78"
+IdCueClusterPosition = "\xF1"
+IdCueDuration = "\xB2"
+IdCuePoint = "\xBB"
+IdCueRelativePosition = "\xF0"
+IdCueTime = "\xB3"
+IdCueTrack = "\xF7"
+IdCueTrackPositions = "\xB7"
+IdCues = "\x1C\x53\xBB\x6B"
+IdDateUTC = "\x44\x61"
+IdDateUtc = "\x44\x61"
+IdDefaultDuration = "\x23\xE3\x83"
+IdDiscardPadding = "\x75\xA2"
+IdDisplayHeight = "\x54\xBA"
+IdDisplayUnit = "\x54\xB2"
+IdDisplayWidth = "\x54\xB0"
+IdDocType = "\x42\x82"
+IdDocTypeReadVersion = "\x42\x85"
+IdDocTypeVersion = "\x42\x87"
+IdDuration = "\x44\x89"
+IdEBML = "\x1A\x45\xDF\xA3"
+IdEBMLMaxIDLength = "\x42\xF2"
+IdEBMLMaxSizeLength = "\x42\xF3"
+IdEBMLReadVersion = "\x42\xF7"
+IdEBMLVersion = "\x42\x86"
+IdEbml = "\x1A\x45\xDF\xA3"
+IdEbmlMaxIdLength = "\x42\xF2"
+IdEbmlMaxSizeLength = "\x42\xF3"
+IdEbmlReadVersion = "\x42\xF7"
+IdEbmlVersion = "\x42\x86"
+IdEditionEntry = "\x45\xB9"
+IdFileUsedEndTime = "\x46\x62"
+IdFileUsedStartTime = "\x46\x61"
+IdFlagDefault = "\x88"
+IdFlagEnabled = "\xB9"
+IdFlagForced = "\x55\xAA"
+IdFlagInterlaced = "\x9A"
+IdFlagLacing = "\x9C"
+IdFrameRate = "\x23\x83\xE3"
+IdInfo = "\x15\x49\xA9\x66"
+IdLaceNumber = "\xCC"
+IdLanguage = "\x22\xB5\x9C"
+IdLuminanceMax = "\x55\xD9"
+IdLuminanceMin = "\x55\xDA"
+IdMasteringMetadata = "\x55\xD0"
+IdMatrixCoefficients = "\x55\xB1"
+IdMaxCll = "\x55\xBC"
+IdMaxFall = "\x55\xBD"
+IdMuxingApp = "\x4D\x80"
+IdName = "\x53\x6E"
+IdOutputSamplingFrequency = "\x78\xB5"
+IdPixelCropBottom = "\x54\xAA"
+IdPixelCropLeft = "\x54\xCC"
+IdPixelCropRight = "\x54\xDD"
+IdPixelCropTop = "\x54\xBB"
+IdPixelHeight = "\xBA"
+IdPixelWidth = "\xB0"
+IdPrevSize = "\xAB"
+IdPrimaries = "\x55\xBB"
+IdPrimaryBChromaticityX = "\x55\xD5"
+IdPrimaryBChromaticityY = "\x55\xD6"
+IdPrimaryGChromaticityX = "\x55\xD3"
+IdPrimaryGChromaticityY = "\x55\xD4"
+IdPrimaryRChromaticityX = "\x55\xD1"
+IdPrimaryRChromaticityY = "\x55\xD2"
+IdProjection = "\x76\x70"
+IdProjectionPosePitch = "\x76\x74"
+IdProjectionPoseRoll = "\x76\x75"
+IdProjectionPoseYaw = "\x76\x73"
+IdProjectionPrivate = "\x76\x72"
+IdProjectionType = "\x76\x71"
+IdRange = "\x55\xB9"
+IdReferenceBlock = "\xFB"
+IdSamplingFrequency = "\xB5"
+IdSeek = "\x4D\xBB"
+IdSeekHead = "\x11\x4D\x9B\x74"
+IdSeekID = "\x53\xAB"
+IdSeekPosition = "\x53\xAC"
+IdSeekPreRoll = "\x56\xBB"
+IdSegment = "\x18\x53\x80\x67"
+IdSimpleBlock = "\xA3"
+IdSimpleTag = "\x67\xC8"
+IdSliceDuration = "\xCF"
+IdSlices = "\x8E"
+IdStereoMode = "\x53\xB8"
+IdTag = "\x73\x73"
+IdTagBinary = "\x44\x85"
+IdTagDefault = "\x44\x84"
+IdTagLanguage = "\x44\x7A"
+IdTagName = "\x45\xA3"
+IdTagString = "\x44\x87"
+IdTagTrackUid = "\x63\xC5"
+IdTags = "\x12\x54\xC3\x67"
+IdTargetType = "\x63\xCA"
+IdTargetTypeValue = "\x68\xCA"
+IdTargets = "\x63\xC0"
+IdTimeSlice = "\xE8"
+IdTimecode = "\xE7"
+IdTimecodeScale = "\x2A\xD7\xB1"
+IdTitle = "\x7B\xA9"
+IdTrackEntry = "\xAE"
+IdTrackNumber = "\xD7"
+IdTrackType = "\x83"
+IdTrackUID = "\x73\xC5"
+IdTracks = "\x16\x54\xAE\x6B"
+IdTransferCharacteristics = "\x55\xBA"
+IdTrickMasterTrackSegmentUID = "\xC4"
+IdTrickMasterTrackUID = "\xC7"
+IdTrickTrackFlag = "\xC6"
+IdTrickTrackSegmentUID = "\xC1"
+IdTrickTrackUID = "\xC0"
+IdVideo = "\xE0"
+IdVoid = "\xEC"
+IdWhitePointChromaticityX = "\x55\xD7"
+IdWhitePointChromaticityY = "\x55\xD8"
+IdWritingApp = "\x57\x41"
+
+# Interesting sizes.
+SizeUnknown = "\xFF"
+
diff --git a/media/ndk/include/media/NdkImage.h b/media/ndk/include/media/NdkImage.h
index 62b8624..e19dd3a 100644
--- a/media/ndk/include/media/NdkImage.h
+++ b/media/ndk/include/media/NdkImage.h
@@ -559,8 +559,6 @@
     int32_t bottom;
 } AImageCropRect;
 
-#if __ANDROID_API__ >= 24
-
 /**
  * Return the image back the the system and delete the AImage object from memory.
  *
@@ -777,10 +775,6 @@
         const AImage* image, int planeIdx,
         /*out*/uint8_t** data, /*out*/int* dataLength) __INTRODUCED_IN(24);
 
-#endif /* __ANDROID_API__ >= 24 */
-
-#if __ANDROID_API__ >= 26
-
 /**
  * Return the image back the the system and delete the AImage object from memory asynchronously.
  *
@@ -829,8 +823,6 @@
  */
 media_status_t AImage_getHardwareBuffer(const AImage* image, /*out*/AHardwareBuffer** buffer) __INTRODUCED_IN(26);
 
-#endif /* __ANDROID_API__ >= 26 */
-
 __END_DECLS
 
 #endif //_NDK_IMAGE_H
diff --git a/media/ndk/include/media/NdkImageReader.h b/media/ndk/include/media/NdkImageReader.h
index 600ffc9..d86f3c7 100644
--- a/media/ndk/include/media/NdkImageReader.h
+++ b/media/ndk/include/media/NdkImageReader.h
@@ -53,8 +53,6 @@
  */
 typedef struct AImageReader AImageReader;
 
-#if __ANDROID_API__ >= 24
-
 /**
  * Create a new reader for images of the desired size and format.
  *
@@ -320,10 +318,6 @@
 media_status_t AImageReader_setImageListener(
         AImageReader* reader, AImageReader_ImageListener* listener) __INTRODUCED_IN(24);
 
-#endif /* __ANDROID_API__ >= 24 */
-
-#if __ANDROID_API__ >= 26
-
 /**
  * AImageReader constructor similar to {@link AImageReader_new} that takes an additional parameter
  * for the consumer usage. All other parameters and the return values are identical to those passed
@@ -510,8 +504,6 @@
     AImageReader *reader, /* out */native_handle_t **handle);
 #endif
 
-#endif /* __ANDROID_API__ >= 26 */
-
 __END_DECLS
 
 #endif //_NDK_IMAGE_READER_H
diff --git a/media/ndk/include/media/NdkMediaCodec.h b/media/ndk/include/media/NdkMediaCodec.h
index 80d5d50..60cdd32 100644
--- a/media/ndk/include/media/NdkMediaCodec.h
+++ b/media/ndk/include/media/NdkMediaCodec.h
@@ -121,8 +121,6 @@
       AMediaCodecOnAsyncError           onAsyncError;
 } AMediaCodecOnAsyncNotifyCallback;
 
-#if __ANDROID_API__ >= 21
-
 /**
  * Create codec by name. Use this if you know the exact codec you want to use.
  * When configuring, you will need to specify whether to use the codec as an
@@ -311,8 +309,6 @@
 media_status_t AMediaCodec_releaseOutputBufferAtTime(
         AMediaCodec *mData, size_t idx, int64_t timestampNs) __INTRODUCED_IN(21);
 
-#if __ANDROID_API__ >= 26
-
 /**
  * Creates a Surface that can be used as the input to encoder, in place of input buffers
  *
@@ -393,10 +389,6 @@
  */
 media_status_t AMediaCodec_signalEndOfInputStream(AMediaCodec *mData) __INTRODUCED_IN(26);
 
-#endif /* __ANDROID_API__ >= 26 */
-
-#if __ANDROID_API__ >= 28
-
 /**
  * Get format of the buffer. The specified buffer index must have been previously obtained from
  * dequeueOutputBuffer.
@@ -482,8 +474,6 @@
  */
 bool AMediaCodecActionCode_isTransient(int32_t actionCode) __INTRODUCED_IN(28);
 
-#endif /* __ANDROID_API__ >= 28 */
-
 typedef enum {
     AMEDIACODECRYPTOINFO_MODE_CLEAR = 0,
     AMEDIACODECRYPTOINFO_MODE_AES_CTR = 1,
@@ -580,8 +570,6 @@
  */
 media_status_t AMediaCodecCryptoInfo_getEncryptedBytes(AMediaCodecCryptoInfo*, size_t *dst) __INTRODUCED_IN(21);
 
-#endif /* __ANDROID_API__ >= 21 */
-
 __END_DECLS
 
 #endif //_NDK_MEDIA_CODEC_H
diff --git a/media/ndk/include/media/NdkMediaCrypto.h b/media/ndk/include/media/NdkMediaCrypto.h
index 3fa07c7..590d51d 100644
--- a/media/ndk/include/media/NdkMediaCrypto.h
+++ b/media/ndk/include/media/NdkMediaCrypto.h
@@ -47,8 +47,6 @@
 
 typedef uint8_t AMediaUUID[16];
 
-#if __ANDROID_API__ >= 21
-
 /**
  * Available since API level 21.
  */
@@ -69,8 +67,6 @@
  */
 void AMediaCrypto_delete(AMediaCrypto* crypto) __INTRODUCED_IN(21);
 
-#endif /* __ANDROID_API__ >= 21 */
-
 __END_DECLS
 
 #endif // _NDK_MEDIA_CRYPTO_H
diff --git a/media/ndk/include/media/NdkMediaDataSource.h b/media/ndk/include/media/NdkMediaDataSource.h
index 0577df2..4158a97 100644
--- a/media/ndk/include/media/NdkMediaDataSource.h
+++ b/media/ndk/include/media/NdkMediaDataSource.h
@@ -38,8 +38,6 @@
 struct AMediaDataSource;
 typedef struct AMediaDataSource AMediaDataSource;
 
-#if __ANDROID_API__ >= 28
-
 /*
  * AMediaDataSource's callbacks will be invoked on an implementation-defined thread
  * or thread pool. No guarantees are provided about which thread(s) will be used for
@@ -93,8 +91,6 @@
  */
 AMediaDataSource* AMediaDataSource_new() __INTRODUCED_IN(28);
 
-#if __ANDROID_API__ >= 29
-
 /**
  * Called to get an estimate of the number of bytes that can be read from this data source
  * starting at |offset| without blocking for I/O.
@@ -124,8 +120,6 @@
         int numheaders,
         const char * const *key_values) __INTRODUCED_IN(29);
 
-#endif  /*__ANDROID_API__ >= 29 */
-
 /**
  * Delete a previously created media data source.
  *
@@ -185,10 +179,6 @@
         AMediaDataSource*,
         AMediaDataSourceClose) __INTRODUCED_IN(28);
 
-#endif  /*__ANDROID_API__ >= 28 */
-
-#if __ANDROID_API__ >= 29
-
 /**
  * Close the data source, unblock reads, and release associated resources.
  *
@@ -213,8 +203,6 @@
         AMediaDataSource*,
         AMediaDataSourceGetAvailableSize) __INTRODUCED_IN(29);
 
-#endif  /*__ANDROID_API__ >= 29 */
-
 __END_DECLS
 
 #endif // _NDK_MEDIA_DATASOURCE_H
diff --git a/media/ndk/include/media/NdkMediaDrm.h b/media/ndk/include/media/NdkMediaDrm.h
index 31f5c7d..849a8f9 100644
--- a/media/ndk/include/media/NdkMediaDrm.h
+++ b/media/ndk/include/media/NdkMediaDrm.h
@@ -165,8 +165,6 @@
         const AMediaDrmSessionId *sessionId, const AMediaDrmKeyStatus *keyStatus,
         size_t numKeys, bool hasNewUsableKey);
 
-#if __ANDROID_API__ >= 21
-
 /**
  * Query if the given scheme identified by its UUID is supported on this device, and
  * whether the drm plugin is able to handle the media container format specified by mimeType.
@@ -576,8 +574,6 @@
         const char *macAlgorithm, uint8_t *keyId, const uint8_t *message, size_t messageSize,
         const uint8_t *signature, size_t signatureSize) __INTRODUCED_IN(21);
 
-#endif /* __ANDROID_API__ >= 21 */
-
 __END_DECLS
 
 #endif //_NDK_MEDIA_DRM_H
diff --git a/media/ndk/include/media/NdkMediaExtractor.h b/media/ndk/include/media/NdkMediaExtractor.h
index a1cd9e3..e429820 100644
--- a/media/ndk/include/media/NdkMediaExtractor.h
+++ b/media/ndk/include/media/NdkMediaExtractor.h
@@ -50,8 +50,6 @@
 struct AMediaExtractor;
 typedef struct AMediaExtractor AMediaExtractor;
 
-#if __ANDROID_API__ >= 21
-
 /**
  * Create new media extractor.
  *
@@ -82,8 +80,6 @@
 media_status_t AMediaExtractor_setDataSource(AMediaExtractor*,
         const char *location) __INTRODUCED_IN(21);
 
-#if __ANDROID_API__ >= 28
-
 /**
  * Set the custom data source implementation from which the extractor will read.
  *
@@ -92,8 +88,6 @@
 media_status_t AMediaExtractor_setDataSourceCustom(AMediaExtractor*,
         AMediaDataSource *src) __INTRODUCED_IN(28);
 
-#endif /* __ANDROID_API__ >= 28 */
-
 /**
  * Return the number of tracks in the previously specified media file
  *
@@ -211,8 +205,6 @@
     AMEDIAEXTRACTOR_SAMPLE_FLAG_ENCRYPTED = 2,
 };
 
-#if __ANDROID_API__ >= 28
-
 /**
  * Returns the format of the extractor. The caller must free the returned format
  * using AMediaFormat_delete(format).
@@ -266,10 +258,6 @@
 media_status_t AMediaExtractor_getSampleFormat(AMediaExtractor *ex,
         AMediaFormat *fmt) __INTRODUCED_IN(28);
 
-#endif /* __ANDROID_API__ >= 28 */
-
-#endif /* __ANDROID_API__ >= 21 */
-
 __END_DECLS
 
 #endif // _NDK_MEDIA_EXTRACTOR_H
diff --git a/media/ndk/include/media/NdkMediaFormat.h b/media/ndk/include/media/NdkMediaFormat.h
index 6371de4..0ce08c1 100644
--- a/media/ndk/include/media/NdkMediaFormat.h
+++ b/media/ndk/include/media/NdkMediaFormat.h
@@ -40,18 +40,8 @@
 #include <sys/cdefs.h>
 #include <sys/types.h>
 
-#ifndef __ANDROID__
-// Value copied from 'bionic/libc/include/android/api-level.h' which is not available on
-// non Android systems. It is set to 10000 which is same as __ANDROID_API_FUTURE__ value.
-#ifndef __ANDROID_API__
-#define __ANDROID_API__ 10000
-#endif
-
-// Value copied from 'bionic/libc/include/android/versioning.h' which is not available on
-// non Android systems
-#ifndef __INTRODUCED_IN
-#define __INTRODUCED_IN(api_level)
-#endif
+#if !defined(__INTRODUCED_IN)
+#define __INTRODUCED_IN(__api_level) /* nothing */
 #endif
 
 #include "NdkMediaError.h"
@@ -61,8 +51,6 @@
 struct AMediaFormat;
 typedef struct AMediaFormat AMediaFormat;
 
-#if __ANDROID_API__ >= 21
-
 /**
  * Available since API level 21.
  */
@@ -205,9 +193,7 @@
 extern const char* AMEDIAFORMAT_KEY_TRACK_ID __INTRODUCED_IN(28);
 extern const char* AMEDIAFORMAT_KEY_TRACK_INDEX __INTRODUCED_IN(28);
 extern const char* AMEDIAFORMAT_KEY_WIDTH __INTRODUCED_IN(21);
-#endif /* __ANDROID_API__ >= 21 */
 
-#if __ANDROID_API__ >= 28
 /**
  * Available since API level 28.
  */
@@ -231,9 +217,7 @@
  */
 void AMediaFormat_setRect(AMediaFormat*, const char* name,
         int32_t left, int32_t top, int32_t right, int32_t bottom) __INTRODUCED_IN(28);
-#endif /* __ANDROID_API__ >= 28 */
 
-#if __ANDROID_API__ >= 29
 /**
  * Remove all key/value pairs from the given AMediaFormat.
  *
@@ -307,9 +291,6 @@
 extern const char* AMEDIAFORMAT_KEY_VALID_SAMPLES __INTRODUCED_IN(29);
 extern const char* AMEDIAFORMAT_KEY_YEAR __INTRODUCED_IN(29);
 
-#endif /* __ANDROID_API__ >= 29 */
-
-#if __ANDROID_API__ >= 30
 /**
  * An optional key describing the low latency decoding mode. This is an optional parameter
  * that applies only to decoders. If enabled, the decoder doesn't hold input and output
@@ -320,7 +301,6 @@
  * Available since API level 30.
  */
 extern const char* AMEDIAFORMAT_KEY_LOW_LATENCY __INTRODUCED_IN(30);
-#endif /* __ANDROID_API__ >= 30 */
 
 __END_DECLS
 
diff --git a/media/ndk/include/media/NdkMediaMuxer.h b/media/ndk/include/media/NdkMediaMuxer.h
index 9de3fbf..519e249 100644
--- a/media/ndk/include/media/NdkMediaMuxer.h
+++ b/media/ndk/include/media/NdkMediaMuxer.h
@@ -54,8 +54,6 @@
     AMEDIAMUXER_OUTPUT_FORMAT_THREE_GPP   = 2,
 } OutputFormat;
 
-#if __ANDROID_API__ >= 21
-
 /**
  * Create new media muxer.
  *
@@ -140,8 +138,6 @@
         size_t trackIdx, const uint8_t *data,
         const AMediaCodecBufferInfo *info) __INTRODUCED_IN(21);
 
-#endif /* __ANDROID_API__ >= 21 */
-
 __END_DECLS
 
 #endif // _NDK_MEDIA_MUXER_H
diff --git a/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp b/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
index 0981bca..54704ea 100644
--- a/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
@@ -39,6 +39,7 @@
 // TODO(mnaganov): Consider finding an alternative for using HIDL code.
 using hardware::Return;
 using hardware::Status;
+using hardware::Void;
 using utilities::convertTo;
 
 template<typename E, typename C>
@@ -89,7 +90,6 @@
 
     };
 
-    static Return<Element> deserialize(const xmlNode *cur, PtrSerializingCtx serializingContext);
     // No children
 };
 
@@ -106,8 +106,6 @@
         static constexpr const char *format = "format";
         static constexpr const char *channelMasks = "channelMasks";
     };
-
-    static Return<Element> deserialize(const xmlNode *cur, PtrSerializingCtx serializingContext);
 };
 
 struct MixPortTraits : public AndroidCollectionTraits<IOProfile, IOProfileCollection>
@@ -125,7 +123,6 @@
         static constexpr const char *maxActiveCount = "maxActiveCount";
     };
 
-    static Return<Element> deserialize(const xmlNode *cur, PtrSerializingCtx serializingContext);
     // Children: GainTraits
 };
 
@@ -147,7 +144,6 @@
         static constexpr const char *encodedFormats = "encodedFormats";
     };
 
-    static Return<Element> deserialize(const xmlNode *cur, PtrSerializingCtx serializingContext);
     // Children: GainTraits (optional)
 };
 
@@ -166,8 +162,6 @@
     };
 
     typedef HwModule *PtrSerializingCtx;
-
-    static Return<Element> deserialize(const xmlNode *cur, PtrSerializingCtx serializingContext);
 };
 
 struct ModuleTraits : public AndroidCollectionTraits<HwModule, HwModuleCollection>
@@ -187,13 +181,14 @@
 
     typedef AudioPolicyConfig *PtrSerializingCtx;
 
-    static Return<Element> deserialize(const xmlNode *cur, PtrSerializingCtx serializingContext);
     // Children: mixPortTraits, devicePortTraits, and routeTraits
     // Need to call deserialize on each child
 };
 
 struct GlobalConfigTraits
 {
+    typedef void Element;
+
     static constexpr const char *tag = "globalConfiguration";
 
     struct Attributes
@@ -203,14 +198,16 @@
         static constexpr const char *engineLibrarySuffix = "engine_library";
     };
 
-    static status_t deserialize(const xmlNode *root, AudioPolicyConfig *config);
+    typedef AudioPolicyConfig *PtrSerializingCtx;
 };
 
 struct SurroundSoundTraits
 {
+    typedef void Element;
+
     static constexpr const char *tag = "surroundSound";
 
-    static status_t deserialize(const xmlNode *root, AudioPolicyConfig *config);
+    typedef AudioPolicyConfig *PtrSerializingCtx;
     // Children: SurroundSoundFormatTraits
 };
 
@@ -224,28 +221,30 @@
         static constexpr const char *name = "name";
         static constexpr const char *subformats = "subformats";
     };
-
-    static Return<Element> deserialize(const xmlNode *cur, PtrSerializingCtx serializingContext);
 };
 
 class PolicySerializer
 {
 public:
-    PolicySerializer() : mVersion{std::to_string(gMajor) + "." + std::to_string(gMinor)}
-    {
-        ALOGV("%s: Version=%s Root=%s", __func__, mVersion.c_str(), rootName);
-    }
     status_t deserialize(const char *configFile, AudioPolicyConfig *config);
 
+    template <class Trait>
+    status_t deserializeCollection(const xmlNode *cur,
+            typename Trait::Collection *collection,
+            typename Trait::PtrSerializingCtx serializingContext);
+    template <class Trait>
+    Return<typename Trait::Element> deserialize(const xmlNode *cur,
+            typename Trait::PtrSerializingCtx serializingContext);
+
 private:
     static constexpr const char *rootName = "audioPolicyConfiguration";
     static constexpr const char *versionAttribute = "version";
-    static constexpr uint32_t gMajor = 1; /**< the major number of the policy xml format version. */
-    static constexpr uint32_t gMinor = 0; /**< the minor number of the policy xml format version. */
 
     typedef AudioPolicyConfig Element;
 
-    const std::string mVersion;
+    std::string mChannelMasksSeparator = ",";
+    std::string mSamplingRatesSeparator = ",";
+    std::string mFlagsSeparator = "|";
 
     // Children: ModulesTraits, VolumeTraits, SurroundSoundTraits (optional)
 };
@@ -296,7 +295,7 @@
 }
 
 template <class Trait>
-status_t deserializeCollection(const xmlNode *cur,
+status_t PolicySerializer::deserializeCollection(const xmlNode *cur,
         typename Trait::Collection *collection,
         typename Trait::PtrSerializingCtx serializingContext)
 {
@@ -309,7 +308,7 @@
         }
         for (; child != NULL; child = child->next) {
             if (!xmlStrcmp(child->name, reinterpret_cast<const xmlChar*>(Trait::tag))) {
-                auto element = Trait::deserialize(child, serializingContext);
+                auto element = deserialize<Trait>(child, serializingContext);
                 if (element.isOk()) {
                     status_t status = Trait::addElementToCollection(element, collection);
                     if (status != NO_ERROR) {
@@ -329,11 +328,14 @@
     return NO_ERROR;
 }
 
-Return<AudioGainTraits::Element> AudioGainTraits::deserialize(const xmlNode *cur,
-        PtrSerializingCtx /*serializingContext*/)
+template<>
+Return<AudioGainTraits::Element> PolicySerializer::deserialize<AudioGainTraits>(const xmlNode *cur,
+        AudioGainTraits::PtrSerializingCtx /*serializingContext*/)
 {
+    using Attributes = AudioGainTraits::Attributes;
+
     static uint32_t index = 0;
-    Element gain = new AudioGain(index++, true);
+    AudioGainTraits::Element gain = new AudioGain(index++, true);
 
     std::string mode = getXmlAttribute(cur, Attributes::mode);
     if (!mode.empty()) {
@@ -396,16 +398,19 @@
     }
 }
 
-Return<AudioProfileTraits::Element> AudioProfileTraits::deserialize(const xmlNode *cur,
-        PtrSerializingCtx /*serializingContext*/)
+template<>
+Return<AudioProfileTraits::Element> PolicySerializer::deserialize<AudioProfileTraits>(
+        const xmlNode *cur, AudioProfileTraits::PtrSerializingCtx /*serializingContext*/)
 {
+    using Attributes = AudioProfileTraits::Attributes;
+
     std::string samplingRates = getXmlAttribute(cur, Attributes::samplingRates);
     std::string format = getXmlAttribute(cur, Attributes::format);
     std::string channels = getXmlAttribute(cur, Attributes::channelMasks);
 
-    Element profile = new AudioProfile(formatFromString(format, gDynamicFormat),
-            channelMasksFromString(channels, ","),
-            samplingRatesFromString(samplingRates, ","));
+    AudioProfileTraits::Element profile = new AudioProfile(formatFromString(format, gDynamicFormat),
+            channelMasksFromString(channels, mChannelMasksSeparator.c_str()),
+            samplingRatesFromString(samplingRates, mSamplingRatesSeparator.c_str()));
 
     profile->setDynamicFormat(profile->getFormat() == gDynamicFormat);
     profile->setDynamicChannels(profile->getChannels().empty());
@@ -414,15 +419,18 @@
     return profile;
 }
 
-Return<MixPortTraits::Element> MixPortTraits::deserialize(const xmlNode *child,
-        PtrSerializingCtx /*serializingContext*/)
+template<>
+Return<MixPortTraits::Element> PolicySerializer::deserialize<MixPortTraits>(const xmlNode *child,
+        MixPortTraits::PtrSerializingCtx /*serializingContext*/)
 {
+    using Attributes = MixPortTraits::Attributes;
+
     std::string name = getXmlAttribute(child, Attributes::name);
     if (name.empty()) {
         ALOGE("%s: No %s found", __func__, Attributes::name);
         return Status::fromStatusT(BAD_VALUE);
     }
-    ALOGV("%s: %s %s=%s", __func__, tag, Attributes::name, name.c_str());
+    ALOGV("%s: %s %s=%s", __func__, MixPortTraits::tag, Attributes::name, name.c_str());
     std::string role = getXmlAttribute(child, Attributes::role);
     if (role.empty()) {
         ALOGE("%s: No %s found", __func__, Attributes::role);
@@ -432,7 +440,7 @@
     audio_port_role_t portRole = (role == Attributes::roleSource) ?
             AUDIO_PORT_ROLE_SOURCE : AUDIO_PORT_ROLE_SINK;
 
-    Element mixPort = new IOProfile(name, portRole);
+    MixPortTraits::Element mixPort = new IOProfile(name, portRole);
 
     AudioProfileTraits::Collection profiles;
     status_t status = deserializeCollection<AudioProfileTraits>(child, &profiles, NULL);
@@ -451,10 +459,10 @@
     if (!flags.empty()) {
         // Source role
         if (portRole == AUDIO_PORT_ROLE_SOURCE) {
-            mixPort->setFlags(OutputFlagConverter::maskFromString(flags));
+            mixPort->setFlags(OutputFlagConverter::maskFromString(flags, mFlagsSeparator.c_str()));
         } else {
             // Sink role
-            mixPort->setFlags(InputFlagConverter::maskFromString(flags));
+            mixPort->setFlags(InputFlagConverter::maskFromString(flags, mFlagsSeparator.c_str()));
         }
     }
     std::string maxOpenCount = getXmlAttribute(child, Attributes::maxOpenCount);
@@ -476,9 +484,13 @@
     return mixPort;
 }
 
-Return<DevicePortTraits::Element> DevicePortTraits::deserialize(const xmlNode *cur,
-        PtrSerializingCtx /*serializingContext*/)
+template<>
+Return<DevicePortTraits::Element> PolicySerializer::deserialize<DevicePortTraits>(
+        const xmlNode *cur, DevicePortTraits::PtrSerializingCtx /*serializingContext*/)
 {
+    using Attributes = DevicePortTraits::Attributes;
+    auto& tag = DevicePortTraits::tag;
+
     std::string name = getXmlAttribute(cur, Attributes::tagName);
     if (name.empty()) {
         ALOGE("%s: No %s found", __func__, Attributes::tagName);
@@ -514,7 +526,8 @@
         encodedFormats = formatsFromString(encodedFormatsLiteral, " ");
     }
     std::string address = getXmlAttribute(cur, Attributes::address);
-    Element deviceDesc = new DeviceDescriptor(type, name, address, encodedFormats);
+    DevicePortTraits::Element deviceDesc =
+            new DeviceDescriptor(type, name, address, encodedFormats);
 
     AudioProfileTraits::Collection profiles;
     status_t status = deserializeCollection<AudioProfileTraits>(cur, &profiles, NULL);
@@ -539,8 +552,12 @@
     return deviceDesc;
 }
 
-Return<RouteTraits::Element> RouteTraits::deserialize(const xmlNode *cur, PtrSerializingCtx ctx)
+template<>
+Return<RouteTraits::Element> PolicySerializer::deserialize<RouteTraits>(
+        const xmlNode *cur, RouteTraits::PtrSerializingCtx ctx)
 {
+    using Attributes = RouteTraits::Attributes;
+
     std::string type = getXmlAttribute(cur, Attributes::type);
     if (type.empty()) {
         ALOGE("%s: No %s found", __func__, Attributes::type);
@@ -549,8 +566,8 @@
     audio_route_type_t routeType = (type == Attributes::typeMix) ?
                 AUDIO_ROUTE_MIX : AUDIO_ROUTE_MUX;
 
-    ALOGV("%s: %s %s=%s", __func__, tag, Attributes::type, type.c_str());
-    Element route = new AudioRoute(routeType);
+    ALOGV("%s: %s %s=%s", __func__, RouteTraits::tag, Attributes::type, type.c_str());
+    RouteTraits::Element route = new AudioRoute(routeType);
 
     std::string sinkAttr = getXmlAttribute(cur, Attributes::sink);
     if (sinkAttr.empty()) {
@@ -596,8 +613,16 @@
     return route;
 }
 
-Return<ModuleTraits::Element> ModuleTraits::deserialize(const xmlNode *cur, PtrSerializingCtx ctx)
+template<>
+Return<ModuleTraits::Element> PolicySerializer::deserialize<ModuleTraits>(
+        const xmlNode *cur, ModuleTraits::PtrSerializingCtx ctx)
 {
+    using Attributes = ModuleTraits::Attributes;
+    auto& tag = ModuleTraits::tag;
+    auto& childAttachedDevicesTag = ModuleTraits::childAttachedDevicesTag;
+    auto& childAttachedDeviceTag = ModuleTraits::childAttachedDeviceTag;
+    auto& childDefaultOutputDeviceTag = ModuleTraits::childDefaultOutputDeviceTag;
+
     std::string name = getXmlAttribute(cur, Attributes::name);
     if (name.empty()) {
         ALOGE("%s: No %s found", __func__, Attributes::name);
@@ -611,11 +636,11 @@
               versionMajor, versionMajor);
     }
 
-    ALOGV("%s: %s %s=%s", __func__, tag, Attributes::name, name.c_str());
+    ALOGV("%s: %s %s=%s", __func__, ModuleTraits::tag, Attributes::name, name.c_str());
 
-    Element module = new HwModule(name.c_str(), versionMajor, versionMinor);
+    ModuleTraits::Element module = new HwModule(name.c_str(), versionMajor, versionMinor);
 
-    // Deserialize childrens: Audio Mix Port, Audio Device Ports (Source/Sink), Audio Routes
+    // Deserialize children: Audio Mix Port, Audio Device Ports (Source/Sink), Audio Routes
     MixPortTraits::Collection mixPorts;
     status_t status = deserializeCollection<MixPortTraits>(cur, &mixPorts, NULL);
     if (status != NO_ERROR) {
@@ -678,10 +703,14 @@
     return module;
 }
 
-status_t GlobalConfigTraits::deserialize(const xmlNode *root, AudioPolicyConfig *config)
+template<>
+Return<GlobalConfigTraits::Element> PolicySerializer::deserialize<GlobalConfigTraits>(
+        const xmlNode *root, GlobalConfigTraits::PtrSerializingCtx config)
 {
+    using Attributes = GlobalConfigTraits::Attributes;
+
     for (const xmlNode *cur = root->xmlChildrenNode; cur != NULL; cur = cur->next) {
-        if (!xmlStrcmp(cur->name, reinterpret_cast<const xmlChar*>(tag))) {
+        if (!xmlStrcmp(cur->name, reinterpret_cast<const xmlChar*>(GlobalConfigTraits::tag))) {
             bool value;
             std::string attr = getXmlAttribute(cur, Attributes::speakerDrcEnabled);
             if (!attr.empty() &&
@@ -697,33 +726,38 @@
             if (!engineLibrarySuffix.empty()) {
                 config->setEngineLibraryNameSuffix(engineLibrarySuffix);
             }
-            return NO_ERROR;
+            return Void();
         }
     }
-    return NO_ERROR;
+    return Void();
 }
 
-status_t SurroundSoundTraits::deserialize(const xmlNode *root, AudioPolicyConfig *config)
+template<>
+Return<SurroundSoundTraits::Element> PolicySerializer::deserialize<SurroundSoundTraits>(
+        const xmlNode *root, SurroundSoundTraits::PtrSerializingCtx config)
 {
     config->setDefaultSurroundFormats();
 
     for (const xmlNode *cur = root->xmlChildrenNode; cur != NULL; cur = cur->next) {
-        if (!xmlStrcmp(cur->name, reinterpret_cast<const xmlChar*>(tag))) {
+        if (!xmlStrcmp(cur->name, reinterpret_cast<const xmlChar*>(SurroundSoundTraits::tag))) {
             AudioPolicyConfig::SurroundFormats formats;
             status_t status = deserializeCollection<SurroundSoundFormatTraits>(
                     cur, &formats, nullptr);
             if (status == NO_ERROR) {
                 config->setSurroundFormats(formats);
             }
-            return NO_ERROR;
+            return Void();
         }
     }
-    return NO_ERROR;
+    return Void();
 }
 
-Return<SurroundSoundFormatTraits::Element> SurroundSoundFormatTraits::deserialize(
-        const xmlNode *cur, PtrSerializingCtx /*serializingContext*/)
+template<>
+Return<SurroundSoundFormatTraits::Element> PolicySerializer::deserialize<SurroundSoundFormatTraits>(
+        const xmlNode *cur, SurroundSoundFormatTraits::PtrSerializingCtx /*serializingContext*/)
 {
+    using Attributes = SurroundSoundFormatTraits::Attributes;
+
     std::string formatLiteral = getXmlAttribute(cur, Attributes::name);
     if (formatLiteral.empty()) {
         ALOGE("%s: No %s found for a surround format", __func__, Attributes::name);
@@ -734,7 +768,8 @@
         ALOGE("%s: Unrecognized format %s", __func__, formatLiteral.c_str());
         return Status::fromStatusT(BAD_VALUE);
     }
-    Element pair = std::make_pair(format, Collection::mapped_type{});
+    SurroundSoundFormatTraits::Element pair = std::make_pair(
+            format, SurroundSoundFormatTraits::Collection::mapped_type{});
 
     std::string subformatsLiteral = getXmlAttribute(cur, Attributes::subformats);
     if (subformatsLiteral.empty()) return pair;
@@ -776,12 +811,14 @@
         ALOGE("%s: No version found in root node %s", __func__, rootName);
         return BAD_VALUE;
     }
-    if (version != mVersion) {
-        ALOGE("%s: Version does not match; expect %s got %s", __func__, mVersion.c_str(),
-              version.c_str());
+    if (version == "7.0") {
+        mChannelMasksSeparator = mSamplingRatesSeparator = mFlagsSeparator = " ";
+    } else if (version != "1.0") {
+        ALOGE("%s: Version does not match; expected \"1.0\" or \"7.0\" got \"%s\"",
+                __func__, version.c_str());
         return BAD_VALUE;
     }
-    // Lets deserialize children
+    // Let's deserialize children
     // Modules
     ModuleTraits::Collection modules;
     status_t status = deserializeCollection<ModuleTraits>(root, &modules, config);
@@ -791,10 +828,10 @@
     config->setHwModules(modules);
 
     // Global Configuration
-    GlobalConfigTraits::deserialize(root, config);
+    deserialize<GlobalConfigTraits>(root, config);
 
     // Surround configuration
-    SurroundSoundTraits::deserialize(root, config);
+    deserialize<SurroundSoundTraits>(root, config);
 
     return android::OK;
 }
diff --git a/services/audiopolicy/config/audio_policy_configuration_7_0.xml b/services/audiopolicy/config/audio_policy_configuration_7_0.xml
index 31c8954..9961a00 100644
--- a/services/audiopolicy/config/audio_policy_configuration_7_0.xml
+++ b/services/audiopolicy/config/audio_policy_configuration_7_0.xml
@@ -14,7 +14,7 @@
      limitations under the License.
 -->
 
-<audioPolicyConfiguration version="1.0" xmlns:xi="http://www.w3.org/2001/XInclude">
+<audioPolicyConfiguration version="7.0" xmlns:xi="http://www.w3.org/2001/XInclude">
     <!-- version section contains a “version” tag in the form “major.minor” e.g version=”1.0” -->
 
     <!-- Global configuration Decalaration -->
diff --git a/services/audiopolicy/config/bluetooth_audio_policy_configuration.xml b/services/audiopolicy/config/bluetooth_audio_policy_configuration.xml
index 7238317..98415b7 100644
--- a/services/audiopolicy/config/bluetooth_audio_policy_configuration.xml
+++ b/services/audiopolicy/config/bluetooth_audio_policy_configuration.xml
@@ -12,7 +12,13 @@
         </mixPort>
         <!-- Le Audio Audio Ports -->
         <mixPort name="le audio output" role="source">
-            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT,AUDIO_FORMAT_PCM_24_BIT,AUDIO_FORMAT_PCM_32_BIT"
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="8000,16000,24000,32000,44100,48000"
+                     channelMasks="AUDIO_CHANNEL_OUT_MONO,AUDIO_CHANNEL_OUT_STEREO"/>
+            <profile name="" format="AUDIO_FORMAT_PCM_24_BIT_PACKED"
+                     samplingRates="8000,16000,24000,32000,44100,48000"
+                     channelMasks="AUDIO_CHANNEL_OUT_MONO,AUDIO_CHANNEL_OUT_STEREO"/>
+            <profile name="" format="AUDIO_FORMAT_PCM_32_BIT"
                      samplingRates="8000,16000,24000,32000,44100,48000"
                      channelMasks="AUDIO_CHANNEL_OUT_MONO,AUDIO_CHANNEL_OUT_STEREO"/>
         </mixPort>
@@ -37,6 +43,10 @@
         <!-- Hearing AIDs Audio Ports -->
         <devicePort tagName="BT Hearing Aid Out" type="AUDIO_DEVICE_OUT_HEARING_AID" role="sink"/>
         <!-- BLE Audio Ports -->
+        <!-- Note that these device types are not valid in HAL versions < 7. Any device
+             running pre-V7 HAL and using this file will not pass VTS. Need to use
+             bluetooth_audio_policy_configuration_7_0.xml instead.
+        -->
         <devicePort tagName="BLE Headset Out" type="AUDIO_DEVICE_OUT_BLE_HEADSET" role="sink"/>
         <devicePort tagName="BLE Speaker Out" type="AUDIO_DEVICE_OUT_BLE_SPEAKER" role="sink"/>
     </devicePorts>
diff --git a/services/audiopolicy/config/bluetooth_audio_policy_configuration_7_0.xml b/services/audiopolicy/config/bluetooth_audio_policy_configuration_7_0.xml
index 2dffe02..fbe7571 100644
--- a/services/audiopolicy/config/bluetooth_audio_policy_configuration_7_0.xml
+++ b/services/audiopolicy/config/bluetooth_audio_policy_configuration_7_0.xml
@@ -10,6 +10,18 @@
                      samplingRates="24000 16000"
                      channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
         </mixPort>
+        <!-- Le Audio Audio Ports -->
+        <mixPort name="le audio output" role="source">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="8000 16000 24000 32000 44100 48000"
+                     channelMasks="AUDIO_CHANNEL_OUT_MONO AUDIO_CHANNEL_OUT_STEREO"/>
+            <profile name="" format="AUDIO_FORMAT_PCM_24_BIT_PACKED"
+                     samplingRates="8000 16000 24000 32000 44100 48000"
+                     channelMasks="AUDIO_CHANNEL_OUT_MONO AUDIO_CHANNEL_OUT_STEREO"/>
+            <profile name="" format="AUDIO_FORMAT_PCM_32_BIT"
+                     samplingRates="8000 16000 24000 32000 44100 48000"
+                     channelMasks="AUDIO_CHANNEL_OUT_MONO AUDIO_CHANNEL_OUT_STEREO"/>
+        </mixPort>
     </mixPorts>
     <devicePorts>
         <!-- A2DP Audio Ports -->
@@ -30,6 +42,9 @@
         </devicePort>
         <!-- Hearing AIDs Audio Ports -->
         <devicePort tagName="BT Hearing Aid Out" type="AUDIO_DEVICE_OUT_HEARING_AID" role="sink"/>
+        <!-- BLE Audio Ports -->
+        <devicePort tagName="BLE Headset Out" type="AUDIO_DEVICE_OUT_BLE_HEADSET" role="sink"/>
+        <devicePort tagName="BLE Speaker Out" type="AUDIO_DEVICE_OUT_BLE_SPEAKER" role="sink"/>
     </devicePorts>
     <routes>
         <route type="mix" sink="BT A2DP Out"
@@ -40,5 +55,9 @@
                sources="a2dp output"/>
         <route type="mix" sink="BT Hearing Aid Out"
                sources="hearing aid output"/>
+        <route type="mix" sink="BLE Headset Out"
+               sources="le audio output"/>
+        <route type="mix" sink="BLE Speaker Out"
+               sources="le audio output"/>
     </routes>
 </module>
diff --git a/services/audiopolicy/engineconfigurable/tools/Android.bp b/services/audiopolicy/engineconfigurable/tools/Android.bp
index 3e47324..b51918a 100644
--- a/services/audiopolicy/engineconfigurable/tools/Android.bp
+++ b/services/audiopolicy/engineconfigurable/tools/Android.bp
@@ -42,8 +42,8 @@
     cmd: "cp $(locations :audio_policy_configuration_files) $(genDir)/. && " +
          "cp $(location :audio_policy_configuration_top_file) $(genDir)/audio_policy_configuration.xml && " +
          "$(location buildPolicyCriterionTypes.py) " +
-         // @todo update if 1428659 is merged "--androidaudiobaseheader $(location :android_audio_base_header_file) " +
-         " --androidaudiobaseheader system/media/audio/include/system/audio-base.h " +
+         " --androidaudiobaseheader $(location :libaudio_system_audio_base) " +
+         " --androidaudiocommonbaseheader $(location :libaudio_system_audio_common_base) " +
          "--audiopolicyconfigurationfile $(genDir)/audio_policy_configuration.xml " +
          "--criteriontypes $(location :audio_policy_engine_criterion_types_template) " +
          "--outputfile $(out)",
@@ -51,6 +51,8 @@
         // The commented inputs must be provided to use this genrule_defaults
         // @todo uncomment if 1428659 is merged":android_audio_base_header_file",
         ":audio_policy_engine_criterion_types_template",
+        ":libaudio_system_audio_base",
+        ":libaudio_system_audio_common_base",
         // ":audio_policy_configuration_top_file",
         // ":audio_policy_configuration_files",
     ],
diff --git a/services/audiopolicy/engineconfigurable/tools/buildPolicyCriterionTypes.py b/services/audiopolicy/engineconfigurable/tools/buildPolicyCriterionTypes.py
index b8b60c1..b5885c0 100755
--- a/services/audiopolicy/engineconfigurable/tools/buildPolicyCriterionTypes.py
+++ b/services/audiopolicy/engineconfigurable/tools/buildPolicyCriterionTypes.py
@@ -55,6 +55,11 @@
                            metavar="ANDROID_AUDIO_BASE_HEADER",
                            type=argparse.FileType('r'),
                            required=True)
+    argparser.add_argument('--androidaudiocommonbaseheader',
+                           help="Android Audio CommonBase C header file, Mandatory.",
+                           metavar="ANDROID_AUDIO_COMMON_BASE_HEADER",
+                           type=argparse.FileType('r'),
+                           required=True)
     argparser.add_argument('--audiopolicyconfigurationfile',
                            help="Android Audio Policy Configuration file, Mandatory.",
                            metavar="(AUDIO_POLICY_CONFIGURATION_FILE)",
@@ -176,12 +181,12 @@
 #   -Output devices type
 #   -Input devices type
 #
-def parseAndroidAudioFile(androidaudiobaseheaderFile):
+def parseAndroidAudioFile(androidaudiobaseheaderFile, androidaudiocommonbaseheaderFile):
     #
     # Adaptation table between Android Enumeration prefix and Audio PFW Criterion type names
     #
     criterion_mapping_table = {
-        'AUDIO_MODE' : "AndroidModeType",
+        'HAL_AUDIO_MODE' : "AndroidModeType",
         'AUDIO_DEVICE_OUT' : "OutputDevicesMaskType",
         'AUDIO_DEVICE_IN' : "InputDevicesMaskType"}
 
@@ -196,9 +201,9 @@
     ignored_values = ['CNT', 'MAX', 'ALL', 'NONE']
 
     criteria_pattern = re.compile(
-        r"\s*(?P<type>(?:"+'|'.join(criterion_mapping_table.keys()) + "))_" \
-        r"(?P<literal>(?!" + '|'.join(ignored_values) + ")\w*)\s*=\s*" \
-        r"(?P<values>(?:0[xX])?[0-9a-fA-F]+)")
+        r"\s*V\((?P<type>(?:"+'|'.join(criterion_mapping_table.keys()) + "))_" \
+        r"(?P<literal>(?!" + '|'.join(ignored_values) + ")\w*)\s*,\s*" \
+        r"(?:AUDIO_DEVICE_BIT_IN \| )?(?P<values>(?:0[xX])?[0-9a-fA-F]+|[0-9]+)")
 
     logging.info("Checking Android Header file {}".format(androidaudiobaseheaderFile))
 
@@ -209,27 +214,91 @@
                 androidaudiobaseheaderFile.name, line_number, line))
 
             criterion_name = criterion_mapping_table[match.groupdict()['type']]
-            literal = ''.join((w.capitalize() for w in match.groupdict()['literal'].split('_')))
-            numerical_value = match.groupdict()['values']
+            criterion_literal = \
+                ''.join((w.capitalize() for w in match.groupdict()['literal'].split('_')))
+            criterion_numerical_value = match.groupdict()['values']
 
-            # for AUDIO_DEVICE_IN: need to remove sign bit
+            # for AUDIO_DEVICE_IN: need to remove sign bit / rename default to stub
             if criterion_name == "InputDevicesMaskType":
-                numerical_value = str(int(numerical_value, 0) & ~2147483648)
+                if criterion_literal == "Default":
+                    criterion_numerical_value = str(int("0x40000000", 0))
+                else:
+                    try:
+                        string_int = int(criterion_numerical_value, 0)
+                    except ValueError:
+                        # Handle the exception
+                        logging.info("value {}:{} for criterion {} is not a number, ignoring"
+                            .format(criterion_numerical_value, criterion_literal, criterion_name))
+                        continue
+                    criterion_numerical_value = str(int(criterion_numerical_value, 0) & ~2147483648)
+
+            if criterion_name == "OutputDevicesMaskType":
+                if criterion_literal == "Default":
+                    criterion_numerical_value = str(int("0x40000000", 0))
+
+            try:
+                string_int = int(criterion_numerical_value, 0)
+            except ValueError:
+                # Handle the exception
+                logging.info("The value {}:{} is for criterion {} is not a number, ignoring"
+                    .format(criterion_numerical_value, criterion_literal, criterion_name))
+                continue
 
             # Remove duplicated numerical values
-            if int(numerical_value, 0) in all_criteria[criterion_name].values():
+            if int(criterion_numerical_value, 0) in all_criteria[criterion_name].values():
                 logging.info("criterion {} duplicated values:".format(criterion_name))
-                logging.info("{}:{}".format(numerical_value, literal))
+                logging.info("{}:{}".format(criterion_numerical_value, criterion_literal))
                 logging.info("KEEPING LATEST")
                 for key in list(all_criteria[criterion_name]):
-                    if all_criteria[criterion_name][key] == int(numerical_value, 0):
+                    if all_criteria[criterion_name][key] == int(criterion_numerical_value, 0):
                         del all_criteria[criterion_name][key]
 
-            all_criteria[criterion_name][literal] = int(numerical_value, 0)
+            all_criteria[criterion_name][criterion_literal] = int(criterion_numerical_value, 0)
 
             logging.debug("type:{},".format(criterion_name))
-            logging.debug("iteral:{},".format(literal))
-            logging.debug("values:{}.".format(numerical_value))
+            logging.debug("iteral:{},".format(criterion_literal))
+            logging.debug("values:{}.".format(criterion_numerical_value))
+
+    logging.info("Checking Android Common Header file {}".format(androidaudiocommonbaseheaderFile))
+
+    criteria_pattern = re.compile(
+        r"\s*(?P<type>(?:"+'|'.join(criterion_mapping_table.keys()) + "))_" \
+        r"(?P<literal>(?!" + '|'.join(ignored_values) + ")\w*)\s*=\s*" \
+        r"(?:AUDIO_DEVICE_BIT_IN \| )?(?P<values>(?:0[xX])?[0-9a-fA-F]+|[0-9]+)")
+
+    for line_number, line in enumerate(androidaudiocommonbaseheaderFile):
+        match = criteria_pattern.match(line)
+        if match:
+            logging.debug("The following line is VALID: {}:{}\n{}".format(
+                androidaudiocommonbaseheaderFile.name, line_number, line))
+
+            criterion_name = criterion_mapping_table[match.groupdict()['type']]
+            criterion_literal = \
+                ''.join((w.capitalize() for w in match.groupdict()['literal'].split('_')))
+            criterion_numerical_value = match.groupdict()['values']
+
+            try:
+                string_int = int(criterion_numerical_value, 0)
+            except ValueError:
+                # Handle the exception
+                logging.info("The value {}:{} is for criterion {} is not a number, ignoring"
+                    .format(criterion_numerical_value, criterion_literal, criterion_name))
+                continue
+
+            # Remove duplicated numerical values
+            if int(criterion_numerical_value, 0) in all_criteria[criterion_name].values():
+                logging.info("criterion {} duplicated values:".format(criterion_name))
+                logging.info("{}:{}".format(criterion_numerical_value, criterion_literal))
+                logging.info("KEEPING LATEST")
+                for key in list(all_criteria[criterion_name]):
+                    if all_criteria[criterion_name][key] == int(criterion_numerical_value, 0):
+                        del all_criteria[criterion_name][key]
+
+            all_criteria[criterion_name][criterion_literal] = int(criterion_numerical_value, 0)
+
+            logging.debug("type:{},".format(criterion_name))
+            logging.debug("iteral:{},".format(criterion_literal))
+            logging.debug("values:{}.".format(criterion_numerical_value))
 
     return all_criteria
 
@@ -238,7 +307,8 @@
     logging.root.setLevel(logging.INFO)
     args = parseArgs()
 
-    all_criteria = parseAndroidAudioFile(args.androidaudiobaseheader)
+    all_criteria = parseAndroidAudioFile(args.androidaudiobaseheader,
+                                         args.androidaudiocommonbaseheader)
 
     address_criteria = parseAndroidAudioPolicyConfigurationFile(args.audiopolicyconfigurationfile)
 
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 7179355..e1d806d 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -1037,8 +1037,7 @@
     *output = AUDIO_IO_HANDLE_NONE;
     if (!msdDevices.isEmpty()) {
         *output = getOutputForDevices(msdDevices, session, *stream, config, flags);
-        sp<DeviceDescriptor> device = outputDevices.isEmpty() ? nullptr : outputDevices.itemAt(0);
-        if (*output != AUDIO_IO_HANDLE_NONE && setMsdPatch(device) == NO_ERROR) {
+        if (*output != AUDIO_IO_HANDLE_NONE && setMsdPatches(&outputDevices) == NO_ERROR) {
             ALOGV("%s() Using MSD devices %s instead of devices %s",
                   __func__, msdDevices.toString().c_str(), outputDevices.toString().c_str());
         } else {
@@ -1054,6 +1053,12 @@
     }
 
     *selectedDeviceId = getFirstDeviceId(outputDevices);
+    for (auto &outputDevice : outputDevices) {
+        if (outputDevice->getId() == getConfig().getDefaultOutputDevice()->getId()) {
+            *selectedDeviceId = outputDevice->getId();
+            break;
+        }
+    }
 
     if (outputDevices.onlyContainsDevicesWithType(AUDIO_DEVICE_OUT_TELEPHONY_TX)) {
         *outputType = API_OUTPUT_TELEPHONY_TX;
@@ -1196,24 +1201,9 @@
     sp<SwAudioOutputDescriptor> outputDesc =
             new SwAudioOutputDescriptor(profile, mpClientInterface);
 
-    String8 address = getFirstDeviceAddress(devices);
-
-    // MSD patch may be using the only output stream that can service this request. Release
-    // MSD patch to prioritize this request over any active output on MSD.
-    AudioPatchCollection msdPatches = getMsdPatches();
-    for (size_t i = 0; i < msdPatches.size(); i++) {
-        const auto& patch = msdPatches[i];
-        for (size_t j = 0; j < patch->mPatch.num_sinks; ++j) {
-            const struct audio_port_config *sink = &patch->mPatch.sinks[j];
-            if (sink->type == AUDIO_PORT_TYPE_DEVICE &&
-                    devices.containsDeviceWithType(sink->ext.device.type) &&
-                    (address.isEmpty() || strncmp(sink->ext.device.address, address.string(),
-                            AUDIO_DEVICE_MAX_ADDRESS_LEN) == 0)) {
-                releaseAudioPatch(patch->getHandle(), mUidCached);
-                break;
-            }
-        }
-    }
+    // An MSD patch may be using the only output stream that can service this request. Release
+    // all MSD patches to prioritize this request over any active output on MSD.
+    releaseMsdPatches(devices);
 
     status_t status = outputDesc->open(config, devices, stream, flags, output);
 
@@ -1386,7 +1376,8 @@
     }
     AudioProfileVector deviceProfiles;
     for (const auto &outProfile : outputProfiles) {
-        if (hwAvSync == ((outProfile->getFlags() & AUDIO_OUTPUT_FLAG_HW_AV_SYNC) != 0)) {
+        if (hwAvSync == ((outProfile->getFlags() & AUDIO_OUTPUT_FLAG_HW_AV_SYNC) != 0) &&
+                outProfile->supportsDevice(outputDevice)) {
             appendAudioProfiles(deviceProfiles, outProfile->getAudioProfiles());
         }
     }
@@ -1454,40 +1445,85 @@
     return patchBuilder;
 }
 
-status_t AudioPolicyManager::setMsdPatch(const sp<DeviceDescriptor> &outputDevice) {
-    sp<DeviceDescriptor> device = outputDevice;
-    if (device == nullptr) {
+status_t AudioPolicyManager::setMsdPatches(const DeviceVector *outputDevices) {
+    DeviceVector devices;
+    if (outputDevices != nullptr && outputDevices->size() > 0) {
+        devices.add(*outputDevices);
+    } else {
         // Use media strategy for unspecified output device. This should only
         // occur on checkForDeviceAndOutputChanges(). Device connection events may
         // therefore invalidate explicit routing requests.
-        DeviceVector devices = mEngine->getOutputDevicesForAttributes(
+        devices = mEngine->getOutputDevicesForAttributes(
                     attributes_initializer(AUDIO_USAGE_MEDIA), nullptr, false /*fromCache*/);
-        LOG_ALWAYS_FATAL_IF(devices.isEmpty(), "no outpudevice to set Msd Patch");
-        device = devices.itemAt(0);
+        LOG_ALWAYS_FATAL_IF(devices.isEmpty(), "no output device to set MSD patch");
     }
-    ALOGV("%s() for device %s", __func__, device->toString().c_str());
-    PatchBuilder patchBuilder = buildMsdPatch(device);
-    const struct audio_patch* patch = patchBuilder.patch();
-    const AudioPatchCollection msdPatches = getMsdPatches();
-    if (!msdPatches.isEmpty()) {
-        LOG_ALWAYS_FATAL_IF(msdPatches.size() > 1,
-                "The current MSD prototype only supports one output patch");
-        sp<AudioPatch> currentPatch = msdPatches.valueAt(0);
-        if (audio_patches_are_equal(&currentPatch->mPatch, patch)) {
-            return NO_ERROR;
+    std::vector<PatchBuilder> patchesToCreate;
+    for (auto i = 0u; i < devices.size(); ++i) {
+        ALOGV("%s() for device %s", __func__, devices[i]->toString().c_str());
+        patchesToCreate.push_back(buildMsdPatch(devices[i]));
+    }
+    // Retain only the MSD patches associated with outputDevices request.
+    // Tear down the others, and create new ones as needed.
+    AudioPatchCollection patchesToRemove = getMsdPatches();
+    for (auto it = patchesToCreate.begin(); it != patchesToCreate.end(); ) {
+        auto retainedPatch = false;
+        for (auto i = 0u; i < patchesToRemove.size(); ++i) {
+            if (audio_patches_are_equal(it->patch(), &patchesToRemove[i]->mPatch)) {
+                patchesToRemove.removeItemsAt(i);
+                retainedPatch = true;
+                break;
+            }
         }
+        if (retainedPatch) {
+            it = patchesToCreate.erase(it);
+            continue;
+        }
+        ++it;
+    }
+    if (patchesToCreate.size() == 0 && patchesToRemove.size() == 0) {
+        return NO_ERROR;
+    }
+    for (auto i = 0u; i < patchesToRemove.size(); ++i) {
+        auto &currentPatch = patchesToRemove.valueAt(i);
         releaseAudioPatch(currentPatch->getHandle(), mUidCached);
     }
-    status_t status = installPatch(__func__, -1 /*index*/, nullptr /*patchHandle*/,
-            patch, 0 /*delayMs*/, mUidCached, nullptr /*patchDescPtr*/);
-    ALOGE_IF(status != NO_ERROR, "%s() error %d creating MSD audio patch", __func__, status);
-    ALOGI_IF(status == NO_ERROR, "%s() Patch created from MSD_IN to "
-           "device:%s (format:%#x channels:%#x samplerate:%d)", __func__,
-             device->toString().c_str(), patch->sources[0].format,
-             patch->sources[0].channel_mask, patch->sources[0].sample_rate);
+    status_t status = NO_ERROR;
+    for (const auto &p : patchesToCreate) {
+        auto currStatus = installPatch(__func__, -1 /*index*/, nullptr /*patchHandle*/,
+                p.patch(), 0 /*delayMs*/, mUidCached, nullptr /*patchDescPtr*/);
+        char message[256];
+        snprintf(message, sizeof(message), "%s() %s: creating MSD patch from device:IN_BUS to "
+            "device:%#x (format:%#x channels:%#x samplerate:%d)", __func__,
+                currStatus == NO_ERROR ? "Success" : "Error",
+                p.patch()->sinks[0].ext.device.type, p.patch()->sources[0].format,
+                p.patch()->sources[0].channel_mask, p.patch()->sources[0].sample_rate);
+        if (currStatus == NO_ERROR) {
+            ALOGD("%s", message);
+        } else {
+            ALOGE("%s", message);
+            if (status == NO_ERROR) {
+                status = currStatus;
+            }
+        }
+    }
     return status;
 }
 
+void AudioPolicyManager::releaseMsdPatches(const DeviceVector& devices) {
+    AudioPatchCollection msdPatches = getMsdPatches();
+    for (size_t i = 0; i < msdPatches.size(); i++) {
+        const auto& patch = msdPatches[i];
+        for (size_t j = 0; j < patch->mPatch.num_sinks; ++j) {
+            const struct audio_port_config *sink = &patch->mPatch.sinks[j];
+            if (sink->type == AUDIO_PORT_TYPE_DEVICE && devices.getDevice(sink->ext.device.type,
+                    String8(sink->ext.device.address), AUDIO_FORMAT_DEFAULT) != nullptr) {
+                releaseAudioPatch(patch->getHandle(), mUidCached);
+                break;
+            }
+        }
+    }
+}
+
 audio_io_handle_t AudioPolicyManager::selectOutput(const SortedVector<audio_io_handle_t>& outputs,
                                                        audio_output_flags_t flags,
                                                        audio_format_t format,
@@ -5309,8 +5345,13 @@
             }
         }
         if (!directOutputOpen) {
-            ALOGV("no direct outputs open, reset MSD patch");
-            setMsdPatch();
+            ALOGV("no direct outputs open, reset MSD patches");
+            // TODO: The MSD patches to be established here may differ to current MSD patches due to
+            // how output devices for patching are resolved. Avoid by caching and reusing the
+            // arguments to mEngine->getOutputDevicesForAttributes() when resolving which output
+            // devices to patch to. This may be complicated by the fact that devices may become
+            // unavailable.
+            setMsdPatches();
         }
     }
 }
@@ -5377,7 +5418,13 @@
     if (onOutputsChecked != nullptr && onOutputsChecked()) checkA2dpSuspend();
     updateDevicesAndOutputs();
     if (mHwModules.getModuleFromName(AUDIO_HARDWARE_MODULE_ID_MSD) != 0) {
-        setMsdPatch();
+        // TODO: The MSD patches to be established here may differ to current MSD patches due to how
+        // output devices for patching are resolved. Nevertheless, AudioTracks affected by device
+        // configuration changes will ultimately be rerouted correctly. We can still avoid
+        // unnecessary rerouting by caching and reusing the arguments to
+        // mEngine->getOutputDevicesForAttributes() when resolving which output devices to patch to.
+        // This may be complicated by the fact that devices may become unavailable.
+        setMsdPatches();
     }
 }
 
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 33639cd..c1c483c 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -844,13 +844,6 @@
         // end point.
         audio_port_handle_t mCallRxSourceClientPort = AUDIO_PORT_HANDLE_NONE;
 
-private:
-        void onNewAudioModulesAvailableInt(DeviceVector *newDevices);
-
-        // Add or remove AC3 DTS encodings based on user preferences.
-        void modifySurroundFormats(const sp<DeviceDescriptor>& devDesc, FormatVector *formatsPtr);
-        void modifySurroundChannelMasks(ChannelMaskSet *channelMasksPtr);
-
         // Support for Multi-Stream Decoder (MSD) module
         sp<DeviceDescriptor> getMsdAudioInDevice() const;
         DeviceVector getMsdAudioOutDevices() const;
@@ -860,7 +853,14 @@
                                            audio_port_config *sourceConfig,
                                            audio_port_config *sinkConfig) const;
         PatchBuilder buildMsdPatch(const sp<DeviceDescriptor> &outputDevice) const;
-        status_t setMsdPatch(const sp<DeviceDescriptor> &outputDevice = nullptr);
+        status_t setMsdPatches(const DeviceVector *outputDevices = nullptr);
+        void releaseMsdPatches(const DeviceVector& devices);
+private:
+        void onNewAudioModulesAvailableInt(DeviceVector *newDevices);
+
+        // Add or remove AC3 DTS encodings based on user preferences.
+        void modifySurroundFormats(const sp<DeviceDescriptor>& devDesc, FormatVector *formatsPtr);
+        void modifySurroundChannelMasks(ChannelMaskSet *channelMasksPtr);
 
         // If any, resolve any "dynamic" fields of an Audio Profiles collection
         void updateAudioProfiles(const sp<DeviceDescriptor>& devDesc, audio_io_handle_t ioHandle,
diff --git a/services/audiopolicy/tests/AudioPolicyTestManager.h b/services/audiopolicy/tests/AudioPolicyTestManager.h
index 8bab020..c096427 100644
--- a/services/audiopolicy/tests/AudioPolicyTestManager.h
+++ b/services/audiopolicy/tests/AudioPolicyTestManager.h
@@ -29,6 +29,8 @@
     using AudioPolicyManager::getOutputs;
     using AudioPolicyManager::getAvailableOutputDevices;
     using AudioPolicyManager::getAvailableInputDevices;
+    using AudioPolicyManager::releaseMsdPatches;
+    using AudioPolicyManager::setMsdPatches;
     uint32_t getAudioPortGeneration() const { return mAudioPortGeneration; }
 };
 
diff --git a/services/audiopolicy/tests/audio_health_tests.cpp b/services/audiopolicy/tests/audio_health_tests.cpp
index b5c67a1..e1634cf 100644
--- a/services/audiopolicy/tests/audio_health_tests.cpp
+++ b/services/audiopolicy/tests/audio_health_tests.cpp
@@ -21,6 +21,7 @@
 #include <gtest/gtest.h>
 
 #include <media/AudioSystem.h>
+#include <media/TypeConverter.h>
 #include <system/audio.h>
 #include <utils/Log.h>
 
@@ -68,9 +69,18 @@
     ASSERT_NE("AudioPolicyConfig::setDefault", manager.getConfig().getSource());
 
     for (auto desc : manager.getConfig().getInputDevices()) {
-        ASSERT_NE(attachedDevices.end(), attachedDevices.find(desc->type()));
+        if (attachedDevices.find(desc->type()) == attachedDevices.end()) {
+            std::string deviceType;
+            (void)DeviceConverter::toString(desc->type(), deviceType);
+            ADD_FAILURE() << "Input device \"" << deviceType << "\" not found";
+        }
     }
     for (auto desc : manager.getConfig().getOutputDevices()) {
-        ASSERT_NE(attachedDevices.end(), attachedDevices.find(desc->type()));
+        if (attachedDevices.find(desc->type()) == attachedDevices.end()) {
+            std::string deviceType;
+            (void)DeviceConverter::toString(desc->type(), deviceType);
+            ADD_FAILURE() << "Output device \"" << deviceType << "\" not found";
+        }
     }
 }
+
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index d379239..f391606 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -319,7 +319,17 @@
 
 // TODO: Add patch creation tests that involve already existing patch
 
-class AudioPolicyManagerTestMsd : public AudioPolicyManagerTest {
+enum
+{
+    MSD_AUDIO_PATCH_COUNT_NUM_AUDIO_PATCHES_INDEX = 0,
+    MSD_AUDIO_PATCH_COUNT_NAME_INDEX = 1
+};
+using MsdAudioPatchCountSpecification = std::tuple<size_t, std::string>;
+
+class AudioPolicyManagerTestMsd : public AudioPolicyManagerTest,
+        public ::testing::WithParamInterface<MsdAudioPatchCountSpecification> {
+  public:
+    AudioPolicyManagerTestMsd();
   protected:
     void SetUpManagerConfig() override;
     void TearDown() override;
@@ -327,8 +337,26 @@
     sp<DeviceDescriptor> mMsdOutputDevice;
     sp<DeviceDescriptor> mMsdInputDevice;
     sp<DeviceDescriptor> mDefaultOutputDevice;
+
+    const size_t mExpectedAudioPatchCount;
+    sp<DeviceDescriptor> mSpdifDevice;
 };
 
+AudioPolicyManagerTestMsd::AudioPolicyManagerTestMsd()
+    : mExpectedAudioPatchCount(std::get<MSD_AUDIO_PATCH_COUNT_NUM_AUDIO_PATCHES_INDEX>(
+            GetParam())) {}
+
+INSTANTIATE_TEST_CASE_P(
+        MsdAudioPatchCount,
+        AudioPolicyManagerTestMsd,
+        ::testing::Values(
+                MsdAudioPatchCountSpecification(1u, "single"),
+                MsdAudioPatchCountSpecification(2u, "dual")
+        ),
+        [](const ::testing::TestParamInfo<MsdAudioPatchCountSpecification> &info) {
+                return std::get<MSD_AUDIO_PATCH_COUNT_NAME_INDEX>(info.param); }
+);
+
 void AudioPolicyManagerTestMsd::SetUpManagerConfig() {
     // TODO: Consider using Serializer to load part of the config from a string.
     AudioPolicyManagerTest::SetUpManagerConfig();
@@ -348,6 +376,19 @@
     config.addDevice(mMsdOutputDevice);
     config.addDevice(mMsdInputDevice);
 
+    if (mExpectedAudioPatchCount == 2) {
+        // Add SPDIF device with PCM output profile as a second device for dual MSD audio patching.
+        mSpdifDevice = new DeviceDescriptor(AUDIO_DEVICE_OUT_SPDIF);
+        mSpdifDevice->addAudioProfile(pcmOutputProfile);
+        config.addDevice(mSpdifDevice);
+
+        sp<OutputProfile> spdifOutputProfile = new OutputProfile("spdif output");
+        spdifOutputProfile->addAudioProfile(pcmOutputProfile);
+        spdifOutputProfile->addSupportedDevice(mSpdifDevice);
+        config.getHwModules().getModuleFromName(AUDIO_HARDWARE_MODULE_ID_PRIMARY)->
+                addOutputProfile(spdifOutputProfile);
+    }
+
     sp<HwModule> msdModule = new HwModule(AUDIO_HARDWARE_MODULE_ID_MSD, 2 /*halVersionMajor*/);
     HwModuleCollection modules = config.getHwModules();
     modules.add(msdModule);
@@ -383,64 +424,88 @@
             addOutputProfile(primaryEncodedOutputProfile);
 
     mDefaultOutputDevice = config.getDefaultOutputDevice();
+    if (mExpectedAudioPatchCount == 2) {
+        mSpdifDevice->addAudioProfile(dtsOutputProfile);
+        primaryEncodedOutputProfile->addSupportedDevice(mSpdifDevice);
+    }
 }
 
 void AudioPolicyManagerTestMsd::TearDown() {
     mMsdOutputDevice.clear();
     mMsdInputDevice.clear();
     mDefaultOutputDevice.clear();
+    mSpdifDevice.clear();
     AudioPolicyManagerTest::TearDown();
 }
 
-TEST_F(AudioPolicyManagerTestMsd, InitSuccess) {
+TEST_P(AudioPolicyManagerTestMsd, InitSuccess) {
     ASSERT_TRUE(mMsdOutputDevice);
     ASSERT_TRUE(mMsdInputDevice);
     ASSERT_TRUE(mDefaultOutputDevice);
 }
 
-TEST_F(AudioPolicyManagerTestMsd, Dump) {
+TEST_P(AudioPolicyManagerTestMsd, Dump) {
     dumpToLog();
 }
 
-TEST_F(AudioPolicyManagerTestMsd, PatchCreationOnSetForceUse) {
+TEST_P(AudioPolicyManagerTestMsd, PatchCreationOnSetForceUse) {
     const PatchCountCheck patchCount = snapshotPatchCount();
     mManager->setForceUse(AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND,
             AUDIO_POLICY_FORCE_ENCODED_SURROUND_ALWAYS);
-    ASSERT_EQ(1, patchCount.deltaFromSnapshot());
+    ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
 }
 
-TEST_F(AudioPolicyManagerTestMsd, GetOutputForAttrEncodedRoutesToMsd) {
+TEST_P(AudioPolicyManagerTestMsd, PatchCreationSetReleaseMsdPatches) {
+    const PatchCountCheck patchCount = snapshotPatchCount();
+    DeviceVector devices = mManager->getAvailableOutputDevices();
+    // Remove MSD output device to avoid patching to itself
+    devices.remove(mMsdOutputDevice);
+    ASSERT_EQ(mExpectedAudioPatchCount, devices.size());
+    mManager->setMsdPatches(&devices);
+    ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
+    // Dual patch: exercise creating one new audio patch and reusing another existing audio patch.
+    DeviceVector singleDevice(devices[0]);
+    mManager->releaseMsdPatches(singleDevice);
+    ASSERT_EQ(mExpectedAudioPatchCount - 1, patchCount.deltaFromSnapshot());
+    mManager->setMsdPatches(&devices);
+    ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
+    mManager->releaseMsdPatches(devices);
+    ASSERT_EQ(0, patchCount.deltaFromSnapshot());
+}
+
+TEST_P(AudioPolicyManagerTestMsd, GetOutputForAttrEncodedRoutesToMsd) {
     const PatchCountCheck patchCount = snapshotPatchCount();
     audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
     getOutputForAttr(&selectedDeviceId,
             AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1, 48000, AUDIO_OUTPUT_FLAG_DIRECT);
     ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
-    ASSERT_EQ(1, patchCount.deltaFromSnapshot());
+    ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
 }
 
-TEST_F(AudioPolicyManagerTestMsd, GetOutputForAttrPcmRoutesToMsd) {
+TEST_P(AudioPolicyManagerTestMsd, GetOutputForAttrPcmRoutesToMsd) {
     const PatchCountCheck patchCount = snapshotPatchCount();
     audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
     getOutputForAttr(&selectedDeviceId,
             AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO, 48000);
     ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
-    ASSERT_EQ(1, patchCount.deltaFromSnapshot());
+    ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
 }
 
-TEST_F(AudioPolicyManagerTestMsd, GetOutputForAttrEncodedPlusPcmRoutesToMsd) {
+TEST_P(AudioPolicyManagerTestMsd, GetOutputForAttrEncodedPlusPcmRoutesToMsd) {
     const PatchCountCheck patchCount = snapshotPatchCount();
     audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
     getOutputForAttr(&selectedDeviceId,
             AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1, 48000, AUDIO_OUTPUT_FLAG_DIRECT);
     ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
-    ASSERT_EQ(1, patchCount.deltaFromSnapshot());
+    ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
+    selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
     getOutputForAttr(&selectedDeviceId,
             AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO, 48000);
     ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
-    ASSERT_EQ(1, patchCount.deltaFromSnapshot());
+    ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
 }
 
-TEST_F(AudioPolicyManagerTestMsd, GetOutputForAttrUnsupportedFormatBypassesMsd) {
+TEST_P(AudioPolicyManagerTestMsd, GetOutputForAttrUnsupportedFormatBypassesMsd) {
     const PatchCountCheck patchCount = snapshotPatchCount();
     audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
     getOutputForAttr(&selectedDeviceId,
@@ -449,7 +514,7 @@
     ASSERT_EQ(0, patchCount.deltaFromSnapshot());
 }
 
-TEST_F(AudioPolicyManagerTestMsd, GetOutputForAttrFormatSwitching) {
+TEST_P(AudioPolicyManagerTestMsd, GetOutputForAttrFormatSwitching) {
     // Switch between formats that are supported and not supported by MSD.
     {
         const PatchCountCheck patchCount = snapshotPatchCount();
@@ -459,9 +524,9 @@
                 AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1, 48000, AUDIO_OUTPUT_FLAG_DIRECT,
                 nullptr /*output*/, &portId);
         ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
-        ASSERT_EQ(1, patchCount.deltaFromSnapshot());
+        ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
         mManager->releaseOutput(portId);
-        ASSERT_EQ(1, patchCount.deltaFromSnapshot());
+        ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
     }
     {
         const PatchCountCheck patchCount = snapshotPatchCount();
@@ -471,7 +536,7 @@
                 AUDIO_FORMAT_DTS, AUDIO_CHANNEL_OUT_5POINT1, 48000, AUDIO_OUTPUT_FLAG_DIRECT,
                 nullptr /*output*/, &portId);
         ASSERT_NE(selectedDeviceId, mMsdOutputDevice->getId());
-        ASSERT_EQ(-1, patchCount.deltaFromSnapshot());
+        ASSERT_EQ(-static_cast<int>(mExpectedAudioPatchCount), patchCount.deltaFromSnapshot());
         mManager->releaseOutput(portId);
         ASSERT_EQ(0, patchCount.deltaFromSnapshot());
     }